From b87e264960ba8868d2b4f49e82fd8c05a8de98db Mon Sep 17 00:00:00 2001 From: Justin Ross Date: Fri, 5 Jan 2024 13:51:31 -0500 Subject: [PATCH] Use an embedded repo instead of git subrepo --- .plano.py | 2 +- .../skewer-main/.github/workflows/main.yaml | 23 + external/skewer-main/.gitignore | 3 + external/skewer-main/.plano.py | 69 + external/skewer-main/LICENSE.txt | 202 ++ external/skewer-main/README.md | 278 +++ .../config/.github/workflows/main.yaml | 41 + external/skewer-main/config/.plano.py | 132 ++ .../plano-main/.github/workflows/main.yaml | 48 + .../external/plano-main/.gitignore | 6 + .../external/plano-main/LICENSE.txt | 202 ++ .../external/plano-main/MANIFEST.in | 1 + .../skewer-main/external/plano-main/Makefile | 70 + .../skewer-main/external/plano-main/README.md | 78 + .../skewer-main/external/plano-main/bin/plano | 31 + .../external/plano-main/bin/plano-test | 31 + .../external/plano-main/docs/conf.py | 34 + .../external/plano-main/docs/index.rst | 4 + .../external/plano-main/pyproject.toml | 23 + .../external/plano-main/src/plano/__init__.py | 24 + .../src/plano/_testproject/.plano.py | 112 ++ .../_testproject/src/chucker/__init__.py | 0 .../plano/_testproject/src/chucker/tests.py | 59 + .../external/plano-main/src/plano/_tests.py | 1277 ++++++++++++ .../external/plano-main/src/plano/command.py | 515 +++++ .../external/plano-main/src/plano/main.py | 1731 +++++++++++++++++ .../external/plano-main/src/plano/test.py | 397 ++++ external/skewer-main/plano | 1 + external/skewer-main/python/plano | 1 + .../skewer-main/python/skewer/__init__.py | 20 + external/skewer-main/python/skewer/main.py | 565 ++++++ .../python/skewer/standardsteps.yaml | 227 +++ external/skewer-main/python/skewer/tests.py | 71 + external/skewer-main/test-example/.gitignore | 1 + external/skewer-main/test-example/.plano.py | 1 + external/skewer-main/test-example/README.md | 462 +++++ .../test-example/external/skewer-main | 1 + .../test-example/images/entities.svg | 3 + .../test-example/images/sequence.svg | 1 + .../test-example/images/sequence.txt | 22 + external/skewer-main/test-example/plano | 1 + .../skewer-main/test-example/python/skewer | 1 + external/skewer-main/test-example/skewer.yaml | 119 ++ plano | 2 +- python/skewer | 2 +- skewer.yaml | 6 +- 46 files changed, 6894 insertions(+), 6 deletions(-) create mode 100644 external/skewer-main/.github/workflows/main.yaml create mode 100644 external/skewer-main/.gitignore create mode 100644 external/skewer-main/.plano.py create mode 100644 external/skewer-main/LICENSE.txt create mode 100644 external/skewer-main/README.md create mode 100644 external/skewer-main/config/.github/workflows/main.yaml create mode 100644 external/skewer-main/config/.plano.py create mode 100644 external/skewer-main/external/plano-main/.github/workflows/main.yaml create mode 100644 external/skewer-main/external/plano-main/.gitignore create mode 100644 external/skewer-main/external/plano-main/LICENSE.txt create mode 100644 external/skewer-main/external/plano-main/MANIFEST.in create mode 100644 external/skewer-main/external/plano-main/Makefile create mode 100644 external/skewer-main/external/plano-main/README.md create mode 100755 external/skewer-main/external/plano-main/bin/plano create mode 100755 external/skewer-main/external/plano-main/bin/plano-test create mode 100644 external/skewer-main/external/plano-main/docs/conf.py create mode 100644 external/skewer-main/external/plano-main/docs/index.rst create mode 100644 external/skewer-main/external/plano-main/pyproject.toml create mode 100644 external/skewer-main/external/plano-main/src/plano/__init__.py create mode 100644 external/skewer-main/external/plano-main/src/plano/_testproject/.plano.py create mode 100644 external/skewer-main/external/plano-main/src/plano/_testproject/src/chucker/__init__.py create mode 100644 external/skewer-main/external/plano-main/src/plano/_testproject/src/chucker/tests.py create mode 100644 external/skewer-main/external/plano-main/src/plano/_tests.py create mode 100644 external/skewer-main/external/plano-main/src/plano/command.py create mode 100644 external/skewer-main/external/plano-main/src/plano/main.py create mode 100644 external/skewer-main/external/plano-main/src/plano/test.py create mode 120000 external/skewer-main/plano create mode 120000 external/skewer-main/python/plano create mode 100644 external/skewer-main/python/skewer/__init__.py create mode 100644 external/skewer-main/python/skewer/main.py create mode 100644 external/skewer-main/python/skewer/standardsteps.yaml create mode 100644 external/skewer-main/python/skewer/tests.py create mode 100644 external/skewer-main/test-example/.gitignore create mode 120000 external/skewer-main/test-example/.plano.py create mode 100644 external/skewer-main/test-example/README.md create mode 120000 external/skewer-main/test-example/external/skewer-main create mode 100644 external/skewer-main/test-example/images/entities.svg create mode 100644 external/skewer-main/test-example/images/sequence.svg create mode 100644 external/skewer-main/test-example/images/sequence.txt create mode 120000 external/skewer-main/test-example/plano create mode 120000 external/skewer-main/test-example/python/skewer create mode 100644 external/skewer-main/test-example/skewer.yaml diff --git a/.plano.py b/.plano.py index bf2f77c..6b89765 120000 --- a/.plano.py +++ b/.plano.py @@ -1 +1 @@ -subrepos/skewer/config/.plano.py \ No newline at end of file +external/skewer-main/config/.plano.py \ No newline at end of file diff --git a/external/skewer-main/.github/workflows/main.yaml b/external/skewer-main/.github/workflows/main.yaml new file mode 100644 index 0000000..1c2c681 --- /dev/null +++ b/external/skewer-main/.github/workflows/main.yaml @@ -0,0 +1,23 @@ +name: main +on: + push: + pull_request: + schedule: + - cron: "0 0 * * 0" +jobs: + test: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + - uses: actions/setup-python@v4 + with: + python-version: "3.x" + - uses: manusa/actions-setup-minikube@v2.7.2 + with: + minikube version: "v1.28.0" + kubernetes version: "v1.25.4" + github token: ${{secrets.GITHUB_TOKEN}} + - run: pip install pyyaml + - run: curl https://skupper.io/install.sh | sh + - run: echo "$HOME/.local/bin" >> $GITHUB_PATH + - run: ./plano test diff --git a/external/skewer-main/.gitignore b/external/skewer-main/.gitignore new file mode 100644 index 0000000..04d68f4 --- /dev/null +++ b/external/skewer-main/.gitignore @@ -0,0 +1,3 @@ +__pycache__/ +/README.html +/.coverage diff --git a/external/skewer-main/.plano.py b/external/skewer-main/.plano.py new file mode 100644 index 0000000..25398f8 --- /dev/null +++ b/external/skewer-main/.plano.py @@ -0,0 +1,69 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# + +from skewer import * + +@command(passthrough=True) +def test(coverage=False, passthrough_args=[]): + clean() + + args = " ".join(passthrough_args) + + if coverage: + check_program("coverage") + + with working_env(PYTHONPATH="python"): + run(f"coverage run --source skewer -m skewer.tests {args}") + + run("coverage report") + run("coverage html") + + print(f"file:{get_current_dir()}/htmlcov/index.html") + else: + with working_env(PYTHONPATH="python"): + run(f"python -m skewer.tests {args}") + +@command +def render(): + """ + Render README.html from README.md + """ + check_program("pandoc") + + run(f"pandoc -o README.html README.md") + + print(f"file:{get_real_path('README.html')}") + +@command +def clean(): + remove(join("python", "__pycache__")) + remove(join("test-example", "python", "__pycache__")) + remove("README.html") + remove("htmlcov") + remove(".coverage") + +@command +def update_plano(): + """ + Update the embedded Plano repo + """ + + make_dir("external") + remove("external/plano-main") + run("curl -sfL https://github.com/ssorj/plano/archive/main.tar.gz | tar -C external -xz", shell=True) diff --git a/external/skewer-main/LICENSE.txt b/external/skewer-main/LICENSE.txt new file mode 100644 index 0000000..e06d208 --- /dev/null +++ b/external/skewer-main/LICENSE.txt @@ -0,0 +1,202 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright {yyyy} {name of copyright owner} + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + diff --git a/external/skewer-main/README.md b/external/skewer-main/README.md new file mode 100644 index 0000000..0a466a7 --- /dev/null +++ b/external/skewer-main/README.md @@ -0,0 +1,278 @@ +# Skewer + +[![main](https://github.com/skupperproject/skewer/actions/workflows/main.yaml/badge.svg)](https://github.com/skupperproject/skewer/actions/workflows/main.yaml) + +A library for documenting and testing Skupper examples + +A `skewer.yaml` file describes the steps and commands to achieve an +objective using Skupper. Skewer takes the `skewer.yaml` file as input +and produces two outputs: a `README.md` file and a test routine. + +## An example example + +[Example `skewer.yaml` file](test-example/skewer.yaml) + +[Example `README.md` output](test-example/README.md) + +## Setting up Skewer for your own example + +**Note:** This is how you set things up from scratch. You can also +use the [Skupper example template][template] as a starting point. + +[template]: https://github.com/skupperproject/skupper-example-template + +Add the Skewer code as a subdirectory in your example project: + + cd / + mkdir external + curl -sfL https://github.com/skupperproject/skewer/archive/main.tar.gz | tar -C external -xz + +Symlink the Skewer library into your `python` directory: + + mkdir -p python + ln -s ../external/skewer-main/python/skewer python/skewer + +Symlink the `plano` command into the root of your project. Symlink +the standard `config/.plano.py` as `.plano.py` in the root as well: + + ln -s external/skewer-main/plano + ln -s external/skewer-main/config/.plano.py + + + + + + + + + + +To use the `./plano` command, you must have the Python `pyyaml` +package installed. Use `pip` (or `pip3` on some systems) to install +it: + + pip install pyyaml + +Use the `plano update-workflow` command to copy the latest GitHub +Actions workflow file into your project: + + ./plano update-workflow + +Use your editor to create a `skewer.yaml` file in the root of your +project: + + emacs skewer.yaml + +Run the `./plano` command to see the available commands: + +~~~ console +$ ./plano +usage: plano [--verbose] [--quiet] [--debug] [-h] [-f FILE] {generate,render,run,run-external,demo,test,update-workflow} ... + +Run commands defined as Python functions + +options: + --verbose Print detailed logging to the console + --quiet Print no logging to the console + --debug Print debugging output to the console + -h, --help Show this help message and exit + -f FILE, --file FILE Load commands from FILE (default '.plano.py') + +commands: + {generate,render,run,run-external,demo,test,update-workflow,update-skewer} + generate Generate README.md from the data in skewer.yaml + render Render README.html from the data in skewer.yaml + run Run the example steps using Minikube + run-external Run the example steps against external clusters + demo Run the example steps and pause before cleaning up + test Test README generation and run the steps on Minikube + update-workflow Update the GitHub Actions workflow file + update-skewer Update the embedded Skewer repo +~~~ + +## Updating a Skewer subrepo inside your example project + +Use `git subrepo pull`: + + git subrepo pull --force subrepos/skewer + +## Skewer YAML + +The top level: + +~~~ yaml +title: # Your example's title (required) +subtitle: # Your chosen subtitle (required) +github_actions_url: # The URL of your workflow (optional) +overview: # Text introducing your example (optional) +prerequisites: # Text describing prerequisites (optional, has default text) +sites: # A map of named sites (see below) +steps: # A list of steps (see below) +summary: # Text to summarize what the user did (optional) +next_steps: # Text linking to more examples (optional, has default text) +~~~ + +A **site**: + +~~~ yaml +: + title: # The site title (optional) + platform: # "kubernetes" or "podman" (required) + namespace: # The Kubernetes namespace (required for Kubernetes sites) + env: # A map of named environment variables +~~~ + +A tilde (~) in the kubeconfig file path is replaced with a temporary +working directory during testing. + +Kubernetes sites must have a `KUBECONFIG` environment variable with a +path to a kubeconfig file. Podman sites must have a +`SKUPPER_PLATFORM` variable with the value `podman`. + +Example sites: + +~~~ yaml +sites: + east: + title: East + platform: kubernetes + namespace: east + env: + KUBECONFIG: ~/.kube/config-east + west: + title: West + platform: podman + env: + SKUPPER_PLATFORM: podman +~~~ + +A **step**: + +~~~ yaml +- title: # The step title (required) + preamble: # Text before the commands (optional) + commands: # Named groups of commands. See below. + postamble: # Text after the commands (optional) +~~~ + +An example step: + +~~~ yaml +steps: + - title: Expose the frontend service + preamble: | + We have established connectivity between the two namespaces and + made the backend in `east` available to the frontend in `west`. + Before we can test the application, we need external access to + the frontend. + + Use `kubectl expose` with `--type LoadBalancer` to open network + access to the frontend service. Use `kubectl get services` to + check for the service and its external IP address. + commands: + east: + west: +~~~ + +Or you can use a named step from the library of standard steps: + +~~~ yaml +- standard: configure_separate_console_sessions +~~~ + +The standard steps are defined in +[python/standardsteps.yaml](python/standardsteps.yaml). Note that you +should not edit this file. Instead, in your `skewer.yaml` file, you +can create custom steps based on the standard steps. You can override +the `title`, `preamble`, `commands`, or `postamble` field of a +standard step by adding the field in addition to `standard`: + +~~~ yaml +- standard: cleaning_up + commands: + east: + - run: skupper delete + - run: kubectl delete deployment/database + west: + - run: skupper delete +~~~ + +The initial steps are usually standard ones. There are also some +standard steps at the end. You may be able to use something like +this: + +~~~ yaml +steps: + - standard: configure_separate_console_sessions + - standard: access_your_clusters + - standard: set_up_your_namespaces + - standard: install_skupper_in_your_namespaces + - standard: check_the_status_of_your_namespaces + - standard: link_your_namespaces + + - standard: test_the_application + - standard: accessing_the_web_console + - standard: cleaning_up +~~~ + +Note that the `link_your_namespaces` and `test_the_application` steps +are less generic than the other steps, so check that the text and +commands they produce are doing what you need. If not, you'll need to +provide a custom step. + +The step commands are separated into named groups corresponding to the +sites. Each named group contains a list of command entries. Each +command entry has a `run` field containing a shell command and other +fields for awaiting completion or providing sample output. + +A **command**: + +~~~ yaml +- run: # A shell command (required) + apply: # Use this command only for "readme" or "test" (optional, default is both) + output: # Sample output to include in the README (optional) +~~~ + +Only the `run` and `output` fields are used in the README content. +The `output` field is used as sample output only, not for any kind of +testing. + +The `apply` field is useful when you want the readme instructions to +be different from the test procedure, or you simply want to omit +something. + +There are also some special "await" commands that you can use to pause +for a condition you require before going to the next step. They are +used only for testing and do not impact the README. + +~~~ yaml +- await_resource: # A resource (as in, deployment/frontend) for which to await readiness (optional) +- await_external_ip: # A service (as in, service/frontend) for which to await an external IP (optional) +- await_http_ok: # A service and URL template (as in, service/frontend and "http://{}:8080/api/hello") + # for which to await an HTTP OK response (optional) +~~~ + +Example commands: + +~~~ yaml +commands: + east: + - run: kubectl expose deployment/backend --port 8080 --type LoadBalancer + output: | + service/frontend exposed + west: + - await_resource: service/backend + - run: kubectl get service/backend + output: | + NAME TYPE CLUSTER-IP EXTERNAL-IP PORT(S) AGE + backend ClusterIP 10.102.112.121 8080/TCP 30s +~~~ + +## Demo mode + +Skewer has a mode where it executes all the steps, but before cleaning +up and exiting, it pauses so you can inspect things. + +It is enabled by setting the environment variable `SKEWER_DEMO` to any +value when you call `./plano run` or one of its variants. You can +also use `./plano demo`, which sets the variable for you. diff --git a/external/skewer-main/config/.github/workflows/main.yaml b/external/skewer-main/config/.github/workflows/main.yaml new file mode 100644 index 0000000..3266cb2 --- /dev/null +++ b/external/skewer-main/config/.github/workflows/main.yaml @@ -0,0 +1,41 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# + +name: main +on: + push: + pull_request: + schedule: + - cron: "0 0 * * 0" +jobs: + test: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + - uses: actions/setup-python@v4 + with: + python-version: "3.x" + - uses: manusa/actions-setup-minikube@v2.7.2 + with: + minikube version: "v1.28.0" + kubernetes version: "v1.25.4" + github token: ${{secrets.GITHUB_TOKEN}} + - run: curl https://skupper.io/install.sh | sh + - run: echo "$HOME/.local/bin" >> $GITHUB_PATH + - run: ./plano test --debug diff --git a/external/skewer-main/config/.plano.py b/external/skewer-main/config/.plano.py new file mode 100644 index 0000000..3cf4633 --- /dev/null +++ b/external/skewer-main/config/.plano.py @@ -0,0 +1,132 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# + +from skewer import * + +@command +def generate(): + """ + Generate README.md from the data in skewer.yaml + """ + generate_readme("skewer.yaml", "README.md") + +render_template = """ + + + + + + +
+ +@content@ + +
+ + +""".strip() + +@command +def render(): + """ + Render README.html from the data in skewer.yaml + """ + generate() + + markdown = read("README.md") + data = {"text": markdown} + json = emit_json(data) + content = http_post("https://api.github.com/markdown", json, content_type="application/json") + html = render_template.replace("@content@", content) + + write("README.html", html) + + print(f"file:{get_real_path('README.html')}") + +@command +def clean(): + remove(find(".", "__pycache__")) + remove("README.html") + +@command +def run_(debug=False): + """ + Run the example steps using Minikube + """ + run_steps_minikube("skewer.yaml", debug=debug) + +# XXX +# +# @command +# def run_external(*kubeconfigs, debug=False): +# """ +# Run the example steps with user-provided kubeconfigs +# """ +# run_steps("skewer.yaml", *kubeconfigs, debug=debug) + +@command +def demo(debug=False): + """ + Run the example steps and pause before cleaning up + """ + with working_env(SKEWER_DEMO=1): + run_steps_minikube("skewer.yaml", debug=debug) + +@command +def test_(debug=False): + """ + Test README generation and run the steps on Minikube + """ + generate_readme("skewer.yaml", make_temp_file()) + run_steps_minikube("skewer.yaml", debug=debug) + +@command +def update_workflow(): + """ + Update the GitHub Actions workflow file + """ + copy("external/skewer-main/config/.github/workflows/main.yaml", ".github/workflows/main.yaml") + +@command +def update_skewer(): + """ + Update the embedded Skewer repo + """ + check_program("curl") + + make_dir("external") + remove("external/skewer-main") + + run("curl -sfL https://github.com/skupperproject/skewer/archive/main.tar.gz | tar -C external -xz", shell=True) diff --git a/external/skewer-main/external/plano-main/.github/workflows/main.yaml b/external/skewer-main/external/plano-main/.github/workflows/main.yaml new file mode 100644 index 0000000..83ba30d --- /dev/null +++ b/external/skewer-main/external/plano-main/.github/workflows/main.yaml @@ -0,0 +1,48 @@ +name: main +on: + push: + pull_request: + schedule: + - cron: "0 0 * * 0" +jobs: + main: + strategy: + fail-fast: false + matrix: + os: [macos-latest, ubuntu-latest, windows-latest] + version: [3.7, 3.x] + runs-on: ${{matrix.os}} + steps: + - uses: actions/checkout@v3 + - uses: actions/setup-python@v4 + with: + python-version: ${{matrix.version}} + - run: pip install build wheel + - run: python -m build + - run: pip install dist/ssorj_plano-1.0.0-py3-none-any.whl + - run: plano-self-test + cygwin: + runs-on: windows-latest + steps: + - run: git config --global core.autocrlf input + - uses: actions/checkout@v3 + - uses: cygwin/cygwin-install-action@master + with: + packages: python3 + - run: pip install build wheel + shell: C:\cygwin\bin\bash.exe -o igncr '{0}' + - run: make install + shell: C:\cygwin\bin\bash.exe -o igncr '{0}' + - run: echo "C:\Users\runneradmin\AppData\Roaming\Python\Python39\Scripts" >> "$GITHUB_PATH" + shell: C:\cygwin\bin\bash.exe -o igncr '{0}' + - run: plano-self-test + shell: C:\cygwin\bin\bash.exe -o igncr '{0}' + fedora: + runs-on: ubuntu-latest + container: fedora:latest + steps: + - uses: actions/checkout@v3 + - run: dnf -y install make pip python python-build python-wheel + - run: make install + - run: echo "$HOME/.local/bin" >> "$GITHUB_PATH" + - run: plano-self-test diff --git a/external/skewer-main/external/plano-main/.gitignore b/external/skewer-main/external/plano-main/.gitignore new file mode 100644 index 0000000..3af00c3 --- /dev/null +++ b/external/skewer-main/external/plano-main/.gitignore @@ -0,0 +1,6 @@ +__pycache__/ +*.egg-info/ +/build +/dist +/.coverage +/htmlcov diff --git a/external/skewer-main/external/plano-main/LICENSE.txt b/external/skewer-main/external/plano-main/LICENSE.txt new file mode 100644 index 0000000..e06d208 --- /dev/null +++ b/external/skewer-main/external/plano-main/LICENSE.txt @@ -0,0 +1,202 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright {yyyy} {name of copyright owner} + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + diff --git a/external/skewer-main/external/plano-main/MANIFEST.in b/external/skewer-main/external/plano-main/MANIFEST.in new file mode 100644 index 0000000..778ca32 --- /dev/null +++ b/external/skewer-main/external/plano-main/MANIFEST.in @@ -0,0 +1 @@ +include src/plano/_testproject/* diff --git a/external/skewer-main/external/plano-main/Makefile b/external/skewer-main/external/plano-main/Makefile new file mode 100644 index 0000000..28212e5 --- /dev/null +++ b/external/skewer-main/external/plano-main/Makefile @@ -0,0 +1,70 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# + +.NOTPARALLEL: + +# A workaround for an install-with-prefix problem in Fedora 36 +# +# https://docs.fedoraproject.org/en-US/fedora/latest/release-notes/developers/Development_Python/#_pipsetup_py_installation_with_prefix +# https://bugzilla.redhat.com/show_bug.cgi?id=2026979 + +export RPM_BUILD_ROOT := fake + +.PHONY: build +build: + python -m build + +.PHONY: test +test: clean build + python -m venv build/venv + . build/venv/bin/activate && pip install --force-reinstall dist/ssorj_plano-*-py3-none-any.whl + . build/venv/bin/activate && plano-self-test + +.PHONY: qtest +qtest: + PYTHONPATH=src python -m plano._tests + +.PHONY: install +install: build + pip install --user --force-reinstall dist/ssorj_plano-*-py3-none-any.whl + +.PHONY: clean +clean: + rm -rf build dist htmlcov .coverage src/plano/__pycache__ src/plano.egg-info + +.PHONY: docs +docs: + mkdir -p build + sphinx-build -M html docs build/docs + +# XXX Watch out: The 3.11 in this is environment dependent +.PHONY: coverage +coverage: build + python -m venv build/venv + . build/venv/bin/activate && pip install --force-reinstall dist/ssorj_plano-*-py3-none-any.whl + . build/venv/bin/activate && PYTHONPATH=build/venv/lib/python3.11/site-packages coverage run \ + --include build/venv/lib/python\*/site-packages/plano/\*,build/venv/bin/\* \ + build/venv/bin/plano-self-test + coverage report + coverage html + @echo "OUTPUT: file:${CURDIR}/htmlcov/index.html" + +.PHONY: upload +upload: build + twine upload --repository testpypi dist/* diff --git a/external/skewer-main/external/plano-main/README.md b/external/skewer-main/external/plano-main/README.md new file mode 100644 index 0000000..2bf8c99 --- /dev/null +++ b/external/skewer-main/external/plano-main/README.md @@ -0,0 +1,78 @@ +# Plano + +[![main](https://github.com/ssorj/plano/workflows/main/badge.svg)](https://github.com/ssorj/plano/actions?query=workflow%3Amain) + +Python functions for writing shell-style system scripts. + +## Installation + +To install plano globally for the current user: + +~~~ +make install +~~~ + +## Example 1 + +`~/.local/bin/widget`: +~~~ python +#!/usr/bin/python + +import sys +from plano import * + +@command +def greeting(message="Howdy"): + print(message) + +if __name__ == "__main__": + PlanoCommand(sys.modules[__name__]).main() +~~~ + +~~~ shell +$ widget greeting --message Hello +--> greeting +Hello +<-- greeting +OK (0s) +~~~ + +## Example 2 + +`~/.local/bin/widget-test`: +~~~ python +import sys +from plano import * + +@test +def check(): + run("widget --message Yo") + +if __name__ == "__main__": + PlanoTestCommand(sys.modules[__name__]).main() +~~~ + +~~~ shell +$ widget-test +=== Configuration === +Modules: __main__ +Test timeout: 5m +Fail fast: False + +=== Module '__main__' === +check ........................................................... PASSED 0.0s + +=== Summary === +Total: 1 +Skipped: 0 +Failed: 0 + +=== RESULT === +All tests passed +~~~ + +## Things to know + +* The plano command accepts command sequences in the form "this,that" + (no spaces). The command arguments are applied to the last command + only. diff --git a/external/skewer-main/external/plano-main/bin/plano b/external/skewer-main/external/plano-main/bin/plano new file mode 100755 index 0000000..9d1e018 --- /dev/null +++ b/external/skewer-main/external/plano-main/bin/plano @@ -0,0 +1,31 @@ +#!/usr/bin/python3 +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# + +import os +import sys + +if os.path.islink(__file__): + repo_dir = os.path.dirname(os.path.dirname(os.path.realpath(__file__))) + sys.path.insert(0, os.path.join(repo_dir, "src")) + +from plano import PlanoCommand + +if __name__ == "__main__": + PlanoCommand().main() diff --git a/external/skewer-main/external/plano-main/bin/plano-test b/external/skewer-main/external/plano-main/bin/plano-test new file mode 100755 index 0000000..a256740 --- /dev/null +++ b/external/skewer-main/external/plano-main/bin/plano-test @@ -0,0 +1,31 @@ +#!/usr/bin/python3 +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# + +import os +import sys + +if os.path.islink(__file__): + repo_dir = os.path.dirname(os.path.dirname(os.path.realpath(__file__))) + sys.path.insert(0, os.path.join(repo_dir, "src")) + +from plano import PlanoTestCommand + +if __name__ == "__main__": + PlanoTestCommand().main() diff --git a/external/skewer-main/external/plano-main/docs/conf.py b/external/skewer-main/external/plano-main/docs/conf.py new file mode 100644 index 0000000..3277b1e --- /dev/null +++ b/external/skewer-main/external/plano-main/docs/conf.py @@ -0,0 +1,34 @@ +# import os +# import sys + +# sys.path.insert(0, os.path.abspath("../python")) + +extensions = [ + "sphinx.ext.autodoc", +] + +# autodoc_member_order = "bysource" +# autodoc_default_flags = ["members", "undoc-members", "inherited-members"] + +autodoc_default_options = { + "members": True, + "member-order": "bysource", + "undoc-members": True, + "imported-members": True, + "exclude-members": "PlanoProcess", +} + +master_doc = "index" +project = u"Plano" +copyright = u"1975" +author = u"Justin Ross" + +version = u"0.1.0" +release = u"" + +pygments_style = "sphinx" +html_theme = "nature" + +html_theme_options = { + "nosidebar": True, +} diff --git a/external/skewer-main/external/plano-main/docs/index.rst b/external/skewer-main/external/plano-main/docs/index.rst new file mode 100644 index 0000000..7441b03 --- /dev/null +++ b/external/skewer-main/external/plano-main/docs/index.rst @@ -0,0 +1,4 @@ +Plano +===== + +.. automodule:: plano diff --git a/external/skewer-main/external/plano-main/pyproject.toml b/external/skewer-main/external/plano-main/pyproject.toml new file mode 100644 index 0000000..a682141 --- /dev/null +++ b/external/skewer-main/external/plano-main/pyproject.toml @@ -0,0 +1,23 @@ +[build-system] +requires = [ "setuptools", "setuptools-scm" ] +build-backend = "setuptools.build_meta" + +[project] +name = "ssorj-plano" +version = "1.0.0" +authors = [ { name = "Justin Ross", email = "jross@apache.org" } ] +description = "Python functions for writing shell-style system scripts" +license = { file = "LICENSE.txt" } +readme = "README.md" +classifiers = [ "License :: OSI Approved :: Apache Software License" ] +requires-python = ">=3.7" +dependencies = [ "PyYAML" ] + +[project.scripts] +plano = "plano.command:_main" +plano-test = "plano.test:_main" +plano-self-test = "plano._tests:main" + +[project.urls] +"Homepage" = "https://github.com/ssorj/plano" +"Bug Tracker" = "https://github.com/ssorj/plano/issues" diff --git a/external/skewer-main/external/plano-main/src/plano/__init__.py b/external/skewer-main/external/plano-main/src/plano/__init__.py new file mode 100644 index 0000000..3218323 --- /dev/null +++ b/external/skewer-main/external/plano-main/src/plano/__init__.py @@ -0,0 +1,24 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# + +from .main import * +from .main import _default_sigterm_handler + +from .command import * +from .test import * diff --git a/external/skewer-main/external/plano-main/src/plano/_testproject/.plano.py b/external/skewer-main/external/plano-main/src/plano/_testproject/.plano.py new file mode 100644 index 0000000..67904b2 --- /dev/null +++ b/external/skewer-main/external/plano-main/src/plano/_testproject/.plano.py @@ -0,0 +1,112 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# + +from plano import * + +@command +def base_command(alpha, beta, omega="x"): + """ + Base command help + """ + + print("base", alpha, beta, omega) + +@command(name="extended-command", parent=base_command) +def extended_command(alpha, beta, omega="y"): + print("extended", alpha, omega) + parent(alpha, beta, omega) + +@command(parameters=[CommandParameter("message_", help="The message to print", display_name="message"), + CommandParameter("count", help="Print the message COUNT times"), + CommandParameter("extra", default=1, short_option="e")]) +def echo(message_, count=1, extra=None, trouble=False): + """ + Print a message to the console + """ + + print("Echoing (message={}, count={})".format(message_, count)) + + if trouble: + raise Exception("Trouble") + + for i in range(count): + print(message_) + +@command +def echoecho(message): + echo(message) + +@command +def haberdash(first, *middle, last="bowler"): + """ + Habberdash command help + """ + + data = [first, *middle, last] + write_json("haberdash.json", data) + +@command(parameters=[CommandParameter("optional", positional=True)]) +def balderdash(required, optional="malarkey", other="rubbish", **extra_kwargs): + """ + Balderdash command help + """ + + data = [required, optional, other] + write_json("balderdash.json", data) + +@command +def splasher(): + write_json("splasher.json", [1]) + +@command +def dasher(alpha, beta=123): + pass + +@command(passthrough=True) +def dancer(gamma, omega="abc", passthrough_args=[]): + write_json("dancer.json", passthrough_args) + +# Vixen's parent calls prancer. We are testing to ensure the extended +# prancer (below) is executed. + +from plano._tests import prancer, vixen + +@command(parent=prancer) +def prancer(): + parent() + + notice("Extended prancer") + + write_json("prancer.json", True) + +@command(parent=vixen) +def vixen(): + parent() + +@command +def no_parent(): + parent() + +@command(parameters=[CommandParameter("spinach")]) +def feta(*args, **kwargs): + write_json("feta.json", kwargs["spinach"]) + +@command(hidden=True) +def invisible(something="nothing"): + write_json("invisible.json", something) diff --git a/external/skewer-main/external/plano-main/src/plano/_testproject/src/chucker/__init__.py b/external/skewer-main/external/plano-main/src/plano/_testproject/src/chucker/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/external/skewer-main/external/plano-main/src/plano/_testproject/src/chucker/tests.py b/external/skewer-main/external/plano-main/src/plano/_testproject/src/chucker/tests.py new file mode 100644 index 0000000..a556cc8 --- /dev/null +++ b/external/skewer-main/external/plano-main/src/plano/_testproject/src/chucker/tests.py @@ -0,0 +1,59 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# + +from plano import * + +@test +def hello(): + print("Hello") + +@test +async def hello_async(): + print("Hello") + +@test +def goodbye(): + print("Goodbye") + +@test(disabled=True) +def badbye(): + print("Badbye") + assert False + +@test(disabled=True) +def skipped(): + skip_test("Skipped") + assert False + +@test(disabled=True) +def keyboard_interrupt(): + raise KeyboardInterrupt() + +@test(disabled=True, timeout=0.05) +def timeout(): + sleep(10, quiet=True) + assert False + +@test(disabled=True) +def process_error(): + run("expr 1 / 0") + +@test(disabled=True) +def system_exit_(): + exit(1) diff --git a/external/skewer-main/external/plano-main/src/plano/_tests.py b/external/skewer-main/external/plano-main/src/plano/_tests.py new file mode 100644 index 0000000..5449155 --- /dev/null +++ b/external/skewer-main/external/plano-main/src/plano/_tests.py @@ -0,0 +1,1277 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# + +import datetime as _datetime +import getpass as _getpass +import os as _os +import signal as _signal +import socket as _socket +import sys as _sys +import threading as _threading + +try: + import http.server as _http +except ImportError: # pragma: nocover + import BaseHTTPServer as _http + +from .test import * + +test_project_dir = join(get_parent_dir(__file__), "_testproject") + +class test_project(working_dir): + def __enter__(self): + dir = super(test_project, self).__enter__() + copy(test_project_dir, ".", inside=False) + return dir + +TINY_INTERVAL = 0.05 + +@test +def archive_operations(): + with working_dir(): + make_dir("some-dir") + touch("some-dir/some-file") + + make_archive("some-dir") + assert is_file("some-dir.tar.gz"), list_dir() + + extract_archive("some-dir.tar.gz", output_dir="some-subdir") + assert is_dir("some-subdir/some-dir") + assert is_file("some-subdir/some-dir/some-file") + + rename_archive("some-dir.tar.gz", "something-else") + assert is_file("something-else.tar.gz") + + extract_archive("something-else.tar.gz") + assert is_dir("something-else") + assert is_file("something-else/some-file") + +@test +def command_operations(): + class SomeCommand(BaseCommand): + def __init__(self): + self.parser = BaseArgumentParser() + self.parser.add_argument("--interrupt", action="store_true") + self.parser.add_argument("--explode", action="store_true") + + def parse_args(self, args): + return self.parser.parse_args(args) + + def init(self, args): + self.verbose = args.verbose + self.interrupt = args.interrupt + self.explode = args.explode + + def run(self): + if self.verbose: + print("Hello") + + if self.interrupt: + raise KeyboardInterrupt() + + if self.explode: + raise PlanoError("Exploded") + + SomeCommand().main([]) + SomeCommand().main(["--interrupt"]) + SomeCommand().main(["--debug"]) + + with expect_system_exit(): + SomeCommand().main(["--verbose", "--debug", "--explode"]) + +@test +def console_operations(): + eprint("Here's a story") + eprint("About a", "man named Brady") + + pprint(list_dir()) + pprint(PlanoProcess, 1, "abc", end="\n\n") + + flush() + + with console_color("red"): + print("ALERT") + + print(cformat("AMBER ALERT", color="yellow")) + print(cformat("NO ALERT")) + + cprint("CRITICAL ALERT", color="red", bright=True) + +@test +def dir_operations(): + with working_dir(): + test_dir = make_dir("some-dir") + test_file_1 = touch(join(test_dir, "some-file-1")) + test_file_2 = touch(join(test_dir, "some-file-2")) + + result = list_dir(test_dir) + assert join(test_dir, result[0]) == test_file_1, (join(test_dir, result[0]), test_file_1) + + result = list_dir(test_dir, "*-file-1") + assert result == ["some-file-1"], (result, ["some-file-1"]) + + result = list_dir(test_dir, exclude="*-file-1") + assert result == ["some-file-2"], (result, ["some-file-2"]) + + result = list_dir("some-dir", "*.not-there") + assert result == [], result + + with working_dir(): + result = list_dir() + assert result == [], result + + result = find(test_dir) + assert result == [test_file_1, test_file_2], (result, [test_file_1, test_file_2]) + + result = find(test_dir, include="*-file-1") + assert result == [test_file_1], (result, [test_file_1]) + + result = find(test_dir, exclude="*-file-1") + assert result == [test_file_2], (result, [test_file_2]) + + with working_dir(): + result = find() + assert result == [], result + + make_dir("subdir") + + result = find("./subdir") + assert result == [], result + + with working_dir(): + with working_dir("a-dir", quiet=True): + touch("a-file") + + curr_dir = get_current_dir() + prev_dir = change_dir("a-dir") + new_curr_dir = get_current_dir() + new_prev_dir = change_dir(curr_dir) + + assert curr_dir == prev_dir, (curr_dir, prev_dir) + assert new_curr_dir == new_prev_dir, (new_curr_dir, new_prev_dir) + +@test +def env_operations(): + result = join_path_var("a", "b", "c", "a") + assert result == _os.pathsep.join(("a", "b", "c")), result + + curr_dir = get_current_dir() + + with working_dir("."): + assert get_current_dir() == curr_dir, (get_current_dir(), curr_dir) + + result = get_home_dir() + assert result == _os.path.expanduser("~"), (result, _os.path.expanduser("~")) + + result = get_home_dir("alice") + assert result.endswith("alice"), result + + user = _getpass.getuser() + result = get_user() + assert result == user, (result, user) + + result = get_hostname() + assert result, result + + result = get_program_name() + assert result, result + + result = get_program_name("alpha beta") + assert result == "alpha", result + + result = get_program_name("X=Y alpha beta") + assert result == "alpha", result + + result = which("echo") + assert result, result + + with working_env(YES_I_AM_SET=1): + check_env("YES_I_AM_SET") + + with expect_error(): + check_env("NO_I_AM_NOT") + + with working_env(I_AM_SET_NOW=1, amend=False): + check_env("I_AM_SET_NOW") + assert "YES_I_AM_SET" not in ENV, ENV + + with working_env(SOME_VAR=1): + assert ENV["SOME_VAR"] == "1", ENV.get("SOME_VAR") + + with working_env(SOME_VAR=2): + assert ENV["SOME_VAR"] == "2", ENV.get("SOME_VAR") + + with expect_error(): + check_program("not-there") + + with expect_error(): + check_module("not_there") + + with expect_output(contains="ARGS:") as out: + with open(out, "w") as f: + print_env(file=f) + +@test +def file_operations(): + with working_dir(): + alpha_dir = make_dir("alpha-dir") + alpha_file = touch(join(alpha_dir, "alpha-file")) + alpha_link = make_link(join(alpha_dir, "alpha-file-link"), "alpha-file") + alpha_broken_link = make_link(join(alpha_dir, "broken-link"), "no-such-file") + + beta_dir = make_dir("beta-dir") + beta_file = touch(join(beta_dir, "beta-file")) + beta_link = make_link(join(beta_dir, "beta-file-link"), "beta-file") + beta_broken_link = make_link(join(beta_dir, "broken-link"), join("..", alpha_dir, "no-such-file")) + beta_another_link = make_link(join(beta_dir, "broken-link"), join("..", alpha_dir, "alpha-file-link")) + + assert exists(beta_link) + assert exists(beta_file) + + with working_dir("beta-dir"): + assert is_file(read_link("beta-file-link")) + + copied_file = copy(alpha_file, beta_dir) + assert copied_file == join(beta_dir, "alpha-file"), copied_file + assert is_file(copied_file), list_dir(beta_dir) + + copied_link = copy(beta_link, join(beta_dir, "beta-file-link-copy")) + assert copied_link == join(beta_dir, "beta-file-link-copy"), copied_link + assert is_link(copied_link), list_dir(beta_dir) + + copied_dir = copy(alpha_dir, beta_dir) + assert copied_dir == join(beta_dir, "alpha-dir"), copied_dir + assert is_link(join(copied_dir, "alpha-file-link")) + + moved_file = move(beta_file, alpha_dir) + assert moved_file == join(alpha_dir, "beta-file"), moved_file + assert is_file(moved_file), list_dir(alpha_dir) + assert not exists(beta_file), list_dir(beta_dir) + + moved_dir = move(beta_dir, alpha_dir) + assert moved_dir == join(alpha_dir, "beta-dir"), moved_dir + assert is_dir(moved_dir), list_dir(alpha_dir) + assert not exists(beta_dir) + + gamma_dir = make_dir("gamma-dir") + gamma_file = touch(join(gamma_dir, "gamma-file")) + + delta_dir = make_dir("delta-dir") + delta_file = touch(join(delta_dir, "delta-file")) + + copy(gamma_dir, delta_dir, inside=False) + assert is_file(join("delta-dir", "gamma-file")) + + move(gamma_dir, delta_dir, inside=False) + assert is_file(join("delta-dir", "gamma-file")) + assert not exists(gamma_dir) + + epsilon_dir = make_dir("epsilon-dir") + epsilon_file_1 = touch(join(epsilon_dir, "epsilon-file-1")) + epsilon_file_2 = touch(join(epsilon_dir, "epsilon-file-2")) + epsilon_file_3 = touch(join(epsilon_dir, "epsilon-file-3")) + epsilon_file_4 = touch(join(epsilon_dir, "epsilon-file-4")) + + remove("not-there") + + remove(epsilon_file_2) + assert not exists(epsilon_file_2) + + remove(epsilon_dir) + assert not exists(epsilon_file_1) + assert not exists(epsilon_dir) + + remove([epsilon_file_3, epsilon_file_4]) + assert not exists(epsilon_file_3) + assert not exists(epsilon_file_4) + + file = write("xes", "x" * 10) + result = get_file_size(file) + assert result == 10, result + +@test +def http_operations(): + class Handler(_http.BaseHTTPRequestHandler): + def do_GET(self): + self.send_response(200) + self.end_headers() + self.wfile.write(b"[1]") + + def do_POST(self): + length = int(self.headers["content-length"]) + content = self.rfile.read(length) + + self.send_response(200) + self.end_headers() + self.wfile.write(content) + + def do_PUT(self): + length = int(self.headers["content-length"]) + content = self.rfile.read(length) + + self.send_response(200) + self.end_headers() + + class ServerThread(_threading.Thread): + def __init__(self, server): + _threading.Thread.__init__(self) + self.server = server + + def run(self): + self.server.serve_forever() + + host, port = "localhost", get_random_port() + url = "http://{}:{}".format(host, port) + + try: + server = _http.HTTPServer((host, port), Handler) + except (OSError, PermissionError): # pragma: nocover + # Try one more time + port = get_random_port() + server = _http.HTTPServer((host, port), Handler) + + server_thread = ServerThread(server) + server_thread.start() + + try: + with working_dir(): + result = http_get(url) + assert result == "[1]", result + + result = http_get(url, insecure=True) + assert result == "[1]", result + + result = http_get(url, user="fritz", password="secret") + assert result == "[1]", result + + result = http_get(url, output_file="a") + output = read("a") + assert result is None, result + assert output == "[1]", output + + result = http_get_json(url) + assert result == [1], result + + file_b = write("b", "[2]") + + result = http_post(url, read(file_b), insecure=True) + assert result == "[2]", result + + result = http_post(url, read(file_b), output_file="x") + output = read("x") + assert result is None, result + assert output == "[2]", output + + result = http_post_file(url, file_b) + assert result == "[2]", result + + result = http_post_json(url, parse_json(read(file_b))) + assert result == [2], result + + file_c = write("c", "[3]") + + result = http_put(url, read(file_c), insecure=True) + assert result is None, result + + result = http_put_file(url, file_c) + assert result is None, result + + result = http_put_json(url, parse_json(read(file_c))) + assert result is None, result + finally: + server.shutdown() + server.server_close() + server_thread.join() + +@test +def io_operations(): + with working_dir(): + input_ = "some-text\n" + file_a = write("a", input_) + output = read(file_a) + + assert input_ == output, (input_, output) + + pre_input = "pre-some-text\n" + post_input = "post-some-text\n" + + prepend(file_a, pre_input) + append(file_a, post_input) + + output = tail(file_a, 100) + tailed = tail(file_a, 1) + + assert output.startswith(pre_input), (output, pre_input) + assert output.endswith(post_input), (output, post_input) + assert tailed == post_input, (tailed, post_input) + + input_lines = [ + "alpha\n", + "beta\n", + "gamma\n", + "chi\n", + "psi\n", + "omega\n", + ] + + file_b = write_lines("b", input_lines) + output_lines = read_lines(file_b) + + assert input_lines == output_lines, (input_lines, output_lines) + + pre_lines = ["pre-alpha\n"] + post_lines = ["post-omega\n"] + + prepend_lines(file_b, pre_lines) + append_lines(file_b, post_lines) + + output_lines = tail_lines(file_b, 100) + tailed_lines = tail_lines(file_b, 1) + + assert output_lines[0] == pre_lines[0], (output_lines[0], pre_lines[0]) + assert output_lines[-1] == post_lines[0], (output_lines[-1], post_lines[0]) + assert tailed_lines[0] == post_lines[0], (tailed_lines[0], post_lines[0]) + + file_c = touch("c") + assert is_file(file_c), file_c + + file_d = write("d", "front@middle@@middle@back") + path = replace_in_file(file_d, "@middle@", "M", count=1) + result = read(path) + assert result == "frontM@middle@back", result + + file_e = write("e", "123") + file_f = write("f", "456") + path = concatenate("g", (file_e, "not-there", file_f)) + result = read(path) + assert result == "123456", result + +@test +def iterable_operations(): + result = unique([1, 1, 1, 2, 2, 3]) + assert result == [1, 2, 3], result + + result = skip([1, "", 2, None, 3]) + assert result == [1, 2, 3], result + + result = skip([1, "", 2, None, 3], 2) + assert result == [1, "", None, 3], result + +@test +def json_operations(): + with working_dir(): + input_data = { + "alpha": [1, 2, 3], + } + + file_a = write_json("a", input_data) + output_data = read_json(file_a) + + assert input_data == output_data, (input_data, output_data) + + json = read(file_a) + parsed_data = parse_json(json) + emitted_json = emit_json(input_data) + + assert input_data == parsed_data, (input_data, parsed_data) + assert json == emitted_json, (json, emitted_json) + + with expect_output(equals=emitted_json) as out: + with open(out, "w") as f: + print_json(input_data, file=f, end="") + +@test +def link_operations(): + with working_dir(): + make_dir("some-dir") + path = get_absolute_path(touch("some-dir/some-file")) + + with working_dir("another-dir"): + link = make_link("a-link", path) + linked_path = read_link(link) + assert linked_path.endswith(path), (linked_path, path) + +@test +def logging_operations(): + error("Error!") + warning("Warning!") + notice("Take a look!") + notice(123) + debug("By the way") + debug("abc{}{}{}", 1, 2, 3) + + with expect_exception(RuntimeError): + fail(RuntimeError("Error!")) + + with expect_error(): + fail("Error!") + + for level in ("debug", "notice", "warning", "error"): + with expect_output(contains="Hello") as out: + with logging_disabled(): + with logging_enabled(level=level, output=out): + log(level, "hello") + + with expect_output(equals="") as out: + with logging_enabled(output=out): + with logging_disabled(): + error("Yikes") + + with expect_output(contains="flipper") as out: + with logging_enabled(output=out): + with logging_context("flipper"): + notice("Whhat") + + with logging_context("bip"): + with logging_context("boop"): + error("It's alarming!") + +@test +def path_operations(): + abspath = _os.path.abspath + normpath = _os.path.normpath + + with working_dir("/"): + result = get_current_dir() + expect = abspath(_os.sep) + assert result == expect, (result, expect) + + path = "a/b/c" + result = get_absolute_path(path) + expect = join(get_current_dir(), path) + assert result == expect, (result, expect) + + path = "/x/y/z" + result = get_absolute_path(path) + expect = abspath(path) + assert result == expect, (result, expect) + + path = "/x/y/z" + assert is_absolute(path) + + path = "x/y/z" + assert not is_absolute(path) + + path = "a//b/../c/" + result = normalize_path(path) + expect = normpath("a/c") + assert result == expect, (result, expect) + + path = "/a/../c" + result = get_real_path(path) + expect = abspath("/c") + assert result == expect, (result, expect) + + path = abspath("/a/b") + result = get_relative_path(path, "/a/c") + expect = normpath("../b") + assert result == expect, (result, expect) + + path = abspath("/a/b") + result = get_file_url(path) + expect = "file:{}".format(path) + assert result == expect, (result, expect) + + with working_dir(): + result = get_file_url("afile") + expect = join(get_file_url(get_current_dir()), "afile") + assert result == expect, (result, expect) + + path = "/alpha/beta.ext" + path_split = "/alpha", "beta.ext" + path_split_extension = "/alpha/beta", ".ext" + name_split_extension = "beta", ".ext" + + result = join(*path_split) + expect = normpath(path) + assert result == expect, (result, expect) + + result = split(path) + expect = normpath(path_split[0]), normpath(path_split[1]) + assert result == expect, (result, expect) + + result = split_extension(path) + expect = normpath(path_split_extension[0]), normpath(path_split_extension[1]) + assert result == expect, (result, expect) + + result = get_parent_dir(path) + expect = normpath(path_split[0]) + assert result == expect, (result, expect) + + result = get_base_name(path) + expect = normpath(path_split[1]) + assert result == expect, (result, expect) + + result = get_name_stem(path) + expect = normpath(name_split_extension[0]) + assert result == expect, (result, expect) + + result = get_name_stem("alpha.tar.gz") + expect = "alpha" + assert result == expect, (result, expect) + + result = get_name_extension(path) + expect = normpath(name_split_extension[1]) + assert result == expect, (result, expect) + + with working_dir(): + touch("adir/afile") + + check_exists("adir") + check_exists("adir/afile") + check_dir("adir") + check_file("adir/afile") + + with expect_error(): + check_exists("adir/notafile") + + with expect_error(): + check_file("adir/notafile") + + with expect_error(): + check_file("adir") + + with expect_error(): + check_dir("not-there") + + with expect_error(): + check_dir("adir/afile") + + await_exists("adir/afile") + + if not WINDOWS: + with expect_timeout(): + await_exists("adir/notafile", timeout=TINY_INTERVAL) + +@test +def port_operations(): + result = get_random_port() + assert result >= 49152 and result <= 65535, result + + server_port = get_random_port() + server_socket = _socket.socket(_socket.AF_INET, _socket.SOCK_STREAM) + + try: + try: + server_socket.bind(("localhost", server_port)) + except (OSError, PermissionError): # pragma: nocover + # Try one more time + server_port = get_random_port() + server_socket.bind(("localhost", server_port)) + + server_socket.listen(5) + + await_port(server_port) + await_port(str(server_port)) + + check_port(server_port) + + # Non-Linux platforms don't seem to produce the expected + # error. + if LINUX: + with expect_error(): + get_random_port(min=server_port, max=server_port) + finally: + server_socket.close() + + if not WINDOWS: + with expect_timeout(): + await_port(get_random_port(), timeout=TINY_INTERVAL) + +@test +def process_operations(): + result = get_process_id() + assert result, result + + proc = run("date") + assert proc is not None, proc + + print(repr(proc)) + + run("date", stash=True) + + run(["echo", 1, 2, 3]) + run(["echo", 1, 2, 3], shell=True) + + proc = run(["echo", "hello"], check=False) + assert proc.exit_code == 0, proc.exit_code + + proc = run("cat /uh/uh", check=False) + assert proc.exit_code > 0, proc.exit_code + + with expect_output() as out: + run("date", output=out) + + run("date", output=DEVNULL) + run("date", stdin=DEVNULL) + run("date", stdout=DEVNULL) + run("date", stderr=DEVNULL) + + run("echo hello", quiet=True) + run("echo hello | cat", shell=True) + run(["echo", "hello"], shell=True) + + with expect_error(): + run("/not/there") + + with expect_error(): + run("cat /whoa/not/really", stash=True) + + result = call("echo hello").strip() + expect = "hello" + assert result == expect, (result, expect) + + result = call("echo hello | cat", shell=True).strip() + expect = "hello" + assert result == expect, (result, expect) + + with expect_error(): + call("cat /whoa/not/really") + + proc = start("sleep 10") + + if not WINDOWS: + with expect_timeout(): + wait(proc, timeout=TINY_INTERVAL) + + proc = start("echo hello") + sleep(TINY_INTERVAL) + stop(proc) + + proc = start("sleep 10") + stop(proc) + + proc = start("sleep 10") + kill(proc) + sleep(TINY_INTERVAL) + stop(proc) + + proc = start("date --not-there") + sleep(TINY_INTERVAL) + stop(proc) + + with start("sleep 10"): + sleep(TINY_INTERVAL) + + with working_dir(): + touch("i") + + with start("date", stdin="i", stdout="o", stderr="e"): + pass + + with expect_system_exit(): + exit() + + with expect_system_exit(): + exit(verbose=True) + + with expect_system_exit(): + exit("abc") + + with expect_system_exit(): + exit("abc", verbose=True) + + with expect_system_exit(): + exit(Exception()) + + with expect_system_exit(): + exit(Exception(), verbose=True) + + with expect_system_exit(): + exit(123) + + with expect_system_exit(): + exit(123, verbose=True) + + with expect_system_exit(): + exit(-123) + + with expect_exception(PlanoException): + exit(object()) + +@test +def string_operations(): + result = replace("ab", "a", "b") + assert result == "bb", result + + result = replace("aba", "a", "b", count=1) + assert result == "bba", result + + result = remove_prefix(None, "xxx") + assert result == "", result + + result = remove_prefix("anterior", "ant") + assert result == "erior", result + + result = remove_prefix("anterior", "ext") + assert result == "anterior", result + + result = remove_suffix(None, "xxx") + assert result == "", result + + result = remove_suffix("exterior", "ior") + assert result == "exter", result + + result = remove_suffix("exterior", "nal") + assert result == "exterior" + + result = shorten("abc", 2) + assert result == "ab", result + + result = shorten("abc", None) + assert result == "abc", result + + result = shorten("abc", 10) + assert result == "abc", result + + result = shorten("ellipsis", 6, ellipsis="...") + assert result == "ell...", result + + result = shorten(None, 6) + assert result == "", result + + result = plural(None) + assert result == "", result + + result = plural("") + assert result == "", result + + result = plural("test") + assert result == "tests", result + + result = plural("test", 1) + assert result == "test", result + + result = plural("bus") + assert result == "busses", result + + result = plural("bus", 1) + assert result == "bus", result + + result = plural("terminus", 2, "termini") + assert result == "termini", result + + result = capitalize(None) + assert result == "", result + + result = capitalize("") + assert result == "", result + + result = capitalize("hello, Frank") + assert result == "Hello, Frank", result + + encoded_result = base64_encode(b"abc") + decoded_result = base64_decode(encoded_result) + assert decoded_result == b"abc", decoded_result + + encoded_result = url_encode("abc=123&yeah!") + decoded_result = url_decode(encoded_result) + assert decoded_result == "abc=123&yeah!", decoded_result + + result = parse_url("http://example.net/index.html") + assert result.hostname == "example.net" + +@test +def temp_operations(): + system_temp_dir = get_system_temp_dir() + + result = make_temp_file() + assert result.startswith(system_temp_dir), result + + result = make_temp_file(suffix=".txt") + assert result.endswith(".txt"), result + + result = make_temp_dir() + assert result.startswith(system_temp_dir), result + + with temp_dir() as d: + assert is_dir(d), d + list_dir(d) + + with temp_file() as f: + assert is_file(f), f + write(f, "test") + + with working_dir() as d: + assert is_dir(d), d + list_dir(d) + + user_temp_dir = get_user_temp_dir() + assert user_temp_dir, user_temp_dir + + ENV.pop("XDG_RUNTIME_DIR", None) + + user_temp_dir = get_user_temp_dir() + assert user_temp_dir, user_temp_dir + +@test +def test_operations(): + with test_project(): + with working_module_path("src"): + import chucker + import chucker.tests + + print_tests(chucker.tests) + + for verbose in (False, True): + # Module 'chucker' has no tests + with expect_error(): + run_tests(chucker, verbose=verbose) + + run_tests(chucker.tests, verbose=verbose) + run_tests(chucker.tests, exclude="*hello*", verbose=verbose) + run_tests(chucker.tests, enable="skipped", verbose=verbose) + + with expect_error(): + run_tests(chucker.tests, enable="skipped", unskip="*skipped*", verbose=verbose) + + with expect_error(): + run_tests(chucker.tests, enable="*badbye*", verbose=verbose) + + with expect_error(): + run_tests(chucker.tests, enable="*badbye*", fail_fast=True, verbose=verbose) + + with expect_exception(KeyboardInterrupt): + run_tests(chucker.tests, enable="keyboard-interrupt", verbose=verbose) + + with expect_error(): + run_tests(chucker.tests, enable="timeout", verbose=verbose) + + with expect_error(): + run_tests(chucker.tests, enable="process-error", verbose=verbose) + + with expect_error(): + run_tests(chucker.tests, enable="system-exit", verbose=verbose) + + with expect_system_exit(): + PlanoTestCommand().main(["--module", "nosuchmodule"]) + + def run_command(*args): + PlanoTestCommand(chucker.tests).main(args) + + run_command("--verbose") + run_command("--list") + + with expect_system_exit(): + run_command("--enable", "*badbye*") + + with expect_system_exit(): + run_command("--enable", "*badbye*", "--verbose") + + try: + with expect_exception(): + pass + raise Exception() # pragma: nocover + except AssertionError: + pass + + with expect_output(equals="abc123", contains="bc12", startswith="abc", endswith="123") as out: + write(out, "abc123") + +@test +def time_operations(): + start_time = get_time() + + sleep(TINY_INTERVAL) + + assert get_time() - start_time > TINY_INTERVAL + + start_datetime = get_datetime() + + sleep(TINY_INTERVAL) + + assert get_datetime() - start_datetime > _datetime.timedelta(seconds=TINY_INTERVAL) + + timestamp = format_timestamp() + result = parse_timestamp(timestamp) + assert format_timestamp(result) == timestamp + + result = parse_timestamp(None) + assert result is None + + earlier = get_datetime() + result = format_date() + later = _datetime.datetime.strptime(result, "%d %B %Y") + later = later.replace(tzinfo=_datetime.timezone.utc) + assert later - earlier < _datetime.timedelta(days=1) + + now = get_datetime() + result = format_date(now) + assert result == f"{now.day} {now.strftime('%B')} {now.strftime('%Y')}" + + now = get_datetime() + result = format_time() + later = _datetime.datetime.strptime(result, "%H:%M:%S") + later = later.replace(tzinfo=_datetime.timezone.utc) + assert later - earlier < _datetime.timedelta(seconds=1) + + now = get_datetime() + result = format_time(now) + assert result == f"{now.hour}:{now.strftime('%M')}:{now.strftime('%S')}" + + now = get_datetime() + result = format_time(now, precision="minute") + assert result == f"{now.hour}:{now.strftime('%M')}" + + result = format_duration(0.1) + assert result == "0.1s", result + + result = format_duration(1) + assert result == "1s", result + + result = format_duration(1, align=True) + assert result == "1.0s", result + + result = format_duration(60) + assert result == "60s", result + + result = format_duration(3600) + assert result == "1h", result + + with expect_system_exit(): + with start("sleep 10"): + from plano import _default_sigterm_handler + _default_sigterm_handler(_signal.SIGTERM, None) + + with Timer() as timer: + sleep(TINY_INTERVAL) + assert timer.elapsed_time > TINY_INTERVAL + + assert timer.elapsed_time > TINY_INTERVAL + + if not WINDOWS: + with expect_timeout(): + with Timer(timeout=TINY_INTERVAL) as timer: + sleep(10) + +@test +def unique_id_operations(): + id1 = get_unique_id() + id2 = get_unique_id() + + assert id1 != id2, (id1, id2) + + result = get_unique_id(1) + assert len(result) == 2 + + result = get_unique_id(16) + assert len(result) == 32 + +@test +def value_operations(): + result = nvl(None, "a") + assert result == "a", result + + result = nvl("b", "a") + assert result == "b", result + + assert is_string("a") + assert not is_string(1) + + for value in (None, "", (), [], {}): + assert is_empty(value), value + + for value in (object(), " ", (1,), [1], {"a": 1}): + assert not is_empty(value), value + + result = pformat({"z": 1, "a": 2}) + assert result == "{'a': 2, 'z': 1}", result + + result = format_empty((), "[nothing]") + assert result == "[nothing]", result + + result = format_empty((1,), "[nothing]") + assert result == (1,), result + + result = format_not_empty("abc", "[{}]") + assert result == "[abc]", result + + result = format_not_empty({}, "[{}]") + assert result == {}, result + + result = format_repr(Namespace(a=1, b=2), limit=1) + assert result == "Namespace(a=1)", result + + result = Namespace(a=1, b=2) + assert result.a == 1, result + assert result.b == 2, result + assert "a" in result, result + assert "c" not in result, result + repr(result) + + other = Namespace(a=1, b=2, c=3) + assert result != other, (result, other) + +@test +def yaml_operations(): + try: + import yaml as _yaml + except ImportError: # pragma: nocover + raise PlanoTestSkipped("PyYAML is not available") + + with working_dir(): + input_data = { + "alpha": [1, 2, 3], + } + + file_a = write_yaml("a", input_data) + output_data = read_yaml(file_a) + + assert input_data == output_data, (input_data, output_data) + + yaml = read(file_a) + parsed_data = parse_yaml(yaml) + emitted_yaml = emit_yaml(input_data) + + assert input_data == parsed_data, (input_data, parsed_data) + assert yaml == emitted_yaml, (yaml, emitted_yaml) + + with expect_output(equals=emitted_yaml) as out: + with open(out, "w") as f: + print_yaml(input_data, file=f, end="") + +@command +def prancer(): + notice("Base prancer") + +@command +def vixen(): + prancer() + +@test +def plano_command(): + with working_dir(): + PlanoCommand().main([]) + + PlanoCommand(_sys.modules[__name__]).main([]) + + PlanoCommand().main(["-m", "plano.test"]) + + with expect_system_exit(): + PlanoCommand().main(["-m", "nosuchmodule"]) + + with working_dir(): + write(".plano.py", "garbage") + + with expect_system_exit(): + PlanoCommand().main([]) + + with expect_system_exit(): + PlanoCommand().main(["-f", "no-such-file"]) + + def run_command(*args): + PlanoCommand().main(["-f", test_project_dir] + list(args)) + + with test_project(): + run_command() + run_command("--help") + run_command("--quiet") + run_command("--init-only") + + with expect_system_exit(): + run_command("no-such-command") + + with expect_system_exit(): + run_command("no-such-command", "--help") + + with expect_system_exit(): + run_command("--help", "no-such-command") + + run_command("extended-command", "a", "b", "--omega", "z") + + with expect_system_exit(): + run_command("echo") + + with expect_exception(contains="Trouble"): + run_command("echo", "Hello", "--trouble") + + run_command("echo", "Hello", "--count", "5") + + run_command("echoecho", "Greetings") + + with expect_system_exit(): + run_command("echo", "Hello", "--count", "not-an-int") + + run_command("haberdash", "ballcap", "fedora", "hardhat", "--last", "turban") + result = read_json("haberdash.json") + assert result == ["ballcap", "fedora", "hardhat", "turban"], result + + run_command("haberdash", "ballcap", "--last", "turban") + result = read_json("haberdash.json") + assert result == ["ballcap", "turban"], result + + run_command("haberdash", "ballcap") + result = read_json("haberdash.json") + assert result == ["ballcap", "bowler"], result + + run_command("balderdash", "bunk", "poppycock") + result = read_json("balderdash.json") + assert result == ["bunk", "poppycock", "rubbish"], result + + run_command("balderdash", "bunk") + result = read_json("balderdash.json") + assert result == ["bunk", "malarkey", "rubbish"], result + + run_command("balderdash", "bunk", "--other", "bollocks") + result = read_json("balderdash.json") + assert result == ["bunk", "malarkey", "bollocks"], result + + run_command("splasher,balderdash", "claptrap") + result = read_json("splasher.json") + assert result == [1], result + result = read_json("balderdash.json") + assert result == ["claptrap", "malarkey", "rubbish"], result + + with expect_system_exit(): + run_command("no-such-command,splasher") + + with expect_system_exit(): + run_command("splasher,no-such-command-nope") + + run_command("dasher", "alpha", "--beta", "123") + + # Gamma is an unexpected arg + with expect_system_exit(): + run_command("dasher", "alpha", "--gamma", "123") + + # Args after "xyz" are extra passthrough args + run_command("dancer", "gamma", "--omega", "xyz", "extra1", "--extra2", "extra3") + result = read_json("dancer.json") + assert result == ["extra1", "--extra2", "extra3"], result + + # Ensure indirect calls (through parent commands) are specialized + run_command("vixen") + assert exists("prancer.json") + + with expect_system_exit(): + run_command("no-parent") + + run_command("feta", "--spinach", "oregano") + result = read_json("feta.json") + assert result == "oregano" + + run_command("invisible") + result = read_json("invisible.json") + assert result == "nothing" + +def main(): + PlanoTestCommand(_sys.modules[__name__]).main() + +if __name__ == "__main__": # pragma: nocover + main() diff --git a/external/skewer-main/external/plano-main/src/plano/command.py b/external/skewer-main/external/plano-main/src/plano/command.py new file mode 100644 index 0000000..0439f6b --- /dev/null +++ b/external/skewer-main/external/plano-main/src/plano/command.py @@ -0,0 +1,515 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# + +from .main import * + +import argparse as _argparse +import importlib as _importlib +import inspect as _inspect +import os as _os +import sys as _sys +import traceback as _traceback + +class BaseCommand: + initial_logging_level = "warning" + verbose_logging_level = "notice" + quiet_logging_level = "error" + + def main(self, args=None): + if args is None: + args = ARGS[1:] + + args = self.parse_args(args) + + assert isinstance(args, _argparse.Namespace), args + + self.verbose = args.verbose or args.debug + self.quiet = args.quiet + self.debug = args.debug + self.init_only = args.init_only + + level = self.initial_logging_level + + if self.verbose: + level = self.verbose_logging_level + + if self.quiet: + level = self.quiet_logging_level + + if self.debug: + level = "debug" + + with logging_enabled(level=level): + try: + self.init(args) + + if self.init_only: + return + + self.run() + except KeyboardInterrupt: + pass + except PlanoError as e: + if self.debug: + _traceback.print_exc() + exit(1) + else: + exit(str(e)) + + def parse_args(self, args): # pragma: nocover + raise NotImplementedError() + + def init(self, args): # pragma: nocover + pass + + def run(self): # pragma: nocover + raise NotImplementedError() + +class BaseArgumentParser(_argparse.ArgumentParser): + def __init__(self, **kwargs): + super().__init__(**kwargs) + + self.allow_abbrev = False + self.formatter_class = _argparse.RawDescriptionHelpFormatter + + self.add_argument("--verbose", action="store_true", + help="Print detailed logging to the console") + self.add_argument("--quiet", action="store_true", + help="Print no logging to the console") + self.add_argument("--debug", action="store_true", + help="Print debugging output to the console") + self.add_argument("--init-only", action="store_true", + help=_argparse.SUPPRESS) + + _capitalize_help(self) + +_plano_command = None + +class PlanoCommand(BaseCommand): + initial_logging_level = "notice" + verbose_logging_level = "debug" + + def __init__(self, module=None, description="Run commands defined as Python functions", epilog=None): + self.module = module + self.bound_commands = dict() + self.running_commands = list() + self.passthrough_args = None + + assert self.module is None or _inspect.ismodule(self.module), self.module + + self.pre_parser = BaseArgumentParser(description=description, add_help=False) + self.pre_parser.add_argument("-h", "--help", action="store_true", + help="Show this help message and exit") + + if self.module is None: + self.pre_parser.add_argument("-f", "--file", help="Load commands from FILE (default '.plano.py')") + self.pre_parser.add_argument("-m", "--module", help="Load commands from MODULE") + + self.parser = _argparse.ArgumentParser(parents=(self.pre_parser,), + description=description, epilog=epilog, + add_help=False, allow_abbrev=False) + + # This is intentionally added after self.pre_parser is passed + # as parent to self.parser, since it is used only in the + # preliminary parsing. + self.pre_parser.add_argument("command", nargs="?", help=_argparse.SUPPRESS) + + global _plano_command + _plano_command = self + + def parse_args(self, args): + pre_args, _ = self.pre_parser.parse_known_args(args) + + if self.module is None: + if pre_args.module is None: + self.module = self._load_file(pre_args.file) + else: + self.module = self._load_module(pre_args.module) + + if self.module is not None: + self._bind_commands(self.module) + + self._process_commands() + + self.preceding_commands = list() + + if pre_args.command is not None and "," in pre_args.command: + names = pre_args.command.split(",") + + for name in names[:-1]: + try: + self.preceding_commands.append(self.bound_commands[name]) + except KeyError: + self.parser.error(f"Command '{name}' is unknown") + + args[args.index(pre_args.command)] = names[-1] + + args, self.passthrough_args = self.parser.parse_known_args(args) + + return args + + def init(self, args): + self.help = args.help + + self.selected_command = None + self.command_args = list() + self.command_kwargs = dict() + + if args.command is not None: + for command in self.preceding_commands: + command() + + self.selected_command = self.bound_commands[args.command] + + if not self.selected_command.passthrough and self.passthrough_args: + self.parser.error(f"unrecognized arguments: {' '.join(self.passthrough_args)}") + + for param in self.selected_command.parameters.values(): + if param.name == "passthrough_args": + continue + + if param.positional: + if param.multiple: + self.command_args.extend(getattr(args, param.name)) + else: + self.command_args.append(getattr(args, param.name)) + else: + self.command_kwargs[param.name] = getattr(args, param.name) + + if self.selected_command.passthrough: + self.command_kwargs["passthrough_args"] = self.passthrough_args + + def run(self): + if self.help or self.module is None or self.selected_command is None: + self.parser.print_help() + return + + with Timer() as timer: + self.selected_command(*self.command_args, **self.command_kwargs) + + cprint("OK", color="green", file=_sys.stderr, end="") + cprint(" ({})".format(format_duration(timer.elapsed_time)), color="magenta", file=_sys.stderr) + + def _load_module(self, name): + try: + return _importlib.import_module(name) + except ImportError: + exit("Module '{}' not found", name) + + def _load_file(self, path): + if path is not None and is_dir(path): + path = self._find_file(path) + + if path is not None and not is_file(path): + exit("File '{}' not found", path) + + if path is None: + path = self._find_file(get_current_dir()) + + if path is None: + return + + debug("Loading '{}'", path) + + _sys.path.insert(0, join(get_parent_dir(path), "python")) + + spec = _importlib.util.spec_from_file_location("_plano", path) + module = _importlib.util.module_from_spec(spec) + _sys.modules["_plano"] = module + + try: + spec.loader.exec_module(module) + except Exception as e: + error(e) + exit("Failure loading {}: {}", path, str(e)) + + return module + + def _find_file(self, dir): + # Planofile and .planofile remain temporarily for backward compatibility + for name in (".plano.py", "Planofile", ".planofile"): + path = join(dir, name) + + if is_file(path): + return path + + def _bind_commands(self, module): + for var in vars(module).values(): + if callable(var) and var.__class__.__name__ == "Command": + self.bound_commands[var.name] = var + + def _process_commands(self): + subparsers = self.parser.add_subparsers(title="commands", dest="command", metavar="{command}") + + for command in self.bound_commands.values(): + # This doesn't work yet, but in the future it might. + # https://bugs.python.org/issue22848 + # + # help = _argparse.SUPPRESS if command.hidden else command.help + + help = "[internal]" if command.hidden else command.help + add_help = False if command.passthrough else True + description = nvl(command.description, command.help) + + subparser = subparsers.add_parser(command.name, help=help, add_help=add_help, description=description, + formatter_class=_argparse.RawDescriptionHelpFormatter) + + for param in command.parameters.values(): + if param.positional: + if param.multiple: + subparser.add_argument(param.name, metavar=param.metavar, type=param.type, help=param.help, + nargs="*") + elif param.optional: + subparser.add_argument(param.name, metavar=param.metavar, type=param.type, help=param.help, + nargs="?", default=param.default) + else: + subparser.add_argument(param.name, metavar=param.metavar, type=param.type, help=param.help) + else: + flag_args = list() + + if param.short_option is not None: + flag_args.append("-{}".format(param.short_option)) + + flag_args.append("--{}".format(param.display_name)) + + help = param.help + + if param.default not in (None, False): + if help is None: + help = "Default value is {}".format(repr(param.default)) + else: + help += " (default {})".format(repr(param.default)) + + if param.default is False: + subparser.add_argument(*flag_args, dest=param.name, default=param.default, action="store_true", + help=help) + else: + subparser.add_argument(*flag_args, dest=param.name, default=param.default, + metavar=param.metavar, type=param.type, help=help) + + _capitalize_help(subparser) + +_command_help = { + "build": "Build artifacts from source", + "clean": "Clean up the source tree", + "dist": "Generate distribution artifacts", + "install": "Install the built artifacts on your system", + "test": "Run the tests", +} + +def command(_function=None, name=None, parameters=None, parent=None, passthrough=False, hidden=False): + class Command: + def __init__(self, function): + self.function = function + self.module = _inspect.getmodule(self.function) + + self.name = name + self.parent = parent + + if self.parent is None: + # Strip trailing underscores and convert remaining + # underscores to hyphens + default = self.function.__name__.rstrip("_").replace("_", "-") + + self.name = nvl(self.name, default) + self.parameters = self._process_parameters(parameters) + else: + assert parameters is None + + self.name = nvl(self.name, self.parent.name) + self.parameters = self.parent.parameters + + doc = _inspect.getdoc(self.function) + + if doc is None: + self.help = _command_help.get(self.name) + self.description = self.help + else: + self.help = doc.split("\n")[0] + self.description = doc + + if self.parent is not None: + self.help = nvl(self.help, self.parent.help) + self.description = nvl(self.description, self.parent.description) + + self.passthrough = passthrough + self.hidden = hidden + + debug("Defining {}", self) + + for param in self.parameters.values(): + debug(" {}", str(param).capitalize()) + + def __repr__(self): + return "command '{}:{}'".format(self.module.__name__, self.name) + + def _process_parameters(self, cparams): + # CommandParameter objects from the @command decorator + cparams_in = {x.name: x for x in nvl(cparams, ())} + cparams_out = dict() + + # Parameter objects from the function signature + sig = _inspect.signature(self.function) + sparams = list(sig.parameters.values()) + + if len(sparams) == 2 and sparams[0].name == "args" and sparams[1].name == "kwargs": + # Don't try to derive command parameters from *args and **kwargs + return cparams_in + + for sparam in sparams: + try: + cparam = cparams_in[sparam.name] + except KeyError: + cparam = CommandParameter(sparam.name) + + if sparam.kind is sparam.POSITIONAL_ONLY: # pragma: nocover + if sparam.positional is None: + cparam.positional = True + elif sparam.kind is sparam.POSITIONAL_OR_KEYWORD and sparam.default is sparam.empty: + if cparam.positional is None: + cparam.positional = True + elif sparam.kind is sparam.POSITIONAL_OR_KEYWORD and sparam.default is not sparam.empty: + cparam.optional = True + cparam.default = sparam.default + elif sparam.kind is sparam.VAR_POSITIONAL: + if cparam.positional is None: + cparam.positional = True + cparam.multiple = True + elif sparam.kind is sparam.VAR_KEYWORD: + continue + elif sparam.kind is sparam.KEYWORD_ONLY: + cparam.optional = True + cparam.default = sparam.default + else: # pragma: nocover + raise NotImplementedError(sparam.kind) + + if cparam.type is None and cparam.default not in (None, False): # XXX why false? + cparam.type = type(cparam.default) + + cparams_out[cparam.name] = cparam + + return cparams_out + + def __call__(self, *args, **kwargs): + from .command import _plano_command, PlanoCommand + assert isinstance(_plano_command, PlanoCommand), _plano_command + + app = _plano_command + command = app.bound_commands[self.name] + + if command is not self: + # The command bound to this name has been overridden. + # This happens when a parent command invokes a peer + # command that is overridden. + + command(*args, **kwargs) + + return + + debug("Running {} {} {}".format(self, args, kwargs)) + + app.running_commands.append(self) + + dashes = "--- " * (len(app.running_commands) - 1) + display_args = list(self._get_display_args(args, kwargs)) + + with console_color("magenta", file=_sys.stderr): + eprint("{}--> {}".format(dashes, self.name), end="") + + if display_args: + eprint(" ({})".format(", ".join(display_args)), end="") + + eprint() + + self.function(*args, **kwargs) + + cprint("{}<-- {}".format(dashes, self.name), color="magenta", file=_sys.stderr) + + app.running_commands.pop() + + def _get_display_args(self, args, kwargs): + for i, param in enumerate(self.parameters.values()): + if param.positional: + if param.multiple: + for va in args[i:]: + yield repr(va) + elif param.optional: + value = args[i] + + if value == param.default: + continue + + yield repr(value) + else: + yield repr(args[i]) + else: + value = kwargs.get(param.name, param.default) + + if value == param.default: + continue + + if value in (True, False): + value = str(value).lower() + else: + value = repr(value) + + yield "{}={}".format(param.display_name, value) + + if _function is None: + return Command + else: + return Command(_function) + +def parent(*args, **kwargs): + try: + f_locals = _inspect.stack()[2].frame.f_locals + parent_fn = f_locals["self"].parent.function + except: + fail("Missing parent command") + + parent_fn(*args, **kwargs) + +class CommandParameter: + def __init__(self, name, display_name=None, type=None, metavar=None, help=None, short_option=None, default=None, positional=None): + self.name = name + self.display_name = nvl(display_name, self.name.replace("_", "-")) + self.type = type + self.metavar = nvl(metavar, self.display_name.upper()) + self.help = help + self.short_option = short_option + self.default = default + self.positional = positional + + self.optional = False + self.multiple = False + + def __repr__(self): + return "argument '{}' (default {})".format(self.name, repr(self.default)) + +# Patch the default help text +def _capitalize_help(parser): + try: + for action in parser._actions: + if action.help and action.help is not _argparse.SUPPRESS: + action.help = capitalize(action.help) + except: # pragma: nocover + pass + +def _main(): # pragma: nocover + PlanoCommand().main() diff --git a/external/skewer-main/external/plano-main/src/plano/main.py b/external/skewer-main/external/plano-main/src/plano/main.py new file mode 100644 index 0000000..7e7b6a0 --- /dev/null +++ b/external/skewer-main/external/plano-main/src/plano/main.py @@ -0,0 +1,1731 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# + +import base64 as _base64 +import binascii as _binascii +import code as _code +import datetime as _datetime +import fnmatch as _fnmatch +import getpass as _getpass +import json as _json +import os as _os +import pprint as _pprint +import pkgutil as _pkgutil +import random as _random +import re as _re +import shlex as _shlex +import shutil as _shutil +import signal as _signal +import socket as _socket +import subprocess as _subprocess +import sys as _sys +import tempfile as _tempfile +import time as _time +import traceback as _traceback +import urllib as _urllib +import uuid as _uuid + +_max = max + +## Exceptions + +class PlanoException(Exception): + def __init__(self, message=None): + super().__init__(message) + self.message = message + +class PlanoError(PlanoException): + pass + +class PlanoTimeout(PlanoException): + pass + +## Global variables + +ENV = _os.environ +ARGS = _sys.argv + +STDIN = _sys.stdin +STDOUT = _sys.stdout +STDERR = _sys.stderr +DEVNULL = _os.devnull + +LINUX = _sys.platform == "linux" +WINDOWS = _sys.platform in ("win32", "cygwin") + +PLANO_DEBUG = "PLANO_DEBUG" in ENV +PLANO_COLOR = "PLANO_COLOR" in ENV + +## Archive operations + +def make_archive(input_dir, output_file=None, quiet=False): + """ + group: archive_operations + """ + + check_program("tar") + + archive_stem = get_base_name(input_dir) + + if output_file is None: + output_file = "{}.tar.gz".format(join(get_current_dir(), archive_stem)) + + _notice(quiet, "Making archive {} from directory {}", repr(output_file), repr(input_dir)) + + with working_dir(get_parent_dir(input_dir)): + run("tar -czf temp.tar.gz {}".format(archive_stem)) + move("temp.tar.gz", output_file) + + return output_file + +def extract_archive(input_file, output_dir=None, quiet=False): + check_program("tar") + + if output_dir is None: + output_dir = get_current_dir() + + _notice(quiet, "Extracting archive {} to directory {}", repr(input_file), repr(output_dir)) + + input_file = get_absolute_path(input_file) + + with working_dir(output_dir): + copy(input_file, "temp.tar.gz") + + try: + run("tar -xf temp.tar.gz") + finally: + remove("temp.tar.gz") + + return output_dir + +def rename_archive(input_file, new_archive_stem, quiet=False): + _notice(quiet, "Renaming archive {} with stem {}", repr(input_file), repr(new_archive_stem)) + + output_dir = get_absolute_path(get_parent_dir(input_file)) + output_file = "{}.tar.gz".format(join(output_dir, new_archive_stem)) + + input_file = get_absolute_path(input_file) + + with working_dir(): + extract_archive(input_file) + + input_name = list_dir()[0] + input_dir = move(input_name, new_archive_stem) + + make_archive(input_dir, output_file=output_file) + + remove(input_file) + + return output_file + +## Console operations + +def flush(): + _sys.stdout.flush() + _sys.stderr.flush() + +def eprint(*args, **kwargs): + print(*args, file=_sys.stderr, **kwargs) + +def pprint(*args, **kwargs): + args = [pformat(x) for x in args] + print(*args, **kwargs) + +_color_codes = { + "black": "\u001b[30", + "red": "\u001b[31", + "green": "\u001b[32", + "yellow": "\u001b[33", + "blue": "\u001b[34", + "magenta": "\u001b[35", + "cyan": "\u001b[36", + "white": "\u001b[37", + "gray": "\u001b[90", +} + +_color_reset = "\u001b[0m" + +def _get_color_code(color, bright): + elems = [_color_codes[color]] + + if bright: + elems.append(";1") + + elems.append("m") + + return "".join(elems) + +def _is_color_enabled(file): + return PLANO_COLOR or hasattr(file, "isatty") and file.isatty() + +class console_color: + def __init__(self, color=None, bright=False, file=_sys.stdout): + self.file = file + self.color_code = None + + if (color, bright) != (None, False): + self.color_code = _get_color_code(color, bright) + + self.enabled = self.color_code is not None and _is_color_enabled(self.file) + + def __enter__(self): + if self.enabled: + print(self.color_code, file=self.file, end="", flush=True) + + def __exit__(self, exc_type, exc_value, traceback): + if self.enabled: + print(_color_reset, file=self.file, end="", flush=True) + +def cformat(value, color=None, bright=False, file=_sys.stdout): + if (color, bright) != (None, False) and _is_color_enabled(file): + return "".join((_get_color_code(color, bright), value, _color_reset)) + else: + return value + +def cprint(*args, **kwargs): + color = kwargs.pop("color", "white") + bright = kwargs.pop("bright", False) + file = kwargs.get("file", _sys.stdout) + + with console_color(color, bright=bright, file=file): + print(*args, **kwargs) + +class output_redirected: + def __init__(self, output, quiet=False): + self.output = output + self.quiet = quiet + + def __enter__(self): + flush() + + _notice(self.quiet, "Redirecting output to file {}", repr(self.output)) + + if is_string(self.output): + output = open(self.output, "w") + + self.prev_stdout, self.prev_stderr = _sys.stdout, _sys.stderr + _sys.stdout, _sys.stderr = output, output + + def __exit__(self, exc_type, exc_value, traceback): + flush() + + _sys.stdout, _sys.stderr = self.prev_stdout, self.prev_stderr + +try: + breakpoint +except NameError: # pragma: nocover + def breakpoint(): + import pdb + pdb.set_trace() + +def repl(locals): # pragma: nocover + _code.InteractiveConsole(locals=locals).interact() + +def print_properties(props, file=None): + size = max([len(x[0]) for x in props]) + + for prop in props: + name = "{}:".format(prop[0]) + template = "{{:<{}}} ".format(size + 1) + + print(template.format(name), prop[1], end="", file=file) + + for value in prop[2:]: + print(" {}".format(value), end="", file=file) + + print(file=file) + +## Directory operations + +def find(dirs=None, include="*", exclude=()): + if dirs is None: + dirs = "." + + if is_string(dirs): + dirs = (dirs,) + + if is_string(include): + include = (include,) + + if is_string(exclude): + exclude = (exclude,) + + found = set() + + for dir in dirs: + for root, dir_names, file_names in _os.walk(dir, followlinks=True): + names = dir_names + file_names + + for include_pattern in include: + names = _fnmatch.filter(names, include_pattern) + + for exclude_pattern in exclude: + for name in _fnmatch.filter(names, exclude_pattern): + names.remove(name) + + if root.startswith("./"): + root = remove_prefix(root, "./") + elif root == ".": + root = "" + + found.update([join(root, x) for x in names]) + + return sorted(found) + +def make_dir(dir, quiet=False): + if dir == "": + return dir + + if not exists(dir): + _notice(quiet, "Making directory '{}'", dir) + _os.makedirs(dir) + + return dir + +def make_parent_dir(path, quiet=False): + return make_dir(get_parent_dir(path), quiet=quiet) + +# Returns the current working directory so you can change it back +def change_dir(dir, quiet=False): + _debug(quiet, "Changing directory to {}", repr(dir)) + + prev_dir = get_current_dir() + + if not dir: + return prev_dir + + _os.chdir(dir) + + return prev_dir + +def list_dir(dir=None, include="*", exclude=()): + if dir is None: + dir = get_current_dir() + else: + dir = expand(dir) + + assert is_dir(dir), dir + + if is_string(include): + include = (include,) + + if is_string(exclude): + exclude = (exclude,) + + names = _os.listdir(dir) + + for include_pattern in include: + names = _fnmatch.filter(names, include_pattern) + + for exclude_pattern in exclude: + for name in _fnmatch.filter(names, exclude_pattern): + names.remove(name) + + return sorted(names) + +# No args constructor gets a temp dir +class working_dir: + def __init__(self, dir=None, quiet=False): + self.dir = dir + self.prev_dir = None + self.remove = False + self.quiet = quiet + + if self.dir is None: + self.dir = make_temp_dir() + self.remove = True + else: + self.dir = expand(self.dir) + + def __enter__(self): + if self.dir == ".": + return + + _notice(self.quiet, "Entering directory {}", repr(get_absolute_path(self.dir))) + + make_dir(self.dir, quiet=True) + + self.prev_dir = change_dir(self.dir, quiet=True) + + return self.dir + + def __exit__(self, exc_type, exc_value, traceback): + if self.dir == ".": + return + + _debug(self.quiet, "Returning to directory {}", repr(get_absolute_path(self.prev_dir))) + + change_dir(self.prev_dir, quiet=True) + + if self.remove: + remove(self.dir, quiet=True) + +## Environment operations + +def join_path_var(*paths): + return _os.pathsep.join(unique(skip(paths))) + +def get_current_dir(): + return _os.getcwd() + +def get_home_dir(user=None): + return _os.path.expanduser("~{}".format(user or "")) + +def get_user(): + return _getpass.getuser() + +def get_hostname(): + return _socket.gethostname() + +def get_program_name(command=None): + if command is None: + args = ARGS + else: + args = command.split() + + for arg in args: + if "=" not in arg: + return get_base_name(arg) + +def which(program_name): + return _shutil.which(program_name) + +def check_env(var, message=None): + if var not in _os.environ: + if message is None: + message = "Environment variable {} is not set".format(repr(var)) + + raise PlanoError(message) + +def check_module(module, message=None): + if _pkgutil.find_loader(module) is None: + if message is None: + message = "Python module {} is not found".format(repr(module)) + + raise PlanoError(message) + +def check_program(program, message=None): + if which(program) is None: + if message is None: + message = "Program {} is not found".format(repr(program)) + + raise PlanoError(message) + +class working_env: + def __init__(self, **vars): + self.amend = vars.pop("amend", True) + self.vars = vars + + def __enter__(self): + self.prev_vars = dict(_os.environ) + + if not self.amend: + for name, value in list(_os.environ.items()): + if name not in self.vars: + del _os.environ[name] + + for name, value in self.vars.items(): + _os.environ[name] = str(value) + + def __exit__(self, exc_type, exc_value, traceback): + for name, value in self.prev_vars.items(): + _os.environ[name] = value + + for name, value in self.vars.items(): + if name not in self.prev_vars: + del _os.environ[name] + +class working_module_path: + def __init__(self, path, amend=True): + if is_string(path): + if not is_absolute(path): + path = get_absolute_path(path) + + path = [path] + + if amend: + path = path + _sys.path + + self.path = path + + def __enter__(self): + self.prev_path = _sys.path + _sys.path = self.path + + def __exit__(self, exc_type, exc_value, traceback): + _sys.path = self.prev_path + +def print_env(file=None): + props = ( + ("ARGS", ARGS), + ("ENV['PATH']", ENV.get("PATH")), + ("ENV['PYTHONPATH']", ENV.get("PYTHONPATH")), + ("sys.executable", _sys.executable), + ("sys.path", _sys.path), + ("sys.version", _sys.version.replace("\n", "")), + ("get_current_dir()", get_current_dir()), + ("get_home_dir()", get_home_dir()), + ("get_hostname()", get_hostname()), + ("get_program_name()", get_program_name()), + ("get_user()", get_user()), + ("plano.__file__", __file__), + ("which('plano')", which("plano")), + ) + + print_properties(props, file=file) + +## File operations + +def touch(file, quiet=False): + file = expand(file) + + _notice(quiet, "Touching {}", repr(file)) + + try: + _os.utime(file, None) + except OSError: + append(file, "") + + return file + +# symlinks=True - Preserve symlinks +# inside=True - Place from_path inside to_path if to_path is a directory +def copy(from_path, to_path, symlinks=True, inside=True, quiet=False): + from_path = expand(from_path) + to_path = expand(to_path) + + _notice(quiet, "Copying {} to {}", repr(from_path), repr(to_path)) + + if is_dir(to_path) and inside: + to_path = join(to_path, get_base_name(from_path)) + else: + make_parent_dir(to_path, quiet=True) + + if is_dir(from_path): + for name in list_dir(from_path): + copy(join(from_path, name), join(to_path, name), symlinks=symlinks, inside=False, quiet=True) + + _shutil.copystat(from_path, to_path) + elif is_link(from_path) and symlinks: + make_link(to_path, read_link(from_path), quiet=True) + else: + _shutil.copy2(from_path, to_path) + + return to_path + +# inside=True - Place from_path inside to_path if to_path is a directory +def move(from_path, to_path, inside=True, quiet=False): + from_path = expand(from_path) + to_path = expand(to_path) + + _notice(quiet, "Moving {} to {}", repr(from_path), repr(to_path)) + + to_path = copy(from_path, to_path, inside=inside, quiet=True) + remove(from_path, quiet=True) + + return to_path + +def remove(paths, quiet=False): + if is_string(paths): + paths = (paths,) + + for path in paths: + path = expand(path) + + if not exists(path): + continue + + _debug(quiet, "Removing {}", repr(path)) + + if is_dir(path): + _shutil.rmtree(path, ignore_errors=True) + else: + _os.remove(path) + +def get_file_size(file): + file = expand(file) + return _os.path.getsize(file) + +## IO operations + +def read(file): + file = expand(file) + + with open(file) as f: + return f.read() + +def write(file, string): + file = expand(file) + + make_parent_dir(file, quiet=True) + + with open(file, "w") as f: + f.write(string) + + return file + +def append(file, string): + file = expand(file) + + make_parent_dir(file, quiet=True) + + with open(file, "a") as f: + f.write(string) + + return file + +def prepend(file, string): + file = expand(file) + + orig = read(file) + + return write(file, string + orig) + +def tail(file, count): + file = expand(file) + return "".join(tail_lines(file, count)) + +def read_lines(file): + file = expand(file) + + with open(file) as f: + return f.readlines() + +def write_lines(file, lines): + file = expand(file) + + make_parent_dir(file, quiet=True) + + with open(file, "w") as f: + f.writelines(lines) + + return file + +def append_lines(file, lines): + file = expand(file) + + make_parent_dir(file, quiet=True) + + with open(file, "a") as f: + f.writelines(lines) + + return file + +def prepend_lines(file, lines): + file = expand(file) + + orig_lines = read_lines(file) + + make_parent_dir(file, quiet=True) + + with open(file, "w") as f: + f.writelines(lines) + f.writelines(orig_lines) + + return file + +def tail_lines(file, count): + assert count >= 0, count + + lines = read_lines(file) + + return lines[-count:] + +def replace_in_file(file, expr, replacement, count=0): + file = expand(file) + return write(file, replace(read(file), expr, replacement, count=count)) + +def concatenate(file, input_files): + file = expand(file) + + assert file not in input_files + + make_parent_dir(file, quiet=True) + + with open(file, "wb") as f: + for input_file in input_files: + if not exists(input_file): + continue + + with open(input_file, "rb") as inf: + _shutil.copyfileobj(inf, f) + + return file + +## Iterable operations + +def unique(iterable): + return list(dict.fromkeys(iterable).keys()) + +def skip(iterable, values=(None, "", (), [], {})): + if is_scalar(values): + values = (values,) + + items = list() + + for item in iterable: + if item not in values: + items.append(item) + + return items + +## JSON operations + +def read_json(file): + file = expand(file) + + with open(file) as f: + return _json.load(f) + +def write_json(file, data): + file = expand(file) + + make_parent_dir(file, quiet=True) + + with open(file, "w") as f: + _json.dump(data, f, indent=4, separators=(",", ": "), sort_keys=True) + + return file + +def parse_json(json): + return _json.loads(json) + +def emit_json(data): + return _json.dumps(data, indent=4, separators=(",", ": "), sort_keys=True) + +def print_json(data, **kwargs): + print(emit_json(data), **kwargs) + +## HTTP operations + +def _run_curl(method, url, content=None, content_file=None, content_type=None, output_file=None, insecure=False, + user=None, password=None): + check_program("curl") + + args = ["curl", "-sfL"] + + if method != "GET": + args.extend(["-X", method]) + + if content is not None: + assert content_file is None + args.extend(["-H", "Expect:", "-d", "@-"]) + + if content_file is not None: + assert content is None, content + args.extend(["-H", "Expect:", "-d", f"@{content_file}"]) + + if content_type is not None: + args.extend(["-H", f"'Content-Type: {content_type}'"]) + + if output_file is not None: + args.extend(["-o", output_file]) + + if insecure: + args.append("--insecure") + + if user is not None: + assert password is not None + args.extend(["--user", f"{user}:{password}"]) + + args.append(url) + + if output_file is not None: + make_parent_dir(output_file, quiet=True) + + proc = run(args, stdin=_subprocess.PIPE, stdout=_subprocess.PIPE, stderr=_subprocess.PIPE, + input=content, check=False, quiet=True) + + if proc.exit_code > 0: + raise PlanoProcessError(proc) + + if output_file is None: + return proc.stdout_result + +def http_get(url, output_file=None, insecure=False, user=None, password=None): + return _run_curl("GET", url, output_file=output_file, insecure=insecure, user=user, password=password) + +def http_get_json(url, insecure=False, user=None, password=None): + return parse_json(http_get(url, insecure=insecure, user=user, password=password)) + +def http_put(url, content, content_type=None, insecure=False, user=None, password=None): + _run_curl("PUT", url, content=content, content_type=content_type, insecure=insecure, user=user, password=password) + +def http_put_file(url, content_file, content_type=None, insecure=False, user=None, password=None): + _run_curl("PUT", url, content_file=content_file, content_type=content_type, insecure=insecure, user=user, + password=password) + +def http_put_json(url, data, insecure=False, user=None, password=None): + http_put(url, emit_json(data), content_type="application/json", insecure=insecure, user=user, password=password) + +def http_post(url, content, content_type=None, output_file=None, insecure=False, user=None, password=None): + return _run_curl("POST", url, content=content, content_type=content_type, output_file=output_file, + insecure=insecure, user=user, password=password) + +def http_post_file(url, content_file, content_type=None, output_file=None, insecure=False, user=None, password=None): + return _run_curl("POST", url, content_file=content_file, content_type=content_type, output_file=output_file, + insecure=insecure, user=user, password=password) + +def http_post_json(url, data, insecure=False, user=None, password=None): + return parse_json(http_post(url, emit_json(data), content_type="application/json", insecure=insecure, user=user, + password=password)) + +## Link operations + +def make_link(path: str, linked_path: str, quiet=False) -> str: + _notice(quiet, "Making symlink {} to {}", repr(path), repr(linked_path)) + + make_parent_dir(path, quiet=True) + remove(path, quiet=True) + + _os.symlink(linked_path, path) + + return path + +def read_link(path): + return _os.readlink(path) + +## Logging operations + +_logging_levels = ( + "debug", + "notice", + "warning", + "error", + "disabled", +) + +_DEBUG = _logging_levels.index("debug") +_NOTICE = _logging_levels.index("notice") +_WARNING = _logging_levels.index("warning") +_ERROR = _logging_levels.index("error") +_DISABLED = _logging_levels.index("disabled") + +_logging_output = None +_logging_threshold = _NOTICE +_logging_contexts = list() + +def enable_logging(level="notice", output=None, quiet=False): + assert level in _logging_levels, level + + _notice(quiet, "Enabling logging (level={}, output={})", repr(level), repr(nvl(output, "stderr"))) + + global _logging_threshold + _logging_threshold = _logging_levels.index(level) + + if is_string(output): + output = open(output, "w") + + global _logging_output + _logging_output = output + +def disable_logging(quiet=False): + _notice(quiet, "Disabling logging") + + global _logging_threshold + _logging_threshold = _DISABLED + +class logging_enabled: + def __init__(self, level="notice", output=None): + self.level = level + self.output = output + + def __enter__(self): + self.prev_level = _logging_levels[_logging_threshold] + self.prev_output = _logging_output + + if self.level == "disabled": + disable_logging(quiet=True) + else: + enable_logging(level=self.level, output=self.output, quiet=True) + + def __exit__(self, exc_type, exc_value, traceback): + if self.prev_level == "disabled": + disable_logging(quiet=True) + else: + enable_logging(level=self.prev_level, output=self.prev_output, quiet=True) + +class logging_disabled(logging_enabled): + def __init__(self): + super().__init__(level="disabled") + +class logging_context: + def __init__(self, name): + self.name = name + + def __enter__(self): + _logging_contexts.append(self.name) + + def __exit__(self, exc_type, exc_value, traceback): + _logging_contexts.pop() + +def fail(message, *args): + error(message, *args) + + if isinstance(message, BaseException): + raise message + + raise PlanoError(message.format(*args)) + +def error(message, *args): + log(_ERROR, message, *args) + +def warning(message, *args): + log(_WARNING, message, *args) + +def notice(message, *args): + log(_NOTICE, message, *args) + +def debug(message, *args): + log(_DEBUG, message, *args) + +def log(level, message, *args): + if is_string(level): + level = _logging_levels.index(level) + + if _logging_threshold <= level: + _print_message(level, message, args) + +def _print_message(level, message, args): + line = list() + out = nvl(_logging_output, _sys.stderr) + + program_text = "{}:".format(get_program_name()) + + line.append(cformat(program_text, color="gray")) + + level_text = "{}:".format(_logging_levels[level]).ljust(8) + level_color = ("white", "cyan", "yellow", "red", None)[level] + level_bright = (False, False, False, True, False)[level] + + line.append(cformat(level_text, color=level_color, bright=level_bright)) + + for name in _logging_contexts: + line.append(cformat("{}:".format(name), color="yellow")) + + if isinstance(message, BaseException): + exception = message + + line.append(type(exception).__name__) + line.append(str(exception)) + + print(" ".join(line), file=out) + + if hasattr(exception, "__traceback__"): + _traceback.print_exception(type(exception), exception, exception.__traceback__, file=out) + else: + message = str(message) + + if args: + message = message.format(*args) + + line.append(capitalize(message)) + + print(" ".join(line), file=out) + + out.flush() + +def _notice(quiet, message, *args): + if quiet: + debug(message, *args) + else: + notice(message, *args) + +def _debug(quiet, message, *args): + if not quiet: + debug(message, *args) + +## Path operations + +def expand(path): + path = _os.path.expanduser(path) + path = _os.path.expandvars(path) + + return path + +def get_absolute_path(path): + path = expand(path) + return _os.path.abspath(path) + +def normalize_path(path): + path = expand(path) + return _os.path.normpath(path) + +def get_real_path(path): + path = expand(path) + return _os.path.realpath(path) + +def get_relative_path(path, start=None): + path = expand(path) + return _os.path.relpath(path, start=start) + +def get_file_url(path): + path = expand(path) + return "file:{}".format(get_absolute_path(path)) + +def exists(path): + path = expand(path) + return _os.path.lexists(path) + +def is_absolute(path): + path = expand(path) + return _os.path.isabs(path) + +def is_dir(path): + path = expand(path) + return _os.path.isdir(path) + +def is_file(path): + path = expand(path) + return _os.path.isfile(path) + +def is_link(path): + path = expand(path) + return _os.path.islink(path) + +def join(*paths): + paths = [expand(x) for x in paths] + + path = _os.path.join(*paths) + path = normalize_path(path) + + return path + +def split(path): + path = expand(path) + path = normalize_path(path) + parent, child = _os.path.split(path) + + return parent, child + +def split_extension(path): + path = expand(path) + path = normalize_path(path) + root, ext = _os.path.splitext(path) + + return root, ext + +def get_parent_dir(path): + path = expand(path) + path = normalize_path(path) + parent, child = split(path) + + return parent + +def get_base_name(path): + path = expand(path) + path = normalize_path(path) + parent, name = split(path) + + return name + +def get_name_stem(file): + file = expand(file) + name = get_base_name(file) + + if name.endswith(".tar.gz"): + name = name[:-3] + + stem, ext = split_extension(name) + + return stem + +def get_name_extension(file): + file = expand(file) + name = get_base_name(file) + stem, ext = split_extension(name) + + return ext + +def _check_path(path, test_func, message): + path = expand(path) + + if not test_func(path): + parent_dir = get_parent_dir(path) + + if is_dir(parent_dir): + found_paths = ", ".join([repr(x) for x in list_dir(parent_dir)]) + message = "{}. The parent directory contains: {}".format(message.format(repr(path)), found_paths) + else: + message = "{}".format(message.format(repr(path))) + + raise PlanoError(message) + +def check_exists(path): + path = expand(path) + _check_path(path, exists, "File or directory {} not found") + +def check_file(path): + path = expand(path) + _check_path(path, is_file, "File {} not found") + +def check_dir(path): + path = expand(path) + _check_path(path, is_dir, "Directory {} not found") + +def await_exists(path, timeout=30, quiet=False): + path = expand(path) + + _notice(quiet, "Waiting for path {} to exist", repr(path)) + + timeout_message = "Timed out waiting for path {} to exist".format(path) + period = 0.03125 + + with Timer(timeout=timeout, timeout_message=timeout_message) as timer: + while True: + try: + check_exists(path) + except PlanoError: + sleep(period, quiet=True) + period = min(1, period * 2) + else: + return + +## Port operations + +def get_random_port(min=49152, max=65535): + ports = [_random.randint(min, max) for _ in range(3)] + + for port in ports: + try: + check_port(port) + except PlanoError: + return port + + raise PlanoError("Random ports unavailable") + +def check_port(port, host="localhost"): + sock = _socket.socket(_socket.AF_INET, _socket.SOCK_STREAM) + sock.setsockopt(_socket.SOL_SOCKET, _socket.SO_REUSEADDR, 1) + + if sock.connect_ex((host, port)) != 0: + raise PlanoError("Port {} (host {}) is not reachable".format(repr(port), repr(host))) + +def await_port(port, host="localhost", timeout=30, quiet=False): + _notice(quiet, "Waiting for port {}", port) + + if is_string(port): + port = int(port) + + timeout_message = "Timed out waiting for port {} to open".format(port) + period = 0.03125 + + with Timer(timeout=timeout, timeout_message=timeout_message) as timer: + while True: + try: + check_port(port, host=host) + except PlanoError: + sleep(period, quiet=True) + period = min(1, period * 2) + else: + return + +## Process operations + +def get_process_id(): + return _os.getpid() + +def _format_command(command, represent=True): + if is_string(command): + args = _shlex.split(command) + else: + args = command + + args = [expand(str(x)) for x in args] + command = " ".join(args) + + if represent: + return repr(command) + else: + return command + +# quiet=False - Don't log at notice level +# stash=False - No output unless there is an error +# output= - Send stdout and stderr to a file +# stdin= - XXX +# stdout= - Send stdout to a file +# stderr= - Send stderr to a file +# shell=False - XXX +def start(command, stdin=None, stdout=None, stderr=None, output=None, shell=False, stash=False, quiet=False): + _notice(quiet, "Starting a new process (command {})", _format_command(command)) + + if output is not None: + stdout, stderr = output, output + + if is_string(stdin): + stdin = expand(stdin) + stdin = open(stdin, "r") + + if is_string(stdout): + stdout = expand(stdout) + stdout = open(stdout, "w") + + if is_string(stderr): + stderr = expand(stderr) + stderr = open(stderr, "w") + + if stdin is None: + stdin = _sys.stdin + + if stdout is None: + stdout = _sys.stdout + + if stderr is None: + stderr = _sys.stderr + + stash_file = None + + if stash: + stash_file = make_temp_file() + out = open(stash_file, "w") + stdout = out + stderr = out + + if shell: + if is_string(command): + args = command + else: + args = " ".join(map(str, command)) + else: + if is_string(command): + args = _shlex.split(command) + else: + args = command + + args = [expand(str(x)) for x in args] + + try: + proc = PlanoProcess(args, stdin=stdin, stdout=stdout, stderr=stderr, shell=shell, close_fds=True, stash_file=stash_file) + except OSError as e: + raise PlanoError("Command {}: {}".format(_format_command(command), str(e))) + + _notice(quiet, "{} started", proc) + + return proc + +def stop(proc, timeout=None, quiet=False): + _notice(quiet, "Stopping {}", proc) + + if proc.poll() is not None: + if proc.exit_code == 0: + debug("{} already exited normally", proc) + elif proc.exit_code == -(_signal.SIGTERM): + debug("{} was already terminated", proc) + else: + debug("{} already exited with code {}", proc, proc.exit_code) + + return proc + + kill(proc, quiet=True) + + return wait(proc, timeout=timeout, quiet=True) + +def kill(proc, quiet=False): + _notice(quiet, "Killing {}", proc) + + proc.terminate() + +def wait(proc, timeout=None, check=False, quiet=False): + _notice(quiet, "Waiting for {} to exit", proc) + + try: + proc.wait(timeout=timeout) + except _subprocess.TimeoutExpired: + # XXX warning or error + raise PlanoTimeout() + + if proc.exit_code == 0: + debug("{} exited normally", proc) + elif proc.exit_code < 0: + debug("{} was terminated by signal {}", proc, abs(proc.exit_code)) + else: + if check: + error("{} exited with code {}", proc, proc.exit_code) + else: + debug("{} exited with code {}", proc, proc.exit_code) + + if proc.stash_file is not None: + if proc.exit_code > 0: + eprint(read(proc.stash_file), end="") + + if not WINDOWS: + remove(proc.stash_file, quiet=True) + + if check and proc.exit_code > 0: + raise PlanoProcessError(proc) + + return proc + +# input= - Pipe to the process +def run(command, stdin=None, stdout=None, stderr=None, input=None, output=None, + stash=False, shell=False, check=True, quiet=False): + _notice(quiet, "Running command {}", _format_command(command)) + + if input is not None: + assert stdin in (None, _subprocess.PIPE), stdin + + input = input.encode("utf-8") + stdin = _subprocess.PIPE + + proc = start(command, stdin=stdin, stdout=stdout, stderr=stderr, output=output, + stash=stash, shell=shell, quiet=True) + + proc.stdout_result, proc.stderr_result = proc.communicate(input=input) + + if proc.stdout_result is not None: + proc.stdout_result = proc.stdout_result.decode("utf-8") + + if proc.stderr_result is not None: + proc.stderr_result = proc.stderr_result.decode("utf-8") + + return wait(proc, check=check, quiet=True) + +# input= - Pipe the given input into the process +def call(command, input=None, shell=False, quiet=False): + _notice(quiet, "Calling {}", _format_command(command)) + + proc = run(command, stdin=_subprocess.PIPE, stdout=_subprocess.PIPE, stderr=_subprocess.PIPE, + input=input, shell=shell, check=True, quiet=True) + + return proc.stdout_result + +def exit(arg=None, *args, **kwargs): + verbose = kwargs.get("verbose", False) + + if arg in (0, None): + if verbose: + notice("Exiting normally") + + _sys.exit() + + if is_string(arg): + if args: + arg = arg.format(*args) + + if verbose: + error(arg) + + _sys.exit(arg) + + if isinstance(arg, BaseException): + if verbose: + error(arg) + + _sys.exit(str(arg)) + + if isinstance(arg, int): + _sys.exit(arg) + + raise PlanoException("Illegal argument") + +_child_processes = list() + +class PlanoProcess(_subprocess.Popen): + def __init__(self, args, **options): + self.stash_file = options.pop("stash_file", None) + + super().__init__(args, **options) + + self.args = args + self.stdout_result = None + self.stderr_result = None + + _child_processes.append(self) + + @property + def exit_code(self): + return self.returncode + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_value, traceback): + stop(self) + + def __repr__(self): + return "process {} (command {})".format(self.pid, _format_command(self.args)) + +class PlanoProcessError(_subprocess.CalledProcessError, PlanoError): + def __init__(self, proc): + super().__init__(proc.exit_code, _format_command(proc.args, represent=False)) + +def _default_sigterm_handler(signum, frame): + for proc in _child_processes: + if proc.poll() is None: + kill(proc, quiet=True) + + exit(-(_signal.SIGTERM)) + +_signal.signal(_signal.SIGTERM, _default_sigterm_handler) + +## String operations + +def replace(string, expr, replacement, count=0): + return _re.sub(expr, replacement, string, count) + +def remove_prefix(string, prefix): + if string is None: + return "" + + if prefix and string.startswith(prefix): + string = string[len(prefix):] + + return string + +def remove_suffix(string, suffix): + if string is None: + return "" + + if suffix and string.endswith(suffix): + string = string[:-len(suffix)] + + return string + +def shorten(string, max, ellipsis=None): + assert max is None or isinstance(max, int) + + if string is None: + return "" + + if max is None or len(string) < max: + return string + else: + if ellipsis is not None: + string = string + ellipsis + end = _max(0, max - len(ellipsis)) + return string[0:end] + ellipsis + else: + return string[0:max] + +def plural(noun, count=0, plural=None): + if noun in (None, ""): + return "" + + if count == 1: + return noun + + if plural is None: + if noun.endswith("s"): + plural = "{}ses".format(noun) + else: + plural = "{}s".format(noun) + + return plural + +def capitalize(string): + if not string: + return "" + + return string[0].upper() + string[1:] + +def base64_encode(string): + return _base64.b64encode(string) + +def base64_decode(string): + return _base64.b64decode(string) + +def url_encode(string): + return _urllib.parse.quote_plus(string) + +def url_decode(string): + return _urllib.parse.unquote_plus(string) + +def parse_url(url): + return _urllib.parse.urlparse(url) + +## Temp operations + +def get_system_temp_dir(): + return _tempfile.gettempdir() + +def get_user_temp_dir(): + try: + return _os.environ["XDG_RUNTIME_DIR"] + except KeyError: + return join(get_system_temp_dir(), get_user()) + +def make_temp_file(prefix="plano-", suffix="", dir=None): + if dir is None: + dir = get_system_temp_dir() + + return _tempfile.mkstemp(prefix=prefix, suffix=suffix, dir=dir)[1] + +def make_temp_dir(prefix="plano-", suffix="", dir=None): + if dir is None: + dir = get_system_temp_dir() + + return _tempfile.mkdtemp(prefix=prefix, suffix=suffix, dir=dir) + +class temp_file: + def __init__(self, suffix="", dir=None): + if dir is None: + dir = get_system_temp_dir() + + self.fd, self.file = _tempfile.mkstemp(prefix="plano-", suffix=suffix, dir=dir) + + def __enter__(self): + return self.file + + def __exit__(self, exc_type, exc_value, traceback): + _os.close(self.fd) + + if not WINDOWS: # XXX + remove(self.file, quiet=True) + +class temp_dir: + def __init__(self, suffix="", dir=None): + self.dir = make_temp_dir(suffix=suffix, dir=dir) + + def __enter__(self): + return self.dir + + def __exit__(self, exc_type, exc_value, traceback): + remove(self.dir, quiet=True) + +## Time operations + +# Unix time +def get_time(): + return _time.time() + +# Python UTC time +def get_datetime(): + return _datetime.datetime.now(tz=_datetime.timezone.utc) + +def parse_timestamp(timestamp, format="%Y-%m-%dT%H:%M:%SZ"): + if timestamp is None: + return None + + datetime = _datetime.datetime.strptime(timestamp, format) + datetime = datetime.replace(tzinfo=_datetime.timezone.utc) + + return datetime + +def format_timestamp(datetime=None, format="%Y-%m-%dT%H:%M:%SZ"): + if datetime is None: + datetime = get_datetime() + + return datetime.strftime(format) + +def format_date(datetime=None): + if datetime is None: + datetime = get_datetime() + + day = datetime.day + month = datetime.strftime("%B") + year = datetime.strftime("%Y") + + return f"{day} {month} {year}" + +def format_time(datetime=None, precision="second"): + if datetime is None: + datetime = get_datetime() + + assert precision in ("minute", "second"), "Illegal precision value" + + hour = datetime.hour + minute = datetime.strftime("%M") + second = datetime.strftime("%S") + + if precision == "second": + return f"{hour}:{minute}:{second}" + else: + return f"{hour}:{minute}" + +def format_duration(seconds, align=False): + assert seconds >= 0 + + if seconds >= 3600: + value = seconds / 3600 + unit = "h" + elif seconds >= 5 * 60: + value = seconds / 60 + unit = "m" + else: + value = seconds + unit = "s" + + if align: + return "{:.1f}{}".format(value, unit) + elif value > 10: + return "{:.0f}{}".format(value, unit) + else: + return remove_suffix("{:.1f}".format(value), ".0") + unit + +def sleep(seconds, quiet=False): + _notice(quiet, "Sleeping for {} {}", seconds, plural("second", seconds)) + + _time.sleep(seconds) + +class Timer: + def __init__(self, timeout=None, timeout_message=None): + self.timeout = timeout + self.timeout_message = timeout_message + + if self.timeout is not None and not hasattr(_signal, "SIGALRM"): # pragma: nocover + self.timeout = None + + self.start_time = None + self.stop_time = None + + def start(self): + self.start_time = get_time() + + if self.timeout is not None: + self.prev_handler = _signal.signal(_signal.SIGALRM, self.raise_timeout) + self.prev_timeout, prev_interval = _signal.setitimer(_signal.ITIMER_REAL, self.timeout) + self.prev_timer_suspend_time = get_time() + + assert prev_interval == 0.0, "This case is not yet handled" + + def stop(self): + self.stop_time = get_time() + + if self.timeout is not None: + assert get_time() - self.prev_timer_suspend_time > 0, "This case is not yet handled" + + _signal.signal(_signal.SIGALRM, self.prev_handler) + _signal.setitimer(_signal.ITIMER_REAL, self.prev_timeout) + + def __enter__(self): + self.start() + return self + + def __exit__(self, exc_type, exc_value, traceback): + self.stop() + + @property + def elapsed_time(self): + assert self.start_time is not None + + if self.stop_time is None: + return get_time() - self.start_time + else: + return self.stop_time - self.start_time + + def raise_timeout(self, *args): + raise PlanoTimeout(self.timeout_message) + +## Unique ID operations + +# Length in bytes, renders twice as long in hex +def get_unique_id(bytes=16): + assert bytes >= 1 + assert bytes <= 16 + + uuid_bytes = _uuid.uuid4().bytes + uuid_bytes = uuid_bytes[:bytes] + + return _binascii.hexlify(uuid_bytes).decode("utf-8") + +## Value operations + +def nvl(value, replacement): + if value is None: + return replacement + + return value + +def is_string(value): + return isinstance(value, str) + +def is_scalar(value): + return value is None or isinstance(value, (str, int, float, complex, bool)) + +def is_empty(value): + return value in (None, "", (), [], {}) + +def pformat(value): + return _pprint.pformat(value, width=120) + +def format_empty(value, replacement): + if is_empty(value): + value = replacement + + return value + +def format_not_empty(value, template=None): + if not is_empty(value) and template is not None: + value = template.format(value) + + return value + +def format_repr(obj, limit=None): + attrs = ["{}={}".format(k, repr(v)) for k, v in obj.__dict__.items()] + return "{}({})".format(obj.__class__.__name__, ", ".join(attrs[:limit])) + +class Namespace: + def __init__(self, **kwargs): + for name in kwargs: + setattr(self, name, kwargs[name]) + + def __eq__(self, other): + return vars(self) == vars(other) + + def __contains__(self, key): + return key in self.__dict__ + + def __repr__(self): + return format_repr(self) + +## YAML operations + +def read_yaml(file): + check_module("yaml", "Python module 'yaml' is not found. To install it, run 'pip install pyyaml'.") + + import yaml as _yaml + + file = expand(file) + + with open(file) as f: + return _yaml.safe_load(f) + +def write_yaml(file, data): + check_module("yaml", "Python module 'yaml' is not found. To install it, run 'pip install pyyaml'.") + + import yaml as _yaml + + file = expand(file) + + make_parent_dir(file, quiet=True) + + with open(file, "w") as f: + _yaml.safe_dump(data, f) + + return file + +def parse_yaml(yaml): + check_module("yaml", "Python module 'yaml' is not found. To install it, run 'pip install pyyaml'.") + + import yaml as _yaml + + return _yaml.safe_load(yaml) + +def emit_yaml(data): + check_module("yaml", "Python module 'yaml' is not found. To install it, run 'pip install pyyaml'.") + + import yaml as _yaml + + return _yaml.safe_dump(data) + +def print_yaml(data, **kwargs): + print(emit_yaml(data), **kwargs) + +if PLANO_DEBUG: # pragma: nocover + enable_logging(level="debug") diff --git a/external/skewer-main/external/plano-main/src/plano/test.py b/external/skewer-main/external/plano-main/src/plano/test.py new file mode 100644 index 0000000..9067c33 --- /dev/null +++ b/external/skewer-main/external/plano-main/src/plano/test.py @@ -0,0 +1,397 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# + +from .main import * +from .command import * + +import argparse as _argparse +import asyncio as _asyncio +import fnmatch as _fnmatch +import importlib as _importlib +import inspect as _inspect +import traceback as _traceback + +class PlanoTestCommand(BaseCommand): + def __init__(self, test_modules=[]): + super(PlanoTestCommand, self).__init__() + + self.test_modules = test_modules + + if _inspect.ismodule(self.test_modules): + self.test_modules = [self.test_modules] + + self.parser = BaseArgumentParser() + self.parser.add_argument("include", metavar="PATTERN", nargs="*", default=["*"], + help="Run tests with names matching PATTERN (default '*', all tests)") + self.parser.add_argument("-e", "--exclude", metavar="PATTERN", action="append", default=[], + help="Do not run tests with names matching PATTERN (repeatable)") + self.parser.add_argument("-m", "--module", action="append", default=[], + help="Collect tests from MODULE (repeatable)") + self.parser.add_argument("-l", "--list", action="store_true", + help="Print the test names and exit") + self.parser.add_argument("--enable", metavar="PATTERN", action="append", default=[], + help=_argparse.SUPPRESS) + self.parser.add_argument("--unskip", metavar="PATTERN", action="append", default=[], + help="Run skipped tests matching PATTERN (repeatable)") + self.parser.add_argument("--timeout", metavar="SECONDS", type=int, default=300, + help="Fail any test running longer than SECONDS (default 300)") + self.parser.add_argument("--fail-fast", action="store_true", + help="Exit on the first failure encountered in a test run") + self.parser.add_argument("--iterations", metavar="COUNT", type=int, default=1, + help="Run the tests COUNT times (default 1)") + + def parse_args(self, args): + return self.parser.parse_args(args) + + def init(self, args): + self.list_only = args.list + self.include_patterns = args.include + self.exclude_patterns = args.exclude + self.enable_patterns = args.enable + self.unskip_patterns = args.unskip + self.timeout = args.timeout + self.fail_fast = args.fail_fast + self.iterations = args.iterations + + try: + for name in args.module: + self.test_modules.append(_importlib.import_module(name)) + except ImportError as e: + raise PlanoError(e) + + def run(self): + if self.list_only: + print_tests(self.test_modules) + return + + for i in range(self.iterations): + run_tests(self.test_modules, include=self.include_patterns, + exclude=self.exclude_patterns, + enable=self.enable_patterns, unskip=self.unskip_patterns, + test_timeout=self.timeout, fail_fast=self.fail_fast, + verbose=self.verbose, quiet=self.quiet) + +class PlanoTestSkipped(Exception): + pass + +def test(_function=None, name=None, timeout=None, disabled=False): + class Test: + def __init__(self, function): + self.function = function + self.name = nvl(name, self.function.__name__.rstrip("_").replace("_", "-")) + self.timeout = timeout + self.disabled = disabled + + self.module = _inspect.getmodule(self.function) + + if not hasattr(self.module, "_plano_tests"): + self.module._plano_tests = list() + + self.module._plano_tests.append(self) + + def __call__(self, test_run, unskipped): + try: + ret = self.function() + + if _inspect.iscoroutine(ret): + _asyncio.run(ret) + except SystemExit as e: + error(e) + raise PlanoError("System exit with code {}".format(e)) + + def __repr__(self): + return "test '{}:{}'".format(self.module.__name__, self.name) + + if _function is None: + return Test + else: + return Test(_function) + +def skip_test(reason=None): + if _inspect.stack()[2].frame.f_locals["unskipped"]: + return + + raise PlanoTestSkipped(reason) + +class expect_exception: + def __init__(self, exception_type=Exception, contains=None): + self.exception_type = exception_type + self.contains = contains + + def __enter__(self): + pass + + def __exit__(self, exc_type, exc_value, traceback): + if exc_value is None: + assert False, "Never encountered expected exception {}".format(self.exception_type.__name__) + + if self.contains is None: + return isinstance(exc_value, self.exception_type) + else: + return isinstance(exc_value, self.exception_type) and self.contains in str(exc_value) + +class expect_error(expect_exception): + def __init__(self, contains=None): + super().__init__(PlanoError, contains=contains) + +class expect_timeout(expect_exception): + def __init__(self, contains=None): + super().__init__(PlanoTimeout, contains=contains) + +class expect_system_exit(expect_exception): + def __init__(self, contains=None): + super().__init__(SystemExit, contains=contains) + +class expect_output(temp_file): + def __init__(self, equals=None, contains=None, startswith=None, endswith=None): + super().__init__() + self.equals = equals + self.contains = contains + self.startswith = startswith + self.endswith = endswith + + def __exit__(self, exc_type, exc_value, traceback): + result = read(self.file) + + if self.equals is None: + assert len(result) > 0, result + else: + assert result == self.equals, result + + if self.contains is not None: + assert self.contains in result, result + + if self.startswith is not None: + assert result.startswith(self.startswith), result + + if self.endswith is not None: + assert result.endswith(self.endswith), result + + super().__exit__(exc_type, exc_value, traceback) + +def print_tests(modules): + if _inspect.ismodule(modules): + modules = (modules,) + + for module in modules: + for test in module._plano_tests: + flags = "(disabled)" if test.disabled else "" + print(" ".join((str(test), flags)).strip()) + +def run_tests(modules, include="*", exclude=(), enable=(), unskip=(), test_timeout=300, + fail_fast=False, verbose=False, quiet=False): + if _inspect.ismodule(modules): + modules = (modules,) + + if is_string(include): + include = (include,) + + if is_string(exclude): + exclude = (exclude,) + + if is_string(enable): + enable = (enable,) + + if is_string(unskip): + enable = (unskip,) + + test_run = TestRun(test_timeout=test_timeout, fail_fast=fail_fast, verbose=verbose, quiet=quiet) + + if verbose: + notice("Starting {}", test_run) + elif not quiet: + cprint("=== Configuration ===", color="cyan") + + props = ( + ("Modules", format_empty(", ".join([x.__name__ for x in modules]), "[none]")), + ("Test timeout", format_duration(test_timeout)), + ("Fail fast", fail_fast), + ) + + print_properties(props) + print() + + for module in modules: + if verbose: + notice("Running tests from module {} (file {})", repr(module.__name__), repr(module.__file__)) + elif not quiet: + cprint("=== Module {} ===".format(repr(module.__name__)), color="cyan") + + if not hasattr(module, "_plano_tests"): + warning("Module {} has no tests", repr(module.__name__)) + continue + + for test in module._plano_tests: + if test.disabled and not any([_fnmatch.fnmatchcase(test.name, x) for x in enable]): + continue + + included = any([_fnmatch.fnmatchcase(test.name, x) for x in include]) + excluded = any([_fnmatch.fnmatchcase(test.name, x) for x in exclude]) + unskipped = any([_fnmatch.fnmatchcase(test.name, x) for x in unskip]) + + if included and not excluded: + test_run.tests.append(test) + _run_test(test_run, test, unskipped) + + if not verbose and not quiet: + print() + + total = len(test_run.tests) + skipped = len(test_run.skipped_tests) + failed = len(test_run.failed_tests) + + if total == 0: + raise PlanoError("No tests ran") + + notes = "" + + if skipped != 0: + notes = "({} skipped)".format(skipped) + + if failed == 0: + result_message = "All tests passed {}".format(notes).strip() + else: + result_message = "{} {} failed {}".format(failed, plural("test", failed), notes).strip() + + if verbose: + if failed == 0: + notice(result_message) + else: + error(result_message) + elif not quiet: + cprint("=== Summary ===", color="cyan") + + props = ( + ("Total", total), + ("Skipped", skipped, format_not_empty(", ".join([x.name for x in test_run.skipped_tests]), "({})")), + ("Failed", failed, format_not_empty(", ".join([x.name for x in test_run.failed_tests]), "({})")), + ) + + print_properties(props) + print() + + cprint("=== RESULT ===", color="cyan") + + if failed == 0: + cprint(result_message, color="green") + else: + cprint(result_message, color="red", bright="True") + + print() + + if failed != 0: + raise PlanoError(result_message) + +def _run_test(test_run, test, unskipped): + if test_run.verbose: + notice("Running {}", test) + elif not test_run.quiet: + print("{:.<65} ".format(test.name + " "), end="") + + timeout = nvl(test.timeout, test_run.test_timeout) + + with temp_file() as output_file: + try: + with Timer(timeout=timeout) as timer: + if test_run.verbose: + test(test_run, unskipped) + else: + with output_redirected(output_file, quiet=True): + test(test_run, unskipped) + except KeyboardInterrupt: + raise + except PlanoTestSkipped as e: + test_run.skipped_tests.append(test) + + if test_run.verbose: + notice("{} SKIPPED ({})", test, format_duration(timer.elapsed_time)) + elif not test_run.quiet: + _print_test_result("SKIPPED", timer, "yellow") + print("Reason: {}".format(str(e))) + except Exception as e: + test_run.failed_tests.append(test) + + if test_run.verbose: + _traceback.print_exc() + + if isinstance(e, PlanoTimeout): + error("{} **FAILED** (TIMEOUT) ({})", test, format_duration(timer.elapsed_time)) + else: + error("{} **FAILED** ({})", test, format_duration(timer.elapsed_time)) + elif not test_run.quiet: + if isinstance(e, PlanoTimeout): + _print_test_result("**FAILED** (TIMEOUT)", timer, color="red", bright=True) + else: + _print_test_result("**FAILED**", timer, color="red", bright=True) + + _print_test_error(e) + _print_test_output(output_file) + + if test_run.fail_fast: + return True + else: + test_run.passed_tests.append(test) + + if test_run.verbose: + notice("{} PASSED ({})", test, format_duration(timer.elapsed_time)) + elif not test_run.quiet: + _print_test_result("PASSED", timer) + +def _print_test_result(status, timer, color="white", bright=False): + cprint("{:<7}".format(status), color=color, bright=bright, end="") + print("{:>6}".format(format_duration(timer.elapsed_time, align=True))) + +def _print_test_error(e): + cprint("--- Error ---", color="yellow") + + if isinstance(e, PlanoProcessError): + print("> {}".format(str(e))) + else: + lines = _traceback.format_exc().rstrip().split("\n") + lines = ["> {}".format(x) for x in lines] + + print("\n".join(lines)) + +def _print_test_output(output_file): + if get_file_size(output_file) == 0: + return + + cprint("--- Output ---", color="yellow") + + with open(output_file, "r") as out: + for line in out: + print("> {}".format(line), end="") + +class TestRun: + def __init__(self, test_timeout=None, fail_fast=False, verbose=False, quiet=False): + self.test_timeout = test_timeout + self.fail_fast = fail_fast + self.verbose = verbose + self.quiet = quiet + + self.tests = list() + self.skipped_tests = list() + self.failed_tests = list() + self.passed_tests = list() + + def __repr__(self): + return format_repr(self) + +def _main(): # pragma: nocover + PlanoTestCommand().main() diff --git a/external/skewer-main/plano b/external/skewer-main/plano new file mode 120000 index 0000000..48d44a8 --- /dev/null +++ b/external/skewer-main/plano @@ -0,0 +1 @@ +external/plano-main/bin/plano \ No newline at end of file diff --git a/external/skewer-main/python/plano b/external/skewer-main/python/plano new file mode 120000 index 0000000..a1aa027 --- /dev/null +++ b/external/skewer-main/python/plano @@ -0,0 +1 @@ +../external/plano-main/src/plano \ No newline at end of file diff --git a/external/skewer-main/python/skewer/__init__.py b/external/skewer-main/python/skewer/__init__.py new file mode 100644 index 0000000..3324b21 --- /dev/null +++ b/external/skewer-main/python/skewer/__init__.py @@ -0,0 +1,20 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# + +from .main import * diff --git a/external/skewer-main/python/skewer/main.py b/external/skewer-main/python/skewer/main.py new file mode 100644 index 0000000..6b15bee --- /dev/null +++ b/external/skewer-main/python/skewer/main.py @@ -0,0 +1,565 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# + +from plano import * + +standard_steps_yaml = read(join(get_parent_dir(__file__), "standardsteps.yaml")) +standard_steps = parse_yaml(standard_steps_yaml) + +example_suite_para = """ +This example is part of a [suite of examples][examples] showing the +different ways you can use [Skupper][website] to connect services +across cloud providers, data centers, and edge sites. + +[website]: https://skupper.io/ +[examples]: https://skupper.io/examples/index.html +""".strip() + +standard_prerequisites = """ +* The `kubectl` command-line tool, version 1.15 or later + ([installation guide][install-kubectl]) + +* Access to at least one Kubernetes cluster, from [any provider you + choose][kube-providers] + +[install-kubectl]: https://kubernetes.io/docs/tasks/tools/install-kubectl/ +[kube-providers]: https://skupper.io/start/kubernetes.html +""".strip() + +standard_next_steps = """ +Check out the other [examples][examples] on the Skupper website. +""".strip() + +about_this_example = """ +This example was produced using [Skewer][skewer], a library for +documenting and testing Skupper examples. + +[skewer]: https://github.com/skupperproject/skewer + +Skewer provides utility functions for generating the README and +running the example steps. Use the `./plano` command in the project +root to see what is available. + +To quickly stand up the example using Minikube, try the `./plano demo` +command. +""".strip() + +def check_environment(): + check_program("base64") + check_program("curl") + check_program("kubectl") + check_program("skupper") + +# Eventually Kubernetes will make this nicer: +# https://github.com/kubernetes/kubernetes/pull/87399 +# https://github.com/kubernetes/kubernetes/issues/80828 +# https://github.com/kubernetes/kubernetes/issues/83094 +def await_resource(resource, timeout=240): + assert "/" in resource, resource + + start_time = get_time() + + while True: + notice(f"Waiting for {resource} to become available") + + if run(f"kubectl get {resource}", output=DEVNULL, check=False, quiet=True).exit_code == 0: + break + + if get_time() - start_time > timeout: + fail(f"Timed out waiting for {resource}") + + sleep(5, quiet=True) + + if resource.startswith("deployment/"): + try: + run(f"kubectl wait --for condition=available --timeout {timeout}s {resource}", quiet=True, stash=True) + except: + run(f"kubectl logs {resource}") + raise + +def await_external_ip(service, timeout=240): + assert service.startswith("service/"), service + + start_time = get_time() + + await_resource(service, timeout=timeout) + + while True: + notice(f"Waiting for external IP from {service} to become available") + + if call(f"kubectl get {service} -o jsonpath='{{.status.loadBalancer.ingress}}'", quiet=True) != "": + break + + if get_time() - start_time > timeout: + fail(f"Timed out waiting for external IP for {service}") + + sleep(5, quiet=True) + + return call(f"kubectl get {service} -o jsonpath='{{.status.loadBalancer.ingress[0].ip}}'", quiet=True) + +def await_http_ok(service, url_template, user=None, password=None, timeout=240): + assert service.startswith("service/"), service + + start_time = get_time() + + ip = await_external_ip(service, timeout=timeout) + url = url_template.format(ip) + insecure = url.startswith("https") + + while True: + notice(f"Waiting for HTTP OK from {url}") + + try: + http_get(url, insecure=insecure, user=user, password=password) + except PlanoError: + if get_time() - start_time > timeout: + fail(f"Timed out waiting for HTTP OK from {url}") + + sleep(5, quiet=True) + else: + break + +def await_console_ok(): + password = call("kubectl get secret/skupper-console-users -o jsonpath={.data.admin}", quiet=True) + password = base64_decode(password) + + await_http_ok("service/skupper", "https://{}:8010/", user="admin", password=password) + +def run_steps_minikube(skewer_file, debug=False): + work_dir = make_temp_dir() + + notice("Running the steps on Minikube") + notice(" Skewer file: " + get_absolute_path(skewer_file)) + notice(" Work dir: " + get_absolute_path(work_dir)) + + check_environment() + check_program("minikube") + + skewer_data = read_yaml(skewer_file) + + try: + run("minikube -p skewer start") + + for site_name, site_data in skewer_data["sites"].items(): + for name, value in site_data["env"].items(): + site_data["env"][name] = value.replace("~", work_dir) + + site = Site(site_name, site_data) + site.check() + + with site: + run("minikube -p skewer update-context") + check_file(ENV["KUBECONFIG"]) + + with open("/tmp/minikube-tunnel-output", "w") as tunnel_output_file: + with start("minikube -p skewer tunnel", output=tunnel_output_file): + run_steps(work_dir, skewer_data, debug=debug) + finally: + run("minikube -p skewer delete") + +def run_steps(work_dir, skewer_data, debug=False): + check_environment() + + apply_standard_steps(skewer_data) + + try: + for step in skewer_data["steps"]: + if step.get("id") == "cleaning_up": + continue + + run_step(work_dir, skewer_data, step) + + if "SKEWER_DEMO" in ENV: + pause_for_demo(work_dir, skewer_data) + except: + if debug: + print("TROUBLE!") + print("-- Start of debug output") + + for site_name, site_data in skewer_data["sites"].items(): + site = Site(site_name, site_data) + + print(f"---- Debug output for site '{site.name}'") + + with site: + if site.platform == "kubernetes": + run("kubectl get services", check=False) + run("kubectl get deployments", check=False) + run("kubectl get statefulsets", check=False) + run("kubectl get pods", check=False) + run("kubectl get events", check=False) + + run("skupper version", check=False) + run("skupper status", check=False) + run("skupper link status", check=False) + run("skupper service status", check=False) + run("skupper network status", check=False) + run("skupper debug events", check=False) + + if site.platform == "kubernetes": + run("kubectl logs deployment/skupper-router", check=False) + run("kubectl logs deployment/skupper-service-controller", check=False) + + print("-- End of debug output") + + raise + finally: + for step in skewer_data["steps"]: + if step.get("id") == "cleaning_up": + run_step(work_dir, skewer_data, step, check=False) + break + +def run_step(work_dir, skewer_data, step_data, check=True): + if "commands" not in step_data: + return + + if "title" in step_data: + notice("Running step '{}'", step_data["title"]) + + for site_name, commands in step_data["commands"].items(): + site_data = skewer_data["sites"][site_name] + + with Site(site_name, site_data) as site: + if site.platform == "kubernetes": + run(f"kubectl config set-context --current --namespace {site.namespace}", stdout=DEVNULL, quiet=True) + + for command in commands: + if command.get("apply") == "readme": + continue + + if "run" in command: + run(command["run"].replace("~", work_dir), shell=True, check=check) + + if "await_resource" in command: + resource = command["await_resource"] + await_resource(resource) + + if "await_external_ip" in command: + service = command["await_external_ip"] + await_external_ip(service) + + if "await_http_ok" in command: + service, url_template = command["await_http_ok"] + await_http_ok(service, url_template) + + if "await_console_ok" in command: + await_console_ok() + +def pause_for_demo(work_dir, skewer_data): + sites = list() + frontend_url = None + + for site_name, site_data in skewer_data["sites"].items(): + site = Site(site_name, site_data) + sites.append(site) + + if sites[0].platform == "kubernetes": + with sites[0]: + console_ip = await_external_ip("service/skupper") + console_url = f"https://{console_ip}:8010/" + password_data = call("kubectl get secret skupper-console-users -o jsonpath='{.data.admin}'", quiet=True) + password = base64_decode(password_data).decode("ascii") + + if run("kubectl get service/frontend", check=False, output=DEVNULL, quiet=True).exit_code == 0: + if call("kubectl get service/frontend -o jsonpath='{.spec.type}'", quiet=True) == "LoadBalancer": + frontend_ip = await_external_ip("service/frontend") + frontend_url = f"http://{frontend_ip}:8080/" + + print() + print("Demo time!") + print() + print("Sites:") + + for site in sites: + if site.platform == "kubernetes": + kubeconfig = site.env["KUBECONFIG"] + print(f" {site_name}: export KUBECONFIG={kubeconfig}") + + if frontend_url: + print() + print(f"Frontend URL: {frontend_url}") + + print() + print(f"Console URL: {console_url}") + print( "Console user: admin") + print(f"Console password: {password}") + print() + + if "SKEWER_DEMO_NO_WAIT" not in ENV: + while input("Are you done (yes)? ") != "yes": # pragma: nocover + pass + +def generate_readme(skewer_file, output_file): + notice("Generating the readme") + notice(" Skewer file: " + get_absolute_path(skewer_file)) + notice(" Output file: " + get_absolute_path(output_file)) + + skewer_data = read_yaml(skewer_file) + out = list() + + out.append(f"# {skewer_data['title']}") + out.append("") + + if "github_actions_url" in skewer_data: + url = skewer_data["github_actions_url"] + out.append(f"[![main]({url}/badge.svg)]({url})") + out.append("") + + if "subtitle" in skewer_data: + out.append(f"#### {skewer_data['subtitle']}") + out.append("") + + out.append(example_suite_para) + out.append("") + out.append("#### Contents") + out.append("") + + if "overview" in skewer_data: + out.append("* [Overview](#overview)") + + out.append("* [Prerequisites](#prerequisites)") + + apply_standard_steps(skewer_data) + + for i, step_data in enumerate(skewer_data["steps"], 1): + if step_data.get("numbered", True): + title = f"Step {i}: {step_data['title']}" + else: + title = step_data['title'] + + fragment = replace(title, r"[ -]", "_") + fragment = replace(fragment, r"[\W]", "") + fragment = replace(fragment, "_", "-") + fragment = fragment.lower() + + out.append(f"* [{title}](#{fragment})") + + if "summary" in skewer_data: + out.append("* [Summary](#summary)") + + if "next_steps" in skewer_data: + out.append("* [Next steps](#next-steps)") + + out.append("* [About this example](#about-this-example)") + out.append("") + + if "overview" in skewer_data: + out.append("## Overview") + out.append("") + out.append(skewer_data["overview"].strip()) + out.append("") + + prerequisites = standard_prerequisites + + if "prerequisites" in skewer_data: + prerequisites = skewer_data["prerequisites"].strip() + + out.append("## Prerequisites") + out.append("") + out.append(prerequisites) + out.append("") + + for i, step_data in enumerate(skewer_data["steps"], 1): + notice("Generating step '{}'", step_data["title"]) + + if step_data.get("numbered", True): + title = f"Step {i}: {step_data['title']}" + else: + title = step_data["title"] + + out.append(f"## {title}") + out.append("") + out.append(generate_readme_step(skewer_data, step_data)) + out.append("") + + if "summary" in skewer_data: + out.append("## Summary") + out.append("") + out.append(skewer_data["summary"].strip()) + out.append("") + + next_steps = standard_next_steps + + if "next_steps" in skewer_data: + next_steps = skewer_data["next_steps"].strip() + + out.append("## Next steps") + out.append("") + out.append(next_steps) + out.append("") + + out.append("## About this example") + out.append("") + out.append(about_this_example) + out.append("") + + write(output_file, "\n".join(out).strip() + "\n") + +def generate_readme_step(skewer_data, step_data): + out = list() + + if "preamble" in step_data: + out.append(step_data["preamble"].strip()) + out.append("") + + if "commands" in step_data: + items = step_data["commands"].items() + + for i, item in enumerate(items): + site_name, commands = item + namespace = skewer_data["sites"][site_name].get("namespace") + title = skewer_data["sites"][site_name].get("title", namespace) + + if title is None: + fail(f"Site '{site_name}' has no namespace or title") + + outputs = list() + + out.append(f"_**Console for {title}:**_") + out.append("") + out.append("~~~ shell") + + for command in commands: + if command.get("apply") == "test": + continue + + if "run" in command: + out.append(command["run"]) + + if "output" in command: + assert "run" in command, command + + outputs.append((command["run"], command["output"])) + + out.append("~~~") + out.append("") + + if outputs: + out.append("_Sample output:_") + out.append("") + out.append("~~~ console") + out.append("\n\n".join((f"$ {run}\n{output.strip()}" for run, output in outputs))) + out.append("~~~") + out.append("") + + if "postamble" in step_data: + out.append(step_data["postamble"].strip()) + + return "\n".join(out).strip() + +def apply_standard_steps(skewer_data): + notice("Applying standard steps") + + for step_data in skewer_data["steps"]: + if "standard" not in step_data: + continue + + standard_step_data = standard_steps[step_data["standard"]] + + if "id" not in step_data: + step_data["id"] = standard_step_data.get("id") + + if "title" not in step_data: + step_data["title"] = standard_step_data["title"] + + if "numbered" not in step_data: + step_data["numbered"] = standard_step_data.get("numbered", True) + + if "preamble" not in step_data: + if "preamble" in standard_step_data: + step_data["preamble"] = standard_step_data["preamble"] + + if "postamble" not in step_data: + if "postamble" in standard_step_data: + step_data["postamble"] = standard_step_data["postamble"] + + if "commands" not in step_data: + if "commands" in standard_step_data: + step_data["commands"] = dict() + + for i, site_item in enumerate(skewer_data["sites"].items()): + site_name, site = site_item + + if str(i) in standard_step_data["commands"]: + # Is a specific index in the standard commands? + commands = standard_step_data["commands"][str(i)] + step_data["commands"][site_name] = resolve_commands(commands, site) + elif "*" in standard_step_data["commands"]: + # Is "*" in the standard commands? + commands = standard_step_data["commands"]["*"] + step_data["commands"][site_name] = resolve_commands(commands, site) + else: + # Otherwise, omit commands for this site + continue + +def resolve_commands(commands, site): + resolved_commands = list() + + for command in commands: + resolved_command = dict(command) + + if "run" in command: + resolved_command["run"] = command["run"] + + if site["platform"] == "kubernetes": + resolved_command["run"] = resolved_command["run"].replace("@kubeconfig@", site["env"]["KUBECONFIG"]) + resolved_command["run"] = resolved_command["run"].replace("@namespace@", site["namespace"]) + + if "output" in command: + resolved_command["output"] = command["output"] + + if site["platform"] == "kubernetes": + resolved_command["output"] = resolved_command["output"].replace("@kubeconfig@", site["env"]["KUBECONFIG"]) + resolved_command["output"] = resolved_command["output"].replace("@namespace@", site["namespace"]) + + resolved_commands.append(resolved_command) + + return resolved_commands + +class Site: + def __init__(self, name, data): + assert name is not None + + self.name = name + self.title = data.get("title", capitalize(self.name)) + self.platform = data["platform"] + self.namespace = data.get("namespace") + self.env = data["env"] + + self._logging_context = logging_context(self.name) + self._working_env = working_env(**self.env) + + def __enter__(self): + self._logging_context.__enter__() + self._working_env.__enter__() + + return self + + def __exit__(self, exc_type, exc_value, traceback): + self._working_env.__exit__(exc_type, exc_value, traceback) + self._logging_context.__exit__(exc_type, exc_value, traceback) + + def check(self): + if self.platform == "kubernetes": + assert self.namespace is not None + assert "KUBECONFIG" in self.env + + if self.platform == "podman": + assert "SKUPPER_PLATFORM" in self.env + assert self.env["SKUPPER_PLATFORM"] == "podman" diff --git a/external/skewer-main/python/skewer/standardsteps.yaml b/external/skewer-main/python/skewer/standardsteps.yaml new file mode 100644 index 0000000..4c51f9d --- /dev/null +++ b/external/skewer-main/python/skewer/standardsteps.yaml @@ -0,0 +1,227 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# + +install_the_skupper_command_line_tool: + title: Install the Skupper command-line tool + preamble: | + The `skupper` command-line tool is the entrypoint for installing + and configuring Skupper. You need to install the `skupper` + command only once for each development environment. + + On Linux or Mac, you can use the install script (inspect it + [here][install-script]) to download and extract the command: + + ~~~ shell + curl https://skupper.io/install.sh | sh + ~~~ + + The script installs the command under your home directory. It + prompts you to add the command to your path if necessary. + + For Windows and other installation options, see [Installing + Skupper][install-docs]. + + [install-script]: https://github.com/skupperproject/skupper-website/blob/main/docs/install.sh + [install-docs]: https://skupper.io/install/index.html +configure_separate_console_sessions: + title: Configure separate console sessions + preamble: | + Skupper is designed for use with multiple namespaces, usually on + different clusters. The `skupper` and `kubectl` commands use your + [kubeconfig][kubeconfig] and current context to select the + namespace where they operate. + + [kubeconfig]: https://kubernetes.io/docs/concepts/configuration/organize-cluster-access-kubeconfig/ + + Your kubeconfig is stored in a file in your home directory. The + `skupper` and `kubectl` commands use the `KUBECONFIG` environment + variable to locate it. + + A single kubeconfig supports only one active context per user. + Since you will be using multiple contexts at once in this + exercise, you need to create distinct kubeconfigs. + + Start a console session for each of your namespaces. Set the + `KUBECONFIG` environment variable to a different path in each + session. + commands: + "*": + - run: export KUBECONFIG=@kubeconfig@ +access_your_clusters: + title: Access your clusters + preamble: | + + The procedure for accessing a Kubernetes cluster varies by + provider. [Find the instructions for your chosen + provider][kube-providers] and use them to authenticate and + configure access for each console session. + + [kube-providers]: https://skupper.io/start/kubernetes.html +set_up_your_namespaces: + title: Set up your namespaces + preamble: | + Use `kubectl create namespace` to create the namespaces you wish + to use (or use existing namespaces). Use `kubectl config + set-context` to set the current namespace for each session. + commands: + "*": + - run: kubectl create namespace @namespace@ + - run: kubectl config set-context --current --namespace @namespace@ +install_skupper_in_your_namespaces: + title: Install Skupper in your namespaces + preamble: | + The `skupper init` command installs the Skupper router and + controller in the current namespace. Run the `skupper init` command + in each namespace. + + **Note:** If you are using Minikube, [you need to start `minikube + tunnel`][minikube-tunnel] before you install Skupper. + + [minikube-tunnel]: https://skupper.io/start/minikube.html#running-minikube-tunnel + commands: + "0": + - run: skupper init --enable-console --enable-flow-collector + "*": + - run: skupper init + postamble: | + _Sample output:_ + + ~~~ console + $ skupper init + Waiting for LoadBalancer IP or hostname... + Waiting for status... + Skupper is now installed in namespace ''. Use 'skupper status' to get more information. + ~~~ +check_the_status_of_your_namespaces: + title: Check the status of your namespaces + preamble: | + Use `skupper status` in each console to check that Skupper is + installed. + commands: + "*": + - run: skupper status + postamble: | + _Sample output:_ + + ~~~ console + Skupper is enabled for namespace "" in interior mode. It is connected to 1 other site. It has 1 exposed service. + The site console url is: + The credentials for internal console-auth mode are held in secret: 'skupper-console-users' + ~~~ + + As you move through the steps below, you can use `skupper status` at + any time to check your progress. +link_your_namespaces: + title: Link your namespaces + preamble: | + Creating a link requires use of two `skupper` commands in + conjunction, `skupper token create` and `skupper link create`. + + The `skupper token create` command generates a secret token that + signifies permission to create a link. The token also carries the + link details. Then, in a remote namespace, The `skupper link + create` command uses the token to create a link to the namespace + that generated it. + + **Note:** The link token is truly a *secret*. Anyone who has the + token can link to your namespace. Make sure that only those you + trust have access to it. + + First, use `skupper token create` in one namespace to generate the + token. Then, use `skupper link create` in the other to create a + link. + commands: + "0": + - run: skupper token create ~/secret.token + output: | + Token written to ~/secret.token + "1": + - run: skupper link create ~/secret.token + output: | + Site configured to link to https://10.105.193.154:8081/ed9c37f6-d78a-11ec-a8c7-04421a4c5042 (name=link1) + Check the status of the link using 'skupper link status'. + - run: skupper link status --wait 60 + apply: test + postamble: | + If your console sessions are on different machines, you may need + to use `scp` or a similar tool to transfer the token securely. By + default, tokens expire after a single use or 15 minutes after + creation. +test_the_application: + title: Test the application + preamble: | + Now we're ready to try it out. Use `kubectl get service/frontend` + to look up the external IP of the frontend service. Then use + `curl` or a similar tool to request the `/api/health` endpoint at + that address. + + **Note:** The `` field in the following commands is a + placeholder. The actual value is an IP address. + commands: + "0": + - run: kubectl get service/frontend + apply: readme + output: | + NAME TYPE CLUSTER-IP EXTERNAL-IP PORT(S) AGE + frontend LoadBalancer 10.103.232.28 8080:30407/TCP 15s + - run: curl http://:8080/api/health + apply: readme + output: | + OK + - await_http_ok: [service/frontend, "http://{}:8080/api/health"] + postamble: | + If everything is in order, you can now access the web interface by + navigating to `http://:8080/` in your browser. +accessing_the_web_console: + title: Accessing the web console + numbered: false + preamble: | + Skupper includes a web console you can use to view the application + network. To access it, use `skupper status` to look up the URL of + the web console. Then use `kubectl get + secret/skupper-console-users` to look up the console admin + password. + + **Note:** The `` and `` fields in the + following output are placeholders. The actual values are specific + to your environment. + commands: + "0": + - run: skupper status + output: | + Skupper is enabled for namespace "@namespace@". It is connected to 1 other site. It has 1 exposed service. + The site console url is: + The credentials for internal console-auth mode are held in secret: 'skupper-console-users' + - run: kubectl get secret/skupper-console-users -o jsonpath={.data.admin} | base64 -d + output: | + + - await_console_ok: + postamble: | + Navigate to `` in your browser. When prompted, log + in as user `admin` and enter the password. +cleaning_up: + id: cleaning_up + title: Cleaning up + numbered: false + preamble: | + To remove Skupper and the other resources from this exercise, use + the following commands. + commands: + "*": + - run: skupper delete diff --git a/external/skewer-main/python/skewer/tests.py b/external/skewer-main/python/skewer/tests.py new file mode 100644 index 0000000..b1008a8 --- /dev/null +++ b/external/skewer-main/python/skewer/tests.py @@ -0,0 +1,71 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# + +from skewer import * + +@test +def check_environment_(): + check_environment() + +@test +def plano_(): + with working_dir("test-example"): + run("./plano") + run("./plano generate") + +@test +def workflow(): + parse_yaml(read("config/.github/workflows/main.yaml")) + +@test +def generate_readme_(): + with working_dir("test-example"): + generate_readme("skewer.yaml", "README.md") + check_file("README.md") + +@test +def await_operations(): + try: + run("minikube -p skewer start") + + with expect_error(): + await_resource("deployment/not-there", timeout=1) + + with expect_error(): + await_external_ip("service/not-there", timeout=1) + finally: + run("minikube -p skewer delete") + +@test +def run_steps_demo(): + with working_dir("test-example"): + with working_env(SKEWER_DEMO=1, SKEWER_DEMO_NO_WAIT=1): + run_steps_minikube("skewer.yaml", debug=True) + +@test +def run_steps_debug(): + with working_dir("test-example"): + with expect_error(): + with working_env(SKEWER_FAIL=1): + run_steps_minikube("skewer.yaml", debug=True) + +if __name__ == "__main__": + import sys + + PlanoTestCommand(sys.modules[__name__]).main() diff --git a/external/skewer-main/test-example/.gitignore b/external/skewer-main/test-example/.gitignore new file mode 100644 index 0000000..7bd2dc8 --- /dev/null +++ b/external/skewer-main/test-example/.gitignore @@ -0,0 +1 @@ +/README.html diff --git a/external/skewer-main/test-example/.plano.py b/external/skewer-main/test-example/.plano.py new file mode 120000 index 0000000..6b89765 --- /dev/null +++ b/external/skewer-main/test-example/.plano.py @@ -0,0 +1 @@ +external/skewer-main/config/.plano.py \ No newline at end of file diff --git a/external/skewer-main/test-example/README.md b/external/skewer-main/test-example/README.md new file mode 100644 index 0000000..1dea610 --- /dev/null +++ b/external/skewer-main/test-example/README.md @@ -0,0 +1,462 @@ +# Skupper Hello World + +[![main](https://github.com/skupperproject/skewer/actions/workflows/main.yaml/badge.svg)](https://github.com/skupperproject/skewer/actions/workflows/main.yaml) + +#### A minimal HTTP application deployed across Kubernetes clusters using Skupper + +This example is part of a [suite of examples][examples] showing the +different ways you can use [Skupper][website] to connect services +across cloud providers, data centers, and edge sites. + +[website]: https://skupper.io/ +[examples]: https://skupper.io/examples/index.html + +#### Contents + +* [Overview](#overview) +* [Prerequisites](#prerequisites) +* [Step 1: Install the Skupper command-line tool](#step-1-install-the-skupper-command-line-tool) +* [Step 2: Configure separate console sessions](#step-2-configure-separate-console-sessions) +* [Step 3: Access your clusters](#step-3-access-your-clusters) +* [Step 4: Set up your namespaces](#step-4-set-up-your-namespaces) +* [Step 5: Install Skupper in your namespaces](#step-5-install-skupper-in-your-namespaces) +* [Step 6: Check the status of your namespaces](#step-6-check-the-status-of-your-namespaces) +* [Step 7: Link your namespaces](#step-7-link-your-namespaces) +* [Step 8: Fail on demand](#step-8-fail-on-demand) +* [Step 9: Deploy the frontend and backend services](#step-9-deploy-the-frontend-and-backend-services) +* [Step 10: Expose the backend service](#step-10-expose-the-backend-service) +* [Step 11: Expose the frontend service](#step-11-expose-the-frontend-service) +* [Step 12: Test the application](#step-12-test-the-application) +* [Accessing the web console](#accessing-the-web-console) +* [Cleaning up](#cleaning-up) +* [Summary](#summary) +* [Next steps](#next-steps) +* [About this example](#about-this-example) + +## Overview + +This example is a very simple multi-service HTTP application that can +be deployed across multiple Kubernetes clusters using Skupper. + +It contains two services: + +* A backend service that exposes an `/api/hello` endpoint. It + returns greetings of the form `Hi, . I am + ()`. + +* A frontend service that sends greetings to the backend and + fetches new greetings in response. + +With Skupper, you can place the backend in one cluster and the +frontend in another and maintain connectivity between the two +services without exposing the backend to the public internet. + + + +## Prerequisites + +Custom prerequisites + +## Step 1: Install the Skupper command-line tool + +The `skupper` command-line tool is the entrypoint for installing +and configuring Skupper. You need to install the `skupper` +command only once for each development environment. + +On Linux or Mac, you can use the install script (inspect it +[here][install-script]) to download and extract the command: + +~~~ shell +curl https://skupper.io/install.sh | sh +~~~ + +The script installs the command under your home directory. It +prompts you to add the command to your path if necessary. + +For Windows and other installation options, see [Installing +Skupper][install-docs]. + +[install-script]: https://github.com/skupperproject/skupper-website/blob/main/docs/install.sh +[install-docs]: https://skupper.io/install/index.html + +## Step 2: Configure separate console sessions + +Skupper is designed for use with multiple namespaces, usually on +different clusters. The `skupper` and `kubectl` commands use your +[kubeconfig][kubeconfig] and current context to select the +namespace where they operate. + +[kubeconfig]: https://kubernetes.io/docs/concepts/configuration/organize-cluster-access-kubeconfig/ + +Your kubeconfig is stored in a file in your home directory. The +`skupper` and `kubectl` commands use the `KUBECONFIG` environment +variable to locate it. + +A single kubeconfig supports only one active context per user. +Since you will be using multiple contexts at once in this +exercise, you need to create distinct kubeconfigs. + +Start a console session for each of your namespaces. Set the +`KUBECONFIG` environment variable to a different path in each +session. + +_**Console for West:**_ + +~~~ shell +export KUBECONFIG=~/.kube/config-west +~~~ + +_**Console for East:**_ + +~~~ shell +export KUBECONFIG=~/.kube/config-east +~~~ + +## Step 3: Access your clusters + +The procedure for accessing a Kubernetes cluster varies by +provider. [Find the instructions for your chosen +provider][kube-providers] and use them to authenticate and +configure access for each console session. + +[kube-providers]: https://skupper.io/start/kubernetes.html + +## Step 4: Set up your namespaces + +Use `kubectl create namespace` to create the namespaces you wish +to use (or use existing namespaces). Use `kubectl config +set-context` to set the current namespace for each session. + +_**Console for West:**_ + +~~~ shell +kubectl create namespace west +kubectl config set-context --current --namespace west +~~~ + +_**Console for East:**_ + +~~~ shell +kubectl create namespace east +kubectl config set-context --current --namespace east +~~~ + +## Step 5: Install Skupper in your namespaces + +The `skupper init` command installs the Skupper router and +controller in the current namespace. Run the `skupper init` command +in each namespace. + +**Note:** If you are using Minikube, [you need to start `minikube +tunnel`][minikube-tunnel] before you install Skupper. + +[minikube-tunnel]: https://skupper.io/start/minikube.html#running-minikube-tunnel + +_**Console for West:**_ + +~~~ shell +skupper init --enable-console --enable-flow-collector +~~~ + +_**Console for East:**_ + +~~~ shell +skupper init +~~~ + +_Sample output:_ + +~~~ console +$ skupper init +Waiting for LoadBalancer IP or hostname... +Waiting for status... +Skupper is now installed in namespace ''. Use 'skupper status' to get more information. +~~~ + +## Step 6: Check the status of your namespaces + +Use `skupper status` in each console to check that Skupper is +installed. + +_**Console for West:**_ + +~~~ shell +skupper status +~~~ + +_**Console for East:**_ + +~~~ shell +skupper status +~~~ + +_Sample output:_ + +~~~ console +Skupper is enabled for namespace "" in interior mode. It is connected to 1 other site. It has 1 exposed service. +The site console url is: +The credentials for internal console-auth mode are held in secret: 'skupper-console-users' +~~~ + +As you move through the steps below, you can use `skupper status` at +any time to check your progress. + +## Step 7: Link your namespaces + +Creating a link requires use of two `skupper` commands in +conjunction, `skupper token create` and `skupper link create`. + +The `skupper token create` command generates a secret token that +signifies permission to create a link. The token also carries the +link details. Then, in a remote namespace, The `skupper link +create` command uses the token to create a link to the namespace +that generated it. + +**Note:** The link token is truly a *secret*. Anyone who has the +token can link to your namespace. Make sure that only those you +trust have access to it. + +First, use `skupper token create` in one namespace to generate the +token. Then, use `skupper link create` in the other to create a +link. + +_**Console for West:**_ + +~~~ shell +skupper token create ~/secret.token +~~~ + +_Sample output:_ + +~~~ console +$ skupper token create ~/secret.token +Token written to ~/secret.token +~~~ + +_**Console for East:**_ + +~~~ shell +skupper link create ~/secret.token +~~~ + +_Sample output:_ + +~~~ console +$ skupper link create ~/secret.token +Site configured to link to https://10.105.193.154:8081/ed9c37f6-d78a-11ec-a8c7-04421a4c5042 (name=link1) +Check the status of the link using 'skupper link status'. +~~~ + +If your console sessions are on different machines, you may need +to use `scp` or a similar tool to transfer the token securely. By +default, tokens expire after a single use or 15 minutes after +creation. + +## Step 8: Fail on demand + +_**Console for West:**_ + +~~~ shell +if [ -n "${SKEWER_FAIL}" ]; then expr 1 / 0; fi + +~~~ + +## Step 9: Deploy the frontend and backend services + +Use `kubectl create deployment` to deploy the frontend service +in `west` and the backend service in `east`. + +_**Console for West:**_ + +~~~ shell +kubectl create deployment frontend --image quay.io/skupper/hello-world-frontend +~~~ + +_Sample output:_ + +~~~ console +$ kubectl create deployment frontend --image quay.io/skupper/hello-world-frontend +deployment.apps/frontend created +~~~ + +_**Console for East:**_ + +~~~ shell +kubectl create deployment backend --image quay.io/skupper/hello-world-backend --replicas 3 +~~~ + +_Sample output:_ + +~~~ console +$ kubectl create deployment backend --image quay.io/skupper/hello-world-backend --replicas 3 +deployment.apps/backend created +~~~ + +## Step 10: Expose the backend service + +We now have two namespaces linked to form a Skupper network, but +no services are exposed on it. Skupper uses the `skupper +expose` command to select a service from one namespace for +exposure on all the linked namespaces. + +Use `skupper expose` to expose the backend service to the +frontend service. + +_**Console for East:**_ + +~~~ shell +skupper expose deployment/backend --port 8080 +~~~ + +_Sample output:_ + +~~~ console +$ skupper expose deployment/backend --port 8080 +deployment backend exposed as backend +~~~ + +## Step 11: Expose the frontend service + +We have established connectivity between the two namespaces and +made the backend in `east` available to the frontend in `west`. +Before we can test the application, we need external access to +the frontend. + +Use `kubectl expose` with `--type LoadBalancer` to open network +access to the frontend service. + +_**Console for West:**_ + +~~~ shell +kubectl expose deployment/frontend --port 8080 --type LoadBalancer +~~~ + +_Sample output:_ + +~~~ console +$ kubectl expose deployment/frontend --port 8080 --type LoadBalancer +service/frontend exposed +~~~ + +## Step 12: Test the application + +Now we're ready to try it out. Use `kubectl get service/frontend` +to look up the external IP of the frontend service. Then use +`curl` or a similar tool to request the `/api/health` endpoint at +that address. + +**Note:** The `` field in the following commands is a +placeholder. The actual value is an IP address. + +_**Console for West:**_ + +~~~ shell +kubectl get service/frontend +curl http://:8080/api/health +~~~ + +_Sample output:_ + +~~~ console +$ kubectl get service/frontend +NAME TYPE CLUSTER-IP EXTERNAL-IP PORT(S) AGE +frontend LoadBalancer 10.103.232.28 8080:30407/TCP 15s + +$ curl http://:8080/api/health +OK +~~~ + +If everything is in order, you can now access the web interface by +navigating to `http://:8080/` in your browser. + +## Accessing the web console + +Skupper includes a web console you can use to view the application +network. To access it, use `skupper status` to look up the URL of +the web console. Then use `kubectl get +secret/skupper-console-users` to look up the console admin +password. + +**Note:** The `` and `` fields in the +following output are placeholders. The actual values are specific +to your environment. + +_**Console for West:**_ + +~~~ shell +skupper status +kubectl get secret/skupper-console-users -o jsonpath={.data.admin} | base64 -d +~~~ + +_Sample output:_ + +~~~ console +$ skupper status +Skupper is enabled for namespace "west". It is connected to 1 other site. It has 1 exposed service. +The site console url is: +The credentials for internal console-auth mode are held in secret: 'skupper-console-users' + +$ kubectl get secret/skupper-console-users -o jsonpath={.data.admin} | base64 -d + +~~~ + +Navigate to `` in your browser. When prompted, log +in as user `admin` and enter the password. + +## Cleaning up + +To remove Skupper and the other resources from this exercise, use +the following commands. + +_**Console for West:**_ + +~~~ shell +skupper delete +kubectl delete service/frontend +kubectl delete deployment/frontend +~~~ + +_**Console for East:**_ + +~~~ shell +skupper delete +kubectl delete deployment/backend +~~~ + +## Summary + +This example locates the frontend and backend services in different +namespaces, on different clusters. Ordinarily, this means that they +have no way to communicate unless they are exposed to the public +internet. + +Introducing Skupper into each namespace allows us to create a virtual +application network that can connect services in different clusters. +Any service exposed on the application network is represented as a +local service in all of the linked namespaces. + +The backend service is located in `east`, but the frontend service +in `west` can "see" it as if it were local. When the frontend +sends a request to the backend, Skupper forwards the request to the +namespace where the backend is running and routes the response back to +the frontend. + + + +## Next steps + +Custom next steps + +## About this example + +This example was produced using [Skewer][skewer], a library for +documenting and testing Skupper examples. + +[skewer]: https://github.com/skupperproject/skewer + +Skewer provides utility functions for generating the README and +running the example steps. Use the `./plano` command in the project +root to see what is available. + +To quickly stand up the example using Minikube, try the `./plano demo` +command. diff --git a/external/skewer-main/test-example/external/skewer-main b/external/skewer-main/test-example/external/skewer-main new file mode 120000 index 0000000..c25bddb --- /dev/null +++ b/external/skewer-main/test-example/external/skewer-main @@ -0,0 +1 @@ +../.. \ No newline at end of file diff --git a/external/skewer-main/test-example/images/entities.svg b/external/skewer-main/test-example/images/entities.svg new file mode 100644 index 0000000..6a1ab87 --- /dev/null +++ b/external/skewer-main/test-example/images/entities.svg @@ -0,0 +1,3 @@ + + +
Frontend service
Frontend service
Skupper
Skupper
Kubernetes cluster 1
Kubernetes cluster 1
Namespace "west"
Namespace "west"
Namespace "east"
Namespace "east"
Kubernetes cluster 2
Kubernetes cluster 2
Backend service
Backend service
Skupper
Skupper
Public
network
Public<br/>network
diff --git a/external/skewer-main/test-example/images/sequence.svg b/external/skewer-main/test-example/images/sequence.svg new file mode 100644 index 0000000..20d27c1 --- /dev/null +++ b/external/skewer-main/test-example/images/sequence.svg @@ -0,0 +1 @@ +westeastCurlFrontendSkupperSkupperBackendBackendGET /         GET /api/hello      GET /api/hello      GET /api/hello"Hello 1"      "Hello 1"      "Hello 1""Hello 1"          diff --git a/external/skewer-main/test-example/images/sequence.txt b/external/skewer-main/test-example/images/sequence.txt new file mode 100644 index 0000000..6d081ea --- /dev/null +++ b/external/skewer-main/test-example/images/sequence.txt @@ -0,0 +1,22 @@ +participant Curl + +participantgroup #cce5ff eu-north +participant Frontend +participant "Skupper" as Skupper1 #lightgreen +end + +participantgroup #ffe6cc us-east +participant "Skupper" as Skupper2 #lightgreen +participant Backend #yellow +end + +abox over Skupper1 #yellow: Backend + +Curl->Frontend: GET / +Frontend->Skupper1: GET /api/hello +Skupper1->Skupper2: GET /api/hello +Skupper2->Backend: GET /api/hello +Skupper2<-Backend: "Hello 1" +Skupper1<-Skupper2: "Hello 1" +Frontend<-Skupper1: "Hello 1" +Curl<-Frontend: "Hello 1" diff --git a/external/skewer-main/test-example/plano b/external/skewer-main/test-example/plano new file mode 120000 index 0000000..c04aa5c --- /dev/null +++ b/external/skewer-main/test-example/plano @@ -0,0 +1 @@ +external/skewer-main/plano \ No newline at end of file diff --git a/external/skewer-main/test-example/python/skewer b/external/skewer-main/test-example/python/skewer new file mode 120000 index 0000000..6ea41b9 --- /dev/null +++ b/external/skewer-main/test-example/python/skewer @@ -0,0 +1 @@ +../external/skewer-main/python/skewer \ No newline at end of file diff --git a/external/skewer-main/test-example/skewer.yaml b/external/skewer-main/test-example/skewer.yaml new file mode 100644 index 0000000..ab2a493 --- /dev/null +++ b/external/skewer-main/test-example/skewer.yaml @@ -0,0 +1,119 @@ +title: Skupper Hello World +subtitle: A minimal HTTP application deployed across Kubernetes clusters using Skupper +github_actions_url: https://github.com/skupperproject/skewer/actions/workflows/main.yaml +overview: | + This example is a very simple multi-service HTTP application that can + be deployed across multiple Kubernetes clusters using Skupper. + + It contains two services: + + * A backend service that exposes an `/api/hello` endpoint. It + returns greetings of the form `Hi, . I am + ()`. + + * A frontend service that sends greetings to the backend and + fetches new greetings in response. + + With Skupper, you can place the backend in one cluster and the + frontend in another and maintain connectivity between the two + services without exposing the backend to the public internet. + + +prerequisites: | + Custom prerequisites +sites: + west: + title: West + platform: kubernetes + namespace: west + env: + KUBECONFIG: ~/.kube/config-west + east: + title: East + platform: kubernetes + namespace: east + env: + KUBECONFIG: ~/.kube/config-east +steps: + - standard: install_the_skupper_command_line_tool + - standard: configure_separate_console_sessions + - standard: access_your_clusters + - standard: set_up_your_namespaces + - standard: install_skupper_in_your_namespaces + - standard: check_the_status_of_your_namespaces + - standard: link_your_namespaces + - title: Fail on demand + commands: + west: + - run: | + if [ -n "${SKEWER_FAIL}" ]; then expr 1 / 0; fi + - title: Deploy the frontend and backend services + preamble: | + Use `kubectl create deployment` to deploy the frontend service + in `west` and the backend service in `east`. + commands: + west: + - run: kubectl create deployment frontend --image quay.io/skupper/hello-world-frontend + output: deployment.apps/frontend created + east: + - run: kubectl create deployment backend --image quay.io/skupper/hello-world-backend --replicas 3 + output: deployment.apps/backend created + - title: Expose the backend service + preamble: | + We now have two namespaces linked to form a Skupper network, but + no services are exposed on it. Skupper uses the `skupper + expose` command to select a service from one namespace for + exposure on all the linked namespaces. + + Use `skupper expose` to expose the backend service to the + frontend service. + commands: + east: + - await: deployment/backend + - run: skupper expose deployment/backend --port 8080 + output: deployment backend exposed as backend + - title: Expose the frontend service + preamble: | + We have established connectivity between the two namespaces and + made the backend in `east` available to the frontend in `west`. + Before we can test the application, we need external access to + the frontend. + + Use `kubectl expose` with `--type LoadBalancer` to open network + access to the frontend service. + commands: + west: + - await: deployment/frontend + - run: kubectl expose deployment/frontend --port 8080 --type LoadBalancer + output: service/frontend exposed + - standard: test_the_application + - standard: accessing_the_web_console + - standard: cleaning_up + commands: + west: + - run: skupper delete + - run: kubectl delete service/frontend + - run: kubectl delete deployment/frontend + east: + - run: skupper delete + - run: kubectl delete deployment/backend +summary: | + This example locates the frontend and backend services in different + namespaces, on different clusters. Ordinarily, this means that they + have no way to communicate unless they are exposed to the public + internet. + + Introducing Skupper into each namespace allows us to create a virtual + application network that can connect services in different clusters. + Any service exposed on the application network is represented as a + local service in all of the linked namespaces. + + The backend service is located in `east`, but the frontend service + in `west` can "see" it as if it were local. When the frontend + sends a request to the backend, Skupper forwards the request to the + namespace where the backend is running and routes the response back to + the frontend. + + +next_steps: | + Custom next steps diff --git a/plano b/plano index 0f4ec84..c04aa5c 120000 --- a/plano +++ b/plano @@ -1 +1 @@ -subrepos/skewer/plano \ No newline at end of file +external/skewer-main/plano \ No newline at end of file diff --git a/python/skewer b/python/skewer index 0785527..6ea41b9 120000 --- a/python/skewer +++ b/python/skewer @@ -1 +1 @@ -../subrepos/skewer/python/skewer \ No newline at end of file +../external/skewer-main/python/skewer \ No newline at end of file diff --git a/skewer.yaml b/skewer.yaml index a5a14ff..7b3c476 100644 --- a/skewer.yaml +++ b/skewer.yaml @@ -15,9 +15,9 @@ overview: | * A frontend service that sends greetings to the backend and fetches new greetings in response. - The two services run in two different clusters. The frontend runs - in a namespace on cluster 1 called West, and the backend runs in a - namespace on cluster 2 called East. + In this scenario, each service runs in a different Kubernetes + cluster. The frontend runs in a namespace on cluster 1 called West, + and the backend runs in a namespace on cluster 2 called East.