diff --git a/simulator/.dockerignore b/simulator/.dockerignore new file mode 100644 index 00000000..d74481d7 --- /dev/null +++ b/simulator/.dockerignore @@ -0,0 +1,14 @@ +# ignore environment variables files +.env + +# virtual environment +venv/ +.venv/ + +# ignore standard files and folders +__pycache__/ +.git/ +.vscode/ + +# notebooks +*.ipynb diff --git a/simulator/Dockerfile b/simulator/Dockerfile new file mode 100644 index 00000000..355391c5 --- /dev/null +++ b/simulator/Dockerfile @@ -0,0 +1,35 @@ + +# Use an official Python runtime as a parent image +FROM python:3.11-slim-buster + +# Variables +ENV KWOK_REPO=kubernetes-sigs/kwok +ENV KWOK_LATEST_RELEASE=v0.0.0 + +# Set the working directory to /app +WORKDIR /app + +# Copy the current directory contents into the container at /app +COPY . /app + +# Install necessary packages +RUN apt-get update && \ + apt-get install -y curl gzip ca-certificates apt-transport-https && \ + curl -sSL https://get.docker.com | sh && \ + curl -sSL -o /usr/local/bin/kind "https://kind.sigs.k8s.io/dl/v0.20.0/kind-linux-amd64" && \ + chmod +x /usr/local/bin/kind && \ + curl -sLO "https://github.com/argoproj/argo-workflows/releases/download/v3.4.13/argo-linux-amd64.gz" && \ + gzip -d argo-linux-amd64.gz && \ + mv ./argo-linux-amd64 /usr/local/bin/argo && \ + chmod +x /usr/local/bin/argo && \ + curl -sSL -o /usr/local/bin/kubectl "https://storage.googleapis.com/kubernetes-release/release/v1.22.0/bin/linux/amd64/kubectl" && \ + chmod +x /usr/local/bin/kubectl && \ + curl -sSL -o /usr/local/bin/kwokctl "https://github.com/kubernetes-sigs/kwok/releases/download/v0.4.0/kwokctl-linux-amd64" && \ + chmod +x /usr/local/bin/kwokctl && \ + curl -sSL -o /usr/local/bin/kwok "https://github.com/kubernetes-sigs/kwok/releases/download/v0.4.0/kwok-linux-amd64" && \ + chmod +x /usr/local/bin/kwok && \ + pip install poetry && \ + poetry install && \ + rm -rf /var/lib/apt/lists/* + +CMD [ "poetry", "run", "python", "run_simulation.py", "-v"] diff --git a/simulator/README.md b/simulator/README.md new file mode 100644 index 00000000..4cfb77bb --- /dev/null +++ b/simulator/README.md @@ -0,0 +1,28 @@ +# Simulate pipeline +Setup a KWOK cluster and submit an argo workflow to the cluster to simulate the pipeline deployment. + +# Python poetry +Run simulation + +First install dependencies: +```bash +poetry install +``` + +Run example simulation: +```bash +poetry run python run_simulation.py -v +``` + +Pass workflow file to simulate workflow: +```bash +poetry run python run_simulation.py -v --workflow +``` + +# Docker +Create and run docker image + +```bash +docker build -f Dockerfile -t kwoksim . +docker run -v /var/run/docker.sock:/var/run/docker.sock kwoksim +``` \ No newline at end of file diff --git a/simulator/config/hello-world.yaml b/simulator/config/hello-world.yaml new file mode 100644 index 00000000..1e94f466 --- /dev/null +++ b/simulator/config/hello-world.yaml @@ -0,0 +1,17 @@ +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + generateName: hello-world- + labels: + workflows.argoproj.io/archive-strategy: "false" + annotations: + workflows.argoproj.io/description: | + This is a simple hello world example. +spec: + entrypoint: whalesay + templates: + - name: whalesay + container: + image: docker/whalesay:latest + command: [cowsay] + args: ["hello world"] \ No newline at end of file diff --git a/simulator/config/nodes.yaml b/simulator/config/nodes.yaml new file mode 100644 index 00000000..65a36bef --- /dev/null +++ b/simulator/config/nodes.yaml @@ -0,0 +1,63 @@ +apiVersion: v1 +kind: Node +metadata: + annotations: + node.alpha.kubernetes.io/ttl: "0" + kwok.x-k8s.io/node: fake + labels: + beta.kubernetes.io/arch: arm64 + beta.kubernetes.io/os: linux + kubernetes.io/arch: arm64 + kubernetes.io/hostname: worker1 + kubernetes.io/os: linux + kubernetes.io/role: worker + node-role.kubernetes.io/worker: "" + type: kwok + name: worker1 +status: + allocatable: + cpu: "42" + memory: 42Gi + pods: "42" + capacity: + cpu: "42" + memory: 42Gi + pods: "42" + nodeInfo: + architecture: arm64 + kubeProxyVersion: fake + kubeletVersion: fake + operatingSystem: linux + +--- + +apiVersion: v1 +kind: Node +metadata: + annotations: + node.alpha.kubernetes.io/ttl: "0" + kwok.x-k8s.io/node: fake + labels: + beta.kubernetes.io/arch: arm64 + beta.kubernetes.io/os: linux + kubernetes.io/arch: arm64 + kubernetes.io/hostname: worker2 + kubernetes.io/os: linux + kubernetes.io/role: worker + node-role.kubernetes.io/worker: "" + type: kwok + name: worker2 +status: + allocatable: + cpu: "99" + memory: 99Gi + pods: "99" + capacity: + cpu: "99" + memory: 99Gi + pods: "99" + nodeInfo: + architecture: arm64 + kubeProxyVersion: fake + kubeletVersion: fake + operatingSystem: linux \ No newline at end of file diff --git a/simulator/config/stages.yaml b/simulator/config/stages.yaml new file mode 100644 index 00000000..a994aa64 --- /dev/null +++ b/simulator/config/stages.yaml @@ -0,0 +1,124 @@ +kind: Stage +apiVersion: kwok.x-k8s.io/v1alpha1 +metadata: + name: pod-ready +spec: + resourceRef: + apiGroup: v1 + kind: Pod + selector: + matchExpressions: + - key: '.metadata.deletionTimestamp' + operator: 'DoesNotExist' + - key: '.status.podIP' + operator: 'DoesNotExist' + next: + finalizers: + add: + - value: 'kwok.x-k8s.io/fake' + statusTemplate: | + {{ $now := Now }} + + conditions: + - lastTransitionTime: {{ $now }} + status: "True" + type: Initialized + - lastTransitionTime: {{ $now }} + status: "True" + type: Ready + - lastTransitionTime: {{ $now }} + status: "True" + type: ContainersReady + {{ range .spec.readinessGates }} + - lastTransitionTime: {{ $now }} + status: "True" + type: {{ .conditionType }} + {{ end }} + + containerStatuses: + {{ range .spec.containers }} + - image: {{ .image }} + name: {{ .name }} + ready: true + restartCount: 0 + state: + running: + startedAt: {{ $now }} + {{ end }} + + initContainerStatuses: + {{ range .spec.initContainers }} + - image: {{ .image }} + name: {{ .name }} + ready: true + restartCount: 0 + state: + terminated: + exitCode: 0 + finishedAt: {{ $now }} + reason: Completed + startedAt: {{ $now }} + {{ end }} + + hostIP: {{ NodeIPWith .spec.nodeName }} + podIP: {{ PodIPWith .spec.nodeName ( or .spec.hostNetwork false ) ( or .metadata.uid "" ) ( or .metadata.name "" ) ( or .metadata.namespace "" ) }} + phase: Running + startTime: {{ $now }} +--- +kind: Stage +apiVersion: kwok.x-k8s.io/v1alpha1 +metadata: + name: pod-complete +spec: + resourceRef: + apiGroup: v1 + kind: Pod + selector: + matchExpressions: + - key: '.metadata.deletionTimestamp' + operator: 'DoesNotExist' + - key: '.status.phase' + operator: 'In' + values: + - 'Running' + - key: '.metadata.ownerReferences.[].kind' + operator: 'In' + values: + - 'Workflow' + next: + statusTemplate: | + {{ $now := Now }} + {{ $root := . }} + containerStatuses: + {{ range $index, $item := .spec.containers }} + {{ $origin := index $root.status.containerStatuses $index }} + - image: {{ $item.image }} + name: {{ $item.name }} + ready: false + restartCount: 0 + started: false + state: + terminated: + exitCode: 0 + finishedAt: '{{ $now }}' + reason: Completed + startedAt: '{{ $now }}' + {{ end }} + phase: Succeeded +--- +kind: Stage +apiVersion: kwok.x-k8s.io/v1alpha1 +metadata: + name: pod-delete +spec: + resourceRef: + apiGroup: v1 + kind: Pod + selector: + matchExpressions: + - key: '.metadata.deletionTimestamp' + operator: 'Exists' + next: + finalizers: + empty: true + delete: true diff --git a/simulator/kwoksim.py b/simulator/kwoksim.py new file mode 100644 index 00000000..bcd541f5 --- /dev/null +++ b/simulator/kwoksim.py @@ -0,0 +1,250 @@ +import os +import yaml +import json +import subprocess + +# import asyncio # TODO: rewite to use async calls +from pathlib import Path + + +class Kwok: + """ + Start a kwok cluster and run and simulate an argo workflow on the cluster. + """ + + runtime = "kind" + argo_version = "v3.4.13" + config_yaml = "config/stages.yaml" + nodes_yaml = "config/nodes.yaml" + controlplane_node_name = "kwok-kwok-control-plane" + basic_configuration_file = "~/.kwok/kwok.yaml" + steps = [ + { + "name": "create cluster", + "command": f"kwokctl create cluster --runtime {runtime} --config {config_yaml}", + }, + {"name": "define nodes", "command": f"kubectl apply -f {nodes_yaml}"}, + { + "name": "create namespace for argo", + "command": "kubectl create namespace argo", + }, + { + "name": "install argo", + "command": f"kubectl apply -n argo -f https://github.com/argoproj/argo-workflows/releases/download/{argo_version}/install.yaml", + }, + { + "name": "migrate argo-server onto control plane", + "command": 'kubectl patch deploy argo-server -n argo --type=json -p [{"op":"add","path":"/spec/template/spec/nodeName","value":"kwok-kwok-control-plane"}]', + }, + { + "name": "migrate workflow-controller onto control plane", + "command": 'kubectl patch deploy workflow-controller -n argo --type=json -p [{"op":"add","path":"/spec/template/spec/nodeName","value":"kwok-kwok-control-plane"}]', + }, + ] + + logs = [] + workflow = None # input workflow + workflow4kwok = None # modified workflow + _workflow_4kwok_filename = ".kwok-workflow.yaml" + workflow_name = None # name of the workflow submitted to kwok + _simulation_status = None # status of the simulation (json) + + def __init__(self, argo_workflow_file: str = None, runtime: str = runtime): + if runtime: + self.runtime = runtime + if argo_workflow_file: + self.load_workflow(argo_workflow_file) + self.start_kwok_cluster() + + def setup_simulation(self): + # setup simulation parameters + pass + + def run_simulation(self): + """Run workflow on kwok cluster""" + self._write_workflow() # create tmp local workflow file + self.call_argo( + f"argo submit {self._workflow_4kwok_filename}" + ) # submit workflow to kwok cluster + # self._delete_workflow() # delete the tmp local workflow + self.get_submitted_workflow_name() + self._delete_workflow() # delete the tmp local workflow + + def is_simulation_complete(self) -> bool: + """Check if the simulation is complete + Also sets the simulation progress status json object + """ + raw_json_string = self.get_results(output_format="json") + j = json.loads(raw_json_string) + self._simulation_status = j["status"] + phase = None + try: + phase = self._simulation_status["phase"] + except KeyError: + pass + if phase == "Succeeded": + return True + else: + return False + + def get_simulation_progress(self) -> str: + """Get the progress of the simulation""" + return self._simulation_status + + def get_results(self, output_format: str = None): + """Get the results of the simulation""" + if output_format == "json": + proc = self.call_argo(f"argo get {self.workflow_name} -o json") + elif output_format == "yaml": + proc = self.call_argo(f"argo get {self.workflow_name} -o yaml") + else: + proc = self.call_argo(f"argo get {self.workflow_name}") + + return proc.stdout.decode("utf-8") + + def get_submitted_workflow_name(self): + """Get the name of the workflow submitted to kwok + Call this after submitting the workflow + """ + proc = self.call_argo("argo list @latest -o json") + j = json.loads(proc.stdout.decode("utf-8")) + self.workflow_name = j[0]["metadata"]["name"] + return self.workflow_name + + def update_kwok_config(self): + """Update the kwok config file""" + # TODO: This updates the config file. However, somehow it does not update the kwok cluster! + # This seem to be a bug in kwokctl. Need to investigate. + self.call_kwokctl(f"kwokctl config tidy --config {self.config_yaml}") + + def _write_workflow(self, output_file: str = None): + """Write the workflow to a temporary file""" + if output_file is None: + self._workflow_4kwok_filename = ".kwok_workflow.yaml" + else: + ifile = Path(output_file) + self._workflow_4kwok_filename = ( + ifile.parent / f"{ifile.stem}_kwok_inputworkflow{ifile.suffix}" + ) + + with open(self._workflow_4kwok_filename, "w") as f: + yaml.dump(self.workflow4kwok, f) + + def _delete_workflow(self): + """Delete the temporary workflow file""" + os.remove(self._workflow_4kwok_filename) + + def collect_logs( + self, + completed_process: subprocess.CompletedProcess, + print_json_logs: bool = True, + print_std_logs: bool = True, + ): + """Collect logs from completed process""" + for i in completed_process.stdout.decode("utf-8").split("\n"): + if i != "": + try: + log_entry = json.loads(i) + if print_json_logs: + print( + "{level} {time} {msg}".format( + level=log_entry["level"], + time=log_entry["time"], + msg=log_entry["msg"], + ) + ) + self.logs.append(log_entry) + except json.JSONDecodeError: + if print_std_logs: + print(i) + + def start_kwok_cluster(self): + """start kwok cluster""" + for step in self.steps: + print(f"Running step: {step['name']}") + self.collect_logs( + subprocess.run( + step["command"].split(), + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT, + ) + ) + + def cleanup(self): + """cleanup kwok cluster + ... delete cluster + """ + self.delete_cluster() + + def delete_cluster(self): + """delete kwok cluster""" + self.collect_logs( + subprocess.run( + ["kwokctl", "delete", "cluster"], + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT, + ) + ) + + def call_kwokctl(self, command: str): + """kwokctl""" + p = subprocess.run( + command.split(), stdout=subprocess.PIPE, stderr=subprocess.STDOUT + ) + return self.collect_logs(p) + + def call_argo(self, command: str): + """argo""" + p = subprocess.run( + command.split(), stdout=subprocess.PIPE, stderr=subprocess.STDOUT + ) + return p + + def call_kubectl(self, command: str): + """kubectl""" + cmd = command.split() + p = subprocess.run(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) + if p.returncode == 0: + output = p.stdout.decode("utf-8") + else: + output = p.stderr.decode("utf-8") + if "-o" in cmd: + output_format = cmd[cmd.index("-o") + 1] + if output_format == "json": + return json.loads(output) + elif output_format == "yaml": + return yaml.load(output, Loader=yaml.FullLoader) + else: + return output + else: + return output + + def load_workflow(self, workflow_file: str): + """ + Load the workflow file into the class + workflow_file: The input workflow file (yaml) + """ + ifile = Path(workflow_file) + self.workflow = yaml.load(open(ifile), Loader=yaml.FullLoader) + self.workflow4kwok = self._modify_workflow_for_kwok(ifile) + + def _modify_workflow_for_kwok(self, ifile: Path): + """ + Modify the workflow to remove the inputs, outputs, and arguments sections from the workflow. + :param input_file: The input workflow file + """ + workflow = yaml.load(open(ifile), Loader=yaml.FullLoader) + for template in workflow["spec"]["templates"]: + look_for = ["steps", "dag"] + remove = ["inputs", "outputs", "arguments"] + for key in remove: + if key in template: + template.pop(key) + for subsection in look_for: + if subsection in template.keys(): + for step in template[subsection]: + for element in step: + if key in element: + element.pop(key) + + return workflow diff --git a/simulator/poetry.lock b/simulator/poetry.lock new file mode 100644 index 00000000..767a54a0 --- /dev/null +++ b/simulator/poetry.lock @@ -0,0 +1,65 @@ +# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand. + +[[package]] +name = "pyyaml" +version = "6.0.1" +description = "YAML parser and emitter for Python" +optional = false +python-versions = ">=3.6" +files = [ + {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"}, + {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, + {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, + {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, + {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, + {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, + {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, + {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, + {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, + {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, + {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, + {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win32.whl", hash = "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa"}, + {file = "PyYAML-6.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win32.whl", hash = "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867"}, + {file = "PyYAML-6.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, + {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, + {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, + {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, + {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, + {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, + {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, + {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, +] + +[metadata] +lock-version = "2.0" +python-versions = "^3.11" +content-hash = "8f9254969a9625380b2e735a3d3017642d8058513e56039123d3f770cc0d8fa0" diff --git a/simulator/pyproject.toml b/simulator/pyproject.toml new file mode 100644 index 00000000..ed9accee --- /dev/null +++ b/simulator/pyproject.toml @@ -0,0 +1,16 @@ +[tool.poetry] +name = "kwok-simulator" +version = "0.1.0" +description = "simulate argo workflows pipeline on kwok cluster" +authors = ["Gøran Brekke Svaland "] +license = "MIT" +readme = "README.md" + +[tool.poetry.dependencies] +python = "^3.11" +PyYAML = "^6.0.1" + + +[build-system] +requires = ["poetry-core"] +build-backend = "poetry.core.masonry.api" diff --git a/simulator/run_simulation.py b/simulator/run_simulation.py new file mode 100644 index 00000000..f5f31098 --- /dev/null +++ b/simulator/run_simulation.py @@ -0,0 +1,93 @@ +import sys +import argparse +import subprocess +from pathlib import Path +from kwoksim import Kwok + +DEPENDENCIES = ["docker", "kind", "kubectl", "kwok", "kwokctl"] + +################################################################################################### + + +def install_dependencies(tools: list = DEPENDENCIES, verbose: bool = False): + """Install dependencies for running kwok simulation""" + # kwok, kwokctl, docker, kind + if sys.platform == "darwin": + install_darwin() + else: + raise Exception( + f"Unsupported platform: {sys.platform}. Install dependencies manually: {DEPENDENCIES}" + ) + + +def check_dependencies(verbose: bool = False) -> bool: + """Check if dependencies are installed""" + tools_not_installed = [] + for tool in DEPENDENCIES: + if tool == "kubectl": + cmd = f"{tool} version --client" + else: + cmd = f"{tool} --version" + p = subprocess.run( + cmd, + shell=True, + capture_output=True, + check=False, + ) + if verbose and p.returncode == 0: + print(p.stdout.decode("utf-8"), end="") + if p.returncode != 0: + print(f"{tool} not installed") + tools_not_installed.append(tool) + if len(tools_not_installed) > 0: + print(tools_not_installed) + install_dependencies(tools=tools_not_installed, verbose=verbose) + return True + + +def install_darwin(tools: list = DEPENDENCIES, verbose: bool = False): + for tool in tools: + try: + subprocess.run(["brew", "install", tool], capture_output=True, check=True) + except Exception as e: + print(f"Could not install {tool}") + if verbose: + print(e) + pass + + +def main(args: argparse.Namespace): + if not args.workflow: + args.workflow = Path("config/hello-world.yaml").resolve() + if args.workflow.exists() is False: + raise Exception( + "No workflow file provided. Use --workflow " + ) + if not check_dependencies(args.verbose): + install_dependencies() + + kwok = Kwok(argo_workflow_file=args.workflow) + + kwok.run_simulation() + + while kwok.is_simulation_complete() is False: + if args.verbose: + j = kwok.get_simulation_progress() + try: + print(j["progress"], end=" ", flush=True) + except KeyError: + print(".", end=" ", flush=True) + + result = kwok.get_results() + + kwok.cleanup() + + print(result) + + +if __name__ == "__main__": + parser = argparse.ArgumentParser(description="Run kwok simulation") + parser.add_argument("--workflow", type=str, help="path to argo workflow file") + parser.add_argument("-v", "--verbose", action="store_true", help="verbose output") + args = parser.parse_args() + main(args)