diff --git a/.gitignore b/.gitignore index 1170286..3b542f3 100644 --- a/.gitignore +++ b/.gitignore @@ -3,3 +3,4 @@ build/ .tox/ __pycache__ *.charm +.vscode diff --git a/config.yaml b/config.yaml index 87672b7..45976f3 100644 --- a/config.yaml +++ b/config.yaml @@ -14,3 +14,7 @@ options: type: boolean default: false description: Whether cookies should require HTTPS + volume-viewer-image: + type: string + default: filebrowser/filebrowser:latest + description: Volume Viewer OCI Image (PVCViewer) diff --git a/metadata.yaml b/metadata.yaml index 7edb79d..85bc2a4 100755 --- a/metadata.yaml +++ b/metadata.yaml @@ -13,7 +13,7 @@ resources: type: oci-image description: 'Backing OCI image' auto-fetch: true - upstream-source: docker.io/kubeflownotebookswg/volumes-web-app:v1.7.0 + upstream-source: docker.io/kubeflownotebookswg/volumes-web-app:v1.8.0-rc.0 requires: ingress: interface: ingress diff --git a/requirements-unit.txt b/requirements-unit.txt index 342b2a6..fa848db 100644 --- a/requirements-unit.txt +++ b/requirements-unit.txt @@ -20,8 +20,12 @@ importlib-resources==6.0.1 # via jsonschema iniconfig==2.0.0 # via pytest +jinja2==3.1.2 + # via -r requirements.in jsonschema==4.17.3 # via serialized-data-interface +markupsafe==2.1.3 + # via jinja2 oci-image==1.0.0 # via -r requirements.in ops==2.6.0 diff --git a/requirements.in b/requirements.in index 0e54c27..21c27c0 100644 --- a/requirements.in +++ b/requirements.in @@ -4,3 +4,4 @@ ops oci-image serialized-data-interface +jinja2 diff --git a/requirements.txt b/requirements.txt index e4c9141..fb00d67 100644 --- a/requirements.txt +++ b/requirements.txt @@ -14,8 +14,12 @@ idna==3.4 # via requests importlib-resources==6.0.1 # via jsonschema +jinja2==3.1.2 + # via -r requirements.in jsonschema==4.17.3 # via serialized-data-interface +markupsafe==2.1.3 + # via jinja2 oci-image==1.0.0 # via -r requirements.in ops==2.6.0 diff --git a/src/charm.py b/src/charm.py index e74e17d..801de2e 100755 --- a/src/charm.py +++ b/src/charm.py @@ -3,11 +3,14 @@ # See LICENSE file for licensing details. import logging +from pathlib import Path +from typing import Dict from charms.kubeflow_dashboard.v0.kubeflow_dashboard_links import ( DashboardLink, KubeflowDashboardLinksRequirer, ) +from jinja2 import Template from oci_image import OCIImageResource, OCIImageResourceError from ops.charm import CharmBase from ops.main import main @@ -15,6 +18,26 @@ from serialized_data_interface import NoCompatibleVersions, NoVersionsListed, get_interfaces +def render_template(template_path: str, context: Dict) -> str: + """ + Render a Jinja2 template. + + This function takes the file path of a Jinja2 template and a context dictionary + containing the variables for template rendering. It loads the template, + substitutes the variables in the context, and returns the rendered content. + + Args: + template_path (str): The file path of the Jinja2 template. + context (Dict): A dictionary containing the variables for template rendering. + + Returns: + str: The rendered template content. + """ + template = Template(Path(template_path).read_text()) + rendered_template = template.render(**context) + return rendered_template + + class CheckFailed(Exception): """Raise this exception if one of the checks in main fails.""" @@ -120,6 +143,11 @@ def main(self, event): "resources": ["notebooks"], "verbs": ["list"], }, + { + "apiGroups": ["kubeflow.org"], + "resources": ["pvcviewers"], + "verbs": ["get", "list", "create", "delete"], + }, ], } ] @@ -134,11 +162,35 @@ def main(self, event): "APP_SECURE_COOKIES": str(config["secure-cookies"]).lower(), "BACKEND_MODE": config["backend-mode"], "APP_PREFIX": "/volumes", + "VOLUME_VIEWER_IMAGE": config["volume-viewer-image"], }, "ports": [{"name": "http", "containerPort": config["port"]}], + "volumeConfig": [ + { + "name": "viewer-spec", + "mountPath": "/etc/config/", # xwris to .yaml? + "files": [ + { + "path": "viewer-spec.yaml", + "content": render_template( + "src/templates/viewer-spec.yaml.j2", {} + ), + } + ], + }, + ], } ], }, + k8s_resources={ + "configMaps": { + "volumes-web-app-viewer-spec-ck6bhh4bdm": { + "viewer-spec.yaml": render_template( + "src/templates/viewer-spec.yaml.j2", {} + ), + }, + }, + }, ) self.model.unit.status = ActiveStatus() diff --git a/src/templates/viewer-spec.yaml.j2 b/src/templates/viewer-spec.yaml.j2 new file mode 100644 index 0000000..fdd6619 --- /dev/null +++ b/src/templates/viewer-spec.yaml.j2 @@ -0,0 +1,38 @@ +# Source: manifests/apps/volumes-web-app/upstream/base/viewer-spec.yaml +# Note: the volumes-web-app allows expanding strings using ${VAR_NAME} +# You may use any environment variable. This lets us e.g. specify images that can be modified using kustomize's image transformer. +# Additionally, 'PVC_NAME', 'NAME' and 'NAMESPACE' are defined +# Name of the pvc is set by the volumes web app +pvc: $NAME +podTemplate: + containers: + - name: main + image: $VOLUME_VIEWER_IMAGE + env: + - name: FB_ADDRESS + value: "0.0.0.0" + - name: FB_PORT + value: "8080" + - name: FB_DATABASE + value: /tmp/filebrowser.db + - name: FB_NOAUTH + value: "true" + - name: FB_BASEURL + value: /pvcviewers/$NAMESPACE/$NAME/ + readinessProbe: + tcpSocket: + port: 8080 + initialDelaySeconds: 2 + periodSeconds: 10 + # viewer-volume is provided automatically by the volumes web app + volumeMounts: + - name: viewer-volume + mountPath: /data + workingDir: /data + serviceAccountName: default-editor +networking: + targetPort: 8080 + basePrefix: "/pvcviewers" + rewrite: "/" + timeout: 30s +rwoScheduling: true diff --git a/tests/integration/config-map.yaml b/tests/integration/config-map.yaml new file mode 100644 index 0000000..35b00c0 --- /dev/null +++ b/tests/integration/config-map.yaml @@ -0,0 +1,39 @@ +viewer-spec.yaml: |- + # Source: manifests/apps/volumes-web-app/upstream/base/viewer-spec.yaml + # Note: the volumes-web-app allows expanding strings using ${VAR_NAME} + # You may use any environment variable. This lets us e.g. specify images that can be modified using kustomize's image transformer. + # Additionally, 'PVC_NAME', 'NAME' and 'NAMESPACE' are defined + # Name of the pvc is set by the volumes web app + pvc: $NAME + podTemplate: + containers: + - name: main + image: $VOLUME_VIEWER_IMAGE + env: + - name: FB_ADDRESS + value: "0.0.0.0" + - name: FB_PORT + value: "8080" + - name: FB_DATABASE + value: /tmp/filebrowser.db + - name: FB_NOAUTH + value: "true" + - name: FB_BASEURL + value: /pvcviewers/$NAMESPACE/$NAME/ + readinessProbe: + tcpSocket: + port: 8080 + initialDelaySeconds: 2 + periodSeconds: 10 + # viewer-volume is provided automatically by the volumes web app + volumeMounts: + - name: viewer-volume + mountPath: /data + workingDir: /data + serviceAccountName: default-editor + networking: + targetPort: 8080 + basePrefix: "/pvcviewers" + rewrite: "/" + timeout: 30s + rwoScheduling: true diff --git a/tests/integration/test_charm.py b/tests/integration/test_charm.py index ef382b6..02cc115 100644 --- a/tests/integration/test_charm.py +++ b/tests/integration/test_charm.py @@ -4,10 +4,10 @@ import logging from pathlib import Path -# from lightkube import Client -# from lightkube.resources.core_v1 import Service import pytest import yaml +from lightkube import Client +from lightkube.resources.core_v1 import ConfigMap from pytest_operator.plugin import OpsTest # from random import choices @@ -24,19 +24,26 @@ log = logging.getLogger(__name__) METADATA = yaml.safe_load(Path("./metadata.yaml").read_text()) +CONFIG_MAP = "volumes-web-app-viewer-spec-ck6bhh4bdm" +CHARM_NAME = METADATA["name"] +EXPECTED_CONFIG_MAP = yaml.safe_load(Path("./tests/integration/config-map.yaml").read_text()) + + +@pytest.fixture(scope="session") +def lightkube_client() -> Client: + client = Client(field_manager=CHARM_NAME) + return client @pytest.mark.abort_on_fail async def test_build_and_deploy(ops_test: OpsTest): - charm_name = METADATA["name"] - my_charm = await ops_test.build_charm(".") image_path = METADATA["resources"]["oci-image"]["upstream-source"] await ops_test.model.deploy(my_charm, resources={"oci-image": image_path}) await ops_test.model.wait_for_idle( - [charm_name], + [CHARM_NAME], wait_for_active=True, raise_on_blocked=True, raise_on_error=True, @@ -44,6 +51,14 @@ async def test_build_and_deploy(ops_test: OpsTest): ) +@pytest.mark.abort_on_fail +async def test_configmap_created(lightkube_client: Client, ops_test: OpsTest): + """Test configmaps contents with default config.""" + config_map = lightkube_client.get(ConfigMap, CONFIG_MAP, namespace=ops_test.model_name) + + assert config_map.data == EXPECTED_CONFIG_MAP + + @pytest.mark.abort_on_fail async def test_relate_dependencies(ops_test: OpsTest): await ops_test.model.deploy( @@ -74,8 +89,10 @@ async def test_relate_dependencies(ops_test: OpsTest): await ops_test.model.add_relation("kubeflow-dashboard", "kubeflow-profiles") await ops_test.model.add_relation("istio-pilot:ingress", "kubeflow-dashboard:ingress") await ops_test.model.add_relation("istio-pilot", "kubeflow-volumes") + # raise_on_blocked=False to avoid flakiness due to kubeflow-dashboard going to + # Blocked((install) Add required relation to kubeflow-profiles) although it has been added await ops_test.model.wait_for_idle( - raise_on_blocked=True, + raise_on_blocked=False, raise_on_error=True, timeout=300, ) @@ -102,6 +119,7 @@ async def test_relate_dependencies(ops_test: OpsTest): # Disabled until we re-enable the selenium tests below +# When reenabling, we should add Service to "from lightkube.resources.core_v1 import" # @pytest.fixture() # def driver(request, ops_test, profile): # profile_name = profile diff --git a/tools/get-images.sh b/tools/get-images.sh index 08522eb..0477ad1 100755 --- a/tools/get-images.sh +++ b/tools/get-images.sh @@ -5,4 +5,5 @@ # dynamic list IMAGE_LIST=() IMAGE_LIST+=($(find -type f -name metadata.yaml -exec yq '.resources | to_entries | .[] | .value | ."upstream-source"' {} \;)) +IMAGE_LIST+=($(find -type f -name config.yaml -exec yq '.options | ."volume-viewer-image" | .default' {} \;)) printf "%s\n" "${IMAGE_LIST[@]}"