Skip to content

Commit

Permalink
fix: configure proxy env vars storage init container (#257)
Browse files Browse the repository at this point in the history
* fix: add configs for proxy envs in storage-initializer container
* add integration test
* fix: config description
* refactor serverless namespace fixture
* refactor common sklearn example
  • Loading branch information
NohaIhab committed Aug 19, 2024
1 parent 838e558 commit 61745b4
Show file tree
Hide file tree
Showing 4 changed files with 118 additions and 45 deletions.
13 changes: 12 additions & 1 deletion charms/kserve-controller/config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -35,4 +35,15 @@ options:
description: >
YAML or JSON formatted input defining images to use in Katib
For usage details, see https://github.com/canonical/kserve-operators.
http-proxy:
default: ""
description: The value of HTTP_PROXY environment variable in the storage-initializer container.
type: string
https-proxy:
default: ""
description: The value of HTTPS_PROXY environment variable in the storage-initializer container.
type: string
no-proxy:
default: ""
description: The value of NO_PROXY environment variable in the storage-initializer container.
type: string
3 changes: 3 additions & 0 deletions charms/kserve-controller/src/charm.py
Original file line number Diff line number Diff line change
Expand Up @@ -174,6 +174,9 @@ def _context(self):
"app_name": self.app.name,
"namespace": self.model.name,
"cert": f"'{ca_context.decode('utf-8')}'",
"http_proxy": self.model.config["http-proxy"],
"https_proxy": self.model.config["https-proxy"],
"no_proxy": self.model.config["no-proxy"],
}

@property
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,21 @@ spec:
requests:
cpu: 100m
memory: 100Mi
{% if http_proxy or https_proxy or no_proxy %}
env:
{% if http_proxy %}
- name: HTTP_PROXY
value: {{ http_proxy }}
{% endif %}
{% if https_proxy %}
- name: HTTPS_PROXY
value: {{ https_proxy }}
{% endif %}
{% if no_proxy %}
- name: NO_PROXY
value: {{ no_proxy }}
{% endif %}
{% endif %}
supportedUriFormats:
- prefix: gs://
- prefix: s3://
Expand Down
132 changes: 88 additions & 44 deletions charms/kserve-controller/tests/integration/test_charm.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@
ServiceAccount,
)
from pytest_operator.plugin import OpsTest
from tenacity import Retrying, stop_after_delay, wait_fixed

logger = logging.getLogger(__name__)

Expand Down Expand Up @@ -63,6 +64,18 @@
TESTING_NAMESPACE_NAME = "raw-deployment"
KSERVE_WORKLOAD_CONTAINER = "kserve-container"

ISVC = lightkube.generic_resource.create_namespaced_resource(
group="serving.kserve.io",
version="v1beta1",
kind="InferenceService",
plural="inferenceservices",
verbs=None,
)

SKLEARN_INF_SVC_YAML = yaml.safe_load(Path("./tests/integration/sklearn-iris.yaml").read_text())
SKLEARN_INF_SVC_OBJECT = lightkube.codecs.load_all_yaml(yaml.dump(SKLEARN_INF_SVC_YAML))[0]
SKLEARN_INF_SVC_NAME = SKLEARN_INF_SVC_OBJECT.metadata.name


def deploy_k8s_resources(template_files: str):
"""Deploy k8s resources from template files."""
Expand Down Expand Up @@ -153,15 +166,19 @@ def namespace(lightkube_client: lightkube.Client):
delete_all_from_yaml(yaml_text, lightkube_client)


@pytest.fixture
def cleanup_namespaces_after_execution(request):
"""Removes the namespaces used for deploying inferenceservices."""
yield
@pytest.fixture(scope="function")
def serverless_namespace(lightkube_client):
"""Create a namespaces used for deploying inferenceservices, cleaning it up afterwards."""

namespace_name = "serverless-namespace"
lightkube_client.create(Namespace(metadata=ObjectMeta(name=namespace_name)))

yield namespace_name

try:
lightkube_client = lightkube.Client()
lightkube_client.delete(Namespace, name=request.param)
lightkube_client.delete(Namespace, name=namespace_name)
except ApiError:
logger.warning(f"The {request.param} namespace could not be removed.")
logger.warning(f"The {namespace_name} namespace could not be removed.")
pass


Expand Down Expand Up @@ -263,14 +280,8 @@ def test_inference_service_raw_deployment(
test_namespace: None, lightkube_client: lightkube.Client, inference_file, ops_test: OpsTest
):
"""Validates that an InferenceService can be deployed."""
# Read InferenceService example and create namespaced resource
inference_service_resource = lightkube.generic_resource.create_namespaced_resource(
group="serving.kserve.io",
version="v1beta1",
kind="InferenceService",
plural="inferenceservices",
verbs=None,
)
# Read InferenceService example

inf_svc_yaml = yaml.safe_load(Path(inference_file).read_text())
inf_svc_object = lightkube.codecs.load_all_yaml(yaml.dump(inf_svc_yaml))[0]
inf_svc_name = inf_svc_object.metadata.name
Expand All @@ -291,9 +302,7 @@ def create_inf_svc():
reraise=True,
)
def assert_inf_svc_state():
inf_svc = lightkube_client.get(
inference_service_resource, inf_svc_name, namespace=TESTING_NAMESPACE_NAME
)
inf_svc = lightkube_client.get(ISVC, inf_svc_name, namespace=TESTING_NAMESPACE_NAME)
conditions = inf_svc.get("status", {}).get("conditions")
logger.info(
f"INFO: Inspecting InferenceService {inf_svc.metadata.name} in namespace {inf_svc.metadata.namespace}"
Expand Down Expand Up @@ -378,40 +387,19 @@ async def test_deploy_knative_dependencies(ops_test: OpsTest):
)


@pytest.mark.parametrize(
"cleanup_namespaces_after_execution", ["serverless-namespace"], indirect=True
)
def test_inference_service_serverless_deployment(
cleanup_namespaces_after_execution, ops_test: OpsTest
):
def test_inference_service_serverless_deployment(serverless_namespace, ops_test: OpsTest):
"""Validates that an InferenceService can be deployed."""
# Instantiate a lightkube client
lightkube_client = lightkube.Client()

# Read InferenceService example and create namespaced resource
inference_service_resource = lightkube.generic_resource.create_namespaced_resource(
group="serving.kserve.io",
version="v1beta1",
kind="InferenceService",
plural="inferenceservices",
verbs=None,
)
inf_svc_yaml = yaml.safe_load(Path("./tests/integration/sklearn-iris.yaml").read_text())
inf_svc_object = lightkube.codecs.load_all_yaml(yaml.dump(inf_svc_yaml))[0]
inf_svc_name = inf_svc_object.metadata.name
serverless_mode_namespace = "serverless-namespace"

# Create Serverless namespace
lightkube_client.create(Namespace(metadata=ObjectMeta(name=serverless_mode_namespace)))

# Create InferenceService from example file
@tenacity.retry(
wait=tenacity.wait_exponential(multiplier=1, min=1, max=15),
stop=tenacity.stop_after_delay(30),
reraise=True,
)
def create_inf_svc():
lightkube_client.create(inf_svc_object, namespace=serverless_mode_namespace)
lightkube_client.create(SKLEARN_INF_SVC_OBJECT, namespace=serverless_namespace)

# Assert InferenceService state is Available
@tenacity.retry(
Expand All @@ -420,9 +408,7 @@ def create_inf_svc():
reraise=True,
)
def assert_inf_svc_state():
inf_svc = lightkube_client.get(
inference_service_resource, inf_svc_name, namespace=serverless_mode_namespace
)
inf_svc = lightkube_client.get(ISVC, SKLEARN_INF_SVC_NAME, namespace=serverless_namespace)
conditions = inf_svc.get("status", {}).get("conditions")
for condition in conditions:
if condition.get("status") == "False":
Expand Down Expand Up @@ -562,6 +548,64 @@ async def test_new_user_namespace_has_manifests(
assert service_account.secrets[0].name == manifests_name


RETRY_FOR_THREE_MINUTES = Retrying(
stop=stop_after_delay(60 * 3),
wait=wait_fixed(5),
reraise=True,
)


async def test_inference_service_proxy_envs_configuration(
serverless_namespace, ops_test: OpsTest, lightkube_client: lightkube.Client
):
"""Changes `http-proxy`, `https-proxy` and `no-proxy` configs and asserts that
the InferenceService Pod is using the values from configs as environment variables."""

# Set Proxy envs by setting the charm configs
test_http_proxy = "my_http_proxy"
test_https_proxy = "my_https_proxy"
test_no_proxy = "no_proxy"

await ops_test.model.applications["kserve-controller"].set_config(
{"http-proxy": test_http_proxy, "https-proxy": test_https_proxy, "no-proxy": test_no_proxy}
)

await ops_test.model.wait_for_idle(
["kserve-controller"],
status="active",
raise_on_blocked=False,
timeout=60 * 1,
)

# Create InferenceService from example file
for attempt in RETRY_FOR_THREE_MINUTES:
with attempt:
lightkube_client.create(SKLEARN_INF_SVC_OBJECT, namespace=serverless_namespace)

# Assert InferenceService Pod specifies the proxy envs for the initContainer
for attempt in RETRY_FOR_THREE_MINUTES:
with attempt:
pods_list = lightkube_client.list(
res=Pod,
namespace=serverless_namespace,
labels={"serving.kserve.io/inferenceservice": SKLEARN_INF_SVC_NAME},
)
isvc_pod = next(pods_list)
init_env_vars = isvc_pod.spec.initContainers[0].env

for env_var in init_env_vars:
if env_var.name == "HTTP_PROXY":
http_proxy_env = env_var.value
elif env_var.name == "HTTPS_PROXY":
https_proxy_env = env_var.value
elif env_var.name == "NO_PROXY":
no_proxy_env = env_var.value

assert http_proxy_env == test_http_proxy
assert https_proxy_env == test_https_proxy
assert no_proxy_env == test_no_proxy


async def test_blocked_on_invalid_config(ops_test: OpsTest):
"""
Test whether the application is blocked on providing an invalid configuration.
Expand Down

0 comments on commit 61745b4

Please sign in to comment.