Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

(cherry-pick to track/0.11) fix: configure proxy env vars storage init container (#257) #267

Merged
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
13 changes: 12 additions & 1 deletion charms/kserve-controller/config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -35,4 +35,15 @@ options:
description: >
YAML or JSON formatted input defining images to use in Katib
For usage details, see https://github.com/canonical/kserve-operators.

http-proxy:
default: ""
description: The value of HTTP_PROXY environment variable in the storage-initializer container.
type: string
https-proxy:
default: ""
description: The value of HTTPS_PROXY environment variable in the storage-initializer container.
type: string
no-proxy:
default: ""
description: The value of NO_PROXY environment variable in the storage-initializer container.
type: string
4 changes: 4 additions & 0 deletions charms/kserve-controller/src/charm.py
Original file line number Diff line number Diff line change
Expand Up @@ -68,6 +68,7 @@
"src/templates/auth_manifests.yaml.j2",
"src/templates/serving_runtimes_manifests.yaml.j2",
"src/templates/webhook_manifests.yaml.j2",
"src/templates/cluster_storage_containers.yaml.j2",
]

# Values for MinIO manifests https://kserve.github.io/website/0.11/modelserving/storage/s3/s3/
Expand Down Expand Up @@ -164,6 +165,9 @@ def _context(self):
"app_name": self.app.name,
"namespace": self.model.name,
"cert": f"'{ca_context.decode('utf-8')}'",
"http_proxy": self.model.config["http-proxy"],
"https_proxy": self.model.config["https-proxy"],
"no_proxy": self.model.config["no-proxy"],
}

@property
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,38 @@
apiVersion: serving.kserve.io/v1alpha1
kind: ClusterStorageContainer
metadata:
name: default
spec:
container:
image: {{ configmap__storageInitializer }}
name: storage-initializer
resources:
limits:
cpu: "1"
memory: 1Gi
requests:
cpu: 100m
memory: 100Mi
{% if http_proxy or https_proxy or no_proxy %}
env:
{% if http_proxy %}
- name: HTTP_PROXY
value: {{ http_proxy }}
{% endif %}
{% if https_proxy %}
- name: HTTPS_PROXY
value: {{ https_proxy }}
{% endif %}
{% if no_proxy %}
- name: NO_PROXY
value: {{ no_proxy }}
{% endif %}
{% endif %}
supportedUriFormats:
- prefix: gs://
- prefix: s3://
- prefix: hdfs://
- prefix: webhdfs://
- regex: https://(.+?).blob.core.windows.net/(.+)
- regex: https://(.+?).file.core.windows.net/(.+)
- regex: https?://(.+)/(.+)
132 changes: 88 additions & 44 deletions charms/kserve-controller/tests/integration/test_charm.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@
ServiceAccount,
)
from pytest_operator.plugin import OpsTest
from tenacity import Retrying, stop_after_delay, wait_fixed

logger = logging.getLogger(__name__)

Expand Down Expand Up @@ -73,6 +74,18 @@
TESTING_NAMESPACE_NAME = "raw-deployment"
KSERVE_WORKLOAD_CONTAINER = "kserve-container"

ISVC = lightkube.generic_resource.create_namespaced_resource(
group="serving.kserve.io",
version="v1beta1",
kind="InferenceService",
plural="inferenceservices",
verbs=None,
)

SKLEARN_INF_SVC_YAML = yaml.safe_load(Path("./tests/integration/sklearn-iris.yaml").read_text())
SKLEARN_INF_SVC_OBJECT = lightkube.codecs.load_all_yaml(yaml.dump(SKLEARN_INF_SVC_YAML))[0]
SKLEARN_INF_SVC_NAME = SKLEARN_INF_SVC_OBJECT.metadata.name


def deploy_k8s_resources(template_files: str):
"""Deploy k8s resources from template files."""
Expand Down Expand Up @@ -163,15 +176,19 @@ def namespace(lightkube_client: lightkube.Client):
delete_all_from_yaml(yaml_text, lightkube_client)


@pytest.fixture
def cleanup_namespaces_after_execution(request):
"""Removes the namespaces used for deploying inferenceservices."""
yield
@pytest.fixture(scope="function")
def serverless_namespace(lightkube_client):
"""Create a namespaces used for deploying inferenceservices, cleaning it up afterwards."""

namespace_name = "serverless-namespace"
lightkube_client.create(Namespace(metadata=ObjectMeta(name=namespace_name)))

yield namespace_name

try:
lightkube_client = lightkube.Client()
lightkube_client.delete(Namespace, name=request.param)
lightkube_client.delete(Namespace, name=namespace_name)
except ApiError:
logger.warning(f"The {request.param} namespace could not be removed.")
logger.warning(f"The {namespace_name} namespace could not be removed.")
pass


Expand Down Expand Up @@ -268,14 +285,8 @@ def test_inference_service_raw_deployment(
test_namespace: None, lightkube_client: lightkube.Client, inference_file, ops_test: OpsTest
):
"""Validates that an InferenceService can be deployed."""
# Read InferenceService example and create namespaced resource
inference_service_resource = lightkube.generic_resource.create_namespaced_resource(
group="serving.kserve.io",
version="v1beta1",
kind="InferenceService",
plural="inferenceservices",
verbs=None,
)
# Read InferenceService example

inf_svc_yaml = yaml.safe_load(Path(inference_file).read_text())
inf_svc_object = lightkube.codecs.load_all_yaml(yaml.dump(inf_svc_yaml))[0]
inf_svc_name = inf_svc_object.metadata.name
Expand All @@ -296,9 +307,7 @@ def create_inf_svc():
reraise=True,
)
def assert_inf_svc_state():
inf_svc = lightkube_client.get(
inference_service_resource, inf_svc_name, namespace=TESTING_NAMESPACE_NAME
)
inf_svc = lightkube_client.get(ISVC, inf_svc_name, namespace=TESTING_NAMESPACE_NAME)
conditions = inf_svc.get("status", {}).get("conditions")
logger.info(
f"INFO: Inspecting InferenceService {inf_svc.metadata.name} in namespace {inf_svc.metadata.namespace}"
Expand Down Expand Up @@ -379,40 +388,19 @@ async def test_deploy_knative_dependencies(ops_test: OpsTest):
)


@pytest.mark.parametrize(
"cleanup_namespaces_after_execution", ["serverless-namespace"], indirect=True
)
def test_inference_service_serverless_deployment(
cleanup_namespaces_after_execution, ops_test: OpsTest
):
def test_inference_service_serverless_deployment(serverless_namespace, ops_test: OpsTest):
"""Validates that an InferenceService can be deployed."""
# Instantiate a lightkube client
lightkube_client = lightkube.Client()

# Read InferenceService example and create namespaced resource
inference_service_resource = lightkube.generic_resource.create_namespaced_resource(
group="serving.kserve.io",
version="v1beta1",
kind="InferenceService",
plural="inferenceservices",
verbs=None,
)
inf_svc_yaml = yaml.safe_load(Path("./tests/integration/sklearn-iris.yaml").read_text())
inf_svc_object = lightkube.codecs.load_all_yaml(yaml.dump(inf_svc_yaml))[0]
inf_svc_name = inf_svc_object.metadata.name
serverless_mode_namespace = "serverless-namespace"

# Create Serverless namespace
lightkube_client.create(Namespace(metadata=ObjectMeta(name=serverless_mode_namespace)))

# Create InferenceService from example file
@tenacity.retry(
wait=tenacity.wait_exponential(multiplier=1, min=1, max=15),
stop=tenacity.stop_after_delay(30),
reraise=True,
)
def create_inf_svc():
lightkube_client.create(inf_svc_object, namespace=serverless_mode_namespace)
lightkube_client.create(SKLEARN_INF_SVC_OBJECT, namespace=serverless_namespace)

# Assert InferenceService state is Available
@tenacity.retry(
Expand All @@ -421,9 +409,7 @@ def create_inf_svc():
reraise=True,
)
def assert_inf_svc_state():
inf_svc = lightkube_client.get(
inference_service_resource, inf_svc_name, namespace=serverless_mode_namespace
)
inf_svc = lightkube_client.get(ISVC, SKLEARN_INF_SVC_NAME, namespace=serverless_namespace)
conditions = inf_svc.get("status", {}).get("conditions")
for condition in conditions:
if condition.get("status") == "False":
Expand Down Expand Up @@ -560,6 +546,64 @@ async def test_new_user_namespace_has_manifests(
assert service_account.secrets[0].name == manifests_name


RETRY_FOR_THREE_MINUTES = Retrying(
stop=stop_after_delay(60 * 3),
wait=wait_fixed(5),
reraise=True,
)


async def test_inference_service_proxy_envs_configuration(
serverless_namespace, ops_test: OpsTest, lightkube_client: lightkube.Client
):
"""Changes `http-proxy`, `https-proxy` and `no-proxy` configs and asserts that
the InferenceService Pod is using the values from configs as environment variables."""

# Set Proxy envs by setting the charm configs
test_http_proxy = "my_http_proxy"
test_https_proxy = "my_https_proxy"
test_no_proxy = "no_proxy"

await ops_test.model.applications["kserve-controller"].set_config(
{"http-proxy": test_http_proxy, "https-proxy": test_https_proxy, "no-proxy": test_no_proxy}
)

await ops_test.model.wait_for_idle(
["kserve-controller"],
status="active",
raise_on_blocked=False,
timeout=60 * 1,
)

# Create InferenceService from example file
for attempt in RETRY_FOR_THREE_MINUTES:
with attempt:
lightkube_client.create(SKLEARN_INF_SVC_OBJECT, namespace=serverless_namespace)

# Assert InferenceService Pod specifies the proxy envs for the initContainer
for attempt in RETRY_FOR_THREE_MINUTES:
with attempt:
pods_list = lightkube_client.list(
res=Pod,
namespace=serverless_namespace,
labels={"serving.kserve.io/inferenceservice": SKLEARN_INF_SVC_NAME},
)
isvc_pod = next(pods_list)
init_env_vars = isvc_pod.spec.initContainers[0].env

for env_var in init_env_vars:
if env_var.name == "HTTP_PROXY":
http_proxy_env = env_var.value
elif env_var.name == "HTTPS_PROXY":
https_proxy_env = env_var.value
elif env_var.name == "NO_PROXY":
no_proxy_env = env_var.value

assert http_proxy_env == test_http_proxy
assert https_proxy_env == test_https_proxy
assert no_proxy_env == test_no_proxy


async def test_blocked_on_invalid_config(ops_test: OpsTest):
"""
Test whether the application is blocked on providing an invalid configuration.
Expand Down
Loading