Skip to content

Commit

Permalink
Add test for empty string on toxicity (#142)
Browse files Browse the repository at this point in the history
  • Loading branch information
jamie256 authored Aug 30, 2023
1 parent 2d2161a commit a469cfa
Showing 1 changed file with 35 additions and 0 deletions.
35 changes: 35 additions & 0 deletions langkit/tests/test_toxicity.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,11 @@
from logging import getLogger
import whylogs as why
import pytest


TEST_LOGGER = getLogger(__name__)


@pytest.mark.load
def test_toxicity():
from langkit import toxicity # noqa
Expand All @@ -28,3 +32,34 @@ def test_toxicity_long_response(long_response):
text_schema = udf_schema()
profile = why.log(long_response, schema=text_schema).profile()
assert profile


@pytest.mark.load
def test_empty_toxicity():
from langkit import toxicity # noqa
from whylogs.experimental.core.udf_schema import udf_schema

text_schema = udf_schema()
test_prompt = "hi."
test_response = ""
test_message = {"prompt": test_prompt, "response": test_response}
profile = why.log(test_message, schema=text_schema).profile()
prompt_score = (
profile.view()
.get_column("prompt.toxicity")
.get_metric("distribution")
.to_summary_dict()["mean"]
)
response_score = (
profile.view()
.get_column("response.toxicity")
.get_metric("distribution")
.to_summary_dict()["mean"]
)

TEST_LOGGER.info(f"running toxicity metrics on {test_message}")
TEST_LOGGER.info(
f"prompt score is: {prompt_score} and response score is: {response_score}"
)
assert prompt_score < 0.1
assert response_score < 0.1

0 comments on commit a469cfa

Please sign in to comment.