From 092cdde68f6640e1d8b954278e7780e2db6fc91d Mon Sep 17 00:00:00 2001 From: Show <35062952+BrunooShow@users.noreply.github.com> Date: Sun, 27 Oct 2024 22:11:04 +0100 Subject: [PATCH 1/2] Add timestamp_granularities parameter to transcription API --- litellm/main.py | 2 ++ litellm/utils.py | 1 + 2 files changed, 3 insertions(+) diff --git a/litellm/main.py b/litellm/main.py index f6680f2df879..29b46054f74f 100644 --- a/litellm/main.py +++ b/litellm/main.py @@ -4789,6 +4789,7 @@ def transcription( response_format: Optional[ Literal["json", "text", "srt", "verbose_json", "vtt"] ] = None, + timestamp_granularities: List[Literal["word", "segment"]] = None, temperature: Optional[int] = None, # openai defaults this to 0 ## LITELLM PARAMS ## user: Optional[str] = None, @@ -4842,6 +4843,7 @@ def transcription( language=language, prompt=prompt, response_format=response_format, + timestamp_granularities=timestamp_granularities, temperature=temperature, custom_llm_provider=custom_llm_provider, drop_params=drop_params, diff --git a/litellm/utils.py b/litellm/utils.py index deb3ae8c63ef..7ca3c020e38a 100644 --- a/litellm/utils.py +++ b/litellm/utils.py @@ -2161,6 +2161,7 @@ def get_optional_params_transcription( prompt: Optional[str] = None, response_format: Optional[str] = None, temperature: Optional[int] = None, + timestamp_granularities: Optional[List[Literal["word", "segment"]]] = None, custom_llm_provider: Optional[str] = None, drop_params: Optional[bool] = None, **kwargs, From e93473a6eda7a59f993d4f63494e492c9877fe02 Mon Sep 17 00:00:00 2001 From: Show <35062952+BrunooShow@users.noreply.github.com> Date: Sun, 27 Oct 2024 22:17:11 +0100 Subject: [PATCH 2/2] add param to the local test --- tests/local_testing/test_whisper.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/tests/local_testing/test_whisper.py b/tests/local_testing/test_whisper.py index f66ad8b133fc..7d5d0d710e9f 100644 --- a/tests/local_testing/test_whisper.py +++ b/tests/local_testing/test_whisper.py @@ -53,8 +53,9 @@ ) @pytest.mark.parametrize("response_format", ["json", "vtt"]) @pytest.mark.parametrize("sync_mode", [True, False]) +@pytest.mark.parametrize("timestamp_granularities", [["word"], ["segment"]]) @pytest.mark.asyncio -async def test_transcription(model, api_key, api_base, response_format, sync_mode): +async def test_transcription(model, api_key, api_base, response_format, sync_mode, timestamp_granularities): if sync_mode: transcript = litellm.transcription( model=model, @@ -62,6 +63,7 @@ async def test_transcription(model, api_key, api_base, response_format, sync_mod api_key=api_key, api_base=api_base, response_format=response_format, + timestamp_granularities=timestamp_granularities, drop_params=True, ) else: