Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

plugins: add docstrings explaining API keys #672

Merged
merged 2 commits into from
Aug 24, 2024
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -48,6 +48,18 @@
user: str | None = None,
client: anthropic.AsyncClient | None = None,
) -> None:
"""
Create a new instance of Anthropic LLM.

``api_key`` must be set to your Anthropic API key, either using the argument or by setting
the ``ANTHROPIC_API_KEY`` environmental variable.
"""

# throw an error on our end
api_key = api_key or os.environ.get("ANTHROPIC_API_KEY")

Check failure on line 59 in livekit-plugins/livekit-plugins-anthropic/livekit/plugins/anthropic/llm.py

View workflow job for this annotation

GitHub Actions / build

Ruff (F821)

livekit-plugins/livekit-plugins-anthropic/livekit/plugins/anthropic/llm.py:59:30: F821 Undefined name `os`
if api_key is None:
raise ValueError("Anthropic API key is required")

self._opts = LLMOptions(model=model, user=user)
self._client = client or anthropic.AsyncClient(
api_key=api_key,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -45,6 +45,13 @@ def __init__(
num_channels: int = 1,
languages: list[str] = [], # when empty, auto-detect the language
):
"""
Create a new instance of Azure STT.

``speech_key`` and ``speech_region`` must be set, either using arguments or by setting the
``AZURE_SPEECH_KEY`` and ``AZURE_SPEECH_REGION`` environmental variables, respectively.
"""

super().__init__(
capabilities=stt.STTCapabilities(streaming=True, interim_results=True)
)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,13 @@ def __init__(
speech_region: str | None = None,
voice: str | None = None,
) -> None:
"""
Create a new instance of Azure TTS.

``speech_key`` and ``speech_region`` must be set, either using arguments or by setting the
``AZURE_SPEECH_KEY`` and ``AZURE_SPEECH_REGION`` environmental variables, respectively.
"""

super().__init__(
capabilities=tts.TTSCapabilities(
streaming=False,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -57,6 +57,13 @@ def __init__(
api_key: str | None = None,
http_session: aiohttp.ClientSession | None = None,
) -> None:
"""
Create a new instance of Cartesia TTS.

``api_key`` must be set to your Cartesia API key, either using the argument or by setting
the ``CARTESIA_API_KEY`` environmental variable.
"""

super().__init__(
capabilities=tts.TTSCapabilities(streaming=True),
sample_rate=sample_rate,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -39,6 +39,13 @@ def __init__(
http_session: Optional[aiohttp.ClientSession] = None,
threshold: float = 0.5,
):
"""
Create a new instance of Clova STT.

``secret`` and ``invoke_url`` must be set, either using arguments or by setting the
``CLOVA_STT_SECRET_KEY`` and ``CLOVA_STT_INVOKE_URL`` environmental variables, respectively.
"""

super().__init__(
capabilities=STTCapabilities(streaming=False, interim_results=True)
)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -68,6 +68,13 @@ def __init__(
api_key: str | None = None,
http_session: aiohttp.ClientSession | None = None,
) -> None:
"""
Create a new instance of Deepgram STT.

``api_key`` must be set to your Deepgram API key, either using the argument or by setting
the ``DEEPGRAM_API_KEY`` environmental variable.
"""

super().__init__(
capabilities=stt.STTCapabilities(
streaming=True, interim_results=interim_results
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -104,6 +104,13 @@ def __init__(
chunk_length_schedule: list[int] = [80, 120, 200, 260], # range is [50, 500]
http_session: aiohttp.ClientSession | None = None,
) -> None:
"""
Create a new instance of ElevenLabs TTS.

``api_key`` must be set to your ElevenLabs API key, either using the argument or by setting
the ``ELEVEN_API_KEY`` environmental variable.
"""

super().__init__(
capabilities=tts.TTSCapabilities(
streaming=True,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -58,8 +58,11 @@ def __init__(
credentials_file: str | None = None,
):
"""
if no credentials is provided, it will use the credentials on the environment
GOOGLE_APPLICATION_CREDENTIALS (default behavior of Google SpeechAsyncClient)
Create a new instance of Google STT.

Credentials must be provided, either by using the ``credentials_info`` dict, or reading
from the file specified in ``credentials_file`` or the ``GOOGLE_APPLICATION_CREDENTIALS``
environmental variable.
"""
super().__init__(
capabilities=stt.STTCapabilities(streaming=True, interim_results=True)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -51,9 +51,13 @@ def __init__(
credentials_file: str | None = None,
) -> None:
"""
if no credentials is provided, it will use the credentials on the environment
GOOGLE_APPLICATION_CREDENTIALS (default behavior of Google TextToSpeechAsyncClient)
Create a new instance of Google TTS.

Credentials must be provided, either by using the ``credentials_info`` dict, or reading
from the file specified in ``credentials_file`` or the ``GOOGLE_APPLICATION_CREDENTIALS``
environmental variable.
"""

super().__init__(
capabilities=tts.TTSCapabilities(
streaming=False,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -52,6 +52,18 @@
user: str | None = None,
client: openai.AsyncClient | None = None,
) -> None:
"""
Create a new instance of OpenAI LLM.

``api_key`` must be set to your OpenAI API key, either using the argument or by setting the
``OPENAI_API_KEY`` environmental variable.
"""

# throw an error on our end
api_key = api_key or os.environ.get("OPENAI_API_KEY")

Check failure on line 63 in livekit-plugins/livekit-plugins-openai/livekit/plugins/openai/llm.py

View workflow job for this annotation

GitHub Actions / build

Ruff (F821)

livekit-plugins/livekit-plugins-openai/livekit/plugins/openai/llm.py:63:30: F821 Undefined name `os`
if api_key is None:
raise ValueError("OpenAI API key is required")

self._opts = LLMOptions(model=model, user=user)
self._client = client or openai.AsyncClient(
api_key=api_key,
Expand Down Expand Up @@ -116,6 +128,18 @@
client: openai.AsyncClient | None = None,
user: str | None = None,
) -> LLM:
"""
Create a new instance of Fireworks LLM.

``api_key`` must be set to your Fireworks API key, either using the argument or by setting
the ``FIREWORKS_API_KEY`` environmental variable.
"""

# shim for not using OPENAI_API_KEY
api_key = api_key or os.environ.get("FIREWORKS_API_KEY")

Check failure on line 139 in livekit-plugins/livekit-plugins-openai/livekit/plugins/openai/llm.py

View workflow job for this annotation

GitHub Actions / build

Ruff (F821)

livekit-plugins/livekit-plugins-openai/livekit/plugins/openai/llm.py:139:30: F821 Undefined name `os`
if api_key is None:
raise ValueError("Fireworks API key is required")

return LLM(
model=model, api_key=api_key, base_url=base_url, client=client, user=user
)
Expand All @@ -129,6 +153,18 @@
client: openai.AsyncClient | None = None,
user: str | None = None,
) -> LLM:
"""
Create a new instance of Groq LLM.

``api_key`` must be set to your Groq API key, either using the argument or by setting
the ``GROQ_API_KEY`` environmental variable.
"""

# shim for not using OPENAI_API_KEY
api_key = api_key or os.environ.get("GROQ_API_KEY")

Check failure on line 164 in livekit-plugins/livekit-plugins-openai/livekit/plugins/openai/llm.py

View workflow job for this annotation

GitHub Actions / build

Ruff (F821)

livekit-plugins/livekit-plugins-openai/livekit/plugins/openai/llm.py:164:30: F821 Undefined name `os`
if api_key is None:
raise ValueError("Groq API key is required")

return LLM(
model=model, api_key=api_key, base_url=base_url, client=client, user=user
)
Expand All @@ -142,6 +178,18 @@
client: openai.AsyncClient | None = None,
user: str | None = None,
) -> LLM:
"""
Create a new instance of OctoAI LLM.

``api_key`` must be set to your OctoAI API key, either using the argument or by setting
the ``OCTO_API_KEY`` environmental variable.
"""

# shim for not using OPENAI_API_KEY
api_key = api_key or os.environ.get("OCTO_API_KEY")

Check failure on line 189 in livekit-plugins/livekit-plugins-openai/livekit/plugins/openai/llm.py

View workflow job for this annotation

GitHub Actions / build

Ruff (F821)

livekit-plugins/livekit-plugins-openai/livekit/plugins/openai/llm.py:189:30: F821 Undefined name `os`
if api_key is None:
raise ValueError("OctoAI API key is required")

return LLM(
model=model, api_key=api_key, base_url=base_url, client=client, user=user
)
Expand All @@ -153,6 +201,10 @@
base_url: str | None = "http://localhost:11434/v1",
client: openai.AsyncClient | None = None,
) -> LLM:
"""
Create a new instance of Ollama LLM.
"""

return LLM(model=model, api_key="ollama", base_url=base_url, client=client)

@staticmethod
Expand All @@ -177,6 +229,18 @@
client: openai.AsyncClient | None = None,
user: str | None = None,
) -> LLM:
"""
Create a new instance of TogetherAI LLM.

``api_key`` must be set to your TogetherAI API key, either using the argument or by setting
the ``TOGETHER_API_KEY`` environmental variable.
"""

# shim for not using OPENAI_API_KEY
api_key = api_key or os.environ.get("TOGETHER_API_KEY")

Check failure on line 240 in livekit-plugins/livekit-plugins-openai/livekit/plugins/openai/llm.py

View workflow job for this annotation

GitHub Actions / build

Ruff (F821)

livekit-plugins/livekit-plugins-openai/livekit/plugins/openai/llm.py:240:30: F821 Undefined name `os`
if api_key is None:
raise ValueError("TogetherAI API key is required")

return LLM(
model=model, api_key=api_key, base_url=base_url, client=client, user=user
)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -47,6 +47,13 @@
api_key: str | None = None,
client: openai.AsyncClient | None = None,
):
"""
Create a new instance of OpenAI STT.

``api_key`` must be set to your OpenAI API key, either using the argument or by setting the
``OPENAI_API_KEY`` environmental variable.
"""

super().__init__(
capabilities=stt.STTCapabilities(streaming=False, interim_results=False)
)
Expand All @@ -59,6 +66,11 @@
model=model,
)

# throw an error on our end
api_key = api_key or os.environ.get("OPENAI_API_KEY")

Check failure on line 70 in livekit-plugins/livekit-plugins-openai/livekit/plugins/openai/stt.py

View workflow job for this annotation

GitHub Actions / build

Ruff (F821)

livekit-plugins/livekit-plugins-openai/livekit/plugins/openai/stt.py:70:30: F821 Undefined name `os`
if api_key is None:
raise ValueError("OpenAI API key is required")

self._client = client or openai.AsyncClient(
api_key=api_key,
base_url=base_url,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -48,6 +48,13 @@
api_key: str | None = None,
client: openai.AsyncClient | None = None,
) -> None:
"""
Create a new instance of OpenAI TTS.

``api_key`` must be set to your OpenAI API key, either using the argument or by setting the
``OPENAI_API_KEY`` environmental variable.
"""

super().__init__(
capabilities=tts.TTSCapabilities(
streaming=False,
Expand All @@ -56,6 +63,11 @@
num_channels=OPENAI_TTS_CHANNELS,
)

# throw an error on our end
api_key = api_key or os.environ.get("OPENAI_API_KEY")

Check failure on line 67 in livekit-plugins/livekit-plugins-openai/livekit/plugins/openai/tts.py

View workflow job for this annotation

GitHub Actions / build

Ruff (F821)

livekit-plugins/livekit-plugins-openai/livekit/plugins/openai/tts.py:67:30: F821 Undefined name `os`
if api_key is None:
raise ValueError("OpenAI API key is required")

self._client = client or openai.AsyncClient(
api_key=api_key,
base_url=base_url,
Expand Down
Loading