-
Notifications
You must be signed in to change notification settings - Fork 0
/
ollamahandler.py
91 lines (85 loc) · 3.45 KB
/
ollamahandler.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
# ollamahandler.py (c) 2024 Gregory L. Magnusson
# renamed to ollamahandler because ollama is a module
# provides ollama list of installed models, offers install to linux, shows ollama info
# provides async input response stream for UIUX interaction
import logging
import subprocess
import asyncio
class OllamaModel:
"""
Class to interact with Llama3 model via the Ollama service.
"""
def __init__(self):
self.api_url = "http://localhost:11434/api"
async def generate_response_async(self, knowledge, model="llama3"):
"""
Generate a response from the Llama3 model based on the given knowledge prompt using streaming.
"""
try:
response_content = ""
stream = ollama.chat(model=model, messages=[{'role': 'user', 'content': knowledge}], stream=True)
async for chunk in stream:
response_content += chunk['message']['content']
return response_content
except Exception as e:
logging.error(f"ollama api error: {e}")
return "error: unable to generate a response due to an issue with the ollama api."
async def show_ollama_info_async(self):
"""
Show information about the Ollama service.
"""
command = "ollama show"
try:
result = await asyncio.create_subprocess_shell(command, stdout=asyncio.subprocess.PIPE, stderr=asyncio.subprocess.PIPE)
stdout, stderr = await result.communicate()
if result.returncode == 0:
return stdout.decode().strip()
else:
logging.error(f"ollama api error: {stderr.decode().strip()}")
return ""
except Exception as e:
logging.error(f"ollama api error: {e}")
return ""
def list_models(self):
"""
List all available models in the Ollama service.
"""
command = "ollama list"
try:
result = subprocess.run(command, shell=True, capture_output=True, text=True)
if result.returncode == 0:
return result.stdout.strip().splitlines()
else:
logging.error(f"ollama api error: {result.stderr}")
return []
except Exception as e:
logging.error(f"ollama api error: {e}")
return []
def install_ollama(self):
"""
Install Ollama using the provided installation script.
"""
command = "curl -fsSL https://ollama.com/install.sh | sh"
try:
result = subprocess.run(command, shell=True, capture_output=True, text=True)
if result.returncode == 0:
return "Ollama installation successful."
else:
logging.error(f"ollama install error: {result.stderr}")
return "error: unable to install ollama."
except Exception as e:
logging.error(f"ollama install error: {e}")
return "error: unable to install ollama."
def check_ollama_installation():
command = "ollama list"
try:
result = subprocess.run(command, shell=True, capture_output=True, text=True)
if result.returncode == 0:
logging.info("Ollama is installed and accessible.")
return True
else:
logging.error("Ollama is not accessible.")
return False
except Exception as e:
logging.error(f"Failed to check Ollama installation: {e}")
return False