Skip to content

Commit

Permalink
Merge pull request #31 from zanderlewis/main
Browse files Browse the repository at this point in the history
0.6.2 - 0.6.3
  • Loading branch information
Zander Lewis authored May 26, 2024
2 parents ca31de4 + 6102359 commit 5ab8ef2
Show file tree
Hide file tree
Showing 3 changed files with 37 additions and 24 deletions.
50 changes: 26 additions & 24 deletions llamascript/__init__.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
__version__ = "0.6.1"
__version__ = "0.6.3"

import asyncio
import ollama
Expand All @@ -7,6 +7,12 @@
import subprocess
import os

dbg = False

def debug(message):
if dbg:
print(message)

# Set up logging
logging.basicConfig(level=logging.WARNING)

Expand Down Expand Up @@ -41,11 +47,13 @@ def SYSTEM(self, line="", p=""):
def CHAT(self, stream: bool = False):
for _ in range(3):
try:
debug("Attempting to chat with model...")
response = ollama.chat(
model=self.model,
messages=self.system + [{"role": "user", "content": self.data}],
stream=stream,
)
debug("Chat successful.")
if stream:
for message in response:
print(message["message"]["content"], end="")
Expand Down Expand Up @@ -100,10 +108,7 @@ def CREATE_MODEL(self, filename, parameters, model_name):
except Exception as e:
logging.error("Error creating model file: %s", e)
print(f"Error creating model file {filename}.")

def REPEAT(self, command, repeat_count):
for _ in range(repeat_count):
self.execute_command(command)
sys.exit(1)

def execute_command(self, command):
if command.startswith("PROMPT INPUT"):
Expand All @@ -124,17 +129,7 @@ async def read(self, filename):
i += 1
continue
command = line.split(" ")
if command[0] == "REPEAT":
repeat_count = int(command[1]) if len(command) > 1 else 1
repeat_commands = []
i += 1
while i < len(lines) and not lines[i].strip().startswith("ENDREPEAT"):
repeat_commands.append(lines[i].strip())
i += 1
for _ in range(repeat_count):
for repeat_command in repeat_commands:
self.execute_command(repeat_command)
elif command[0] == "USE":
if command[0] == "USE":
self.USE(line)
elif len(command) > 1 and command[1] == "INPUT":
self.INPUT(command[0])
Expand All @@ -155,17 +150,13 @@ async def read(self, filename):
}
self.CREATE_MODEL("Modelfile", parameters, model_name)
elif command[0] == "CHAT":
if len(command) > 1 and command[1] == "STREAM":
stream = command[1] == True
if len(command) > 1 and command[1] == "STREAM":\
self.CHAT(stream=True)
else:
stream = False
self.CHAT(stream=stream)
elif command[0] == "REPEAT":
repeat_count = int(command[1]) if len(command) > 1 else 1
repeat_command = " ".join(command[2:])
self.REPEAT(repeat_command, repeat_count)
self.CHAT()
else:
raise ValueError("Invalid command")
i += 1
except FileNotFoundError:
logging.error("File %s not found.", filename)
print(f"File {filename} not found.")
Expand All @@ -177,9 +168,20 @@ async def read(self, filename):
def run():
parser = argparse.ArgumentParser(description="Run llama script.")
parser.add_argument("file_name", type=str, help="The name of the file to run")
parser.add_argument(
"-v",
"--version",
action="version",
version=f"LlamaScript version {__version__}",
help="Display version information",
)

args = parser.parse_args()

if args.version:
print(f"llamascript version {__version__}")
sys.exit(0)

if not (args.file_name.endswith(".llama") or args.file_name == "llama"):
logging.error("Invalid file type. Please provide a .llama or llama file.")
print("Invalid file type. Please provide a .llama or llama file.")
Expand Down
3 changes: 3 additions & 0 deletions llamascript/llama
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
USE llama3
PROMPT why is the sky blue?
CHAT STREAM
8 changes: 8 additions & 0 deletions upload.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,12 @@
import subprocess
import os

if os.path.exists("dist"):
subprocess.run(["rm", "-r", "dist"])
if os.path.exists("build"):
subprocess.run(["rm", "-r", "build"])
if os.path.exists("llamascript.egg-info"):
subprocess.run(["rm", "-r", "llamascript.egg-info"])

subprocess.run(["python3", "setup.py", "sdist", "bdist_wheel"])
subprocess.run(["twine", "upload", "dist/*"])

0 comments on commit 5ab8ef2

Please sign in to comment.