Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

switch to AI Horde Worker Scribe #441

Merged
merged 16 commits into from
Aug 22, 2023
9 changes: 3 additions & 6 deletions .gitmodules
Original file line number Diff line number Diff line change
@@ -1,6 +1,3 @@
[submodule "KoboldAI-Horde"]
path = KoboldAI-Horde
url = https://github.com/db0/KoboldAI-Horde-Bridge
[submodule "KoboldAI-Horde-Bridge"]
path = KoboldAI-Horde-Bridge
url = https://github.com/db0/KoboldAI-Horde-Bridge
[submodule "AI-Horde-Worker"]
path = AI-Horde-Worker
url = https://github.com/Haidra-Org/AI-Horde-Worker/
1 change: 1 addition & 0 deletions AI-Horde-Worker
Submodule AI-Horde-Worker added at 960723
1 change: 0 additions & 1 deletion KoboldAI-Horde-Bridge
Submodule KoboldAI-Horde-Bridge deleted from 20e870
5 changes: 5 additions & 0 deletions aiserver.py
Original file line number Diff line number Diff line change
Expand Up @@ -61,6 +61,11 @@
import traceback

import lupa
# Hack to make the new Horde worker understand its imports...
try:
sys.path.append(os.path.abspath("AI-Horde-Worker"))
except:
pass

# KoboldAI
import fileops
Expand Down
1 change: 1 addition & 0 deletions colabkobold.sh
Original file line number Diff line number Diff line change
Expand Up @@ -152,6 +152,7 @@ if [ "$init" != "skip" ]; then
cp -rn softprompts/* /content/drive/MyDrive/KoboldAI/softprompts/
cp -rn presets/* /content/drive/MyDrive/KoboldAI/presets/
cp -rn themes/* /content/drive/MyDrive/KoboldAI/themes/
rm -rf AI-Horde-Worker/
rm -rf KoboldAI-Horde-Bridge/
rm stories
rm -rf stories/
Expand Down
2 changes: 2 additions & 0 deletions environments/huggingface.yml
Original file line number Diff line number Diff line change
Expand Up @@ -54,3 +54,5 @@ dependencies:
- einops
- peft==0.3.0
- scipy
- windows-curses; sys_platform == 'win32'
- pynvml
2 changes: 2 additions & 0 deletions environments/rocm.yml
Original file line number Diff line number Diff line change
Expand Up @@ -44,3 +44,5 @@ dependencies:
- git+https://github.com/0cc4m/hf_bleeding_edge/
- einops
- peft==0.3.0
- windows-curses; sys_platform == 'win32'
- pynvml
62 changes: 30 additions & 32 deletions koboldai_settings.py
Original file line number Diff line number Diff line change
Expand Up @@ -1350,38 +1350,36 @@ def __setattr__(self, name, value):
self._koboldai_var.calc_ai_text()

if name == 'horde_share':
if self.on_colab == False:
if os.path.exists("./KoboldAI-Horde-Bridge"):
if value == True:
if self._horde_pid is None:
logger.info("Starting Horde bridge")
bridge = importlib.import_module("KoboldAI-Horde-Bridge.bridge")
self._horde_pid = bridge.kai_bridge()
try:
bridge_cd = importlib.import_module("KoboldAI-Horde-Bridge.clientData")
cluster_url = bridge_cd.cluster_url
kai_name = bridge_cd.kai_name
if kai_name == "My Awesome Instance":
kai_name = f"KoboldAI UI Instance #{random.randint(-100000000, 100000000)}"
api_key = bridge_cd.api_key
priority_usernames = bridge_cd.priority_usernames
except:
cluster_url = "https://horde.koboldai.net"
kai_name = self._koboldai_var.horde_worker_name
if kai_name == "My Awesome Instance":
kai_name = f"KoboldAI UI Instance #{random.randint(-100000000, 100000000)}"
api_key = self._koboldai_var.horde_api_key
priority_usernames = []
# Always use the local URL & port
kai_url = f'http://127.0.0.1:{self.port}'

logger.info(f"Name: {kai_name} on {kai_url}")
threading.Thread(target=self._horde_pid.bridge, args=(1, api_key, kai_name, kai_url, cluster_url, priority_usernames)).run()
else:
if self._horde_pid is not None:
logger.info("Killing Horde bridge")
self._horde_pid.stop()
self._horde_pid = None
if self.on_colab is True:
return
if not os.path.exists("./AI-Horde-Worker"):
return
if value is True:
if self._horde_pid is None:
self._horde_pid = "Pending" # Hack to make sure we don't launch twice while it loads
logger.info("Starting Horde bridge")
bd_module = importlib.import_module("AI-Horde-Worker.worker.bridge_data.scribe")
bridge_data = bd_module.KoboldAIBridgeData()
bridge_data.reload_data()
bridge_data.kai_url = f'http://127.0.0.1:{self.port}'
bridge_data.horde_url = self._koboldai_var.horde_url
bridge_data.api_key = self._koboldai_var.horde_api_key
bridge_data.scribe_name = self._koboldai_var.horde_worker_name
bridge_data.disable_terminal_ui = False
if bridge_data.worker_name == "My Awesome Instance":
bridge_data.worker_name = f"KoboldAI UI Instance #{random.randint(-100000000, 100000000)}"
worker_module = importlib.import_module("AI-Horde-Worker.worker.workers.scribe")
self._horde_pid = worker_module.ScribeWorker(bridge_data)
new_thread = threading.Thread(target=self._horde_pid.start)
new_thread.daemon = True
new_thread.start()

else:
if self._horde_pid is not None:
logger.info("Killing Horde bridge")
self._horde_pid.stop()
self._horde_pid = None


class KoboldStoryRegister(object):
def __init__(self, socketio, story_settings, koboldai_vars, tokenizer=None, sequence=[]):
Expand Down
13 changes: 13 additions & 0 deletions logger.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,8 @@ class Colors:
STDOUT_LEVELS = ["GENERATION", "PROMPT"]
INIT_LEVELS = ["INIT", "INIT_OK", "INIT_WARN", "INIT_ERR"]
MESSAGE_LEVELS = ["MESSAGE"]
STATS_LEVELS = ["STATS"]

# By default we're at error level or higher
verbosity = 20
quiet = 0
Expand Down Expand Up @@ -54,6 +56,16 @@ def is_msg_log(record):
return(False)
return(True)

def is_stats_log(record):
if record["level"].name not in STATS_LEVELS:
return False
return True

def is_not_stats_log(record):
if record["level"].name in STATS_LEVELS:
return False
return True

def is_stderr_log(record):
if record["level"].name in STDOUT_LEVELS + INIT_LEVELS + MESSAGE_LEVELS:
return(False)
Expand Down Expand Up @@ -91,6 +103,7 @@ def test_logger():
# Messages contain important information without which this application might not be able to be used
# As such, they have the highest priority
logger.level("MESSAGE", no=61, color="<green>")
logger.level("STATS", no=19, color="<blue>")

logger.__class__.generation = partialmethod(logger.__class__.log, "GENERATION")
logger.__class__.prompt = partialmethod(logger.__class__.log, "PROMPT")
Expand Down
2 changes: 2 additions & 0 deletions requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -42,3 +42,5 @@ git+https://github.com/0cc4m/hf_bleeding_edge/
einops
peft==0.3.0
scipy
windows-curses; sys_platform == 'win32'
pynvml