Skip to content

Commit

Permalink
Fix code style issues with Black
Browse files Browse the repository at this point in the history
  • Loading branch information
lint-action committed Jul 18, 2023
1 parent d4b88e1 commit e1834f8
Show file tree
Hide file tree
Showing 5 changed files with 27 additions and 19 deletions.
6 changes: 5 additions & 1 deletion QAChat/Common/deepL_translator.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,11 @@ def __init__(self):
def translate_to(self, text, target_lang, use_spacy_to_detect_lang_if_needed=True):
if use_spacy_to_detect_lang_if_needed:
doc = self.muulti_lang_nlp(text)
if doc._.language["language"] == "en" and doc._.language["score"] > 0.8 and target_lang == "EN-US":
if (
doc._.language["language"] == "en"
and doc._.language["score"] > 0.8
and target_lang == "EN-US"
):
return Result(text, "EN_US")

result = self.translator.translate_text(
Expand Down
2 changes: 1 addition & 1 deletion QAChat/Data_Processing/data_preprocessor.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@
class DataPreprocessor(ABC):
@abstractmethod
def load_preprocessed_data(
self, end_of_timeframe: datetime, start_of_timeframe: datetime
self, end_of_timeframe: datetime, start_of_timeframe: datetime
) -> List[DataInformation]:
"""
Loads preprocessed data of a specific type that was created or modified
Expand Down
2 changes: 1 addition & 1 deletion QAChat/Data_Processing/dummy_preprocessor.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@

class DummyPreprocessor(DataPreprocessor):
def load_preprocessed_data(
self, end_of_timeframe: datetime, start_of_timeframe: datetime
self, end_of_timeframe: datetime, start_of_timeframe: datetime
) -> List[DataInformation]:
df = pd.read_csv("../../DummyData/qa_less_50.csv", sep=";")
raw_data = []
Expand Down
32 changes: 17 additions & 15 deletions QAChat/QA_Bot/qa_bot.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,14 +23,14 @@

class QABot:
def __init__(
self,
embeddings=None,
database=None,
model=None,
translator=None,
embeddings_gpu=False,
repo_id="TheBloke/WizardLM-13B-V1-1-SuperHOT-8K-GGML",
filename="wizardlm-13b-v1.1-superhot-8k.ggmlv3.q5_0.bin",
self,
embeddings=None,
database=None,
model=None,
translator=None,
embeddings_gpu=False,
repo_id="TheBloke/WizardLM-13B-V1-1-SuperHOT-8K-GGML",
filename="wizardlm-13b-v1.1-superhot-8k.ggmlv3.q5_0.bin",
):
self.answer = None
self.context = None
Expand Down Expand Up @@ -61,11 +61,11 @@ def __init__(
self.translator = DeepLTranslator()

def get_llama_model(
self,
repo_id,
filename,
n_ctx=2048,
max_tokens=512,
self,
repo_id,
filename,
n_ctx=2048,
max_tokens=512,
):
path = hf_hub_download(repo_id=repo_id, filename=filename)

Expand All @@ -81,7 +81,7 @@ def get_llama_model(
)

def answer_question_with_context(
self, question: str, context: List[str], handler=None
self, question: str, context: List[str], handler=None
) -> str:
"""
This method takes a question and a list of context strings as input, and attempts to answer the question using the provided context.
Expand Down Expand Up @@ -150,7 +150,9 @@ def __sim_search(self, question: str) -> List[str]:
]

def translate_text(self, question, language="EN-US"):
return self.translator.translate_to(question, language, use_spacy_to_detect_lang_if_needed=False)
return self.translator.translate_to(
question, language, use_spacy_to_detect_lang_if_needed=False
)

def answer_question(self, question: str, handler: StreamLLMCallbackHandler | None):
"""
Expand Down
4 changes: 3 additions & 1 deletion QAChat/QA_Bot/stream_LLM_callback_handler.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,5 +22,7 @@ def on_llm_new_token(self, token: str, **kwargs):

def send_response(self, text):
if self.lang != "EN-US":
text = self.translator.translate_to(text, self.lang, use_spacy_to_detect_lang_if_needed=False).text
text = self.translator.translate_to(
text, self.lang, use_spacy_to_detect_lang_if_needed=False
).text
return json.dumps({"text": text}) + "\n"

0 comments on commit e1834f8

Please sign in to comment.