You signed in with another tab or window. Reload to refresh your session.You signed out in another tab or window. Reload to refresh your session.You switched accounts on another tab or window. Reload to refresh your session.Dismiss alert
File "/home/ubuntu/h2ogpt/gradio_runner.py", line 1067, in bot
for output_fun in fun1(*tuple(args_list)):
File "/home/ubuntu/h2ogpt/generate.py", line 1056, in evaluate
for r in run_qa_db(query=query,
File "/home/ubuntu/h2ogpt/gpt_langchain.py", line 1250, in _run_qa_db
raise thread.exc
File "/home/ubuntu/h2ogpt/utils.py", line 314, in run
self._return = self._target(*self._args, **self._kwargs)
File "/home/ubuntu/miniconda3/envs/h2ollm/lib/python3.10/site-packages/langchain/chains/base.py", line 140, in __call__
raise e
File "/home/ubuntu/miniconda3/envs/h2ollm/lib/python3.10/site-packages/langchain/chains/base.py", line 134, in __call__
self._call(inputs, run_manager=run_manager)
File "/home/ubuntu/miniconda3/envs/h2ollm/lib/python3.10/site-packages/langchain/chains/combine_documents/base.py", line 84, in _call
output, extra_return_dict = self.combine_docs(
File "/home/ubuntu/miniconda3/envs/h2ollm/lib/python3.10/site-packages/langchain/chains/combine_documents/stuff.py", line 87, in combine_docs
return self.llm_chain.predict(callbacks=callbacks, **inputs), {}
File "/home/ubuntu/miniconda3/envs/h2ollm/lib/python3.10/site-packages/langchain/chains/llm.py", line 213, in predict
return self(kwargs, callbacks=callbacks)[self.output_key]
File "/home/ubuntu/miniconda3/envs/h2ollm/lib/python3.10/site-packages/langchain/chains/base.py", line 140, in __call__
raise e
File "/home/ubuntu/miniconda3/envs/h2ollm/lib/python3.10/site-packages/langchain/chains/base.py", line 134, in __call__
self._call(inputs, run_manager=run_manager)
File "/home/ubuntu/miniconda3/envs/h2ollm/lib/python3.10/site-packages/langchain/chains/llm.py", line 69, in _call
response = self.generate([inputs], run_manager=run_manager)
File "/home/ubuntu/miniconda3/envs/h2ollm/lib/python3.10/site-packages/langchain/chains/llm.py", line 79, in generate
return self.llm.generate_prompt(
File "/home/ubuntu/miniconda3/envs/h2ollm/lib/python3.10/site-packages/langchain/llms/base.py", line 134, in generate_prompt
return self.generate(prompt_strings, stop=stop, callbacks=callbacks)
File "/home/ubuntu/miniconda3/envs/h2ollm/lib/python3.10/site-packages/langchain/llms/base.py", line 191, in generate
raise e
File "/home/ubuntu/miniconda3/envs/h2ollm/lib/python3.10/site-packages/langchain/llms/base.py", line 185, in generate
self._generate(prompts, stop=stop, run_manager=run_manager)
File "/home/ubuntu/miniconda3/envs/h2ollm/lib/python3.10/site-packages/langchain/llms/base.py", line 436, in _generate
self._call(prompt, stop=stop, run_manager=run_manager)
File "/home/ubuntu/miniconda3/envs/h2ollm/lib/python3.10/site-packages/langchain/llms/huggingface_pipeline.py", line 168, in _call
response = self.pipeline(prompt)
File "/home/ubuntu/miniconda3/envs/h2ollm/lib/python3.10/site-packages/transformers/pipelines/text_generation.py", line 209, in __call__
return super().__call__(text_inputs, **kwargs)
File "/home/ubuntu/miniconda3/envs/h2ollm/lib/python3.10/site-packages/transformers/pipelines/base.py", line 1109, in __call__
return self.run_single(inputs, preprocess_params, forward_params, postprocess_params)
File "/home/ubuntu/miniconda3/envs/h2ollm/lib/python3.10/site-packages/transformers/pipelines/base.py", line 1115, in run_single
model_inputs = self.preprocess(inputs, **preprocess_params)
File "/home/ubuntu/h2ogpt/h2oai_pipeline.py", line 79, in preprocess
return super().preprocess(prompt_text, prefix=prefix, handle_long_generation=handle_long_generation,
File "/home/ubuntu/miniconda3/envs/h2ollm/lib/python3.10/site-packages/transformers/pipelines/text_generation.py", line 228, in preprocess
raise ValueError(
ValueError: We cannot use `hole` to handle this generation the number of desired tokens exceeds the models max length
The text was updated successfully, but these errors were encountered:
The text was updated successfully, but these errors were encountered: