Skip to content

Commit

Permalink
set model via default config instead
Browse files Browse the repository at this point in the history
  • Loading branch information
CTY-git committed Feb 5, 2025
1 parent aee5307 commit f315559
Show file tree
Hide file tree
Showing 3 changed files with 5 additions and 8 deletions.
6 changes: 4 additions & 2 deletions patchwork/patchflows/PRReview/PRReview.py
Original file line number Diff line number Diff line change
Expand Up @@ -120,7 +120,8 @@ def run(self) -> dict:
Ensure that you include all rules in your response, even if there\'s no violation. The output should directly reflect the reasoning in your thinking section.
''',
json_schema={"review": "The markdown text of the reviews"},
model="claude-3-5-sonnet-latest")
**self.inputs
)
).run()

llm2_outputs = SimplifiedLLMOnce(
Expand All @@ -136,7 +137,8 @@ def run(self) -> dict:
You should return an empty response if there are no code reviews that is actionable or useful.
''',
json_schema={"review": "The reviews curated"},
model="claude-3-5-sonnet-latest")
**self.inputs
)
).run()

review = llm2_outputs.get("review")
Expand Down
5 changes: 1 addition & 4 deletions patchwork/patchflows/PRReview/defaults.yml
Original file line number Diff line number Diff line change
@@ -1,7 +1,4 @@
# PRReview Inputs
diff_summary: long
diff_suggestion: false


# ReadPRDiffs Inputs
# github_api_key: required-for-github-scm
Expand All @@ -14,7 +11,7 @@ diff_suggestion: false
# CallLLM Inputs
# openai_api_key: required-for-chatgpt
# google_api_key: required-for-gemini
# model: gpt-4o
model: gpt-4o-mini
# client_base_url: https://api.openai.com/v1
# Example HF model
# client_base_url: https://api-inference.huggingface.co/models/codellama/CodeLlama-70b-Instruct-hf/v1
Expand Down
2 changes: 0 additions & 2 deletions patchwork/step.py
Original file line number Diff line number Diff line change
Expand Up @@ -73,8 +73,6 @@ def __init__(self, inputs: DataPoint):
self.run = self.__managed_run

def __init_subclass__(cls, input_class: Optional[Type] = None, output_class: Optional[Type] = None, **kwargs):
if cls.__name__ == "PreparePR":
print(1)
input_class = input_class or getattr(cls, "input_class", None)
if input_class is not None and not is_typeddict(input_class):
input_class = None
Expand Down

0 comments on commit f315559

Please sign in to comment.