From d7f348175e1d6953c22a724963db2b5827b7d26f Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 1 Jan 2024 19:10:24 +0000 Subject: [PATCH 1/2] [pre-commit.ci] pre-commit suggestions MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - https://github.com/myint/docformatter → https://github.com/PyCQA/docformatter - [github.com/PyCQA/isort: 5.12.0 → 5.13.2](https://github.com/PyCQA/isort/compare/5.12.0...5.13.2) - [github.com/psf/black: 22.12.0 → 23.12.1](https://github.com/psf/black/compare/22.12.0...23.12.1) - https://github.com/charliermarsh/ruff-pre-commit → https://github.com/astral-sh/ruff-pre-commit - [github.com/astral-sh/ruff-pre-commit: v0.1.6 → v0.1.9](https://github.com/astral-sh/ruff-pre-commit/compare/v0.1.6...v0.1.9) --- .pre-commit-config.yaml | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index bb34476..b57df78 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -28,19 +28,19 @@ repos: args: [--py38-plus] name: Upgrade code - - repo: https://github.com/myint/docformatter + - repo: https://github.com/PyCQA/docformatter rev: v1.7.5 hooks: - id: docformatter args: [--in-place, --wrap-summaries=120, --wrap-descriptions=120] - repo: https://github.com/PyCQA/isort - rev: 5.12.0 + rev: 5.13.2 hooks: - id: isort - repo: https://github.com/psf/black - rev: 22.12.0 + rev: 23.12.1 hooks: - id: black name: Black code @@ -67,8 +67,8 @@ repos: #- flake8-return #- flake8-simplify - - repo: https://github.com/charliermarsh/ruff-pre-commit - rev: v0.1.6 + - repo: https://github.com/astral-sh/ruff-pre-commit + rev: v0.1.9 hooks: - id: ruff args: ["--fix"] From 57eb455537eca02748cd310c43fcd8eb965b369d Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 1 Jan 2024 19:10:59 +0000 Subject: [PATCH 2/2] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- models/chatglm/modeling_chatglm.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/models/chatglm/modeling_chatglm.py b/models/chatglm/modeling_chatglm.py index 3dcb087..6f8a122 100644 --- a/models/chatglm/modeling_chatglm.py +++ b/models/chatglm/modeling_chatglm.py @@ -877,7 +877,6 @@ def forward( output_hidden_states: Optional[bool] = None, return_dict: Optional[bool] = None, ) -> Union[Tuple[torch.Tensor, ...], BaseModelOutputWithPast]: - output_attentions = output_attentions if output_attentions is not None else self.config.output_attentions output_hidden_states = ( output_hidden_states if output_hidden_states is not None else self.config.output_hidden_states @@ -949,7 +948,6 @@ def forward( attention_mask = attention_mask.to(hidden_states.device) for i, layer in enumerate(self.layers): - if output_hidden_states: all_hidden_states = all_hidden_states + (hidden_states,) layer_past = past_key_values[i]