Skip to content

Commit

Permalink
format
Browse files Browse the repository at this point in the history
  • Loading branch information
robertgshaw2-neuralmagic committed Aug 31, 2024
1 parent 0c129f2 commit 550d9b9
Show file tree
Hide file tree
Showing 4 changed files with 5 additions and 738 deletions.
9 changes: 5 additions & 4 deletions vllm/engine/output_processor/multi_step.py
Original file line number Diff line number Diff line change
Expand Up @@ -88,12 +88,13 @@ def process_outputs(self,
# TODO: Add support for async if necessary
assert not is_async

# Sequences can only be in the RUNNING and FINISHED_ABORTED state
# once they are scheduled. A sequence is moved to FINSIHED_ABORTED
# client disconnects from the server, which can occur while
# Sequences can be in the RUNNING or FINISHED_ABORTED state.
# FINISHED_ABORTED occurs when the client disconnects from
# the server, which can happen while a SequenceGroup is scheduled.
seqs = sequence_group.get_seqs(status=SequenceStatus.RUNNING)
if seqs is None:
seqs = sequence_group.get_seqs(status=SequenceStatus.FINISHED_ABORTED)
seqs = sequence_group.get_seqs(
status=SequenceStatus.FINISHED_ABORTED)

assert seqs, "Expected RUNNING or FINISHED_ABORTED sequences"
assert len(seqs) == 1, (
Expand Down
50 changes: 0 additions & 50 deletions vllm/entrypoints/openai/rpc/__init__.py

This file was deleted.

Loading

0 comments on commit 550d9b9

Please sign in to comment.