Skip to content

Commit

Permalink
Merge branch 'main' into mrwyattii/add-sd-tests
Browse files Browse the repository at this point in the history
  • Loading branch information
loadams authored Apr 8, 2024
2 parents 40dc447 + 26a853d commit af0ea39
Show file tree
Hide file tree
Showing 7 changed files with 22 additions and 6 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/nv-v100-legacy.yml
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ concurrency:

jobs:
unit-tests:
runs-on: [self-hosted, nvidia, cu116, v100]
runs-on: [self-hosted, nvidia, cu117, v100]

steps:
- uses: actions/checkout@v4
Expand Down
3 changes: 2 additions & 1 deletion mii/aml_related/templates.py
Original file line number Diff line number Diff line change
Expand Up @@ -162,7 +162,8 @@
CONDA_DEFAULT_ENV=amlenv \
PATH=$PATH:/usr/local/cuda/bin
RUN /opt/miniconda/envs/amlenv/bin/pip install -r "$BUILD_DIR/requirements.txt" --extra-index-url https://download.pytorch.org/whl/cu113 && \
RUN /opt/miniconda/envs/amlenv/bin/pip install torch torchvision --index-url https://download.pytorch.org/whl/cu113 && \
/opt/miniconda/envs/amlenv/bin/pip install -r "$BUILD_DIR/requirements.txt" && \
/opt/miniconda/envs/amlenv/bin/pip install azureml-inference-server-http && \
/opt/miniconda/envs/amlenv/bin/pip install git+https://github.com/microsoft/DeepSpeed.git && \
/opt/miniconda/envs/amlenv/bin/pip install git+https://github.com/microsoft/DeepSpeed-MII.git && \
Expand Down
1 change: 1 addition & 0 deletions mii/api.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,6 +50,7 @@ def _parse_kwargs_to_model_config(

# Create the ModelConfig object and return it with remaining kwargs
model_config = ModelConfig(**model_config)

return model_config, remaining_kwargs


Expand Down
13 changes: 13 additions & 0 deletions mii/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -131,6 +131,12 @@ class ModelConfig(MIIConfigModel):
`inference_engine_config`.
"""

quantization_mode: Optional[str] = None
"""
The quantization mode in string format. The supported modes are as follows:
- 'wf6af16', weight-only quantization with FP6 weight and FP16 activation.
"""

inference_engine_config: RaggedInferenceEngineConfig = {}
"""
DeepSpeed inference engine config. This is automatically generated, but you
Expand Down Expand Up @@ -210,6 +216,13 @@ def propagate_tp_size(cls, values: Dict[str, Any]) -> Dict[str, Any]:
values.get("inference_engine_config").tensor_parallel.tp_size = tensor_parallel
return values

@root_validator
def propagate_quantization_mode(cls, values: Dict[str, Any]) -> Dict[str, Any]:
quantization_mode = values.get("quantization_mode")
values.get(
"inference_engine_config").quantization.quantization_mode = quantization_mode
return values

@root_validator
def check_replica_config(cls, values: Dict[str, Any]) -> Dict[str, Any]:
num_replica_config = len(values.get("replica_configs"))
Expand Down
3 changes: 2 additions & 1 deletion mii/legacy/aml_related/templates.py
Original file line number Diff line number Diff line change
Expand Up @@ -162,7 +162,8 @@
CONDA_DEFAULT_ENV=amlenv \
PATH=$PATH:/usr/local/cuda/bin
RUN /opt/miniconda/envs/amlenv/bin/pip install -r "$BUILD_DIR/requirements.txt" --extra-index-url https://download.pytorch.org/whl/cu113 && \
RUN /opt/miniconda/envs/amlenv/bin/pip install torch torchvision --index-url https://download.pytorch.org/whl/cu113 && \
/opt/miniconda/envs/amlenv/bin/pip install -r "$BUILD_DIR/requirements.txt" && \
/opt/miniconda/envs/amlenv/bin/pip install azureml-inference-server-http && \
/opt/miniconda/envs/amlenv/bin/pip install git+https://github.com/microsoft/DeepSpeed.git && \
/opt/miniconda/envs/amlenv/bin/pip install git+https://github.com/microsoft/DeepSpeed-MII.git && \
Expand Down
4 changes: 2 additions & 2 deletions requirements/requirements.txt
Original file line number Diff line number Diff line change
@@ -1,14 +1,14 @@
asyncio
deepspeed>=0.13.0
deepspeed>=0.14.0
deepspeed-kernels
Flask-RESTful
grpcio
grpcio-tools
Pillow
pydantic
pyzmq
safetensors
torch
transformers
ujson
Werkzeug
zmq
2 changes: 1 addition & 1 deletion version.txt
Original file line number Diff line number Diff line change
@@ -1 +1 @@
0.2.3
0.2.4

0 comments on commit af0ea39

Please sign in to comment.