Skip to content

Commit

Permalink
Add test for deepseek_vl
Browse files Browse the repository at this point in the history
  • Loading branch information
meenakshiramanathan1 committed Feb 7, 2025
1 parent 8d30bab commit 2dc0a52
Show file tree
Hide file tree
Showing 15 changed files with 2,925 additions and 0 deletions.
1 change: 1 addition & 0 deletions .gitattributes
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
forge/test/models/pytorch/multimodal/deepseek/image/training_pipelines.jpg filter=lfs diff=lfs merge=lfs -text
2 changes: 2 additions & 0 deletions env/core_requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -51,3 +51,5 @@ pytorch_forecasting==1.0.0
patool
openpyxl==3.1.5
GitPython==3.1.44
dotmap
einops
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
46 changes: 46 additions & 0 deletions forge/test/models/pytorch/multimodal/deepseek/test_deepseek_vl.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,46 @@
# SPDX-FileCopyrightText: © 2024 Tenstorrent AI ULC

# SPDX-License-Identifier: Apache-2.0
import subprocess

import pytest

import forge

subprocess.run(["pip", "install", "timm>=0.9.16"])
from test.models.pytorch.multimodal.deepseek.utils.models import (
generate_model_deepseek_vl_pytorch,
generation,
)
from test.models.utils import Framework, Source, Task, build_module_name


@pytest.mark.parametrize("variant", ["deepseek-ai/deepseek-vl-1.3b-base"])
def test_deepseek_vl_no_cache_cpu_pytorch(record_forge_property, variant):

framework_model, vl_gpt, tokenizer, inputs_embeds = generate_model_deepseek_vl_pytorch(variant)
answer = generation(
max_new_tokens=512, model=framework_model, inputs_embeds=inputs_embeds, tokenizer=tokenizer, vl_gpt=vl_gpt
)

print(f"{prepare_inputs['sft_format'][0]}", answer)


@pytest.mark.parametrize("variant", ["deepseek-ai/deepseek-vl-1.3b-base"])
def test_deepseek_vl_pytorch(record_forge_property, variant):

# Build Module Name
module_name = build_module_name(
framework=Framework.PYTORCH, model="deepseek", variant=variant, task=Task.QA, source=Source.HUGGINGFACE
)

# Record Forge Property
record_forge_property("model_name", module_name)

framework_model, vl_gpt, tokenizer, inputs_embeds = generate_model_deepseek_vl_pytorch(variant)
compiled_model = forge.compile(framework_model, sample_inputs=[inputs_embeds], module_name=module_name)
answer = generation(
max_new_tokens=1, model=compiled_model, inputs_embeds=inputs_embeds, tokenizer=tokenizer, vl_gpt=vl_gpt
)

print(f"{prepare_inputs['sft_format'][0]}", answer)
Loading

0 comments on commit 2dc0a52

Please sign in to comment.