Skip to content

Commit

Permalink
Merge branch 'main' into log_config
Browse files Browse the repository at this point in the history
  • Loading branch information
RdoubleA committed Sep 6, 2024
2 parents 0383a73 + 31a95a9 commit 0b586b1
Show file tree
Hide file tree
Showing 10 changed files with 33 additions and 16 deletions.
13 changes: 13 additions & 0 deletions docs/source/api_ref_generation.rst
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
.. _generation:

====================
torchtune.generation
====================

.. currentmodule:: torchtune.generation

.. autosummary::
:toctree: generated/
:nosignatures:

generate
1 change: 0 additions & 1 deletion docs/source/api_ref_utilities.rst
Original file line number Diff line number Diff line change
Expand Up @@ -16,5 +16,4 @@ Miscellaneous

get_device
get_logger
generate
torch_version_ge
1 change: 1 addition & 0 deletions docs/source/index.rst
Original file line number Diff line number Diff line change
Expand Up @@ -143,6 +143,7 @@ torchtune tutorials.
api_ref_config
api_ref_data
api_ref_datasets
api_ref_generation
api_ref_models
api_ref_modules
api_ref_training
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,10 +8,9 @@
import torch

from tests.test_utils import fixed_init_model
from torchtune.generation._generation import generate, sample

from torchtune import utils
from torchtune.models.llama2 import llama2
from torchtune.utils._generation import sample


class TestTextGenerate:
Expand Down Expand Up @@ -119,7 +118,7 @@ def test_reproducibility(self, request, model1, model2, prompt):
top_k = 100

torch.manual_seed(42)
outputs_first = utils.generate(
outputs_first = generate(
model=model1,
prompt=prompt,
max_generated_tokens=10,
Expand All @@ -128,7 +127,7 @@ def test_reproducibility(self, request, model1, model2, prompt):
)

torch.manual_seed(42)
outputs_second = utils.generate(
outputs_second = generate(
model=model2,
prompt=prompt,
max_generated_tokens=10,
Expand All @@ -145,7 +144,7 @@ def test_batched_generate(self, generation_model_batched, prompt_tokens_batched)

torch.manual_seed(42)

output = utils.generate(
output = generate(
model=generation_model_batched,
prompt=prompt_tokens_batched,
max_generated_tokens=10,
Expand Down Expand Up @@ -215,7 +214,7 @@ def test_stop_tokens(self, generation_model, prompt_tokens):

torch.manual_seed(42)

outputs = utils.generate(
outputs = generate(
model=generation_model,
prompt=prompt_tokens,
max_generated_tokens=10,
Expand All @@ -242,7 +241,7 @@ def test_stop_tokens_batched(self, generation_model_batched, prompt_tokens_batch

torch.manual_seed(42)

outputs = utils.generate(
outputs = generate(
model=generation_model_batched,
prompt=prompt_tokens_batched,
max_generated_tokens=10,
Expand Down Expand Up @@ -275,7 +274,7 @@ def test_stop_tokens_batched_uneven_stopping(

torch.manual_seed(42)

outputs = utils.generate(
outputs = generate(
model=generation_model_batched,
prompt=prompt_tokens_batched,
max_generated_tokens=10,
Expand Down
2 changes: 1 addition & 1 deletion tests/torchtune/modules/rlhf/test_generation.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,9 +8,9 @@

import torch
from tests.test_utils import fixed_init_model
from torchtune.generation._generation import sample
from torchtune.models.llama2 import llama2
from torchtune.modules import rlhf
from torchtune.utils._generation import sample


class TestGenerateNextTokenWithLogits:
Expand Down
4 changes: 2 additions & 2 deletions torchtune/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,6 @@
"""
) from e

from torchtune import datasets, models, modules, utils
from torchtune import datasets, generation, models, modules, utils

__all__ = [datasets, models, modules, utils]
__all__ = [datasets, models, modules, utils, generation]
9 changes: 9 additions & 0 deletions torchtune/generation/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
# Copyright (c) Meta Platforms, Inc. and affiliates.
# All rights reserved.
#
# This source code is licensed under the BSD-style license found in the
# LICENSE file in the root directory of this source tree.

from ._generation import generate, generate_next_token

__all__ = ["generate", "generate_next_token"]
File renamed without changes.
1 change: 0 additions & 1 deletion torchtune/training/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,6 @@
#
# This source code is licensed under the BSD-style license found in the
# LICENSE file in the root directory of this source tree.

from torchtune.training._distributed import (
contains_fsdp,
FSDPPolicyType,
Expand Down
3 changes: 0 additions & 3 deletions torchtune/utils/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,6 @@
# LICENSE file in the root directory of this source tree.

from ._device import get_device
from ._generation import generate, generate_next_token

from ._version import torch_version_ge
from .logging import get_logger
Expand All @@ -14,6 +13,4 @@
"get_device",
"get_logger",
"torch_version_ge",
"generate",
"generate_next_token",
]

0 comments on commit 0b586b1

Please sign in to comment.