Skip to content

Commit

Permalink
Remove Apex dependency if not using norm (#10468)
Browse files Browse the repository at this point in the history
Signed-off-by: Chen Cui <chcui@nvidia.com>
Co-authored-by: Pablo Garay <palenq@gmail.com>
  • Loading branch information
cuichenx and pablo-garay committed Sep 16, 2024
1 parent b5798de commit 9621be2
Showing 1 changed file with 1 addition and 3 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -157,9 +157,6 @@ def __init__(
**kwargs,
):
super().__init__()
if not HAVE_APEX:
logging.info("Apex is required to use ParallelLinearAdapters.")
raise RuntimeError("ParallelLinearAdapter can not run without Apex.")
if not HAVE_MEGATRON_CORE:
logging.info("Megatron-core is required to use ParallelLinearAdapters.")
raise RuntimeError("ParallelLinearAdapter can not run without Megatron-core.")
Expand Down Expand Up @@ -227,6 +224,7 @@ def __init__(
if self.norm_position in ["pre", "post"]:
ln_features = in_features if self.norm_position == "pre" else out_features
if norm_type == 'mixedfusedlayernorm':
assert HAVE_APEX, "Apex is required to use MixedFusedLayerNorm"
self.layer_norm = MixedFusedLayerNorm(ln_features, 1e-5, sequence_parallel_enbaled=False)
elif norm_type == 'layernorm':
self.layer_norm = nn.LayerNorm(ln_features)
Expand Down

0 comments on commit 9621be2

Please sign in to comment.