Skip to content

Commit

Permalink
Merge pull request #5 from NVIDIA/bbonev/0.1.0
Browse files Browse the repository at this point in the history
Addressing logic in the big skip connection
  • Loading branch information
bonevbs authored Jan 22, 2024
2 parents 94d9e45 + 6cc4b10 commit 0218658
Show file tree
Hide file tree
Showing 2 changed files with 5 additions and 5 deletions.
2 changes: 1 addition & 1 deletion makani/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.

__version__ = "0.1.0a1"
__version__ = "0.1.0"

from .utils.trainer import Trainer
from .utils.inferencer import Inferencer
8 changes: 4 additions & 4 deletions makani/models/networks/sfnonet.py
Original file line number Diff line number Diff line change
Expand Up @@ -460,10 +460,10 @@ def __init__(

# output transform
if self.big_skip:
self.residual_transform = nn.Conv2d(self.out_chans, self.out_chans, 1, bias=False)
self.residual_transform = nn.Conv2d(self.inp_chans, self.out_chans, 1, bias=False)
self.residual_transform.weight.is_shared_mp = ["spatial"]
self.residual_transform.weight.sharded_dims_mp = [None, None, None, None]
scale = math.sqrt(0.5 / self.out_chans)
scale = math.sqrt(0.5 / self.inp_chans)
nn.init.normal_(self.residual_transform.weight, mean=0.0, std=scale)

# learned position embedding
Expand Down Expand Up @@ -591,15 +591,15 @@ def forward(self, x):
if self.out_shape != self.inp_shape:
xtype = x.dtype
# only take the predicted channels as residual
residual = x[..., : self.out_chans, :, :].to(torch.float32)
residual = x.to(torch.float32)
with amp.autocast(enabled=False):
residual = self.trans_down(residual)
residual = residual.contiguous()
residual = self.itrans_up(residual)
residual = residual.to(dtype=xtype)
else:
# only take the predicted channels
residual = x[..., : self.out_chans, :, :].contiguous()
residual = x

if comm.get_size("fin") > 1:
x = scatter_to_parallel_region(x, 1, "fin")
Expand Down

0 comments on commit 0218658

Please sign in to comment.