Skip to content

Commit

Permalink
Fixes to allow auto-formatting (#804)
Browse files Browse the repository at this point in the history
co-author @mvertens
  • Loading branch information
DeniseWorthen authored Sep 26, 2022
1 parent c6408fa commit 6b630ed
Show file tree
Hide file tree
Showing 31 changed files with 621 additions and 589 deletions.
4 changes: 2 additions & 2 deletions model/src/PDLIB/yowelementpool.F90
Original file line number Diff line number Diff line change
Expand Up @@ -95,7 +95,7 @@ function belongTo(ele_in, rank)
belongTo = .true.
END IF
END DO
end function
end function belongTo


subroutine finalizeElementpool()
Expand All @@ -104,5 +104,5 @@ subroutine finalizeElementpool()
if(allocated(INE)) deallocate(INE)
!if(allocated(INE_global)) deallocate(INE_global)
if(allocated(ielg)) deallocate(ielg)
end subroutine
end subroutine finalizeElementpool
end module yowElementpool
4 changes: 2 additions & 2 deletions model/src/PDLIB/yowerr.F90
Original file line number Diff line number Diff line change
Expand Up @@ -158,7 +158,7 @@ subroutine abort(string, line, file, errno)
write(*,*)
stop

end subroutine
end subroutine abort

!> print warning
!> Call this to print an warning string and optional line number, and file
Expand Down Expand Up @@ -216,5 +216,5 @@ subroutine warn(string, line, file)
endif

write(*,*)
end subroutine
end subroutine warn
end module yowerr
20 changes: 10 additions & 10 deletions model/src/PDLIB/yowexchangeModule.F90
Original file line number Diff line number Diff line change
Expand Up @@ -95,7 +95,7 @@ module yowExchangeModule
procedure :: finalize
procedure :: createMPIType

end type
end type t_neighborDomain

!> Knows for all domains neighbors, which node we must send or revc from neighbor domains
!> from 1 to nConnDomains
Expand Down Expand Up @@ -141,7 +141,7 @@ subroutine finalize(this)
if(ierr /= MPI_SUCCESS) CALL PARALLEL_ABORT("freeMPItype", ierr)
call mpi_type_free(this%p2DRrecvType2, ierr)
if(ierr /= MPI_SUCCESS) CALL PARALLEL_ABORT("freeMPItype", ierr)
end subroutine
end subroutine finalize

! create MPI indexed datatype for this neighborDomain
subroutine createMPIType(this)
Expand Down Expand Up @@ -215,7 +215,7 @@ subroutine createMPIType(this)
if(ierr /= MPI_SUCCESS) CALL PARALLEL_ABORT("createMPIType", ierr)


end subroutine
end subroutine createMPIType

subroutine initNbrDomains(nConnD)
use yowerr
Expand All @@ -227,7 +227,7 @@ subroutine initNbrDomains(nConnD)
nConnDomains = nConnD
allocate(neighborDomains(nConnDomains), stat=stat)
if(stat/=0) CALL ABORT('neighborDomains allocation failure')
end subroutine
end subroutine initNbrDomains

subroutine createMPITypes()
implicit none
Expand All @@ -236,7 +236,7 @@ subroutine createMPITypes()
do i=1, nConnDomains
call neighborDomains(i)%createMPIType()
end do
end subroutine
end subroutine createMPITypes

!> exchange values in U.
!> \param[inout] U array with values to exchange. np+ng long.
Expand Down Expand Up @@ -289,7 +289,7 @@ subroutine PDLIB_exchange1Dreal(U)
if(ierr/=MPI_SUCCESS) CALL PARALLEL_ABORT("waitall", ierr)
call mpi_waitall(nConnDomains, sendRqst, sendStat,ierr)
if(ierr/=MPI_SUCCESS) CALL PARALLEL_ABORT("waitall", ierr)
end subroutine
end subroutine PDLIB_exchange1Dreal


!> \overload PDLIB_exchange1Dreal
Expand Down Expand Up @@ -362,7 +362,7 @@ subroutine PDLIB_exchange2Dreal(U)
WRITE(740+IAPROC,*) 'PDLIB_exchange2Dreal, step 12'
FLUSH(740+IAPROC)
#endif
end subroutine
end subroutine PDLIB_exchange2Dreal


!> set the size of the second and third dimension for exchange
Expand All @@ -384,7 +384,7 @@ subroutine finalizeExchangeModule()
end do
deallocate(neighborDomains)
endif
end subroutine
end subroutine finalizeExchangeModule
!> exchange values in U.
!> \param[inout] U array with values to exchange. np+ng+1 long.
!> U[0:npa] Send values from U(1:np) to other threads.
Expand Down Expand Up @@ -458,7 +458,7 @@ subroutine PDLIB_exchange1Dreal_zero(U)
if(ierr/=MPI_SUCCESS) CALL PARALLEL_ABORT("waitall", ierr)
call mpi_waitall(nConnDomains, sendRqst, sendStat,ierr)
if(ierr/=MPI_SUCCESS) CALL PARALLEL_ABORT("waitall", ierr)
end subroutine
end subroutine PDLIB_exchange1Dreal_zero
!> \note MPI recv tag: 30001 + MPI rank
!> \note MPI send tag: 30001 + neighbor MPI rank
subroutine PDLIB_exchange2Dreal_zero(U)
Expand Down Expand Up @@ -533,7 +533,7 @@ subroutine PDLIB_exchange2Dreal_zero(U)
if(ierr/=MPI_SUCCESS) CALL PARALLEL_ABORT("waitall", ierr)
call mpi_waitall(nConnDomains, sendRqst, sendStat,ierr)
if(ierr/=MPI_SUCCESS) CALL PARALLEL_ABORT("waitall", ierr)
end subroutine
end subroutine PDLIB_exchange2Dreal_zero


end module yowExchangeModule
8 changes: 4 additions & 4 deletions model/src/PDLIB/yowfunction.F90
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,7 @@ SUBROUTINE PDLIB_ABORT(istat)
integer, intent(in) :: istat
Print *, 'Error with istat=', istat
CALL ABORT
END SUBROUTINE
END SUBROUTINE PDLIB_ABORT
!**********************************************************************
!* *
!**********************************************************************
Expand Down Expand Up @@ -187,7 +187,7 @@ SUBROUTINE ComputeListNP_ListNPA_ListIPLG_Kernel
WRITE(740+IAPROC,*) 'ComputeListNP_ListNPA_Kernel, step 8'
FLUSH(740+IAPROC)
#endif
END SUBROUTINE
END SUBROUTINE ComputeListNP_ListNPA_ListIPLG_Kernel
!**********************************************************************
!* *
!**********************************************************************
Expand Down Expand Up @@ -275,7 +275,7 @@ SUBROUTINE ComputeListNP_ListNPA_ListIPLG
FLUSH(740+IAPROC)
#endif
END IF
END SUBROUTINE
END SUBROUTINE ComputeListNP_ListNPA_ListIPLG
!**********************************************************************
!* *
!**********************************************************************
Expand All @@ -301,5 +301,5 @@ SUBROUTINE ComputeBoundaryInformation
END DO
NbSend(IPROC)=eSend
END DO
END SUBROUTINE
END SUBROUTINE ComputeBoundaryInformation
end module yowfunction
12 changes: 6 additions & 6 deletions model/src/PDLIB/yownodepool.F90
Original file line number Diff line number Diff line change
Expand Up @@ -73,7 +73,7 @@ module yowNodepool

!> returns true if this node is a ghost node
procedure :: isGhost
end type
end type t_Node

!> coordinates of the local + ghost nodes. range [1:npa]
real(rkind), public, target, allocatable :: x(:), y(:), z(:)
Expand Down Expand Up @@ -149,7 +149,7 @@ function connNodes(this, i)
integer, intent(in) :: i
type(t_Node), pointer :: connNodes
connNodes => nodes_global(connNodes_data(this%id_global, i))
end function
end function connNodes

!> return pointer to the (global) node from the local id.
!> This is in effekt iplg(id_local)
Expand All @@ -160,7 +160,7 @@ function nodes(id_local)
integer, intent(in) :: id_local
type(t_Node), pointer :: nodes
nodes => nodes_global(iplg(id_local))
end function
end function nodes

!> return pointer to the (global) (ghost) node
!> Ghost nodes are nodes in the global node array, with the particularity
Expand All @@ -173,7 +173,7 @@ function ghosts(id)
integer, intent(in) :: id
type(t_Node), pointer :: ghosts
ghosts => nodes_global(ghostlg(id))
end function
end function ghosts

!> Insert a node number to the end of the conntected node array
!> \param index optional - node number to insert. If it not present, just increas temporarily array lenght for later allocation
Expand Down Expand Up @@ -214,7 +214,7 @@ function isGhost(this)
else
isGhost = .true.
endif
end function
end function isGhost

subroutine finalizeNodepool()
implicit none
Expand All @@ -230,5 +230,5 @@ subroutine finalizeNodepool()
if(allocated(ghostgl)) deallocate(ghostgl)
if(allocated(np_perProc)) deallocate(np_perProc)
if(allocated(np_perProcSum)) deallocate(np_perProcSum)
end subroutine
end subroutine finalizeNodepool
end module yowNodepool
18 changes: 9 additions & 9 deletions model/src/PDLIB/yowpdlibmain.F90
Original file line number Diff line number Diff line change
Expand Up @@ -249,7 +249,7 @@ SUBROUTINE REAL_MPI_BARRIER_PDLIB(TheComm, string)
CALL MPI_RECV(iField, 1, MPI_INTEGER, 0, 712, TheComm, istatus, ierr)
END IF
! Print *, 'Passing barrier string=', string
END SUBROUTINE
END SUBROUTINE REAL_MPI_BARRIER_PDLIB
!--------------------------------------------------------------------------
! Init MPI
!--------------------------------------------------------------------------
Expand Down Expand Up @@ -458,7 +458,7 @@ subroutine findConnNodes(INE_global)
node => nodes_global(i)
ns_global = ns_global + node%nConnNodes
end do
end subroutine
end subroutine findConnNodes


!------------------------------------------------------------------------
Expand Down Expand Up @@ -993,7 +993,7 @@ subroutine findGhostNodes
end do

iplg(np+1: npa) = ghostlg(1:ng)
end subroutine
end subroutine findGhostNodes
!-------------------------------------------------------------------------------
! find the number of connected domains and their ghosts
!-------------------------------------------------------------------------------
Expand Down Expand Up @@ -1336,7 +1336,7 @@ subroutine postPartition2(INE_global)
z(np+i) = zb(IP_glob)
end do

end subroutine
end subroutine postPartition2
!**********************************************************************
!* *
!**********************************************************************
Expand Down Expand Up @@ -1401,7 +1401,7 @@ subroutine ComputeTRIA_IEN_SI_CCON
PDLIB_TRIA03(IE) = TRIA03
ENDDO
CALL PDLIB_exchange1Dreal(PDLIB_SI)
end subroutine
end subroutine ComputeTRIA_IEN_SI_CCON
!**********************************************************************
!* *
!**********************************************************************
Expand All @@ -1420,7 +1420,7 @@ subroutine ELEMENT_CROSSES_DATELINE(RX1, RX2, RX3, CROSSES_DATELINE)
! if R1GT180+R2GT180+R3GT180 .eq. 1 the element contains the pole
! if R1GT180+R2GT180+R3GT180 .eq. 2 the element crosses the dateline
CROSSES_DATELINE = R1GT180+R2GT180+R3GT180 .EQ. 2
end subroutine
end subroutine ELEMENT_CROSSES_DATELINE
!**********************************************************************
!* *
!**********************************************************************
Expand All @@ -1436,7 +1436,7 @@ subroutine CORRECT_DX_GT180(DXP)
IF (DXP .ge. 180) THEN
DXP=DXP - 360
END IF
end subroutine
end subroutine CORRECT_DX_GT180
!**********************************************************************
!* *
!**********************************************************************
Expand Down Expand Up @@ -1630,7 +1630,7 @@ subroutine ComputeIA_JA_POSI_NNZ
END DO
END DO
deallocate(PTABLE)
end subroutine
end subroutine ComputeIA_JA_POSI_NNZ
!**********************************************************************
!* *
!**********************************************************************
Expand All @@ -1645,7 +1645,7 @@ subroutine finalizePD()
call finalizeExchangeModule()
call finalizeElementpool()
call finalizeNodepool()
end subroutine
end subroutine finalizePD

end module yowpdlibMain
!**********************************************************************
Expand Down
12 changes: 6 additions & 6 deletions model/src/PDLIB/yowrankModule.F90
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,7 @@ module yowRankModule

!> global start node number for every thread
integer:: IStart = 0
end type
end type t_rank

!> Provides access to some information of all threads e.g. iplg
!> \note range [1:nTasks]
Expand All @@ -83,7 +83,7 @@ subroutine initRankModule()

call exchangeIPLG()
call calcISTART()
end subroutine
end subroutine initRankModule

!> send iplg from this thread to every neighbor thread
!> \internal
Expand Down Expand Up @@ -231,7 +231,7 @@ subroutine exchangeIPLG()
IPglob=rank(myrank+1)%iplg(J)
ipgl_npa(IPglob)=J
END DO
end subroutine
end subroutine exchangeIPLG

!> \internal
subroutine calcISTART()
Expand All @@ -243,7 +243,7 @@ subroutine calcISTART()
do ir=2, nTasks
rank(ir)%IStart = rank(ir-1)%IStart + rank(ir-1)%np
end do
end subroutine
end subroutine calcISTART

subroutine finalizeRankModule()
implicit none
Expand All @@ -255,5 +255,5 @@ subroutine finalizeRankModule()
end do
deallocate(rank)
endif
end subroutine
end module
end subroutine finalizeRankModule
end module yowRankModule
12 changes: 6 additions & 6 deletions model/src/constants.F90
Original file line number Diff line number Diff line change
Expand Up @@ -299,7 +299,7 @@ SUBROUTINE KZEONE(X, Y, RE0, IM0, RE1, IM1)
P2 = DSQRT(R2)
L = 2.106D0*P2 + 4.4D0
IF (P2.LT.8.0D-1) L = 2.129D0*P2 + 4.0D0
DO 20 N=1,INT(L)
DO N=1,INT(L)
P1 = N
P2 = N*N
R1 = RTERM
Expand All @@ -312,7 +312,7 @@ SUBROUTINE KZEONE(X, Y, RE0, IM0, RE1, IM1)
T1 = T1 + 0.5D0/P1
RE1 = RE1 + (T1*RTERM-T2*ITERM)/P1
IM1 = IM1 + (T1*ITERM+T2*RTERM)/P1
20 CONTINUE
END DO
R1 = X/R2 - 0.5D0*(X*RE1-Y*IM1)
R2 = -Y/R2 - 0.5D0*(X*IM1+Y*RE1)
P1 = DEXP(X)
Expand All @@ -334,7 +334,7 @@ SUBROUTINE KZEONE(X, Y, RE0, IM0, RE1, IM1)
IM0 = T1/P2
RE1 = 0.0D0
IM1 = 0.0D0
DO 40 N=2,8
DO N=2,8
T2 = X2 + TSQ(N)
P1 = DSQRT(T2*T2+R1)
P2 = DSQRT(P1+T2)
Expand All @@ -344,7 +344,7 @@ SUBROUTINE KZEONE(X, Y, RE0, IM0, RE1, IM1)
T1 = EXSQ(N)*TSQ(N)
RE1 = RE1 + T1*P2
IM1 = IM1 + T1/P2
40 CONTINUE
END DO
T2 = -Y2*IM0
RE1 = RE1/R2
R2 = Y2*IM1/R2
Expand Down Expand Up @@ -374,7 +374,7 @@ SUBROUTINE KZEONE(X, Y, RE0, IM0, RE1, IM1)
R2 = 1.0D0
M = -8
K = 3
DO 60 N=1,INT(L)
DO N=1,INT(L)
M = M + 8
K = K - M
R1 = FLOAT(K-4)*R1
Expand All @@ -387,7 +387,7 @@ SUBROUTINE KZEONE(X, Y, RE0, IM0, RE1, IM1)
IM0 = IM0 + R1*ITERM
RE1 = RE1 + R2*RTERM
IM1 = IM1 + R2*ITERM
60 CONTINUE
END DO
T1 = DSQRT(P2+X)
T2 = -Y/T1
P1 = 8.86226925452758D-1/P2
Expand Down
2 changes: 1 addition & 1 deletion model/src/ctest.F90
Original file line number Diff line number Diff line change
Expand Up @@ -42,4 +42,4 @@ SUBROUTINE CTEST
!/
!/ End of CTEST ----------------------------------------------------- /
!/
END SUBROUTINE
END SUBROUTINE CTEST
Loading

0 comments on commit 6b630ed

Please sign in to comment.