Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

MPI and OpenMP memory allocation optimisations #262

Merged
merged 31 commits into from
Mar 23, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
31 commits
Select commit Hold shift + click to select a range
e260ba7
rename mpi_stack to mpi_memory
conradtchan Mar 10, 2022
0a420c6
call mpi memory allocation from main memory subroutine
conradtchan Mar 10, 2022
a4157cd
set mpi "stacksize" dynamically
conradtchan Mar 10, 2022
b296377
allocate memory for omp cache, instead of using threadprivate variable
dliptai Mar 10, 2022
14b2cbf
Merge branch 'memory-optimisation' of github.com:ADACS-Australia/phan…
dliptai Mar 10, 2022
8aa2abc
get minpart from options rather than hardcoding 10
conradtchan Mar 10, 2022
585c24f
remove remote_export from cell
conradtchan Mar 10, 2022
0672c87
remove maxprocs parameter and set arrays based on nprocs at runtime
conradtchan Mar 10, 2022
0f80e28
bugfix: 585c24ff09014fb7934582229dca877ed007b87f broke the message tag
conradtchan Mar 10, 2022
37b7cbf
amend previous commit
conradtchan Mar 10, 2022
a39c846
remove maxprocs from config
conradtchan Mar 10, 2022
25e4434
fix serial compile errors
conradtchan Mar 11, 2022
82a3284
Revert b296377762a9cf786f17ea5f69d25e711a771dc2
conradtchan Mar 11, 2022
9d9754f
decrease xyzcache size to 10000
conradtchan Mar 11, 2022
9d782f1
fix error caused by revert commit
conradtchan Mar 11, 2022
900fae2
automatically expand mpi stacks as required
conradtchan Mar 11, 2022
6e171e0
add deallocate subroutines for mpi arrays
conradtchan Mar 11, 2022
b8ef9c3
mpi stack allocation fix
conradtchan Mar 11, 2022
ba30600
Merge branch 'master' into memory-optimisation
conradtchan Mar 11, 2022
2d9615d
print MPI id when increasing stacksize
conradtchan Mar 16, 2022
7a09cb7
bugfix: reallocated MPI stacks had the wrong memory address
conradtchan Mar 16, 2022
8f5e8f0
Merge branch 'master' into memory-optimisation
conradtchan Mar 17, 2022
da6bf17
add check for if global node refinement exceeds ncellsmax
conradtchan Mar 17, 2022
cbfacac
safety factor of 4 for MPI stack size
conradtchan Mar 17, 2022
5537198
move dynamic mem calculation inside allocate_memory subroutine
conradtchan Mar 17, 2022
167a189
remove unused import
conradtchan Mar 17, 2022
6c25c8a
allocate a larger array for the global tree
conradtchan Mar 18, 2022
4c981be
change allocate_memory safety factor from 8 to 4
conradtchan Mar 18, 2022
eca2545
fix integer type mismatches
conradtchan Mar 18, 2022
a973b88
Merge remote-tracking branch 'origin/master' into memory-optimisation
conradtchan Mar 20, 2022
030e6d6
Initialise dt_in and twas to zero
conradtchan Mar 23, 2022
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 3 additions & 3 deletions build/Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -503,7 +503,7 @@ SOURCES= physcon.f90 ${CONFIG} ${SRCKERNEL} io.F90 units.f90 boundary.f90 \
centreofmass.f90 ${SRCPOT} damping.f90 checkconserved.f90 \
partinject.F90 utils_inject.f90 utils_filenames.f90 utils_summary.F90 ${SRCCHEM}\
${SRCDUST} dust_formation.F90 ptmass_radiation.F90 ptmass_heating.F90 \
mpi_dens.F90 mpi_force.F90 mpi_stack.F90 mpi_derivs.F90 kdtree.F90 linklist_kdtree.F90 ${SRCTURB} \
mpi_dens.F90 mpi_force.F90 mpi_memory.F90 mpi_derivs.F90 kdtree.F90 linklist_kdtree.F90 ${SRCTURB} \
${SRCPHOTO} ${SRCINJECT} ${SRCKROME} memory.F90 ${SRCREADWRITE_DUMPS} \
quitdump.f90 ptmass.F90 \
readwrite_infile.F90 dens.F90 force.F90 utils_deriv.f90 deriv.F90 energies.F90 sort_particles.F90 \
Expand Down Expand Up @@ -576,7 +576,7 @@ edit: checksetup
SRCDUMP= physcon.f90 ${CONFIG} ${SRCKERNEL} io.F90 units.f90 boundary.f90 mpi_utils.F90 \
utils_timing.f90 utils_infiles.f90 dtype_kdtree.f90 utils_allocate.f90 part.F90 ${DOMAIN} \
mpi_dens.F90 mpi_force.F90 \
mpi_balance.F90 mpi_stack.F90 mpi_derivs.F90 kdtree.F90 linklist_kdtree.F90 \
mpi_balance.F90 mpi_memory.F90 mpi_derivs.F90 kdtree.F90 linklist_kdtree.F90 \
utils_dumpfiles.f90 utils_vectors.f90 utils_mathfunc.f90 \
utils_datafiles.f90 utils_filenames.f90 utils_tables.f90 datafiles.f90 gitinfo.f90 \
centreofmass.f90 \
Expand Down Expand Up @@ -1018,7 +1018,7 @@ SRCMULT = physcon.f90 ${CONFIG} ${SRCKERNEL} io.F90 mpi_utils.F90 utils_timing.f
utils_filenames.f90 utils_mathfunc.f90 utils_vectors.f90 utils_omp.F90 utils_datafiles.f90 datafiles.f90 utils_tables.f90 utils_infiles.f90 \
${SRCEOS} viscosity.f90 options.f90 damping.f90 utils_gravwave.f90 \
utils_dumpfiles.f90 utils_summary.f90 centreofmass.f90 \
${SRCCHEM} ${DOMAIN} ${SRCPOT} dust_formation.F90 ptmass_radiation.F90 mpi_balance.F90 mpi_stack.F90 mpi_force.F90 mpi_dens.F90 mpi_derivs.F90 kdtree.F90 linklist_kdtree.F90 ptmass.F90 ${SRCTURB} \
${SRCCHEM} ${DOMAIN} ${SRCPOT} dust_formation.F90 ptmass_radiation.F90 mpi_balance.F90 mpi_memory.F90 mpi_force.F90 mpi_dens.F90 mpi_derivs.F90 kdtree.F90 linklist_kdtree.F90 ptmass.F90 ${SRCTURB} \
checkconserved.f90 prompting.f90 ${SRCDUST} ${SRCNIMHD} readwrite_infile.f90 ${MULTIRUNFILE}
OBJM1 = ${SRCMULT:.f90=.o}
OBJMULT = ${OBJM1:.F90=.o}
Expand Down
24 changes: 12 additions & 12 deletions src/main/config.F90
Original file line number Diff line number Diff line change
Expand Up @@ -77,7 +77,8 @@ module dim
#endif

! maxmimum storage in linklist
integer :: ncellsmax
integer :: ncellsmax
integer(kind=8) :: ncellsmaxglobal

!------
! Dust
Expand Down Expand Up @@ -152,18 +153,10 @@ module dim
radenxpartvecforce + &
maxxpartvecGR

! cell storage
integer, parameter :: maxprocs = 32
#ifdef STACKSIZE
integer, parameter :: stacksize = STACKSIZE
#else
#ifdef MPI
integer, parameter :: stacksize = 200000
logical, parameter :: mpi = .true.
#else
integer, parameter :: stacksize = 0
logical, parameter :: mpi = .false.
#endif
#endif

! storage for artificial viscosity switch
Expand Down Expand Up @@ -372,8 +365,9 @@ module dim

contains

subroutine update_max_sizes(n)
integer, intent(in) :: n
subroutine update_max_sizes(n,ntot)
integer, intent(in) :: n
integer(kind=8), optional, intent(in) :: ntot

maxp = n

Expand All @@ -386,9 +380,15 @@ subroutine update_max_sizes(n)
#endif

#ifdef NCELLSMAX
ncellsmax = NCELLSMAX
ncellsmax = NCELLSMAX
ncellsmaxglobal = NCELLSMAX
#else
ncellsmax = 2*maxp
if (present(ntot)) then
ncellsmaxglobal = 2*ntot
else
ncellsmaxglobal = ncellsmax
endif
#endif

#ifdef DUST
Expand Down
33 changes: 13 additions & 20 deletions src/main/dens.F90
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ module densityforce
! :Runtime parameters: None
!
! :Dependencies: boundary, dim, io, io_summary, kdtree, kernel, linklist,
! mpidens, mpiderivs, mpistack, mpiutils, options, part, timestep,
! mpidens, mpiderivs, mpimemory, mpiutils, options, part, timestep,
! timing, viscosity
!
use dim, only:maxdvdx,maxp,maxrhosum,maxdustlarge
Expand Down Expand Up @@ -99,7 +99,7 @@ module densityforce

!--kernel related parameters
!real, parameter :: cnormk = 1./pi, wab0 = 1., gradh0 = -3.*wab0, radkern2 = 4F.0
integer, parameter :: isizecellcache = 50000
integer, parameter :: isizecellcache = 10000
integer, parameter :: isizeneighcache = 0
integer, parameter :: maxdensits = 50

Expand Down Expand Up @@ -127,10 +127,10 @@ subroutine densityiterate(icall,npart,nactive,xyzh,vxyzu,divcurlv,divcurlB,Bevol
use part, only:mhd,rhoh,dhdrho,rhoanddhdrho,ll,get_partinfo,iactive,&
hrho,iphase,igas,idust,iamgas,periodic,all_active,dustfrac
use mpiutils, only:reduceall_mpi,barrier_mpi,reduce_mpi,reduceall_mpi
use mpistack, only:reserve_stack,swap_stacks,reset_stacks
use mpistack, only:stack_remote => dens_stack_1
use mpistack, only:stack_waiting => dens_stack_2
use mpistack, only:stack_redo => dens_stack_3
use mpimemory, only:reserve_stack,swap_stacks,reset_stacks
use mpimemory, only:stack_remote => dens_stack_1
use mpimemory, only:stack_waiting => dens_stack_2
use mpimemory, only:stack_redo => dens_stack_3
use mpiderivs, only:send_cell,recv_cells,check_send_finished,init_cell_exchange,&
finish_cell_exchange,recv_while_wait,reset_cell_counters
use timestep, only:rhomaxnow
Expand Down Expand Up @@ -302,7 +302,6 @@ subroutine densityiterate(icall,npart,nactive,xyzh,vxyzu,divcurlv,divcurlB,Bevol
cell%owner = id
cell%nits = 0
cell%nneigh = 0
cell%remote_export(1:nprocs) = remote_export

call start_cell(cell,iphase,xyzh,vxyzu,fxyzu,fext,Bevol,rad)
call get_cell_location(icell,cell%xpos,cell%xsizei,cell%rcuti)
Expand All @@ -314,7 +313,7 @@ subroutine densityiterate(icall,npart,nactive,xyzh,vxyzu,divcurlv,divcurlB,Bevol
if (do_export) then
if (stack_waiting%n > 0) call check_send_finished(stack_remote,irequestsend,irequestrecv,xrecvbuf)
call reserve_stack(stack_waiting,cell%waiting_index) ! make a reservation on the stack
call send_cell(cell,0,irequestsend,xsendbuf) ! export the cell: direction 0 for exporting
call send_cell(cell,remote_export,irequestsend,xsendbuf) ! send the cell to remote
endif
!$omp end critical (send_and_recv_remote)
endif
Expand All @@ -336,14 +335,13 @@ subroutine densityiterate(icall,npart,nactive,xyzh,vxyzu,divcurlv,divcurlB,Bevol
call get_neighbour_list(-1,listneigh,nneigh,xyzh,xyzcache,isizecellcache,getj=.false., &
cell_xpos=cell%xpos,cell_xsizei=cell%xsizei,cell_rcuti=cell%rcuti, &
remote_export=remote_export)
cell%remote_export(1:nprocs) = remote_export

if (any(remote_export)) then
do_export = .true.
!$omp critical (send_and_recv_remote)
if (stack_waiting%n > 0) call check_send_finished(stack_remote,irequestsend,irequestrecv,xrecvbuf)
call reserve_stack(stack_waiting,cell%waiting_index)
call send_cell(cell,0,irequestsend,xsendbuf) ! direction export (0)
call send_cell(cell,remote_export,irequestsend,xsendbuf) ! send the cell to remote
!$omp end critical (send_and_recv_remote)
endif
nrelink = nrelink + 1
Expand Down Expand Up @@ -407,14 +405,14 @@ subroutine densityiterate(icall,npart,nactive,xyzh,vxyzu,divcurlv,divcurlB,Bevol

call compute_cell(cell,listneigh,nneigh,getdv,getdB,Bevol,xyzh,vxyzu,fxyzu,fext,xyzcache,rad)

cell%remote_export(id+1) = .false.
remote_export = .false.
remote_export(cell%owner+1) = .true. ! use remote_export array to send back to the owner

! communication happened while computing contributions to remote cells
!$omp critical (send_and_recv_waiting)
call recv_cells(stack_waiting,xrecvbuf,irequestrecv)
call check_send_finished(stack_waiting,irequestsend,irequestrecv,xrecvbuf)
! direction return (1)
call send_cell(cell,1,irequestsend,xsendbuf)
call send_cell(cell,remote_export,irequestsend,xsendbuf) ! send the cell back to owner
!$omp end critical (send_and_recv_waiting)
enddo over_remote
!$omp enddo
Expand All @@ -441,10 +439,6 @@ subroutine densityiterate(icall,npart,nactive,xyzh,vxyzu,divcurlv,divcurlB,Bevol
over_waiting: do i = 1, stack_waiting%n
cell = stack_waiting%cells(i)

if (any(cell%remote_export(1:nprocs))) then
call fatal('dens', 'not all results returned from remote processor')
endif

if (calculate_density) then
call finish_cell(cell,converged)
call compute_hmax(cell,redo_neighbours)
Expand All @@ -461,12 +455,11 @@ subroutine densityiterate(icall,npart,nactive,xyzh,vxyzu,divcurlv,divcurlB,Bevol
call get_neighbour_list(-1,listneigh,nneigh,xyzh,xyzcache,isizecellcache,getj=.false., &
cell_xpos=cell%xpos,cell_xsizei=cell%xsizei,cell_rcuti=cell%rcuti, &
remote_export=remote_export)
cell%remote_export(1:nprocs) = remote_export

!$omp critical (send_and_recv_remote)
call check_send_finished(stack_remote,irequestsend,irequestrecv,xrecvbuf)
call reserve_stack(stack_redo,cell%waiting_index)
! direction export (0)
call send_cell(cell,0,irequestsend,xsendbuf)
call send_cell(cell,remote_export,irequestsend,xsendbuf) ! send the cell to remote
!$omp end critical (send_and_recv_remote)
call compute_cell(cell,listneigh,nneigh,getdv,getdB,Bevol,xyzh,vxyzu,fxyzu,fext,xyzcache,rad)

Expand Down
46 changes: 12 additions & 34 deletions src/main/force.F90
Original file line number Diff line number Diff line change
Expand Up @@ -41,9 +41,9 @@ module forces
!
! :Dependencies: boundary, cooling, dim, dust, eos, eos_shen, fastmath,
! growth, io, io_summary, kdtree, kernel, linklist, metric_tools,
! mpiderivs, mpiforce, mpistack, mpiutils, nicil, options, part, physcon,
! ptmass, ptmass_heating, radiation_utils, timestep, timestep_ind,
! timestep_sts, units, utils_gr, viscosity
! mpiderivs, mpiforce, mpimemory, mpiutils, nicil, omp_cache, options,
! part, physcon, ptmass, ptmass_heating, radiation_utils, timestep,
! timestep_ind, timestep_sts, units, utils_gr, viscosity
!
use dim, only:maxfsum,maxxpartveciforce,maxp,ndivcurlB,ndivcurlv,&
maxdusttypes,maxdustsmall,do_radiation
Expand All @@ -56,7 +56,7 @@ module forces
character(len=80), parameter, public :: & ! module version
modid="$Id$"

integer, parameter :: maxcellcache = 50000
integer, parameter :: maxcellcache = 10000

public :: force, reconstruct_dv ! latter to avoid compiler warning

Expand Down Expand Up @@ -220,9 +220,9 @@ subroutine force(icall,npart,xyzh,vxyzu,fxyzu,divcurlv,divcurlB,Bevol,dBevol,&
#endif
use mpiderivs, only:send_cell,recv_cells,check_send_finished,init_cell_exchange,&
finish_cell_exchange,recv_while_wait,reset_cell_counters
use mpistack, only:reserve_stack,reset_stacks
use mpistack, only:stack_remote => force_stack_1
use mpistack, only:stack_waiting => force_stack_2
use mpimemory, only:reserve_stack,reset_stacks
use mpimemory, only:stack_remote => force_stack_1
use mpimemory, only:stack_waiting => force_stack_2
use io_summary, only:iosumdtr
integer, intent(in) :: icall,npart
real, intent(in) :: xyzh(:,:)
Expand Down Expand Up @@ -456,7 +456,7 @@ subroutine force(icall,npart,xyzh,vxyzu,fxyzu,divcurlv,divcurlB,Bevol,dBevol,&
getj=.true.,f=cell%fgrav,remote_export=remote_export)

cell%owner = id
cell%remote_export(1:nprocs) = remote_export

do_export = any(remote_export)

if (mpi) then
Expand All @@ -465,7 +465,7 @@ subroutine force(icall,npart,xyzh,vxyzu,fxyzu,divcurlv,divcurlB,Bevol,dBevol,&
if (do_export) then
if (stack_waiting%n > 0) call check_send_finished(stack_remote,irequestsend,irequestrecv,xrecvbuf)
call reserve_stack(stack_waiting,cell%waiting_index)
call send_cell(cell,0,irequestsend,xsendbuf) ! export the cell: direction 0 for exporting
call send_cell(cell,remote_export,irequestsend,xsendbuf) ! send to remote
endif
!$omp end critical (send_and_recv_remote)
endif
Expand Down Expand Up @@ -522,12 +522,12 @@ subroutine force(icall,npart,xyzh,vxyzu,fxyzu,divcurlv,divcurlB,Bevol,dBevol,&
dustfrac,dustprop,gradh,ibinnow_m1,ibin_wake,stressmax,xyzcache,&
rad,radprop,dens,metrics)

cell%remote_export(id+1) = .false.

remote_export = .false.
remote_export(cell%owner+1) = .true. ! use remote_export array to send back to the owner
!$omp critical (send_and_recv_waiting)
call recv_cells(stack_waiting,xrecvbuf,irequestrecv)
call check_send_finished(stack_waiting,irequestsend,irequestrecv,xrecvbuf)
call send_cell(cell,1,irequestsend,xsendbuf)
call send_cell(cell,remote_export,irequestsend,xsendbuf) ! send the cell back to owner
!$omp end critical (send_and_recv_waiting)

enddo over_remote
Expand All @@ -553,10 +553,6 @@ subroutine force(icall,npart,xyzh,vxyzu,fxyzu,divcurlv,divcurlB,Bevol,dBevol,&
over_waiting: do i = 1, stack_waiting%n
cell = stack_waiting%cells(i)

if (any(cell%remote_export(1:nprocs))) then
call fatal('force', 'not all results returned from remote processor')
endif

call finish_cell_and_store_results(icall,cell,fxyzu,xyzh,vxyzu,poten,dt,dvdx, &
divBsymm,divcurlv,dBevol,ddustevol,deltav,dustgasprop, &
dtcourant,dtforce,dtvisc,dtohm,dthall,dtambi,dtdiff,dtmini,dtmaxi, &
Expand Down Expand Up @@ -2999,24 +2995,6 @@ subroutine finish_cell_and_store_results(icall,cell,fxyzu,xyzh,vxyzu,poten,dt,dv
enddo over_parts
end subroutine finish_cell_and_store_results

pure subroutine combine_cells(cella, cellb)
type(cellforce), intent(inout) :: cella
type(cellforce), intent(in) :: cellb

integer :: i

do i = 1,cella%npcell
cella%fsums(:,i) = cella%fsums(:,i) + cellb%fsums(:,i)
enddo

cella%ndrag = cella%ndrag + cellb%ndrag
cella%nstokes = cella%nstokes + cellb%nstokes
cella%nsuper = cella%nsuper + cellb%nsuper

cella%remote_export = (cella%remote_export .and. cellb%remote_export)

end subroutine combine_cells

!-----------------------------------------------------------------------------
!+
! Apply reconstruction to velocity gradients
Expand Down
12 changes: 3 additions & 9 deletions src/main/initial.F90
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ module initial
! deriv, dim, dust, energies, eos, evwrite, extern_gr, externalforces,
! fastmath, fileutils, forcing, growth, inject, io, io_summary,
! krome_interface, linklist, metric_tools, mf_write, mpibalance,
! mpiderivs, mpidomain, mpistack, mpiutils, nicil, nicil_sup, omputils,
! mpiderivs, mpidomain, mpimemory, mpiutils, nicil, nicil_sup, omputils,
! options, part, photoevap, ptmass, radiation_utils, readwrite_dumps,
! readwrite_infile, timestep, timestep_ind, timestep_sts, timing, units,
! writeheader
Expand Down Expand Up @@ -54,8 +54,6 @@ subroutine initialise()
use mpidomain, only:init_domains
use cpuinfo, only:print_cpuinfo
use checkoptions, only:check_compile_time_settings
use mpiderivs, only:init_tree_comms
use mpistack, only:init_mpi_memory
use readwrite_dumps, only:init_readwrite_dumps
integer :: ierr
!
Expand Down Expand Up @@ -100,10 +98,6 @@ subroutine initialise()
!--initialise MPI domains
!
call init_domains(nprocs)
if (mpi) then
call init_tree_comms()
call init_mpi_memory()
endif

call init_readwrite_dumps()

Expand Down Expand Up @@ -773,11 +767,11 @@ end subroutine startrun
subroutine finalise()
use dim, only: mpi
use mpiderivs, only:finish_tree_comms
use mpistack, only:finish_mpi_memory
use mpimemory, only:deallocate_mpi_memory

if (mpi) then
call finish_tree_comms()
call finish_mpi_memory()
call deallocate_mpi_memory()
endif

end subroutine finalise
Expand Down
12 changes: 7 additions & 5 deletions src/main/kdtree.F90
Original file line number Diff line number Diff line change
Expand Up @@ -1499,16 +1499,16 @@ subroutine maketreeglobal(nodeglobal,node,nodemap,globallevel,refinelevels,xyzh,
use part, only:isdead_or_accreted,iactive,ibelong
use timing, only:increment_timer,get_timings,itimer_balance

type(kdnode), intent(out) :: nodeglobal(:) !ncellsmax+1)
type(kdnode), intent(out) :: node(:) !ncellsmax+1)
integer, intent(out) :: nodemap(:) !ncellsmax+1)
type(kdnode), intent(out) :: nodeglobal(:) ! ncellsmax+1
type(kdnode), intent(out) :: node(:) ! ncellsmax+1
integer, intent(out) :: nodemap(:) ! ncellsmax+1
integer, intent(out) :: globallevel
integer, intent(out) :: refinelevels
integer, intent(inout) :: np
integer, intent(in) :: ndim
real, intent(inout) :: xyzh(:,:)
integer, intent(out) :: cellatid(:) !ncellsmax+1)
integer, intent(out) :: ifirstincell(:) !ncellsmax+1)
integer, intent(out) :: cellatid(:) ! ncellsmax+1
integer, intent(out) :: ifirstincell(:) ! ncellsmax+1)
real :: xmini(ndim),xmaxi(ndim)
real :: xminl(ndim),xmaxl(ndim)
real :: xminr(ndim),xmaxr(ndim)
Expand Down Expand Up @@ -1650,6 +1650,8 @@ subroutine maketreeglobal(nodeglobal,node,nodemap,globallevel,refinelevels,xyzh,
nnodestart = offset
nnodeend = 2*nnodestart-1

if (nnodeend > ncellsmax) call fatal('kdtree', 'global tree refinement has exceeded ncellsmax')

locstart = roffset
locend = 2*locstart-1

Expand Down
12 changes: 6 additions & 6 deletions src/main/linklist_kdtree.F90
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ module linklist
! :Dependencies: allocutils, boundary, dim, dtypekdtree, infile_utils, io,
! kdtree, kernel, mpiutils, part
!
use dim, only:ncellsmax
use dim, only:ncellsmax,ncellsmaxglobal
use part, only:ll
use dtypekdtree, only:kdnode
implicit none
Expand Down Expand Up @@ -58,11 +58,11 @@ subroutine allocate_linklist
use kdtree, only:allocate_kdtree
use dim, only:maxp

call allocate_array('cellatid', cellatid, ncellsmax+1)
call allocate_array('ifirstincell', ifirstincell, ncellsmax+1)
call allocate_array('nodeglobal', nodeglobal, ncellsmax+1)
call allocate_array('node', node, ncellsmax+1)
call allocate_array('nodemap', nodemap, ncellsmax+1)
call allocate_array('cellatid', cellatid, ncellsmaxglobal+1 )
call allocate_array('ifirstincell', ifirstincell, ncellsmax+1 )
call allocate_array('nodeglobal', nodeglobal, ncellsmaxglobal+1 )
call allocate_array('node', node, ncellsmax+1 )
call allocate_array('nodemap', nodemap, ncellsmax+1 )
call allocate_kdtree()
!$omp parallel
call allocate_array('listneigh',listneigh,maxp)
Expand Down
Loading