diff --git a/cicecore/cicedynB/analysis/ice_diagnostics.F90 b/cicecore/cicedynB/analysis/ice_diagnostics.F90 index 3eaf9d057..6b9b32301 100644 --- a/cicecore/cicedynB/analysis/ice_diagnostics.F90 +++ b/cicecore/cicedynB/analysis/ice_diagnostics.F90 @@ -39,7 +39,7 @@ module ice_diagnostics print_global ! if true, print global data integer (kind=int_kind), public :: & - debug_model_step = 999999999 ! begin printing at istep1=debug_model_step + debug_model_step = 0 ! begin printing at istep1=debug_model_step integer (kind=int_kind), parameter, public :: & npnt = 2 ! total number of points to be printed @@ -73,6 +73,12 @@ module ice_diagnostics integer (kind=int_kind), dimension(npnt), public :: & piloc, pjloc, pbloc, pmloc ! location of diagnostic points + integer (kind=int_kind), public :: & + debug_model_i = -1, & ! location of debug_model point, local i index + debug_model_j = -1, & ! location of debug_model point, local j index + debug_model_iblk = -1, & ! location of debug_model point, local block number + debug_model_task = -1 ! location of debug_model point, local task number + ! for hemispheric water and heat budgets real (kind=dbl_kind) :: & totmn , & ! total ice/snow water mass (nh) @@ -1432,9 +1438,9 @@ subroutine init_diags write(nu_diag,*) ' Find indices of diagnostic points ' endif - piloc(:) = 0 - pjloc(:) = 0 - pbloc(:) = 0 + piloc(:) = -1 + pjloc(:) = -1 + pbloc(:) = -1 pmloc(:) = -999 plat(:) = -999._dbl_kind plon(:) = -999._dbl_kind @@ -1535,16 +1541,29 @@ subroutine debug_ice(iblk, plabeld) integer (kind=int_kind) :: i, j, m character(len=*), parameter :: subname='(debug_ice)' -! tcraig, do this only on one point, the first point -! do m = 1, npnt - m = 1 - if (istep1 >= debug_model_step .and. & - iblk == pbloc(m) .and. my_task == pmloc(m)) then - i = piloc(m) - j = pjloc(m) - call print_state(plabeld,i,j,iblk) + if (istep1 >= debug_model_step) then + + ! set debug point to 1st global point if not set as local values + if (debug_model_i < 0 .and. debug_model_j < 0 .and. & + debug_model_iblk < 0 .and. debug_model_task < 0) then + debug_model_i = piloc(1) + debug_model_j = pjloc(1) + debug_model_task = pmloc(1) + debug_model_iblk = pbloc(1) + endif + + ! if debug point is messed up, abort + if (debug_model_i < 0 .or. debug_model_j < 0 .or. & + debug_model_iblk < 0 .or. debug_model_task < 0) then + call abort_ice (subname//'ERROR: debug_model_[i,j,iblk,mytask] not set correctly') endif -! enddo + + ! write out debug info + if (debug_model_iblk == iblk .and. debug_model_task == my_task) then + call print_state(plabeld,debug_model_i,debug_model_j,debug_model_iblk) + endif + + endif end subroutine debug_ice diff --git a/cicecore/cicedynB/general/ice_flux.F90 b/cicecore/cicedynB/general/ice_flux.F90 index 53b326808..bcc7305ff 100644 --- a/cicecore/cicedynB/general/ice_flux.F90 +++ b/cicecore/cicedynB/general/ice_flux.F90 @@ -720,10 +720,8 @@ subroutine init_coupler_flux ffep (:,:,:,:)= c0 ffed (:,:,:,:)= c0 - if (send_i2x_per_cat) then - allocate(fswthrun_ai(nx_block,ny_block,ncat,max_blocks)) - fswthrun_ai(:,:,:,:) = c0 - endif + allocate(fswthrun_ai(nx_block,ny_block,ncat,max_blocks)) + fswthrun_ai(:,:,:,:) = c0 !----------------------------------------------------------------- ! derived or computed fields diff --git a/cicecore/cicedynB/general/ice_forcing.F90 b/cicecore/cicedynB/general/ice_forcing.F90 index 200b3d00b..a71e6dd17 100755 --- a/cicecore/cicedynB/general/ice_forcing.F90 +++ b/cicecore/cicedynB/general/ice_forcing.F90 @@ -158,7 +158,7 @@ module ice_forcing trest ! restoring time scale (sec) logical (kind=log_kind), public :: & - forcing_diag ! prints forcing debugging output if true + debug_forcing ! prints forcing debugging output if true real (dbl_kind), dimension(:), allocatable, public :: & jday_atm ! jday time vector from atm forcing files @@ -173,7 +173,7 @@ module ice_forcing mixed_layer_depth_default = c20 ! default mixed layer depth in m logical (kind=log_kind), parameter :: & - forcing_debug = .false. ! local debug flag + local_debug = .false. ! local debug flag !======================================================================= @@ -187,7 +187,7 @@ subroutine alloc_forcing integer (int_kind) :: ierr character(len=*), parameter :: subname = '(alloc_forcing)' - if (forcing_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' + if (local_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' allocate ( & cldf(nx_block,ny_block, max_blocks), & ! cloud fraction @@ -235,13 +235,13 @@ subroutine init_forcing_atmo integer (kind=int_kind) :: modadj ! adjustment for mod function character(len=*), parameter :: subname = '(init_forcing_atmo)' - if (forcing_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' + if (local_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' modadj = abs((min(0,myear-fyear_init)/ycycle+1)*ycycle) fyear = fyear_init + mod(myear-fyear_init+modadj,ycycle) fyear_final = fyear_init + ycycle - 1 ! last year in forcing cycle - if (forcing_debug .and. my_task == master_task) then + if (local_debug .and. my_task == master_task) then write(nu_diag,*) subname,'fdbg fyear = ',fyear,fyear_init,fyear_final write(nu_diag,*) subname,'fdbg atm_data_type = ',trim(atm_data_type) endif @@ -344,7 +344,7 @@ subroutine init_forcing_ocn(dt) character(len=*), parameter :: subname = '(init_forcing_ocn)' - if (forcing_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' + if (local_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' call icepack_query_parameters(secday_out=secday) call icepack_warnings_flush(nu_diag) @@ -389,7 +389,7 @@ subroutine init_forcing_ocn(dt) sss(:,:,:) = c0 do k = 1,12 ! loop over 12 months - call ice_read (nu_forcing, k, work1, 'rda8', forcing_diag, & + call ice_read (nu_forcing, k, work1, 'rda8', debug_forcing, & field_loc_center, field_type_scalar) !$OMP PARALLEL DO PRIVATE(iblk,i,j) do iblk = 1, nblocks @@ -436,7 +436,7 @@ subroutine init_forcing_ocn(dt) if (my_task == master_task) & call ice_open (nu_forcing, sst_file, nbits) - call ice_read (nu_forcing, mmonth, sst, 'rda8', forcing_diag, & + call ice_read (nu_forcing, mmonth, sst, 'rda8', debug_forcing, & field_loc_center, field_type_scalar) if (my_task == master_task) close(nu_forcing) @@ -520,7 +520,7 @@ subroutine ocn_freezing_temperature character(len=*), parameter :: subname = '(ocn_freezing_temperature)' - if (forcing_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' + if (local_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' !$OMP PARALLEL DO PRIVATE(iblk,i,j) do iblk = 1, nblocks @@ -565,7 +565,7 @@ subroutine get_forcing_atmo character(len=*), parameter :: subname = '(get_forcing_atmo)' - if (forcing_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' + if (local_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' call ice_timer_start(timer_forcing) @@ -588,7 +588,7 @@ subroutine get_forcing_atmo ! Read and interpolate atmospheric data !------------------------------------------------------------------- - if (forcing_debug .and. my_task == master_task) then + if (local_debug .and. my_task == master_task) then write(nu_diag,*) subname,'fdbg fyear = ',fyear write(nu_diag,*) subname,'fdbg atm_data_type = ',trim(atm_data_type) endif @@ -688,11 +688,11 @@ subroutine get_forcing_ocn (dt) character(len=*), parameter :: subname = '(get_forcing_ocn)' - if (forcing_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' + if (local_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' call ice_timer_start(timer_forcing) - if (forcing_debug .and. my_task == master_task) then + if (local_debug .and. my_task == master_task) then write(nu_diag,*) subname,'fdbg fyear = ',fyear write(nu_diag,*) subname,'fdbg ocn_data_type = ',trim(ocn_data_type) endif @@ -770,15 +770,15 @@ subroutine read_data (flag, recd, yr, ixm, ixx, ixp, & character(len=*), parameter :: subname = '(read_data)' - if (forcing_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' + if (local_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' call ice_timer_start(timer_readwrite) ! reading/writing nbits = 64 ! double precision data - if (istep1 > debug_model_step) forcing_diag = .true. !! debugging + if (istep1 > debug_model_step) debug_forcing = .true. !! debugging - if (my_task==master_task .and. (forcing_diag)) then + if (my_task==master_task .and. (debug_forcing)) then write(nu_diag,*) ' ', trim(data_file) endif @@ -816,7 +816,7 @@ subroutine read_data (flag, recd, yr, ixm, ixx, ixp, & arg = 1 nrec = recd + n2 call ice_read (nu_forcing, nrec, field_data(:,:,arg,:), & - 'rda8', forcing_diag, field_loc, field_type) + 'rda8', debug_forcing, field_loc, field_type) if (ixx==1 .and. my_task == master_task) close(nu_forcing) endif ! ixm ne -99 @@ -828,7 +828,7 @@ subroutine read_data (flag, recd, yr, ixm, ixx, ixp, & arg = arg + 1 nrec = recd + ixx call ice_read (nu_forcing, nrec, field_data(:,:,arg,:), & - 'rda8', forcing_diag, field_loc, field_type) + 'rda8', debug_forcing, field_loc, field_type) if (ixp /= -99) then ! currently in latter half of data interval @@ -853,7 +853,7 @@ subroutine read_data (flag, recd, yr, ixm, ixx, ixp, & arg = arg + 1 nrec = recd + n4 call ice_read (nu_forcing, nrec, field_data(:,:,arg,:), & - 'rda8', forcing_diag, field_loc, field_type) + 'rda8', debug_forcing, field_loc, field_type) endif ! ixp /= -99 if (my_task == master_task) close(nu_forcing) @@ -923,13 +923,13 @@ subroutine read_data_nc (flag, recd, yr, ixm, ixx, ixp, & character(len=*), parameter :: subname = '(read_data_nc)' - if (forcing_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' + if (local_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' call ice_timer_start(timer_readwrite) ! reading/writing - if (istep1 > debug_model_step) forcing_diag = .true. !! debugging + if (istep1 > debug_model_step) debug_forcing = .true. !! debugging - if (my_task==master_task .and. (forcing_diag)) then + if (my_task==master_task .and. (debug_forcing)) then write(nu_diag,*) ' ', trim(data_file) endif @@ -968,7 +968,7 @@ subroutine read_data_nc (flag, recd, yr, ixm, ixx, ixp, & nrec = recd + n2 call ice_read_nc & - (fid, nrec, fieldname, field_data(:,:,arg,:), forcing_diag, & + (fid, nrec, fieldname, field_data(:,:,arg,:), debug_forcing, & field_loc, field_type) if (ixx==1) call ice_close_nc(fid) @@ -982,7 +982,7 @@ subroutine read_data_nc (flag, recd, yr, ixm, ixx, ixp, & nrec = recd + ixx call ice_read_nc & - (fid, nrec, fieldname, field_data(:,:,arg,:), forcing_diag, & + (fid, nrec, fieldname, field_data(:,:,arg,:), debug_forcing, & field_loc, field_type) if (ixp /= -99) then @@ -1008,7 +1008,7 @@ subroutine read_data_nc (flag, recd, yr, ixm, ixx, ixp, & nrec = recd + n4 call ice_read_nc & - (fid, nrec, fieldname, field_data(:,:,arg,:), forcing_diag, & + (fid, nrec, fieldname, field_data(:,:,arg,:), debug_forcing, & field_loc, field_type) endif ! ixp /= -99 @@ -1061,13 +1061,13 @@ subroutine read_data_nc_hycom (flag, recd, & character(len=*), parameter :: subname = '(read_data_nc_hycom)' - if (forcing_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' + if (local_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' call ice_timer_start(timer_readwrite) ! reading/writing - if (istep1 > debug_model_step) forcing_diag = .true. !! debugging + if (istep1 > debug_model_step) debug_forcing = .true. !! debugging - if (my_task==master_task .and. (forcing_diag)) then + if (my_task==master_task .and. (debug_forcing)) then write(nu_diag,*) ' ', trim(data_file) endif @@ -1078,11 +1078,11 @@ subroutine read_data_nc_hycom (flag, recd, & ! read data !----------------------------------------------------------------- call ice_read_nc & - (fid, recd , fieldname, field_data(:,:,1,:), forcing_diag, & + (fid, recd , fieldname, field_data(:,:,1,:), debug_forcing, & field_loc, field_type) call ice_read_nc & - (fid, recd+1, fieldname, field_data(:,:,2,:), forcing_diag, & + (fid, recd+1, fieldname, field_data(:,:,2,:), debug_forcing, & field_loc, field_type) call ice_close_nc(fid) @@ -1131,15 +1131,15 @@ subroutine read_clim_data (readflag, recd, ixm, ixx, ixp, & character(len=*), parameter :: subname = '(read_clim_data)' - if (forcing_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' + if (local_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' call ice_timer_start(timer_readwrite) ! reading/writing nbits = 64 ! double precision data - if (istep1 > debug_model_step) forcing_diag = .true. !! debugging + if (istep1 > debug_model_step) debug_forcing = .true. !! debugging - if (my_task==master_task .and. (forcing_diag)) & + if (my_task==master_task .and. (debug_forcing)) & write(nu_diag,*) ' ', trim(data_file) if (readflag) then @@ -1155,19 +1155,19 @@ subroutine read_clim_data (readflag, recd, ixm, ixx, ixp, & arg = 1 nrec = recd + ixm call ice_read (nu_forcing, nrec, field_data(:,:,arg,:), & - 'rda8', forcing_diag, field_loc, field_type) + 'rda8', debug_forcing, field_loc, field_type) endif arg = arg + 1 nrec = recd + ixx call ice_read (nu_forcing, nrec, field_data(:,:,arg,:), & - 'rda8', forcing_diag, field_loc, field_type) + 'rda8', debug_forcing, field_loc, field_type) if (ixp /= -99) then arg = arg + 1 nrec = recd + ixp call ice_read (nu_forcing, nrec, field_data(:,:,arg,:), & - 'rda8', forcing_diag, field_loc, field_type) + 'rda8', debug_forcing, field_loc, field_type) endif if (my_task == master_task) close (nu_forcing) @@ -1218,13 +1218,13 @@ subroutine read_clim_data_nc (readflag, recd, ixm, ixx, ixp, & character(len=*), parameter :: subname = '(read_clim_data_nc)' - if (forcing_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' + if (local_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' call ice_timer_start(timer_readwrite) ! reading/writing - if (istep1 > debug_model_step) forcing_diag = .true. !! debugging + if (istep1 > debug_model_step) debug_forcing = .true. !! debugging - if (my_task==master_task .and. (forcing_diag)) & + if (my_task==master_task .and. (debug_forcing)) & write(nu_diag,*) ' ', trim(data_file) if (readflag) then @@ -1241,21 +1241,21 @@ subroutine read_clim_data_nc (readflag, recd, ixm, ixx, ixp, & nrec = recd + ixm call ice_read_nc & (fid, nrec, fieldname, field_data(:,:,arg,:), & - forcing_diag, field_loc, field_type) + debug_forcing, field_loc, field_type) endif arg = arg + 1 nrec = recd + ixx call ice_read_nc & (fid, nrec, fieldname, field_data(:,:,arg,:), & - forcing_diag, field_loc, field_type) + debug_forcing, field_loc, field_type) if (ixp /= -99) then arg = arg + 1 nrec = recd + ixp call ice_read_nc & (fid, nrec, fieldname, field_data(:,:,arg,:), & - forcing_diag, field_loc, field_type) + debug_forcing, field_loc, field_type) endif if (my_task == master_task) call ice_close_nc (fid) @@ -1286,7 +1286,7 @@ subroutine interp_coeff_monthly (recslot) character(len=*), parameter :: subname = '(interp_coeff_monthly)' - if (forcing_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' + if (local_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' call icepack_query_parameters(secday_out=secday) call icepack_warnings_flush(nu_diag) @@ -1355,7 +1355,7 @@ subroutine interp_coeff (recnum, recslot, secint, dataloc) character(len=*), parameter :: subname = '(interp_coeff)' - if (forcing_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' + if (local_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' call icepack_query_parameters(secday_out=secday) call icepack_warnings_flush(nu_diag) @@ -1387,7 +1387,7 @@ subroutine interp_coeff (recnum, recslot, secint, dataloc) c1intp = abs((t2 - tt) / (t2 - t1)) c2intp = c1 - c1intp - if (forcing_debug .and. my_task == master_task) then + if (local_debug .and. my_task == master_task) then write(nu_diag,*) subname,'fdbg yday,sec = ',yday,msec write(nu_diag,*) subname,'fdbg tt = ',tt write(nu_diag,*) subname,'fdbg c12intp = ',c1intp,c2intp @@ -1408,7 +1408,7 @@ subroutine interp_coeff2 (tt, t1, t2) t1, t2 ! first+last decimal daynumber character(len=*), parameter :: subname = '(interp_coeff2)' - if (forcing_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' + if (local_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' ! Compute coefficients c1intp = abs((t2 - tt) / (t2 - t1)) @@ -1438,7 +1438,7 @@ subroutine interpolate_data (field_data, field) character(len=*), parameter :: subname = '(interpolate data)' - if (forcing_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' + if (local_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' !$OMP PARALLEL DO PRIVATE(iblk,i,j) do iblk = 1, nblocks @@ -1471,7 +1471,7 @@ subroutine file_year (data_file, yr) character(len=*), parameter :: subname = '(file_year)' - if (forcing_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' + if (local_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' if (trim(atm_data_type) == 'hadgem') then ! netcdf i = index(data_file,'.nc') - 5 @@ -1559,7 +1559,7 @@ subroutine prepare_forcing (nx_block, ny_block, & character(len=*), parameter :: subname = '(prepare_forcing)' - if (forcing_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' + if (local_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' call icepack_query_parameters(Tffresh_out=Tffresh, puny_out=puny) call icepack_query_parameters(secday_out=secday) @@ -1779,7 +1779,7 @@ subroutine longwave_parkinson_washington(Tair, cldf, flw) character(len=*), parameter :: subname = '(longwave_parkinson_washington)' - if (forcing_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' + if (local_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' call icepack_query_parameters(Tffresh_out=Tffresh, & stefan_boltzmann_out=stefan_boltzmann) @@ -1831,7 +1831,7 @@ subroutine longwave_rosati_miyakoda(cldf, Tsfc, & character(len=*), parameter :: subname = '(longwave_rosati_miyakoda)' - if (forcing_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' + if (local_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' call icepack_query_parameters(Tffresh_out=Tffresh, & stefan_boltzmann_out=stefan_boltzmann, & @@ -1870,7 +1870,7 @@ subroutine ncar_files (yr) character(len=*), parameter :: subname = '(ncar_files)' - if (forcing_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' + if (local_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' fsw_file = & trim(atm_data_dir)//'/MONTHLY/swdn.1996.dat' @@ -1943,7 +1943,7 @@ subroutine ncar_data character(len=*), parameter :: subname = '(ncar_data)' - if (forcing_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' + if (local_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' call icepack_query_parameters(secday_out=secday) call icepack_warnings_flush(nu_diag) @@ -2097,7 +2097,7 @@ subroutine LY_files (yr) character(len=*), parameter :: subname = '(LY_files)' - if (forcing_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' + if (local_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' flw_file = & trim(atm_data_dir)//'/MONTHLY/cldf.omip.dat' @@ -2144,7 +2144,7 @@ subroutine JRA55_gx1_files(yr) character(len=*), parameter :: subname = '(JRA55_gx1_files)' - if (forcing_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' + if (local_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' uwind_file = & trim(atm_data_dir)//'/8XDAILY/JRA55_03hr_forcing_2005.nc' @@ -2165,7 +2165,7 @@ subroutine JRA55_tx1_files(yr) character(len=*), parameter :: subname = '(JRA55_tx1_files)' - if (forcing_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' + if (local_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' uwind_file = & trim(atm_data_dir)//'/8XDAILY/JRA55_03hr_forcing_tx1_2005.nc' @@ -2186,7 +2186,7 @@ subroutine JRA55_gx3_files(yr) character(len=*), parameter :: subname = '(JRA55_gx3_files)' - if (forcing_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' + if (local_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' uwind_file = & trim(atm_data_dir)//'/8XDAILY/JRA55_gx3_03hr_forcing_2005.nc' @@ -2237,7 +2237,7 @@ subroutine LY_data character(len=*), parameter :: subname = '(LY_data)' - if (forcing_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' + if (local_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' call icepack_query_parameters(Tffresh_out=Tffresh) call icepack_query_parameters(secday_out=secday) @@ -2386,7 +2386,7 @@ subroutine LY_data ! Save record number oldrecnum = recnum - if (forcing_diag) then + if (debug_forcing) then if (my_task == master_task) write (nu_diag,*) 'LY_bulk_data' vmin = global_minval(fsw,distrb_info,tmask) @@ -2418,7 +2418,7 @@ subroutine LY_data if (my_task.eq.master_task) & write (nu_diag,*) 'Qa',vmin,vmax - endif ! forcing_diag + endif ! debug_forcing end subroutine LY_data @@ -2458,7 +2458,7 @@ subroutine JRA55_data character (char_len_long) :: uwind_file_old character(len=*), parameter :: subname = '(JRA55_data)' - if (forcing_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' + if (local_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' call icepack_query_parameters(Tffresh_out=Tffresh) call icepack_query_parameters(secday_out=secday) @@ -2469,7 +2469,7 @@ subroutine JRA55_data sec3hr = secday/c8 ! seconds in 3 hours maxrec = days_per_year * 8 - if (forcing_debug .and. my_task == master_task) then + if (local_debug .and. my_task == master_task) then write(nu_diag,*) subname,'fdbg dpy, maxrec = ',days_per_year,maxrec endif @@ -2521,7 +2521,7 @@ subroutine JRA55_data endif endif - if (forcing_debug .and. my_task == master_task) then + if (local_debug .and. my_task == master_task) then write(nu_diag,*) subname,'fdbg read recnum = ',recnum,n1 endif @@ -2545,37 +2545,37 @@ subroutine JRA55_data else fieldname = 'airtmp' - call ice_read_nc(ncid,recnum,fieldname,Tair_data(:,:,n1,:),forcing_debug, & + call ice_read_nc(ncid,recnum,fieldname,Tair_data(:,:,n1,:),local_debug, & field_loc=field_loc_center, & field_type=field_type_scalar) fieldname = 'wndewd' - call ice_read_nc(ncid,recnum,fieldname,uatm_data(:,:,n1,:),forcing_debug, & + call ice_read_nc(ncid,recnum,fieldname,uatm_data(:,:,n1,:),local_debug, & field_loc=field_loc_center, & field_type=field_type_scalar) fieldname = 'wndnwd' - call ice_read_nc(ncid,recnum,fieldname,vatm_data(:,:,n1,:),forcing_debug, & + call ice_read_nc(ncid,recnum,fieldname,vatm_data(:,:,n1,:),local_debug, & field_loc=field_loc_center, & field_type=field_type_scalar) fieldname = 'spchmd' - call ice_read_nc(ncid,recnum,fieldname,Qa_data(:,:,n1,:),forcing_debug, & + call ice_read_nc(ncid,recnum,fieldname,Qa_data(:,:,n1,:),local_debug, & field_loc=field_loc_center, & field_type=field_type_scalar) fieldname = 'glbrad' - call ice_read_nc(ncid,recnum,fieldname,fsw_data(:,:,n1,:),forcing_debug, & + call ice_read_nc(ncid,recnum,fieldname,fsw_data(:,:,n1,:),local_debug, & field_loc=field_loc_center, & field_type=field_type_scalar) fieldname = 'dlwsfc' - call ice_read_nc(ncid,recnum,fieldname,flw_data(:,:,n1,:),forcing_debug, & + call ice_read_nc(ncid,recnum,fieldname,flw_data(:,:,n1,:),local_debug, & field_loc=field_loc_center, & field_type=field_type_scalar) fieldname = 'ttlpcp' - call ice_read_nc(ncid,recnum,fieldname,fsnow_data(:,:,n1,:),forcing_debug, & + call ice_read_nc(ncid,recnum,fieldname,fsnow_data(:,:,n1,:),local_debug, & field_loc=field_loc_center, & field_type=field_type_scalar) endif ! copy data from n1=2 from last timestep to n1=1 @@ -2603,7 +2603,7 @@ subroutine JRA55_data call abort_ice (error_message=subname//' ERROR: c2intp out of range', & file=__FILE__, line=__LINE__) endif - if (forcing_debug .and. my_task == master_task) then + if (local_debug .and. my_task == master_task) then write(nu_diag,*) subname,'fdbg c12intp = ',c1intp,c2intp endif @@ -2644,7 +2644,7 @@ subroutine JRA55_data enddo ! iblk !$OMP END PARALLEL DO - if (forcing_diag .or. forcing_debug) then + if (debug_forcing .or. local_debug) then if (my_task.eq.master_task) write (nu_diag,*) subname,'fdbg JRA55_bulk_data' vmin = global_minval(fsw,distrb_info,tmask) vmax = global_maxval(fsw,distrb_info,tmask) @@ -2667,7 +2667,7 @@ subroutine JRA55_data vmin = global_minval(Qa,distrb_info,tmask) vmax = global_maxval(Qa,distrb_info,tmask) if (my_task.eq.master_task) write (nu_diag,*) subname,'fdbg Qa',vmin,vmax - endif ! forcing_diag + endif ! debug_forcing end subroutine JRA55_data @@ -2714,7 +2714,7 @@ subroutine compute_shortwave(nx_block, ny_block, & character(len=*), parameter :: subname = '(compute_shortwave)' - if (forcing_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' + if (local_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' call icepack_query_parameters(secday_out=secday, pi_out=pi) call icepack_warnings_flush(nu_diag) @@ -2778,7 +2778,7 @@ subroutine Qa_fixLY(nx_block, ny_block, Tair, Qa) character(len=*), parameter :: subname = '(Qa_fixLY)' - if (forcing_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' + if (local_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' call icepack_query_parameters(Tffresh_out=Tffresh, puny_out=puny) call icepack_warnings_flush(nu_diag) @@ -2822,7 +2822,7 @@ subroutine hadgem_files (yr) character(len=*), parameter :: subname = '(hadgem_files)' - if (forcing_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' + if (local_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' call icepack_query_parameters(calc_strair_out=calc_strair, & calc_Tsfc_out=calc_Tsfc) @@ -3022,7 +3022,7 @@ subroutine hadgem_data character(len=*), parameter :: subname = '(hadgem_data)' - if (forcing_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' + if (local_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' call icepack_query_parameters(Lsub_out=Lsub) call icepack_query_parameters(calc_strair_out=calc_strair, & @@ -3253,7 +3253,7 @@ subroutine monthly_files (yr) character(len=*), parameter :: subname = '(monthly_files)' - if (forcing_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' + if (local_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' flw_file = & trim(atm_data_dir)//'/MONTHLY/cldf.omip.dat' @@ -3326,7 +3326,7 @@ subroutine monthly_data character(len=*), parameter :: subname = '(monthly_data)' - if (forcing_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' + if (local_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' !------------------------------------------------------------------- ! monthly data @@ -3425,7 +3425,7 @@ subroutine monthly_data enddo ! iblk !$OMP END PARALLEL DO - if (forcing_diag) then + if (debug_forcing) then if (my_task == master_task) write (nu_diag,*) 'LY_bulk_data' vmin = global_minval(fsw,distrb_info,tmask) vmax = global_maxval(fsw,distrb_info,tmask) @@ -3460,7 +3460,7 @@ subroutine monthly_data if (my_task.eq.master_task) & write (nu_diag,*) 'Qa',vmin,vmax - endif ! forcing_diag + endif ! debug_forcing end subroutine monthly_data @@ -3507,7 +3507,7 @@ subroutine oned_data character(len=*), parameter :: subname = '(oned_data)' - if (forcing_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' + if (local_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' diag = .false. ! write diagnostic information @@ -3584,7 +3584,7 @@ subroutine oned_files character(len=*), parameter :: subname = '(oned_files)' - if (forcing_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' + if (local_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' fsw_file = & trim(atm_data_dir)//'/hourlysolar_brw1989_5yr.nc' @@ -3651,7 +3651,7 @@ subroutine ocn_data_clim (dt) character(len=*), parameter :: subname = '(ocn_data_clim)' - if (forcing_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' + if (local_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' if (my_task == master_task .and. istep == 1) then if (trim(ocn_data_type)=='clim') then @@ -3809,7 +3809,7 @@ subroutine ocn_data_ncar_init character(len=*), parameter :: subname = '(ocn_data_ncar_init)' - if (forcing_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' + if (local_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' if (my_task == master_task) then @@ -3861,10 +3861,10 @@ subroutine ocn_data_ncar_init ! Note: netCDF does single to double conversion if necessary ! if (n >= 4 .and. n <= 7) then -! call ice_read_nc(fid, m, vname(n), work1, forcing_diag, & +! call ice_read_nc(fid, m, vname(n), work1, debug_forcing, & ! field_loc_NEcorner, field_type_vector) ! else - call ice_read_nc(fid, m, vname(n), work1, forcing_diag, & + call ice_read_nc(fid, m, vname(n), work1, debug_forcing, & field_loc_center, field_type_scalar) ! endif @@ -3889,10 +3889,10 @@ subroutine ocn_data_ncar_init do m=1,12 nrec = nrec + 1 if (n >= 4 .and. n <= 7) then - call ice_read (nu_forcing, nrec, work1, 'rda8', forcing_diag, & + call ice_read (nu_forcing, nrec, work1, 'rda8', debug_forcing, & field_loc_NEcorner, field_type_vector) else - call ice_read (nu_forcing, nrec, work1, 'rda8', forcing_diag, & + call ice_read (nu_forcing, nrec, work1, 'rda8', debug_forcing, & field_loc_center, field_type_scalar) endif ocn_frc_m(:,:,:,n,m) = work1(:,:,:) @@ -3969,7 +3969,7 @@ subroutine ocn_data_ncar_init_3D character(len=*), parameter :: subname = '(ocn_data_ncar_init_3D)' - if (forcing_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' + if (local_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' if (my_task == master_task) then @@ -4023,10 +4023,10 @@ subroutine ocn_data_ncar_init_3D ! Note: netCDF does single to double conversion if necessary if (n == 4 .or. n == 5) then ! 3D currents nzlev = 1 ! surface currents - call ice_read_nc_uv(fid, m, nzlev, vname(n), work1, forcing_diag, & + call ice_read_nc_uv(fid, m, nzlev, vname(n), work1, debug_forcing, & field_loc_center, field_type_scalar) else - call ice_read_nc(fid, m, vname(n), work1, forcing_diag, & + call ice_read_nc(fid, m, vname(n), work1, debug_forcing, & field_loc_center, field_type_scalar) endif @@ -4108,7 +4108,7 @@ subroutine ocn_data_ncar(dt) character(len=*), parameter :: subname = '(ocn_data_ncar)' - if (forcing_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' + if (local_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' !------------------------------------------------------------------- ! monthly data @@ -4213,7 +4213,7 @@ subroutine ocn_data_ncar(dt) !$OMP END PARALLEL DO endif - if (forcing_diag) then + if (debug_forcing) then if (my_task == master_task) & write (nu_diag,*) 'ocn_data_ncar' vmin = global_minval(Tf,distrb_info,tmask) @@ -4267,7 +4267,7 @@ subroutine ocn_data_oned character(len=*), parameter :: subname = '(ocn_data_oned)' - if (forcing_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' + if (local_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' sss (:,:,:) = 34.0_dbl_kind ! sea surface salinity (ppt) @@ -4324,7 +4324,7 @@ subroutine ocn_data_hadgem(dt) character(len=*), parameter :: subname = '(ocn_data_hadgem)' - if (forcing_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' + if (local_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' !------------------------------------------------------------------- ! monthly data @@ -4482,7 +4482,7 @@ subroutine ocn_data_hycom_init character(len=*), parameter :: subname = '(ocn_data_hycom_init)' - if (forcing_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' + if (local_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' if (trim(ocn_data_type) == 'hycom') then sss_file = trim(ocn_data_dir)//'ice.restart.surf.nc' @@ -4494,7 +4494,7 @@ subroutine ocn_data_hycom_init fieldname = 'sss' call ice_open_nc (sss_file, fid) - call ice_read_nc (fid, 1 , fieldname, sss, forcing_diag, & + call ice_read_nc (fid, 1 , fieldname, sss, debug_forcing, & field_loc_center, field_type_scalar) call ice_close_nc(fid) @@ -4509,7 +4509,7 @@ subroutine ocn_data_hycom_init fieldname = 'sst' call ice_open_nc (sst_file, fid) - call ice_read_nc (fid, 1 , fieldname, sst, forcing_diag, & + call ice_read_nc (fid, 1 , fieldname, sst, debug_forcing, & field_loc_center, field_type_scalar) call ice_close_nc(fid) @@ -4539,7 +4539,7 @@ subroutine hycom_atm_files varname ! variable name in netcdf file character(len=*), parameter :: subname = '(hycom_atm_files)' - if (forcing_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' + if (local_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' fsw_file = trim(atm_data_dir)//'/forcing.shwflx.nc' flw_file = trim(atm_data_dir)//'/forcing.radflx.nc' @@ -4602,7 +4602,7 @@ subroutine hycom_atm_data character(len=*), parameter :: subname = '(hycom_atm_data)' - if (forcing_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' + if (local_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' call icepack_query_parameters(Tffresh_out=Tffresh) call icepack_query_parameters(secday_out=secday) @@ -4682,7 +4682,7 @@ subroutine hycom_atm_data endif ! Interpolate - if (forcing_diag) then + if (debug_forcing) then if (my_task == master_task) then write(nu_diag,*)'CICE: Atm. interpolate: = ',& hcdate,c1intp,c2intp @@ -4768,15 +4768,15 @@ subroutine read_data_nc_point (flag, recd, yr, ixm, ixx, ixp, & character(len=*), parameter :: subname = '(read_data_nc_point)' - if (forcing_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' + if (local_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' call ice_timer_start(timer_readwrite) ! reading/writing field_data = c0 ! to satisfy intent(out) attribute - if (istep1 > debug_model_step) forcing_diag = .true. !! debugging + if (istep1 > debug_model_step) debug_forcing = .true. !! debugging - if (my_task==master_task .and. (forcing_diag)) then + if (my_task==master_task .and. (debug_forcing)) then write(nu_diag,*) ' ', trim(data_file) endif @@ -4823,7 +4823,7 @@ subroutine read_data_nc_point (flag, recd, yr, ixm, ixx, ixp, & nrec = recd + n2 call ice_read_nc & - (fid, nrec, fieldname, field_data(arg), forcing_diag, & + (fid, nrec, fieldname, field_data(arg), debug_forcing, & field_loc, field_type) !if (ixx==1) call ice_close_nc(fid) @@ -4838,7 +4838,7 @@ subroutine read_data_nc_point (flag, recd, yr, ixm, ixx, ixp, & nrec = recd + ixx call ice_read_nc & - (fid, nrec, fieldname, field_data(arg), forcing_diag, & + (fid, nrec, fieldname, field_data(arg), debug_forcing, & field_loc, field_type) if (ixp /= -99) then @@ -4864,7 +4864,7 @@ subroutine read_data_nc_point (flag, recd, yr, ixm, ixx, ixp, & nrec = recd + n4 call ice_read_nc & - (fid, nrec, fieldname, field_data(arg), forcing_diag, & + (fid, nrec, fieldname, field_data(arg), debug_forcing, & field_loc, field_type) endif ! ixp /= -99 @@ -4882,7 +4882,7 @@ subroutine ISPOL_files character(len=*), parameter :: subname = '(ISPOL_files)' - if (forcing_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' + if (local_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' fsw_file = & trim(atm_data_dir)//'/fsw_sfc_4Xdaily.nc' @@ -4975,7 +4975,7 @@ subroutine ISPOL_data character(len=*), parameter :: subname = '(ISPOL_data)' - if (forcing_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' + if (local_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' call icepack_query_parameters(secday_out=secday) call icepack_warnings_flush(nu_diag) @@ -5175,7 +5175,7 @@ subroutine ocn_data_ispol_init character(len=*), parameter :: subname = '(ocn_data_ispol_init)' - if (forcing_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' + if (local_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' if (my_task == master_task) then @@ -5202,10 +5202,10 @@ subroutine ocn_data_ispol_init do m=1,12 ! Note: netCDF does single to double conversion if necessary if (n >= 4 .and. n <= 7) then - call ice_read_nc(fid, m, vname(n), work, forcing_diag, & + call ice_read_nc(fid, m, vname(n), work, debug_forcing, & field_loc_NEcorner, field_type_vector) else - call ice_read_nc(fid, m, vname(n), work, forcing_diag, & + call ice_read_nc(fid, m, vname(n), work, debug_forcing, & field_loc_center, field_type_scalar) endif ocn_frc_m(:,:,:,n,m) = work @@ -5255,7 +5255,7 @@ subroutine box2001_data character(len=*), parameter :: subname = '(box2001_data)' - if (forcing_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' + if (local_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' call icepack_query_parameters(pi_out=pi, pi2_out=pi2, puny_out=puny) call icepack_query_parameters(secday_out=secday) @@ -5348,7 +5348,7 @@ subroutine get_wave_spec logical (kind=log_kind) :: wave_spec character(len=*), parameter :: subname = '(get_wave_spec)' - if (forcing_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' + if (local_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' call ice_timer_start(timer_fsd) @@ -5361,7 +5361,7 @@ subroutine get_wave_spec ! if no wave data is provided, wave_spectrum is zero everywhere wave_spectrum(:,:,:,:) = c0 wave_spec_dir = ocn_data_dir - forcing_diag = .false. + debug_forcing = .false. ! wave spectrum and frequencies if (wave_spec) then @@ -5379,7 +5379,7 @@ subroutine get_wave_spec else #ifdef USE_NETCDF call ice_open_nc(wave_spec_file,fid) - call ice_read_nc_xyf (fid, 1, 'efreq', wave_spectrum(:,:,:,:), forcing_diag, & + call ice_read_nc_xyf (fid, 1, 'efreq', wave_spectrum(:,:,:,:), debug_forcing, & field_loc_center, field_type_scalar) call ice_close_nc(fid) #else diff --git a/cicecore/cicedynB/general/ice_forcing_bgc.F90 b/cicecore/cicedynB/general/ice_forcing_bgc.F90 index d9408c304..383d388de 100644 --- a/cicecore/cicedynB/general/ice_forcing_bgc.F90 +++ b/cicecore/cicedynB/general/ice_forcing_bgc.F90 @@ -17,7 +17,7 @@ module ice_forcing_bgc use ice_calendar, only: dt, istep, msec, mday, mmonth use ice_fileunits, only: nu_diag use ice_arrays_column, only: restore_bgc, & - bgc_data_dir, fe_data_type + bgc_data_dir, fe_data_type, optics_file, optics_file_fieldname use ice_constants, only: c0, p1 use ice_constants, only: field_loc_center, field_type_scalar use ice_exit, only: abort_ice @@ -861,7 +861,7 @@ subroutine faero_optics kaer_bc_tab, & ! BC mass extinction cross section (m2/kg) waer_bc_tab, & ! BC single scatter albedo (fraction) gaer_bc_tab, & ! BC aerosol asymmetry parameter (cos(theta)) - bcenh ! BC absorption enhancement facto + bcenh ! BC absorption enhancement factor #ifdef USE_NETCDF use netcdf @@ -883,7 +883,6 @@ subroutine faero_optics fid ! file id for netCDF file character (char_len_long) :: & - optics_file, & ! netcdf filename fieldname ! field name in netcdf file character(len=*), parameter :: subname = '(faero_optics)' @@ -963,20 +962,16 @@ subroutine faero_optics if (modal_aero) then #ifdef USE_NETCDF - optics_file = & - '/usr/projects/climate/njeffery/DATA/CAM/snicar/snicar_optics_5bnd_mam_c140303.nc' - if (my_task == master_task) then - write (nu_diag,*) ' ' - write (nu_diag,*) 'Read optics for modal aerosol treament in' - write (nu_diag,*) trim(optics_file) - call ice_open_nc(optics_file,fid) - endif + write (nu_diag,*) ' ' + write (nu_diag,*) 'Read optics for modal aerosol treament in' + write (nu_diag,*) trim(optics_file) + write (nu_diag,*) 'Read optics file field name = ',trim(optics_file_fieldname) + call ice_open_nc(optics_file,fid) - fieldname='bcint_enh_mam_cice' - if (my_task == master_task) then + fieldname=optics_file_fieldname - status = nf90_inq_varid(fid, trim(fieldname), varid) + status = nf90_inq_varid(fid, trim(fieldname), varid) if (status /= nf90_noerr) then call abort_ice (subname//'ERROR: Cannot find variable '//trim(fieldname)) @@ -985,20 +980,20 @@ subroutine faero_optics start=(/1,1,1,1/), & count=(/3,10,8,1/) ) do n=1,10 - amin = minval(bcenh(:,n,:)) - amax = maxval(bcenh(:,n,:)) - asum = sum (bcenh(:,n,:)) - write(nu_diag,*) ' min, max, sum =', amin, amax, asum + amin = minval(bcenh(:,n,:)) + amax = maxval(bcenh(:,n,:)) + asum = sum (bcenh(:,n,:)) + write(nu_diag,*) ' min, max, sum =', amin, amax, asum enddo call ice_close_nc(fid) - endif !master_task - do n=1,3 - do k=1,8 - call broadcast_array(bcenh(n,:,k), master_task) - enddo - enddo + endif !master_task + do n=1,3 + do k=1,8 + call broadcast_array(bcenh(n,:,k), master_task) + enddo + enddo #else - call abort_ice(subname//'ERROR: USE_NETCDF cpp not defined', & + call abort_ice(subname//'ERROR: USE_NETCDF cpp not defined', & file=__FILE__, line=__LINE__) #endif endif ! modal_aero diff --git a/cicecore/cicedynB/general/ice_init.F90 b/cicecore/cicedynB/general/ice_init.F90 index 5e5fd144f..a0b050b63 100644 --- a/cicecore/cicedynB/general/ice_init.F90 +++ b/cicecore/cicedynB/general/ice_init.F90 @@ -59,7 +59,8 @@ subroutine input_data use ice_broadcast, only: broadcast_scalar, broadcast_array use ice_diagnostics, only: diag_file, print_global, print_points, latpnt, lonpnt, & - debug_model, debug_model_step + debug_model, debug_model_step, debug_model_task, & + debug_model_i, debug_model_j, debug_model_iblk use ice_domain, only: close_boundaries, orca_halogrid use ice_domain_size, only: ncat, nilyr, nslyr, nblyr, nfsd, nfreq, & n_iso, n_aero, n_zaero, n_algae, & @@ -84,7 +85,7 @@ subroutine input_data use ice_flux, only: default_season use ice_flux_bgc, only: cpl_bgc use ice_forcing, only: & - ycycle, fyear_init, forcing_diag, & + ycycle, fyear_init, debug_forcing, & atm_data_type, atm_data_dir, precip_units, rotate_wind, & atm_data_format, ocn_data_format, & bgc_data_type, & @@ -127,7 +128,7 @@ subroutine input_data mu_rdg, hs0, dpscale, rfracmin, rfracmax, pndaspect, hs1, hp1, & a_rapid_mode, Rac_rapid_mode, aspect_rapid_mode, dSdt_slow_mode, & phi_c_slow_mode, phi_i_mushy, kalg, atmiter_conv, Pstar, Cstar, & - sw_frac, sw_dtemp, floediam, hfrazilmin, iceruf + sw_frac, sw_dtemp, floediam, hfrazilmin, iceruf, iceruf_ocn integer (kind=int_kind) :: ktherm, kstrength, krdg_partic, krdg_redist, natmiter, & kitd, kcatbound, ktransport @@ -136,7 +137,7 @@ subroutine input_data tfrz_option, frzpnd, atmbndy, wave_spec_type logical (kind=log_kind) :: calc_Tsfc, formdrag, highfreq, calc_strair, wave_spec, & - sw_redist + sw_redist, calc_dragio logical (kind=log_kind) :: tr_iage, tr_FY, tr_lvl, tr_pond logical (kind=log_kind) :: tr_iso, tr_aero, tr_fsd @@ -164,9 +165,10 @@ subroutine input_data pointer_file, dumpfreq, dumpfreq_n, dump_last, & diagfreq, diag_type, diag_file, history_format,& print_global, print_points, latpnt, lonpnt, & - forcing_diag, histfreq, histfreq_n, hist_avg, & + debug_forcing, histfreq, histfreq_n, hist_avg, & history_dir, history_file, history_precision, cpl_bgc, & conserv_check, debug_model, debug_model_step, & + debug_model_i, debug_model_j, debug_model_iblk, debug_model_task, & year_init, month_init, day_init, sec_init, & write_ic, incond_dir, incond_file, version_name @@ -226,8 +228,8 @@ subroutine input_data namelist /forcing_nml/ & formdrag, atmbndy, calc_strair, calc_Tsfc, & - highfreq, natmiter, atmiter_conv, & - ustar_min, emissivity, iceruf, & + highfreq, natmiter, atmiter_conv, calc_dragio, & + ustar_min, emissivity, iceruf, iceruf_ocn, & fbot_xfer_type, update_ocn_f, l_mpond_fresh, tfrz_option, & oceanmixed_ice, restore_ice, restore_ocn, trestore, & precip_units, default_season, wave_spec_type,nfreq, & @@ -267,7 +269,11 @@ subroutine input_data npt_unit = '1' ! units of npt 'y', 'm', 'd', 's', '1' diagfreq = 24 ! how often diag output is written debug_model = .false. ! debug output - debug_model_step = 999999999 ! debug model after this step number + debug_model_step = 0 ! debug model after this step number + debug_model_i = -1 ! debug model local i index + debug_model_j = -1 ! debug model local j index + debug_model_iblk = -1 ! debug model local iblk number + debug_model_task = -1 ! debug model local task number print_points = .false. ! if true, print point data print_global = .true. ! if true, print global diagnostic data bfbflag = 'off' ! off = optimized @@ -291,12 +297,10 @@ subroutine input_data dumpfreq='y' ! restart frequency option dumpfreq_n = 1 ! restart frequency dump_last = .false. ! write restart on last time step - restart = .false. ! if true, read restart files for initialization restart_dir = './' ! write to executable dir for default restart_file = 'iced' ! restart file name prefix restart_ext = .false. ! if true, read/write ghost cells restart_coszen = .false. ! if true, read/write coszen - use_restart_time = .true. ! if true, use time info written in file pointer_file = 'ice.restart_file' restart_format = 'default' ! restart file format lcdf64 = .false. ! 64 bit offset for netCDF @@ -380,6 +384,8 @@ subroutine input_data update_ocn_f = .false. ! include fresh water and salt fluxes for frazil ustar_min = 0.005 ! minimum friction velocity for ocean heat flux (m/s) iceruf = 0.0005_dbl_kind ! ice surface roughness at atmosphere interface (m) + iceruf_ocn = 0.03_dbl_kind ! under-ice roughness (m) + calc_dragio = .false. ! compute dragio from iceruf_ocn and thickness of first ocean level emissivity = 0.985 ! emissivity of snow and ice l_mpond_fresh = .false. ! logical switch for including meltpond freshwater ! flux feedback to ocean model @@ -436,7 +442,7 @@ subroutine input_data restore_ocn = .false. ! restore sst if true trestore = 90 ! restoring timescale, days (0 instantaneous) restore_ice = .false. ! restore ice state on grid edges if true - forcing_diag = .false. ! true writes diagnostics for input forcing + debug_forcing = .false. ! true writes diagnostics for input forcing latpnt(1) = 90._dbl_kind ! latitude of diagnostic point 1 (deg) lonpnt(1) = 0._dbl_kind ! longitude of point 1 (deg) @@ -446,6 +452,8 @@ subroutine input_data #ifndef CESMCOUPLED runid = 'unknown' ! run ID used in CESM and for machine 'bering' runtype = 'initial' ! run type: 'initial', 'continue' + restart = .false. ! if true, read restart files for initialization + use_restart_time = .true. ! if true, use time info written in file #endif ! extra tracers @@ -604,6 +612,10 @@ subroutine input_data call broadcast_scalar(diagfreq, master_task) call broadcast_scalar(debug_model, master_task) call broadcast_scalar(debug_model_step, master_task) + call broadcast_scalar(debug_model_i, master_task) + call broadcast_scalar(debug_model_j, master_task) + call broadcast_scalar(debug_model_iblk, master_task) + call broadcast_scalar(debug_model_task, master_task) call broadcast_scalar(print_points, master_task) call broadcast_scalar(print_global, master_task) call broadcast_scalar(bfbflag, master_task) @@ -739,6 +751,8 @@ subroutine input_data call broadcast_scalar(l_mpond_fresh, master_task) call broadcast_scalar(ustar_min, master_task) call broadcast_scalar(iceruf, master_task) + call broadcast_scalar(iceruf_ocn, master_task) + call broadcast_scalar(calc_dragio, master_task) call broadcast_scalar(emissivity, master_task) call broadcast_scalar(fbot_xfer_type, master_task) call broadcast_scalar(precip_units, master_task) @@ -758,7 +772,7 @@ subroutine input_data call broadcast_scalar(restore_ocn, master_task) call broadcast_scalar(trestore, master_task) call broadcast_scalar(restore_ice, master_task) - call broadcast_scalar(forcing_diag, master_task) + call broadcast_scalar(debug_forcing, master_task) call broadcast_array (latpnt(1:2), master_task) call broadcast_array (lonpnt(1:2), master_task) call broadcast_scalar(runid, master_task) @@ -1543,6 +1557,15 @@ subroutine input_data endif write(nu_diag,1030) ' fbot_xfer_type = ', trim(fbot_xfer_type),trim(tmpstr2) write(nu_diag,1000) ' ustar_min = ', ustar_min,' : minimum value of ocean friction velocity' + if (calc_dragio) then + tmpstr2 = ' : dragio computed from iceruf_ocn' + else + tmpstr2 = ' : dragio hard-coded' + endif + write(nu_diag,1010) ' calc_dragio = ', calc_dragio,trim(tmpstr2) + if(calc_dragio) then + write(nu_diag,1002) ' iceruf_ocn = ', iceruf_ocn,' : under-ice roughness length' + endif if (tr_fsd) then write(nu_diag,1002) ' floediam = ', floediam, ' constant floe diameter' @@ -1649,6 +1672,10 @@ subroutine input_data write(nu_diag,1011) ' print_points = ', print_points write(nu_diag,1011) ' debug_model = ', debug_model write(nu_diag,1022) ' debug_model_step = ', debug_model_step + write(nu_diag,1021) ' debug_model_i = ', debug_model_i + write(nu_diag,1021) ' debug_model_i = ', debug_model_j + write(nu_diag,1021) ' debug_model_iblk = ', debug_model_iblk + write(nu_diag,1021) ' debug_model_task = ', debug_model_task write(nu_diag,1031) ' bfbflag = ', trim(bfbflag) write(nu_diag,1021) ' numin = ', numin write(nu_diag,1021) ' numax = ', numax @@ -1761,7 +1788,8 @@ subroutine input_data grid_type /= 'rectangular' .and. & grid_type /= 'cpom_grid' .and. & grid_type /= 'regional' .and. & - grid_type /= 'latlon' ) then + grid_type /= 'latlon' .and. & + grid_type /= 'setmask' ) then if (my_task == master_task) write(nu_diag,*) subname//' ERROR: unknown grid_type=',trim(grid_type) abort_list = trim(abort_list)//":20" endif @@ -1808,7 +1836,7 @@ subroutine input_data wave_spec_type_in = wave_spec_type, & wave_spec_in=wave_spec, nfreq_in=nfreq, & tfrz_option_in=tfrz_option, kalg_in=kalg, fbot_xfer_type_in=fbot_xfer_type, & - Pstar_in=Pstar, Cstar_in=Cstar, iceruf_in=iceruf, & + Pstar_in=Pstar, Cstar_in=Cstar, iceruf_in=iceruf, iceruf_ocn_in=iceruf_ocn, calc_dragio_in=calc_dragio, & sw_redist_in=sw_redist, sw_frac_in=sw_frac, sw_dtemp_in=sw_dtemp) call icepack_init_tracer_flags(tr_iage_in=tr_iage, tr_FY_in=tr_FY, & tr_lvl_in=tr_lvl, tr_iso_in=tr_iso, tr_aero_in=tr_aero, & diff --git a/cicecore/cicedynB/infrastructure/comm/mpi/ice_broadcast.F90 b/cicecore/cicedynB/infrastructure/comm/mpi/ice_broadcast.F90 index 87c78f9df..7d221c65e 100644 --- a/cicecore/cicedynB/infrastructure/comm/mpi/ice_broadcast.F90 +++ b/cicecore/cicedynB/infrastructure/comm/mpi/ice_broadcast.F90 @@ -8,9 +8,15 @@ module ice_broadcast ! author: Phil Jones, LANL ! Oct. 2004: Adapted from POP version by William H. Lipscomb, LANL +#ifndef SERIAL_REMOVE_MPI use mpi ! MPI Fortran module +#endif use ice_kinds_mod +#ifdef SERIAL_REMOVE_MPI + use ice_communicate, only: MPI_COMM_ICE +#else use ice_communicate, only: mpiR8, mpir4, MPI_COMM_ICE +#endif use ice_exit, only: abort_ice use icepack_intfc, only: icepack_warnings_flush, icepack_warnings_aborted @@ -78,8 +84,12 @@ subroutine broadcast_scalar_dbl(scalar, root_pe) !----------------------------------------------------------------------- +#ifdef SERIAL_REMOVE_MPI + ! nothing to do +#else call MPI_BCAST(scalar, 1, mpiR8, root_pe, MPI_COMM_ICE, ierr) call MPI_BARRIER(MPI_COMM_ICE, ierr) +#endif !----------------------------------------------------------------------- @@ -110,8 +120,12 @@ subroutine broadcast_scalar_real(scalar, root_pe) !----------------------------------------------------------------------- +#ifdef SERIAL_REMOVE_MPI + ! nothing to do +#else call MPI_BCAST(scalar, 1, mpiR4, root_pe, MPI_COMM_ICE, ierr) call MPI_BARRIER(MPI_COMM_ICE, ierr) +#endif !----------------------------------------------------------------------- @@ -142,8 +156,12 @@ subroutine broadcast_scalar_int(scalar, root_pe) !----------------------------------------------------------------------- +#ifdef SERIAL_REMOVE_MPI + ! nothing to do +#else call MPI_BCAST(scalar, 1, MPI_INTEGER, root_pe, MPI_COMM_ICE,ierr) call MPI_BARRIER(MPI_COMM_ICE, ierr) +#endif !----------------------------------------------------------------------- @@ -176,6 +194,9 @@ subroutine broadcast_scalar_log(scalar, root_pe) !----------------------------------------------------------------------- +#ifdef SERIAL_REMOVE_MPI + ! nothing to do +#else if (scalar) then itmp = 1 else @@ -190,6 +211,7 @@ subroutine broadcast_scalar_log(scalar, root_pe) else scalar = .false. endif +#endif !----------------------------------------------------------------------- @@ -222,10 +244,14 @@ subroutine broadcast_scalar_char(scalar, root_pe) !----------------------------------------------------------------------- +#ifdef SERIAL_REMOVE_MPI + ! nothing to do +#else clength = len(scalar) call MPI_BCAST(scalar, clength, MPI_CHARACTER, root_pe, MPI_COMM_ICE, ierr) call MPI_BARRIER(MPI_COMM_ICE, ierr) +#endif !-------------------------------------------------------------------- @@ -258,10 +284,14 @@ subroutine broadcast_array_dbl_1d(array, root_pe) !----------------------------------------------------------------------- +#ifdef SERIAL_REMOVE_MPI + ! nothing to do +#else nelements = size(array) call MPI_BCAST(array, nelements, mpiR8, root_pe, MPI_COMM_ICE, ierr) call MPI_BARRIER(MPI_COMM_ICE, ierr) +#endif !----------------------------------------------------------------------- @@ -294,10 +324,14 @@ subroutine broadcast_array_real_1d(array, root_pe) !----------------------------------------------------------------------- +#ifdef SERIAL_REMOVE_MPI + ! nothing to do +#else nelements = size(array) call MPI_BCAST(array, nelements, mpiR4, root_pe, MPI_COMM_ICE, ierr) call MPI_BARRIER(MPI_COMM_ICE, ierr) +#endif !----------------------------------------------------------------------- @@ -330,10 +364,14 @@ subroutine broadcast_array_int_1d(array, root_pe) !----------------------------------------------------------------------- +#ifdef SERIAL_REMOVE_MPI + ! nothing to do +#else nelements = size(array) call MPI_BCAST(array, nelements, MPI_INTEGER, root_pe, MPI_COMM_ICE, ierr) call MPI_BARRIER(MPI_COMM_ICE, ierr) +#endif !----------------------------------------------------------------------- @@ -370,6 +408,9 @@ subroutine broadcast_array_log_1d(array, root_pe) !----------------------------------------------------------------------- +#ifdef SERIAL_REMOVE_MPI + ! nothing to do +#else nelements = size(array) allocate(array_int(nelements)) @@ -390,6 +431,7 @@ subroutine broadcast_array_log_1d(array, root_pe) end where deallocate(array_int) +#endif !----------------------------------------------------------------------- @@ -422,10 +464,14 @@ subroutine broadcast_array_dbl_2d(array, root_pe) !----------------------------------------------------------------------- +#ifdef SERIAL_REMOVE_MPI + ! nothing to do +#else nelements = size(array) call MPI_BCAST(array, nelements, mpiR8, root_pe, MPI_COMM_ICE, ierr) call MPI_BARRIER(MPI_COMM_ICE, ierr) +#endif !----------------------------------------------------------------------- @@ -458,10 +504,14 @@ subroutine broadcast_array_real_2d(array, root_pe) !----------------------------------------------------------------------- +#ifdef SERIAL_REMOVE_MPI + ! nothing to do +#else nelements = size(array) call MPI_BCAST(array, nelements, mpiR4, root_pe, MPI_COMM_ICE, ierr) call MPI_BARRIER(MPI_COMM_ICE, ierr) +#endif !----------------------------------------------------------------------- @@ -494,10 +544,14 @@ subroutine broadcast_array_int_2d(array, root_pe) !----------------------------------------------------------------------- +#ifdef SERIAL_REMOVE_MPI + ! nothing to do +#else nelements = size(array) call MPI_BCAST(array, nelements, MPI_INTEGER, root_pe, MPI_COMM_ICE, ierr) call MPI_BARRIER(MPI_COMM_ICE, ierr) +#endif !----------------------------------------------------------------------- @@ -534,6 +588,9 @@ subroutine broadcast_array_log_2d(array, root_pe) !----------------------------------------------------------------------- +#ifdef SERIAL_REMOVE_MPI + ! nothing to do +#else nelements = size(array) allocate(array_int(size(array,dim=1),size(array,dim=2))) @@ -554,6 +611,7 @@ subroutine broadcast_array_log_2d(array, root_pe) end where deallocate(array_int) +#endif !----------------------------------------------------------------------- @@ -586,10 +644,14 @@ subroutine broadcast_array_dbl_3d(array, root_pe) !----------------------------------------------------------------------- +#ifdef SERIAL_REMOVE_MPI + ! nothing to do +#else nelements = size(array) call MPI_BCAST(array, nelements, mpiR8, root_pe, MPI_COMM_ICE, ierr) call MPI_BARRIER(MPI_COMM_ICE, ierr) +#endif !----------------------------------------------------------------------- @@ -622,10 +684,14 @@ subroutine broadcast_array_real_3d(array, root_pe) !----------------------------------------------------------------------- +#ifdef SERIAL_REMOVE_MPI + ! nothing to do +#else nelements = size(array) call MPI_BCAST(array, nelements, mpiR4, root_pe, MPI_COMM_ICE, ierr) call MPI_BARRIER(MPI_COMM_ICE, ierr) +#endif !----------------------------------------------------------------------- @@ -658,10 +724,14 @@ subroutine broadcast_array_int_3d(array, root_pe) !----------------------------------------------------------------------- +#ifdef SERIAL_REMOVE_MPI + ! nothing to do +#else nelements = size(array) call MPI_BCAST(array, nelements, MPI_INTEGER, root_pe, MPI_COMM_ICE, ierr) call MPI_BARRIER(MPI_COMM_ICE, ierr) +#endif !----------------------------------------------------------------------- @@ -698,6 +768,9 @@ subroutine broadcast_array_log_3d(array, root_pe) !----------------------------------------------------------------------- +#ifdef SERIAL_REMOVE_MPI + ! nothing to do +#else nelements = size(array) allocate(array_int(size(array,dim=1), & size(array,dim=2), & @@ -720,6 +793,7 @@ subroutine broadcast_array_log_3d(array, root_pe) end where deallocate(array_int) +#endif !----------------------------------------------------------------------- diff --git a/cicecore/cicedynB/infrastructure/comm/mpi/ice_communicate.F90 b/cicecore/cicedynB/infrastructure/comm/mpi/ice_communicate.F90 index 1c369ef93..00f427144 100644 --- a/cicecore/cicedynB/infrastructure/comm/mpi/ice_communicate.F90 +++ b/cicecore/cicedynB/infrastructure/comm/mpi/ice_communicate.F90 @@ -18,6 +18,7 @@ module ice_communicate public :: init_communicate, & get_num_procs, & + get_rank, & ice_barrier, & create_communicator @@ -121,6 +122,32 @@ function get_num_procs() end function get_num_procs +!*********************************************************************** + + function get_rank() + +! This function returns the number of processor assigned to +! MPI_COMM_ICE + + integer (int_kind) :: get_rank + +!----------------------------------------------------------------------- +! +! local variables +! +!----------------------------------------------------------------------- + + integer (int_kind) :: ierr + character(len=*), parameter :: subname = '(get_rank)' + +!----------------------------------------------------------------------- + + call MPI_COMM_RANK(MPI_COMM_ICE, get_rank, ierr) + +!----------------------------------------------------------------------- + + end function get_rank + !*********************************************************************** subroutine ice_barrier() diff --git a/cicecore/cicedynB/infrastructure/comm/mpi/ice_global_reductions.F90 b/cicecore/cicedynB/infrastructure/comm/mpi/ice_global_reductions.F90 index 1d724fb39..0728ac105 100644 --- a/cicecore/cicedynB/infrastructure/comm/mpi/ice_global_reductions.F90 +++ b/cicecore/cicedynB/infrastructure/comm/mpi/ice_global_reductions.F90 @@ -74,7 +74,8 @@ module ice_global_reductions global_maxval_int, & global_maxval_scalar_dbl, & global_maxval_scalar_real, & - global_maxval_scalar_int + global_maxval_scalar_int, & + global_maxval_scalar_int_nodist end interface interface global_minval @@ -83,7 +84,8 @@ module ice_global_reductions global_minval_int, & global_minval_scalar_dbl, & global_minval_scalar_real, & - global_minval_scalar_int + global_minval_scalar_int, & + global_minval_scalar_int_nodist end interface !*********************************************************************** @@ -1683,6 +1685,56 @@ function global_maxval_scalar_int (scalar, dist) & end function global_maxval_scalar_int +!*********************************************************************** + + function global_maxval_scalar_int_nodist (scalar, communicator) & + result(globalMaxval) + +! Computes the global maximum value of a scalar value across +! a communicator. This method supports testing. +! +! This is actually the specific interface for the generic global_maxval +! function corresponding to single precision scalars. + + integer (int_kind), intent(in) :: & + scalar ! scalar for which max value needed + + integer (int_kind), intent(in) :: & + communicator ! mpi communicator + + integer (int_kind) :: & + globalMaxval ! resulting maximum value + +!----------------------------------------------------------------------- +! +! local variables +! +!----------------------------------------------------------------------- + + integer (int_kind) :: & + ierr ! mpi error flag + + character(len=*), parameter :: subname = '(global_maxval_scalar_int_nodist)' + +!----------------------------------------------------------------------- + +!----------------------------------------------------------------------- +! +! now use MPI global reduction to reduce local maxval to global maxval +! +!----------------------------------------------------------------------- + +#ifdef SERIAL_REMOVE_MPI + globalMaxval = scalar +#else + call MPI_ALLREDUCE(scalar, globalMaxval, 1, & + MPI_INTEGER, MPI_MAX, communicator, ierr) +#endif + +!----------------------------------------------------------------------- + + end function global_maxval_scalar_int_nodist + !*********************************************************************** function global_minval_dbl (array, dist, lMask) & @@ -2179,6 +2231,55 @@ function global_minval_scalar_int (scalar, dist) & end function global_minval_scalar_int !*********************************************************************** + + function global_minval_scalar_int_nodist (scalar, communicator) & + result(globalMinval) + +! Computes the global minimum value of a scalar value across +! a communicator. This method supports testing. +! +! This is actually the specific interface for the generic global_minval +! function corresponding to single precision scalars. + + integer (int_kind), intent(in) :: & + scalar ! scalar for which min value needed + + integer(int_kind), intent(in) :: & + communicator ! mpi communicator + + integer (int_kind) :: & + globalMinval ! resulting minimum value + +!----------------------------------------------------------------------- +! +! local variables +! +!----------------------------------------------------------------------- + + integer (int_kind) :: & + ierr ! mpi error flag + + character(len=*), parameter :: subname = '(global_minval_scalar_int_nodist)' + +!----------------------------------------------------------------------- + +!----------------------------------------------------------------------- +! +! now use MPI global reduction to reduce local minval to global minval +! +!----------------------------------------------------------------------- + +#ifdef SERIAL_REMOVE_MPI + globalMinval = scalar +#else + call MPI_ALLREDUCE(scalar, globalMinval, 1, & + MPI_INTEGER, MPI_MIN, communicator, ierr) +#endif + +!----------------------------------------------------------------------- + + end function global_minval_scalar_int_nodist + !*********************************************************************** subroutine compute_sums_dbl(array2,sums8,mpicomm,numprocs) @@ -2192,7 +2293,7 @@ subroutine compute_sums_dbl(array2,sums8,mpicomm,numprocs) ! lsum16 = local sum with real*16 and scalar mpi allreduce, likely to be bfb ! WARNING: this does not work in several compilers and mpi ! implementations due to support for quad precision and consistency -! between underlying datatype in fortran and c. The source code +! between underlying datatypes in fortran and c. The source code ! can be turned off with a cpp NO_R16. Otherwise, it is recommended ! that the results be validated on any platform where it might be used. ! reprosum = fixed point method based on ordered double integer sums. @@ -2226,10 +2327,9 @@ subroutine compute_sums_dbl(array2,sums8,mpicomm,numprocs) real (real_kind), allocatable :: psums4(:) real (real_kind), allocatable :: sums4(:) real (dbl_kind) , allocatable :: psums8(:) -#ifndef NO_R16 + ! if r16 is not available (NO_R16), then r16 reverts to double precision (r8) real (r16_kind) , allocatable :: psums16(:) real (r16_kind) , allocatable :: sums16(:) -#endif integer (int_kind) :: ns,nf,i,j, ierr @@ -2261,7 +2361,7 @@ subroutine compute_sums_dbl(array2,sums8,mpicomm,numprocs) deallocate(psums8) -#ifndef NO_R16 + ! if no_r16 is set, this will revert to a double precision calculation like lsum8 elseif (bfbflag == 'lsum16') then allocate(psums16(nf)) psums16(:) = 0._r16_kind @@ -2284,7 +2384,6 @@ subroutine compute_sums_dbl(array2,sums8,mpicomm,numprocs) sums8 = real(sums16,dbl_kind) deallocate(psums16,sums16) -#endif elseif (bfbflag == 'lsum4') then allocate(psums4(nf)) diff --git a/cicecore/cicedynB/infrastructure/comm/serial/ice_broadcast.F90 b/cicecore/cicedynB/infrastructure/comm/serial/ice_broadcast.F90 index 8532f23b7..75d0be4ca 100644 --- a/cicecore/cicedynB/infrastructure/comm/serial/ice_broadcast.F90 +++ b/cicecore/cicedynB/infrastructure/comm/serial/ice_broadcast.F90 @@ -1,16 +1,23 @@ !||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||| +#define SERIAL_REMOVE_MPI module ice_broadcast ! This module contains all the broadcast routines. This -! particular version contains serial versions of these routines -! which typically perform no operations since there is no need -! to broadcast what is already known. +! particular version contains MPI versions of these routines. ! ! author: Phil Jones, LANL ! Oct. 2004: Adapted from POP version by William H. Lipscomb, LANL +#ifndef SERIAL_REMOVE_MPI + use mpi ! MPI Fortran module +#endif use ice_kinds_mod +#ifdef SERIAL_REMOVE_MPI + use ice_communicate, only: MPI_COMM_ICE +#else + use ice_communicate, only: mpiR8, mpir4, MPI_COMM_ICE +#endif use ice_exit, only: abort_ice use icepack_intfc, only: icepack_warnings_flush, icepack_warnings_aborted @@ -67,19 +74,31 @@ subroutine broadcast_scalar_dbl(scalar, root_pe) real (dbl_kind), intent(inout) :: & scalar ! scalar to be broadcast - character(len=*), parameter :: subname = '(broadcast_scalar_dbl)' - !----------------------------------------------------------------------- ! -! for serial codes, nothing is required +! local variables ! !----------------------------------------------------------------------- - - end subroutine broadcast_scalar_dbl + + integer (int_kind) :: ierr ! local MPI error flag + character(len=*), parameter :: subname = '(broadcast_scalar_dbl)' + +!----------------------------------------------------------------------- + +#ifdef SERIAL_REMOVE_MPI + ! nothing to do +#else + call MPI_BCAST(scalar, 1, mpiR8, root_pe, MPI_COMM_ICE, ierr) + call MPI_BARRIER(MPI_COMM_ICE, ierr) +#endif + +!----------------------------------------------------------------------- + +end subroutine broadcast_scalar_dbl !*********************************************************************** - subroutine broadcast_scalar_real(scalar, root_pe) +subroutine broadcast_scalar_real(scalar, root_pe) ! Broadcasts a scalar real variable from one processor (root_pe) ! to all other processors. This is a specific instance of the generic @@ -91,19 +110,31 @@ subroutine broadcast_scalar_real(scalar, root_pe) real (real_kind), intent(inout) :: & scalar ! scalar to be broadcast - character(len=*), parameter :: subname = '(broadcast_scalar_real)' - !----------------------------------------------------------------------- ! -! for serial codes, nothing is required +! local variables ! +!----------------------------------------------------------------------- + + integer (int_kind) :: ierr ! local MPI error flag + character(len=*), parameter :: subname = '(broadcast_scalar_real)' + +!----------------------------------------------------------------------- + +#ifdef SERIAL_REMOVE_MPI + ! nothing to do +#else + call MPI_BCAST(scalar, 1, mpiR4, root_pe, MPI_COMM_ICE, ierr) + call MPI_BARRIER(MPI_COMM_ICE, ierr) +#endif + !----------------------------------------------------------------------- end subroutine broadcast_scalar_real !*********************************************************************** - subroutine broadcast_scalar_int(scalar, root_pe) +subroutine broadcast_scalar_int(scalar, root_pe) ! Broadcasts a scalar integer variable from one processor (root_pe) ! to all other processors. This is a specific instance of the generic @@ -115,19 +146,31 @@ subroutine broadcast_scalar_int(scalar, root_pe) integer (int_kind), intent(inout) :: & scalar ! scalar to be broadcast - character(len=*), parameter :: subname = '(broadcast_scalar_int)' - !----------------------------------------------------------------------- ! -! for serial codes, nothing is required +! local variables ! +!----------------------------------------------------------------------- + + integer (int_kind) :: ierr ! local MPI error flag + character(len=*), parameter :: subname = '(broadcast_scalar_int)' + +!----------------------------------------------------------------------- + +#ifdef SERIAL_REMOVE_MPI + ! nothing to do +#else + call MPI_BCAST(scalar, 1, MPI_INTEGER, root_pe, MPI_COMM_ICE,ierr) + call MPI_BARRIER(MPI_COMM_ICE, ierr) +#endif + !----------------------------------------------------------------------- end subroutine broadcast_scalar_int !*********************************************************************** - subroutine broadcast_scalar_log(scalar, root_pe) +subroutine broadcast_scalar_log(scalar, root_pe) ! Broadcasts a scalar logical variable from one processor (root_pe) ! to all other processors. This is a specific instance of the generic @@ -139,19 +182,45 @@ subroutine broadcast_scalar_log(scalar, root_pe) logical (log_kind), intent(inout) :: & scalar ! scalar to be broadcast - character(len=*), parameter :: subname = '(broadcast_scalar_log)' - !----------------------------------------------------------------------- ! -! for serial codes, nothing is required +! local variables ! +!----------------------------------------------------------------------- + + integer (int_kind) :: & + itmp, &! local temporary + ierr ! MPI error flag + character(len=*), parameter :: subname = '(broadcast_scalar_log)' + +!----------------------------------------------------------------------- + +#ifdef SERIAL_REMOVE_MPI + ! nothing to do +#else + if (scalar) then + itmp = 1 + else + itmp = 0 + endif + + call MPI_BCAST(itmp, 1, MPI_INTEGER, root_pe, MPI_COMM_ICE, ierr) + call MPI_BARRIER(MPI_COMM_ICE, ierr) + + if (itmp == 1) then + scalar = .true. + else + scalar = .false. + endif +#endif + !----------------------------------------------------------------------- end subroutine broadcast_scalar_log !*********************************************************************** - subroutine broadcast_scalar_char(scalar, root_pe) +subroutine broadcast_scalar_char(scalar, root_pe) ! Broadcasts a scalar character variable from one processor (root_pe) ! to all other processors. This is a specific instance of the generic @@ -163,19 +232,35 @@ subroutine broadcast_scalar_char(scalar, root_pe) character (*), intent(inout) :: & scalar ! scalar to be broadcast - character(len=*), parameter :: subname = '(broadcast_scalar_char)' - !----------------------------------------------------------------------- ! -! for serial codes, nothing is required +! local variables ! !----------------------------------------------------------------------- + integer (int_kind) :: & + clength, &! length of character + ierr ! MPI error flag + character(len=*), parameter :: subname = '(broadcast_scalar_char)' + +!----------------------------------------------------------------------- + +#ifdef SERIAL_REMOVE_MPI + ! nothing to do +#else + clength = len(scalar) + + call MPI_BCAST(scalar, clength, MPI_CHARACTER, root_pe, MPI_COMM_ICE, ierr) + call MPI_BARRIER(MPI_COMM_ICE, ierr) +#endif + +!-------------------------------------------------------------------- + end subroutine broadcast_scalar_char !*********************************************************************** - subroutine broadcast_array_dbl_1d(array, root_pe) +subroutine broadcast_array_dbl_1d(array, root_pe) ! Broadcasts a vector dbl variable from one processor (root_pe) ! to all other processors. This is a specific instance of the generic @@ -187,19 +272,35 @@ subroutine broadcast_array_dbl_1d(array, root_pe) real (dbl_kind), dimension(:), intent(inout) :: & array ! array to be broadcast - character(len=*), parameter :: subname = '(broadcast_array_dbl_1d)' - !----------------------------------------------------------------------- ! -! for serial codes, nothing is required +! local variables ! +!----------------------------------------------------------------------- + + integer (int_kind) :: & + nelements, &! size of array + ierr ! local MPI error flag + character(len=*), parameter :: subname = '(broadcast_array_dbl_1d)' + +!----------------------------------------------------------------------- + +#ifdef SERIAL_REMOVE_MPI + ! nothing to do +#else + nelements = size(array) + + call MPI_BCAST(array, nelements, mpiR8, root_pe, MPI_COMM_ICE, ierr) + call MPI_BARRIER(MPI_COMM_ICE, ierr) +#endif + !----------------------------------------------------------------------- end subroutine broadcast_array_dbl_1d !*********************************************************************** - subroutine broadcast_array_real_1d(array, root_pe) +subroutine broadcast_array_real_1d(array, root_pe) ! Broadcasts a real vector from one processor (root_pe) ! to all other processors. This is a specific instance of the generic @@ -211,19 +312,35 @@ subroutine broadcast_array_real_1d(array, root_pe) real (real_kind), dimension(:), intent(inout) :: & array ! array to be broadcast - character(len=*), parameter :: subname = '(broadcast_array_real_1d)' - !----------------------------------------------------------------------- ! -! for serial codes, nothing is required +! local variables ! +!----------------------------------------------------------------------- + + integer (int_kind) :: & + nelements, &! size of array to be broadcast + ierr ! local MPI error flag + character(len=*), parameter :: subname = '(broadcast_array_real_1d)' + +!----------------------------------------------------------------------- + +#ifdef SERIAL_REMOVE_MPI + ! nothing to do +#else + nelements = size(array) + + call MPI_BCAST(array, nelements, mpiR4, root_pe, MPI_COMM_ICE, ierr) + call MPI_BARRIER(MPI_COMM_ICE, ierr) +#endif + !----------------------------------------------------------------------- end subroutine broadcast_array_real_1d !*********************************************************************** - subroutine broadcast_array_int_1d(array, root_pe) +subroutine broadcast_array_int_1d(array, root_pe) ! Broadcasts an integer vector from one processor (root_pe) ! to all other processors. This is a specific instance of the generic @@ -235,19 +352,35 @@ subroutine broadcast_array_int_1d(array, root_pe) integer (int_kind), dimension(:), intent(inout) :: & array ! array to be broadcast - character(len=*), parameter :: subname = '(broadcast_array_int_1d)' - !----------------------------------------------------------------------- ! -! for serial codes, nothing is required +! local variables ! +!----------------------------------------------------------------------- + + integer (int_kind) :: & + nelements, &! size of array to be broadcast + ierr ! local MPI error flag + character(len=*), parameter :: subname = '(broadcast_array_int_1d)' + +!----------------------------------------------------------------------- + +#ifdef SERIAL_REMOVE_MPI + ! nothing to do +#else + nelements = size(array) + + call MPI_BCAST(array, nelements, MPI_INTEGER, root_pe, MPI_COMM_ICE, ierr) + call MPI_BARRIER(MPI_COMM_ICE, ierr) +#endif + !----------------------------------------------------------------------- end subroutine broadcast_array_int_1d !*********************************************************************** - subroutine broadcast_array_log_1d(array, root_pe) +subroutine broadcast_array_log_1d(array, root_pe) ! Broadcasts a logical vector from one processor (root_pe) ! to all other processors. This is a specific instance of the generic @@ -259,12 +392,48 @@ subroutine broadcast_array_log_1d(array, root_pe) logical (log_kind), dimension(:), intent(inout) :: & array ! array to be broadcast - character(len=*), parameter :: subname = '(broadcast_array_log_1d)' - !----------------------------------------------------------------------- ! -! for serial codes, nothing is required +! local variables ! +!----------------------------------------------------------------------- + + integer (int_kind), dimension(:), allocatable :: & + array_int ! temporary array for MPI bcast + + integer (int_kind) :: & + nelements, &! size of array to be broadcast + ierr ! local MPI error flag + + character(len=*), parameter :: subname = '(broadcast_array_log_1d)' + +!----------------------------------------------------------------------- + +#ifdef SERIAL_REMOVE_MPI + ! nothing to do +#else + nelements = size(array) + allocate(array_int(nelements)) + + where (array) + array_int = 1 + elsewhere + array_int = 0 + end where + + call MPI_BCAST(array_int, nelements, MPI_INTEGER, root_pe, & + MPI_COMM_ICE, ierr) + call MPI_BARRIER(MPI_COMM_ICE, ierr) + + where (array_int == 1) + array = .true. + elsewhere + array = .false. + end where + + deallocate(array_int) +#endif + !----------------------------------------------------------------------- end subroutine broadcast_array_log_1d @@ -283,12 +452,28 @@ subroutine broadcast_array_dbl_2d(array, root_pe) real (dbl_kind), dimension(:,:), intent(inout) :: & array ! array to be broadcast - character(len=*), parameter :: subname = '(broadcast_array_dbl_2d)' - !----------------------------------------------------------------------- ! -! for serial codes, nothing is required +! local variables ! +!----------------------------------------------------------------------- + + integer (int_kind) :: & + nelements, &! size of array + ierr ! local MPI error flag + character(len=*), parameter :: subname = '(broadcast_array_dbl_2d)' + +!----------------------------------------------------------------------- + +#ifdef SERIAL_REMOVE_MPI + ! nothing to do +#else + nelements = size(array) + + call MPI_BCAST(array, nelements, mpiR8, root_pe, MPI_COMM_ICE, ierr) + call MPI_BARRIER(MPI_COMM_ICE, ierr) +#endif + !----------------------------------------------------------------------- end subroutine broadcast_array_dbl_2d @@ -307,12 +492,28 @@ subroutine broadcast_array_real_2d(array, root_pe) real (real_kind), dimension(:,:), intent(inout) :: & array ! array to be broadcast - character(len=*), parameter :: subname = '(broadcast_array_real_2d)' - !----------------------------------------------------------------------- ! -! for serial codes, nothing is required +! local variables ! +!----------------------------------------------------------------------- + + integer (int_kind) :: & + nelements, &! size of array to be broadcast + ierr ! local MPI error flag + character(len=*), parameter :: subname = '(broadcast_array_real_2d)' + +!----------------------------------------------------------------------- + +#ifdef SERIAL_REMOVE_MPI + ! nothing to do +#else + nelements = size(array) + + call MPI_BCAST(array, nelements, mpiR4, root_pe, MPI_COMM_ICE, ierr) + call MPI_BARRIER(MPI_COMM_ICE, ierr) +#endif + !----------------------------------------------------------------------- end subroutine broadcast_array_real_2d @@ -331,12 +532,28 @@ subroutine broadcast_array_int_2d(array, root_pe) integer (int_kind), dimension(:,:), intent(inout) :: & array ! array to be broadcast - character(len=*), parameter :: subname = '(broadcast_array_int_2d)' - !----------------------------------------------------------------------- ! -! for serial codes, nothing is required +! local variables ! +!----------------------------------------------------------------------- + + integer (int_kind) :: & + nelements, &! size of array to be broadcast + ierr ! local MPI error flag + character(len=*), parameter :: subname = '(broadcast_array_int_2d)' + +!----------------------------------------------------------------------- + +#ifdef SERIAL_REMOVE_MPI + ! nothing to do +#else + nelements = size(array) + + call MPI_BCAST(array, nelements, MPI_INTEGER, root_pe, MPI_COMM_ICE, ierr) + call MPI_BARRIER(MPI_COMM_ICE, ierr) +#endif + !----------------------------------------------------------------------- end subroutine broadcast_array_int_2d @@ -355,12 +572,48 @@ subroutine broadcast_array_log_2d(array, root_pe) logical (log_kind), dimension(:,:), intent(inout) :: & array ! array to be broadcast - character(len=*), parameter :: subname = '(broadcast_array_log_2d)' - !----------------------------------------------------------------------- ! -! for serial codes, nothing is required +! local variables ! +!----------------------------------------------------------------------- + + integer (int_kind), dimension(:,:), allocatable :: & + array_int ! temporary array for MPI bcast + + integer (int_kind) :: & + nelements, &! size of array to be broadcast + ierr ! local MPI error flag + + character(len=*), parameter :: subname = '(broadcast_array_log_2d)' + +!----------------------------------------------------------------------- + +#ifdef SERIAL_REMOVE_MPI + ! nothing to do +#else + nelements = size(array) + allocate(array_int(size(array,dim=1),size(array,dim=2))) + + where (array) + array_int = 1 + elsewhere + array_int = 0 + end where + + call MPI_BCAST(array_int, nelements, MPI_INTEGER, root_pe, & + MPI_COMM_ICE, ierr) + call MPI_BARRIER(MPI_COMM_ICE, ierr) + + where (array_int == 1) + array = .true. + elsewhere + array = .false. + end where + + deallocate(array_int) +#endif + !----------------------------------------------------------------------- end subroutine broadcast_array_log_2d @@ -379,12 +632,28 @@ subroutine broadcast_array_dbl_3d(array, root_pe) real (dbl_kind), dimension(:,:,:), intent(inout) :: & array ! array to be broadcast - character(len=*), parameter :: subname = '(broadcast_array_dbl_3d)' - !----------------------------------------------------------------------- ! -! for serial codes, nothing is required +! local variables ! +!----------------------------------------------------------------------- + + integer (int_kind) :: & + nelements, &! size of array + ierr ! local MPI error flag + character(len=*), parameter :: subname = '(broadcast_array_dbl_3d)' + +!----------------------------------------------------------------------- + +#ifdef SERIAL_REMOVE_MPI + ! nothing to do +#else + nelements = size(array) + + call MPI_BCAST(array, nelements, mpiR8, root_pe, MPI_COMM_ICE, ierr) + call MPI_BARRIER(MPI_COMM_ICE, ierr) +#endif + !----------------------------------------------------------------------- end subroutine broadcast_array_dbl_3d @@ -403,12 +672,28 @@ subroutine broadcast_array_real_3d(array, root_pe) real (real_kind), dimension(:,:,:), intent(inout) :: & array ! array to be broadcast - character(len=*), parameter :: subname = '(broadcast_array_real_3d)' - !----------------------------------------------------------------------- ! -! for serial codes, nothing is required +! local variables ! +!----------------------------------------------------------------------- + + integer (int_kind) :: & + nelements, &! size of array to be broadcast + ierr ! local MPI error flag + character(len=*), parameter :: subname = '(broadcast_array_real_3d)' + +!----------------------------------------------------------------------- + +#ifdef SERIAL_REMOVE_MPI + ! nothing to do +#else + nelements = size(array) + + call MPI_BCAST(array, nelements, mpiR4, root_pe, MPI_COMM_ICE, ierr) + call MPI_BARRIER(MPI_COMM_ICE, ierr) +#endif + !----------------------------------------------------------------------- end subroutine broadcast_array_real_3d @@ -427,12 +712,28 @@ subroutine broadcast_array_int_3d(array, root_pe) integer (int_kind), dimension(:,:,:), intent(inout) :: & array ! array to be broadcast - character(len=*), parameter :: subname = '(broadcast_array_int_3d)' - !----------------------------------------------------------------------- ! -! for serial codes, nothing is required +! local variables ! +!----------------------------------------------------------------------- + + integer (int_kind) :: & + nelements, &! size of array to be broadcast + ierr ! local MPI error flag + character(len=*), parameter :: subname = '(broadcast_array_int_3d)' + +!----------------------------------------------------------------------- + +#ifdef SERIAL_REMOVE_MPI + ! nothing to do +#else + nelements = size(array) + + call MPI_BCAST(array, nelements, MPI_INTEGER, root_pe, MPI_COMM_ICE, ierr) + call MPI_BARRIER(MPI_COMM_ICE, ierr) +#endif + !----------------------------------------------------------------------- end subroutine broadcast_array_int_3d @@ -451,12 +752,50 @@ subroutine broadcast_array_log_3d(array, root_pe) logical (log_kind), dimension(:,:,:), intent(inout) :: & array ! array to be broadcast - character(len=*), parameter :: subname = '(broadcast_array_log_3d)' - !----------------------------------------------------------------------- ! -! for serial codes, nothing is required +! local variables ! +!----------------------------------------------------------------------- + + integer (int_kind), dimension(:,:,:), allocatable :: & + array_int ! temporary array for MPI bcast + + integer (int_kind) :: & + nelements, &! size of array to be broadcast + ierr ! local MPI error flag + + character(len=*), parameter :: subname = '(broadcast_array_log_3d)' + +!----------------------------------------------------------------------- + +#ifdef SERIAL_REMOVE_MPI + ! nothing to do +#else + nelements = size(array) + allocate(array_int(size(array,dim=1), & + size(array,dim=2), & + size(array,dim=3))) + + where (array) + array_int = 1 + elsewhere + array_int = 0 + end where + + call MPI_BCAST(array_int, nelements, MPI_INTEGER, root_pe, & + MPI_COMM_ICE, ierr) + call MPI_BARRIER(MPI_COMM_ICE, ierr) + + where (array_int == 1) + array = .true. + elsewhere + array = .false. + end where + + deallocate(array_int) +#endif + !----------------------------------------------------------------------- end subroutine broadcast_array_log_3d diff --git a/cicecore/cicedynB/infrastructure/comm/serial/ice_communicate.F90 b/cicecore/cicedynB/infrastructure/comm/serial/ice_communicate.F90 index c9df264dd..ed11aafec 100644 --- a/cicecore/cicedynB/infrastructure/comm/serial/ice_communicate.F90 +++ b/cicecore/cicedynB/infrastructure/comm/serial/ice_communicate.F90 @@ -18,6 +18,7 @@ module ice_communicate public :: init_communicate, & get_num_procs, & + get_rank, & ice_barrier, & create_communicator @@ -85,6 +86,29 @@ function get_num_procs() end function get_num_procs +!*********************************************************************** + + function get_rank() + +! This function returns the number of processors assigned to +! the ice model. + + integer (int_kind) :: get_rank + + character(len=*), parameter :: subname = '(get_rank)' + +!----------------------------------------------------------------------- +! +! serial execution, must be only 1 +! +!----------------------------------------------------------------------- + + get_rank = 0 + +!----------------------------------------------------------------------- + + end function get_rank + !*********************************************************************** subroutine ice_barrier() diff --git a/cicecore/cicedynB/infrastructure/comm/serial/ice_global_reductions.F90 b/cicecore/cicedynB/infrastructure/comm/serial/ice_global_reductions.F90 index 4d53e873e..a024698d5 100644 --- a/cicecore/cicedynB/infrastructure/comm/serial/ice_global_reductions.F90 +++ b/cicecore/cicedynB/infrastructure/comm/serial/ice_global_reductions.F90 @@ -75,7 +75,8 @@ module ice_global_reductions global_maxval_int, & global_maxval_scalar_dbl, & global_maxval_scalar_real, & - global_maxval_scalar_int + global_maxval_scalar_int, & + global_maxval_scalar_int_nodist end interface interface global_minval @@ -84,7 +85,8 @@ module ice_global_reductions global_minval_int, & global_minval_scalar_dbl, & global_minval_scalar_real, & - global_minval_scalar_int + global_minval_scalar_int, & + global_minval_scalar_int_nodist end interface !*********************************************************************** @@ -1684,6 +1686,56 @@ function global_maxval_scalar_int (scalar, dist) & end function global_maxval_scalar_int +!*********************************************************************** + + function global_maxval_scalar_int_nodist (scalar, communicator) & + result(globalMaxval) + +! Computes the global maximum value of a scalar value across +! a communicator. This method supports testing. +! +! This is actually the specific interface for the generic global_maxval +! function corresponding to single precision scalars. + + integer (int_kind), intent(in) :: & + scalar ! scalar for which max value needed + + integer (int_kind), intent(in) :: & + communicator ! mpi communicator + + integer (int_kind) :: & + globalMaxval ! resulting maximum value + +!----------------------------------------------------------------------- +! +! local variables +! +!----------------------------------------------------------------------- + + integer (int_kind) :: & + ierr ! mpi error flag + + character(len=*), parameter :: subname = '(global_maxval_scalar_int_nodist)' + +!----------------------------------------------------------------------- + +!----------------------------------------------------------------------- +! +! now use MPI global reduction to reduce local maxval to global maxval +! +!----------------------------------------------------------------------- + +#ifdef SERIAL_REMOVE_MPI + globalMaxval = scalar +#else + call MPI_ALLREDUCE(scalar, globalMaxval, 1, & + MPI_INTEGER, MPI_MAX, communicator, ierr) +#endif + +!----------------------------------------------------------------------- + + end function global_maxval_scalar_int_nodist + !*********************************************************************** function global_minval_dbl (array, dist, lMask) & @@ -2180,6 +2232,55 @@ function global_minval_scalar_int (scalar, dist) & end function global_minval_scalar_int !*********************************************************************** + + function global_minval_scalar_int_nodist (scalar, communicator) & + result(globalMinval) + +! Computes the global minimum value of a scalar value across +! a communicator. This method supports testing. +! +! This is actually the specific interface for the generic global_minval +! function corresponding to single precision scalars. + + integer (int_kind), intent(in) :: & + scalar ! scalar for which min value needed + + integer(int_kind), intent(in) :: & + communicator ! mpi communicator + + integer (int_kind) :: & + globalMinval ! resulting minimum value + +!----------------------------------------------------------------------- +! +! local variables +! +!----------------------------------------------------------------------- + + integer (int_kind) :: & + ierr ! mpi error flag + + character(len=*), parameter :: subname = '(global_minval_scalar_int_nodist)' + +!----------------------------------------------------------------------- + +!----------------------------------------------------------------------- +! +! now use MPI global reduction to reduce local minval to global minval +! +!----------------------------------------------------------------------- + +#ifdef SERIAL_REMOVE_MPI + globalMinval = scalar +#else + call MPI_ALLREDUCE(scalar, globalMinval, 1, & + MPI_INTEGER, MPI_MIN, communicator, ierr) +#endif + +!----------------------------------------------------------------------- + + end function global_minval_scalar_int_nodist + !*********************************************************************** subroutine compute_sums_dbl(array2,sums8,mpicomm,numprocs) @@ -2193,7 +2294,7 @@ subroutine compute_sums_dbl(array2,sums8,mpicomm,numprocs) ! lsum16 = local sum with real*16 and scalar mpi allreduce, likely to be bfb ! WARNING: this does not work in several compilers and mpi ! implementations due to support for quad precision and consistency -! between underlying datatype in fortran and c. The source code +! between underlying datatypes in fortran and c. The source code ! can be turned off with a cpp NO_R16. Otherwise, it is recommended ! that the results be validated on any platform where it might be used. ! reprosum = fixed point method based on ordered double integer sums. @@ -2227,10 +2328,9 @@ subroutine compute_sums_dbl(array2,sums8,mpicomm,numprocs) real (real_kind), allocatable :: psums4(:) real (real_kind), allocatable :: sums4(:) real (dbl_kind) , allocatable :: psums8(:) -#ifndef NO_R16 + ! if r16 is not available (NO_R16), then r16 reverts to double precision (r8) real (r16_kind) , allocatable :: psums16(:) real (r16_kind) , allocatable :: sums16(:) -#endif integer (int_kind) :: ns,nf,i,j, ierr @@ -2262,7 +2362,7 @@ subroutine compute_sums_dbl(array2,sums8,mpicomm,numprocs) deallocate(psums8) -#ifndef NO_R16 + ! if no_r16 is set, this will revert to a double precision calculation like lsum8 elseif (bfbflag == 'lsum16') then allocate(psums16(nf)) psums16(:) = 0._r16_kind @@ -2285,7 +2385,6 @@ subroutine compute_sums_dbl(array2,sums8,mpicomm,numprocs) sums8 = real(sums16,dbl_kind) deallocate(psums16,sums16) -#endif elseif (bfbflag == 'lsum4') then allocate(psums4(nf)) diff --git a/cicecore/cicedynB/infrastructure/ice_blocks.F90 b/cicecore/cicedynB/infrastructure/ice_blocks.F90 index 2768a40c3..74aba9cb5 100644 --- a/cicecore/cicedynB/infrastructure/ice_blocks.F90 +++ b/cicecore/cicedynB/infrastructure/ice_blocks.F90 @@ -316,10 +316,11 @@ subroutine create_blocks(nx_global, ny_global, ew_boundary_type, & if (my_task == master_task) then write(nu_diag,*) 'block i,j locations' do n = 1, nblocks_tot - write(nu_diag,*) 'block id, iblock, jblock:', & + write(nu_diag,*) 'block id, iblock, jblock, tripole:', & all_blocks(n)%block_id, & all_blocks(n)%iblock, & - all_blocks(n)%jblock + all_blocks(n)%jblock, & + all_blocks(n)%tripole enddo endif endif diff --git a/cicecore/cicedynB/infrastructure/ice_grid.F90 b/cicecore/cicedynB/infrastructure/ice_grid.F90 index 2304877d2..2124bbebe 100644 --- a/cicecore/cicedynB/infrastructure/ice_grid.F90 +++ b/cicecore/cicedynB/infrastructure/ice_grid.F90 @@ -34,13 +34,13 @@ module ice_grid use ice_exit, only: abort_ice use ice_global_reductions, only: global_minval, global_maxval use icepack_intfc, only: icepack_warnings_flush, icepack_warnings_aborted - use icepack_intfc, only: icepack_query_parameters + use icepack_intfc, only: icepack_query_parameters, icepack_init_parameters implicit none private public :: init_grid1, init_grid2, & t2ugrid_vector, u2tgrid_vector, & - to_ugrid, to_tgrid, alloc_grid + to_ugrid, to_tgrid, alloc_grid, makemask character (len=char_len_long), public :: & grid_format , & ! file format ('bin'=binary or 'nc'=netcdf) @@ -247,6 +247,16 @@ subroutine init_grid1 allocate(work_g1(nx_global,ny_global)) allocate(work_g2(nx_global,ny_global)) + ! check tripole flags here + ! can't check in init_data because ns_boundary_type is not yet read + ! can't check in init_domain_blocks because grid_type is not accessible due to circular logic + + if (grid_type == 'tripole' .and. ns_boundary_type /= 'tripole' .and. & + ns_boundary_type /= 'tripoleT') then + call abort_ice(subname//'ERROR: grid_type tripole needs tripole ns_boundary_type', & + file=__FILE__, line=__LINE__) + endif + if (trim(grid_type) == 'displaced_pole' .or. & trim(grid_type) == 'tripole' .or. & trim(grid_type) == 'regional' ) then @@ -1160,7 +1170,6 @@ subroutine latlongrid end subroutine latlongrid #endif - !======================================================================= ! Regular rectangular grid and mask @@ -2362,6 +2371,9 @@ subroutine get_bathymetry real (kind=dbl_kind) :: & puny + logical (kind=log_kind) :: & + calc_dragio + real (kind=dbl_kind), dimension(nlevel), parameter :: & thick = (/ & ! ocean layer thickness, m 10.01244_dbl_kind, 10.11258_dbl_kind, 10.31682_dbl_kind, & @@ -2381,7 +2393,7 @@ subroutine get_bathymetry character(len=*), parameter :: subname = '(get_bathymetry)' - call icepack_query_parameters(puny_out=puny) + call icepack_query_parameters(puny_out=puny, calc_dragio_out=calc_dragio) call icepack_warnings_flush(nu_diag) if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & file=__FILE__, line=__LINE__) @@ -2408,6 +2420,14 @@ subroutine get_bathymetry enddo enddo + ! For consistency, set thickness_ocn_layer1 in Icepack if 'calc_dragio' is active + if (calc_dragio) then + call icepack_init_parameters(thickness_ocn_layer1_in=thick(1)) + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=__FILE__, line=__LINE__) + endif + endif ! bathymetry_file end subroutine get_bathymetry @@ -2431,6 +2451,9 @@ subroutine get_bathymetry_popfile depth , & ! total depth, m thick ! layer thickness, cm -> m + logical (kind=log_kind) :: & + calc_dragio + character(len=*), parameter :: subname = '(get_bathymetry_popfile)' ntmp = maxval(nint(KMT)) @@ -2500,6 +2523,15 @@ subroutine get_bathymetry_popfile enddo enddo + ! For consistency, set thickness_ocn_layer1 in Icepack if 'calc_dragio' is active + call icepack_query_parameters(calc_dragio_out=calc_dragio) + if (calc_dragio) then + call icepack_init_parameters(thickness_ocn_layer1_in=thick(1)) + endif + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=__FILE__, line=__LINE__) + deallocate(depth,thick) end subroutine get_bathymetry_popfile diff --git a/cicecore/drivers/direct/hadgem3/CICE_FinalMod.F90 b/cicecore/drivers/direct/hadgem3/CICE_FinalMod.F90 index 2fdb170f1..a246ed036 100644 --- a/cicecore/drivers/direct/hadgem3/CICE_FinalMod.F90 +++ b/cicecore/drivers/direct/hadgem3/CICE_FinalMod.F90 @@ -39,7 +39,7 @@ subroutine CICE_Finalize !------------------------------------------------------------------- call icepack_warnings_flush(nu_diag) - if (icepack_warnings_aborted()) call abort_ice(error_message="subname", & + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & file=__FILE__,line= __LINE__) call ice_timer_stop(timer_total) ! stop timing entire run diff --git a/cicecore/drivers/direct/hadgem3/CICE_InitMod.F90 b/cicecore/drivers/direct/hadgem3/CICE_InitMod.F90 index 5f91ed584..b2a0e3cd1 100644 --- a/cicecore/drivers/direct/hadgem3/CICE_InitMod.F90 +++ b/cicecore/drivers/direct/hadgem3/CICE_InitMod.F90 @@ -64,8 +64,8 @@ subroutine cice_init ocean_bio_all, ice_bio_net, snow_bio_net, alloc_arrays_column use ice_arrays_column, only: floe_rad_l, floe_rad_c, & floe_binwidth, c_fsd_range - use ice_calendar, only: dt, dt_dyn, time, istep, istep1, write_ic, & - init_calendar, calendar + use ice_calendar, only: dt, dt_dyn, write_ic, & + init_calendar, advance_timestep, calc_timesteps use ice_communicate, only: init_communicate, my_task, master_task use ice_diagnostics, only: init_diags use ice_domain, only: init_domain_blocks @@ -83,17 +83,18 @@ subroutine cice_init use ice_history, only: init_hist, accum_hist use ice_restart_shared, only: restart, runid, runtype use ice_init, only: input_data, init_state - use ice_init_column, only: init_thermo_vertical, init_shortwave, init_zbgc + use ice_init_column, only: init_thermo_vertical, init_shortwave, init_zbgc, input_zbgc, count_tracers use ice_kinds_mod use ice_restoring, only: ice_HaloRestore_init use ice_timers, only: timer_total, init_ice_timers, ice_timer_start use ice_transport_driver, only: init_transport + use lib_mpp, only: mpi_comm_opa ! NEMO MPI communicator logical(kind=log_kind) :: tr_aero, tr_zaero, skl_bgc, z_tracers, & tr_fsd, wave_spec character(len=*),parameter :: subname = '(cice_init)' - call init_communicate ! initial setup for message passing + call init_communicate(mpi_comm_opa) ! initial setup for message passing call init_fileunits ! unit numbers call icepack_configure() ! initialize icepack @@ -102,8 +103,8 @@ subroutine cice_init file=__FILE__,line= __LINE__) call input_data ! namelist variables - - if (trim(runid) == 'bering') call check_finished_file + call input_zbgc ! vertical biogeochemistry namelist + call count_tracers ! count tracers call init_domain_blocks ! set up block decomposition call init_grid1 ! domain distribution @@ -150,11 +151,9 @@ subroutine cice_init write_diags=(my_task == master_task)) ! write diag on master only call icepack_warnings_flush(nu_diag) - if (icepack_warnings_aborted()) call abort_ice(error_message="subname", & + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & file=__FILE__, line=__LINE__) - call calendar(time) ! determine the initial date - #ifndef CICE_IN_NEMO call init_forcing_ocn(dt) ! initialize sss and sst from data #endif @@ -170,6 +169,7 @@ subroutine cice_init call init_diags ! initialize diagnostic output points call init_history_therm ! initialize thermo history variables call init_history_dyn ! initialize dynamic history variables + call calc_timesteps ! update timestep counter if not using npt_unit="1" call icepack_query_tracer_flags(tr_aero_out=tr_aero, tr_zaero_out=tr_zaero) call icepack_warnings_flush(nu_diag) @@ -185,10 +185,8 @@ subroutine cice_init if (trim(runtype) == 'continue' .or. restart) & call init_shortwave ! initialize radiative transfer - istep = istep + 1 ! update time step counters - istep1 = istep1 + 1 - time = time + dt ! determine the time and date - call calendar(time) ! at the end of the first timestep + ! determine the time and date at the end of the first timestep + call advance_timestep() !-------------------------------------------------------------------- ! coupler communication or forcing data initialization @@ -227,7 +225,7 @@ subroutine init_restart use ice_arrays_column, only: dhsn use ice_blocks, only: nx_block, ny_block - use ice_calendar, only: time, calendar + use ice_calendar, only: calendar use ice_constants, only: c0 use ice_domain, only: nblocks use ice_domain_size, only: ncat, n_aero, nfsd @@ -265,6 +263,8 @@ subroutine init_restart nt_alvl, nt_vlvl, nt_apnd, nt_hpnd, nt_ipnd, & nt_iage, nt_FY, nt_aero, nt_fsd + character(len=*),parameter :: subname = '(init_restart)' + call icepack_query_tracer_sizes(ntrcr_out=ntrcr) call icepack_warnings_flush(nu_diag) if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & @@ -280,13 +280,13 @@ subroutine init_restart nt_apnd_out=nt_apnd, nt_hpnd_out=nt_hpnd, nt_ipnd_out=nt_ipnd, & nt_iage_out=nt_iage, nt_FY_out=nt_FY, nt_aero_out=nt_aero, nt_fsd_out=nt_fsd) call icepack_warnings_flush(nu_diag) - if (icepack_warnings_aborted()) call abort_ice(error_message="subname", & + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & file=__FILE__, line=__LINE__) if (trim(runtype) == 'continue') then ! start from core restart file call restartfile() ! given by pointer in ice_in - call calendar(time) ! update time parameters + call calendar() ! update time parameters if (kdyn == 2) call read_restart_eap ! EAP else if (restart) then ! ice_ic = core restart file call restartfile (ice_ic) ! or 'default' or 'none' @@ -327,7 +327,7 @@ subroutine init_restart call read_restart_lvl else do iblk = 1, nblocks - call init_lvl(trcrn(:,:,nt_alvl,:,iblk), & + call init_lvl(iblk,trcrn(:,:,nt_alvl,:,iblk), & trcrn(:,:,nt_vlvl,:,iblk)) enddo ! iblk endif @@ -452,39 +452,11 @@ subroutine init_restart !$OMP END PARALLEL DO call icepack_warnings_flush(nu_diag) - if (icepack_warnings_aborted()) call abort_ice(error_message="subname", & + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & file=__FILE__, line=__LINE__) end subroutine init_restart -!======================================================================= -! -! Check whether a file indicating that the previous run finished cleanly -! If so, then do not continue the current restart. This is needed only -! for runs on machine 'bering' (set using runid = 'bering'). -! -! author: Adrian Turner, LANL - - subroutine check_finished_file() - - use ice_communicate, only: my_task, master_task - use ice_restart_shared, only: restart_dir - - character(len=char_len_long) :: filename - logical :: lexist = .false. - - if (my_task == master_task) then - - filename = trim(restart_dir)//"finished" - inquire(file=filename, exist=lexist) - if (lexist) then - call abort_ice("subname"//"ERROR: Found already finished file - quitting") - end if - - endif - - end subroutine check_finished_file - !======================================================================= end module CICE_InitMod diff --git a/cicecore/drivers/direct/hadgem3/CICE_RunMod.F90 b/cicecore/drivers/direct/hadgem3/CICE_RunMod.F90 index e8c809d9e..cd81de879 100644 --- a/cicecore/drivers/direct/hadgem3/CICE_RunMod.F90 +++ b/cicecore/drivers/direct/hadgem3/CICE_RunMod.F90 @@ -43,7 +43,7 @@ module CICE_RunMod subroutine CICE_Run - use ice_calendar, only: istep, istep1, time, dt, stop_now, calendar + use ice_calendar, only: stop_now, advance_timestep use ice_forcing, only: get_forcing_atmo, get_forcing_ocn, & get_wave_spec use ice_forcing_bgc, only: get_forcing_bgc, get_atm_bgc, fzaero_data, & @@ -53,6 +53,8 @@ subroutine CICE_Run timer_couple, timer_step logical (kind=log_kind) :: & tr_aero, tr_zaero, skl_bgc, z_tracers, wave_spec, tr_fsd + + character(len=*), parameter :: subname = '(CICE_Run)' !-------------------------------------------------------------------- ! initialize error code and step timer @@ -67,7 +69,7 @@ subroutine CICE_Run tr_zaero_out=tr_zaero, & tr_fsd_out=tr_fsd) call icepack_warnings_flush(nu_diag) - if (icepack_warnings_aborted()) call abort_ice(error_message="subname", & + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & file=__FILE__, line=__LINE__) #ifndef CICE_IN_NEMO @@ -80,11 +82,7 @@ subroutine CICE_Run call ice_step - istep = istep + 1 ! update time step counters - istep1 = istep1 + 1 - time = time + dt ! determine the time and date - - call calendar(time) ! at the end of the timestep + call advance_timestep() ! advance time #ifndef CICE_IN_NEMO if (stop_now >= 1) exit timeLoop @@ -173,6 +171,8 @@ subroutine ice_step tr_pond_cesm, tr_pond_lvl, tr_pond_topo, tr_brine, tr_aero, & calc_Tsfc, skl_bgc, solve_zsal, z_tracers, wave_spec + character(len=*), parameter :: subname = '(ice_step)' + call icepack_query_parameters(calc_Tsfc_out=calc_Tsfc, skl_bgc_out=skl_bgc, & solve_zsal_out=solve_zsal, z_tracers_out=z_tracers, ktherm_out=ktherm, & wave_spec_out=wave_spec) @@ -181,7 +181,7 @@ subroutine ice_step tr_pond_topo_out=tr_pond_topo, tr_brine_out=tr_brine, tr_aero_out=tr_aero, & tr_fsd_out=tr_fsd) call icepack_warnings_flush(nu_diag) - if (icepack_warnings_aborted()) call abort_ice(error_message="subname", & + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & file=__FILE__, line=__LINE__) #ifdef ICE_DA @@ -198,7 +198,7 @@ subroutine ice_step if (restore_ice) call ice_HaloRestore !----------------------------------------------------------------- - ! initialize diagnostics + ! initialize diagnostics and save initial state values !----------------------------------------------------------------- call ice_timer_start(timer_diags) ! diagnostics/history @@ -210,6 +210,8 @@ subroutine ice_step call ice_timer_start(timer_column) ! column physics call ice_timer_start(timer_thermo) ! thermodynamics + call save_init + !$OMP PARALLEL DO PRIVATE(iblk) do iblk = 1, nblocks @@ -219,7 +221,7 @@ subroutine ice_step ! Scale radiation fields !----------------------------------------------------------------- - if (calc_Tsfc) call prep_radiation (dt, iblk) + if (calc_Tsfc) call prep_radiation (iblk) !----------------------------------------------------------------- ! thermodynamics and biogeochemistry @@ -248,7 +250,7 @@ subroutine ice_step ! wave fracture of the floe size distribution ! note this is called outside of the dynamics subcycling loop if (tr_fsd .and. wave_spec) call step_dyn_wave(dt) -+ + do k = 1, ndtd ! momentum, stress, transport @@ -303,9 +305,9 @@ subroutine ice_step call ice_timer_start(timer_diags) ! diagnostics if (mod(istep,diagfreq) == 0) then call runtime_diags(dt) ! log file - if (solve_zsal) call zsal_diags(dt) - if (skl_bgc .or. z_tracers) call bgc_diags (dt) - if (tr_brine) call hbrine_diags(dt) + if (solve_zsal) call zsal_diags + if (skl_bgc .or. z_tracers) call bgc_diags + if (tr_brine) call hbrine_diags endif call ice_timer_stop(timer_diags) ! diagnostics @@ -345,7 +347,8 @@ subroutine coupling_prep (iblk) use ice_arrays_column, only: alvdfn, alidfn, alvdrn, alidrn, & albicen, albsnon, albpndn, apeffn, fzsal_g, fzsal, snowfracn - use ice_blocks, only: block, nx_block, ny_block + use ice_blocks, only: nx_block, ny_block, get_block, block + use ice_domain, only: blocks_ice use ice_calendar, only: dt, nstreams use ice_domain_size, only: ncat use ice_flux, only: alvdf, alidf, alvdr, alidr, albice, albsno, & @@ -369,11 +372,15 @@ subroutine coupling_prep (iblk) ! local variables integer (kind=int_kind) :: & + ilo,ihi,jlo,jhi, & ! beginning and end of physical domain n , & ! thickness category index i,j , & ! horizontal indices k , & ! tracer index nbtrcr ! + type (block) :: & + this_block ! block information for current block + logical (kind=log_kind) :: & calc_Tsfc ! @@ -383,11 +390,13 @@ subroutine coupling_prep (iblk) rhofresh , & ! netsw ! flag for shortwave radiation presence + character(len=*), parameter :: subname = '(coupling_prep)' + call icepack_query_parameters(puny_out=puny, rhofresh_out=rhofresh) call icepack_query_tracer_sizes(nbtrcr_out=nbtrcr) call icepack_query_parameters(calc_Tsfc_out=calc_Tsfc) call icepack_warnings_flush(nu_diag) - if (icepack_warnings_aborted()) call abort_ice(error_message="subname", & + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & file=__FILE__, line=__LINE__) !----------------------------------------------------------------- @@ -432,9 +441,16 @@ subroutine coupling_prep (iblk) enddo enddo enddo + + this_block = get_block(blocks_ice(iblk),iblk) + ilo = this_block%ilo + ihi = this_block%ihi + jlo = this_block%jlo + jhi = this_block%jhi + do n = 1, ncat - do j = 1, ny_block - do i = 1, nx_block + do j = jlo, jhi + do i = ilo, ihi if (aicen(i,j,n,iblk) > puny) then alvdf(i,j,iblk) = alvdf(i,j,iblk) & @@ -602,11 +618,14 @@ subroutine sfcflux_to_ocn(nx_block, ny_block, & real (kind=dbl_kind) :: & puny, & ! + Lsub, & ! rLsub ! 1/Lsub - call icepack_query_parameters(puny_out=puny) + character(len=*), parameter :: subname = '(sfcflux_to_ocn)' + + call icepack_query_parameters(puny_out=puny, Lsub_out=Lsub) call icepack_warnings_flush(nu_diag) - if (icepack_warnings_aborted()) call abort_ice(error_message="subname", & + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & file=__FILE__, line=__LINE__) rLsub = c1 / Lsub @@ -627,288 +646,6 @@ subroutine sfcflux_to_ocn(nx_block, ny_block, & end subroutine sfcflux_to_ocn -!======================================================================= -! -! Update the ice state variables using the ice concentration increment rate -! calculated in the NEMO data assimilation (DA) scheme. -! Ice area is added by adding ADDITIONAL ice with thickness hi_da_new. -! This implies the ADDITIONAL volume added is hi_da_new*daice, where -! daice is the change in ice area due to DA. -! Ice area is subtracted by removing ice area with the current category -! thickness. Ice area is first removed from the lowest category, and then -! removed from higher categories as needed. -! -! authors: D. Peterson, Met Office -! A. McLaren, Met Office - - subroutine da_state_update - - use ice_constants, only: c1, puny - -#ifdef ICE_DA - - integer (kind=int_kind) :: & - i, j, ij , & ! horizontal indices - iblk , & ! block index - ilo,ihi,jlo,jhi, & ! beginning and end of physical domain - n ! thickness category index - - integer (kind=int_kind) :: & - nelevate ! number of elevations of increments to higher - ! category (diagnostic) - - integer (kind=int_kind) :: & - icells ! number of ocean cells - - integer (kind=int_kind), dimension(nx_block*ny_block) :: & - indxi, indxj ! indirect indices for cells with aicen > puny - - type (block) :: & - this_block ! block information for current block - - real (kind=dbl_kind) :: & - hi_da_new , & ! specified ice thickness for new ice created by DA - hicen , & ! ice thickness - hsnon , & ! snow thickness - daice , & ! change in ice concentration (for first category) - dvice , & ! change in ice volume - dvsno ! change in snow volume - - real (kind=dbl_kind), dimension(nx_block,ny_block,max_blocks) :: & - vsno_init , & ! initial snow volume - vice_init ! initial ice volume - - !---------------------------------------------------------------- - ! This routine will only work under certain circumstances!! - ! Note, if any optional tracers are used in the run, they will not - ! be conserved here. - !---------------------------------------------------------------- - - if (nilyr /= 1 .or. nslyr /= 1 .or. ntrcr /= 1) & - call abort_ice("subname"// & - 'ERROR: da_state_update: only works for 1 cat, 1 layer, 1 tracer runs') - - !------------------------------------------------------------------ - ! Set thickness for new ice - ! (Currently using value of 0.5m, which was value thin ice was - ! incremented toward in LIM ice model). - !----------------------------------------------------------------- - - hi_da_new = 0.50_dbl_kind ! if ncat>1, this has to be less than - ! the 1st category thickness limit - - ! Initialise various fields - vsno_init(:,:,:) = c0 - vice_init(:,:,:) = c0 - fresh_da(:,:,:) = c0 - fsalt_da(:,:,:) = c0 - - !---------------------------------------------------------------- - ! Update category state variables - !---------------------------------------------------------------- - - do iblk = 1, nblocks - this_block = get_block(blocks_ice(iblk), iblk) - ilo = this_block%ilo - ihi = this_block%ihi - jlo = this_block%jlo - jhi = this_block%jhi - - !---------------------------------------------------------------- - ! Find ocean points where data assimilation abs(increment) > puny - ! (Note, daice_da is the RATE of change of ice concentration due - ! to DA) - !---------------------------------------------------------------- - - icells = 0 - do j = jlo, jhi - do i = ilo, ihi - if (tmask(i,j,iblk) .and. abs(daice_da(i,j,iblk)*dt) > puny) then - icells = icells + 1 - indxi(icells) = i - indxj(icells) = j - endif - - vsno_init(i,j,iblk) = vsno(i,j,iblk) ! used for salinity changes - vice_init(i,j,iblk) = vice(i,j,iblk) ! used for salinity changes - - enddo ! i - enddo ! j - - if (icells > 0) then - - n = 1 ! only ever add increment to 1st category - do ij = 1, icells - i = indxi(ij) - j = indxj(ij) - - !--------------------------------------------------- - ! Apply concentration increment and associated - ! volume change - !--------------------------------------------------- - - if (aicen(i,j,n,iblk) > puny) then - ! if decreasing concentration, subtract ice volume at - ! current thickness - hicen = vicen(i,j,n,iblk) / aicen(i,j,n,iblk) - - ! if increasing concentration, add ice volume at hi_da_new - ! thickness - if ( daice_da(i,j,iblk)*dt > puny) hicen = hi_da_new - - ! whether in/decreasing concentration, add/subtract snow - ! volume at current thickness - hsnon = vsnon(i,j,n,iblk) / aicen(i,j,n,iblk) - - daice = & - min( ( c1 - aice(i,j,iblk) ), ( daice_da(i,j,iblk)*dt ) ) - aicen(i,j,n,iblk) = aicen(i,j,n,iblk) + daice - vicen(i,j,n,iblk) = vicen(i,j,n,iblk) + hicen*daice - vsnon(i,j,n,iblk) = aicen(i,j,n,iblk) * hsnon - - !--------------------------------------------------- - ! Create new ice points with specified thickness - !--------------------------------------------------- - - else - aicen(i,j,n,iblk) = & - min( ( c1 - aice(i,j,iblk) ), ( daice_da(i,j,iblk)*dt ) ) - ! note aicen/vicen < c0 taken care below - vicen(i,j,n,iblk) = aicen(i,j,n,iblk) * hi_da_new - vsnon(i,j,n,iblk) = c0 - - endif - - enddo ! ij - - do n = 1,ncat - nelevate=0 - do ij = 1,icells - i = indxi(ij) - j = indxj(ij) - - !---------------------------------------------------- - ! Check is aicen < puny - ! - remove from next category if necessary - ! - otherwise just remove it - ! Ignoring conservation issues here - !---------------------------------------------------- - - if (aicen(i,j,n,iblk) < puny) then - if ( n < ncat ) then - if (aicen(i,j,n,iblk) < -1.0*puny ) then - nelevate=nelevate+1 - endif - ! take concentration from next category -- constant thickness - if ( aicen(i,j,n+1,iblk) > puny ) then - hicen = vicen(i,j,n+1,iblk)/aicen(i,j,n+1,iblk) - hsnon = vsnon(i,j,n+1,iblk)/aicen(i,j,n+1,iblk) - else - hicen = c0 - hsnon = c0 - endif ! aicen(n+1) > puny - aicen(i,j,n+1,iblk) = aicen(i,j,n+1,iblk) + aicen(i,j,n,iblk) - vicen(i,j,n+1,iblk) = aicen(i,j,n+1,iblk) * hicen - vsnon(i,j,n+1,iblk) = aicen(i,j,n+1,iblk) * hsnon - endif ! n < ncat - aicen(i,j,n,iblk) = c0 - vicen(i,j,n,iblk) = c0 - vsnon(i,j,n,iblk) = c0 - eicen(i,j,n,iblk) = c0 - esnon(i,j,n,iblk) = c0 - endif ! aicen(n) < puny - - !--------------------------------------------------- - ! Update energies - !--------------------------------------------------- - - ! Would need vertical layers here in the future - if (aicen(i,j,n,iblk) > puny) then - esnon(i,j,n,iblk) = -rhos*Lfresh*vsnon(i,j,n,iblk) - eicen(i,j,n,iblk) = -rhoi*Lfresh*vicen(i,j,n,iblk) - endif - - enddo ! ij - !write(nu_diag,*) 'Elevated ', nelevate, ' incs to category ', n+1 - enddo ! n - endif ! icells - enddo ! nblocks - - - !------------------------------------------------------------------- - ! Ghost cell updates for state variables. (Can't be called within - ! block do loop). - !------------------------------------------------------------------- - - call bound_state (aicen, trcrn, vicen, vsnon, eicen, esnon) - - do iblk = 1, nblocks - - this_block = get_block(blocks_ice(iblk), iblk) - ilo = this_block%ilo - ihi = this_block%ihi - jlo = this_block%jlo - jhi = this_block%jhi - - !----------------------------------------------------------- - ! Find data assimilation points again - !----------------------------------------------------------- - - icells = 0 - do j = jlo, jhi - do i = ilo, ihi - if (tmask(i,j,iblk) .and. abs(daice_da(i,j,iblk)*dt) > puny) then - icells = icells + 1 - indxi(icells) = i - indxj(icells) = j - endif - - !------------------------------------------------------------- - ! Update aggregate values - !------------------------------------------------------------- - - if (tmask(i,j,iblk)) & - call aggregate (ncat, & - aicen(i,j,:,iblk), & - trcrn(i,j,:,:,iblk), & - vicen(i,j,:,iblk), vsnon(i,j, :,iblk), & - aice (i,j, iblk), & - trcr (i,j,:, iblk), & - vice (i,j, iblk), vsno (i,j, iblk), & - aice0(i,j, iblk), & - ntrcr, & - trcr_depend(:), & - trcr_base (:,:), & - n_trcr_strata(:), & - nt_strata (:,:)) - - enddo ! i - enddo ! j - - !------------------------------------------------------------- - ! Calculate implied freshwater and salt fluxes - !------------------------------------------------------------- - - if (icells > 0) then - - do ij = 1, icells - i = indxi(ij) - j = indxj(ij) - - dvice = vice(i,j,iblk) - vice_init(i,j,iblk) - dvsno = vsno(i,j,iblk) - vsno_init(i,j,iblk) - - fresh_da(i,j,iblk) = - (rhoi * dvice + rhos * dvsno)/dt - fsalt_da(i,j,iblk) = - rhoi*ice_ref_salinity*p001*dvice/dt - - enddo ! ij - endif ! icells - - enddo ! iblk - -#endif - end subroutine da_state_update - !======================================================================= end module CICE_RunMod diff --git a/cicecore/drivers/direct/nemo_concepts/CICE_FinalMod.F90 b/cicecore/drivers/direct/nemo_concepts/CICE_FinalMod.F90 new file mode 100644 index 000000000..a246ed036 --- /dev/null +++ b/cicecore/drivers/direct/nemo_concepts/CICE_FinalMod.F90 @@ -0,0 +1,64 @@ +!======================================================================= +! +! This module contains routines for the final exit of the CICE model, +! including final output and clean exit from any message passing +! environments and frameworks. +! +! authors: Philip W. Jones, LANL +! 2006: Converted to free source form (F90) by Elizabeth Hunke +! 2008: E. Hunke moved ESMF code to its own driver + + module CICE_FinalMod + + use ice_kinds_mod + use ice_exit, only: abort_ice, end_run + use ice_fileunits, only: nu_diag, release_all_fileunits + use icepack_intfc, only: icepack_warnings_flush, icepack_warnings_aborted + + implicit none + private + public :: CICE_Finalize + +!======================================================================= + + contains + +!======================================================================= +! +! This routine shuts down CICE by exiting all relevent environments. + + subroutine CICE_Finalize + + use ice_restart_shared, only: runid + use ice_timers, only: ice_timer_stop, ice_timer_print_all, timer_total + + character(len=*), parameter :: subname='(CICE_Finalize)' + + !------------------------------------------------------------------- + ! stop timers and print timer info + !------------------------------------------------------------------- + + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=__FILE__,line= __LINE__) + + call ice_timer_stop(timer_total) ! stop timing entire run + call ice_timer_print_all(stats=.false.) ! print timing information + +!echmod if (nu_diag /= 6) close (nu_diag) ! diagnostic output + call release_all_fileunits + + !------------------------------------------------------------------- + ! quit MPI + !------------------------------------------------------------------- + +! standalone +! call end_run ! quit MPI + + end subroutine CICE_Finalize + +!======================================================================= + + end module CICE_FinalMod + +!======================================================================= diff --git a/cicecore/drivers/direct/nemo_concepts/CICE_InitMod.F90 b/cicecore/drivers/direct/nemo_concepts/CICE_InitMod.F90 new file mode 100644 index 000000000..b2a0e3cd1 --- /dev/null +++ b/cicecore/drivers/direct/nemo_concepts/CICE_InitMod.F90 @@ -0,0 +1,464 @@ +!======================================================================= +! +! This module contains the CICE initialization routine that sets model +! parameters and initializes the grid and CICE state variables. +! +! authors Elizabeth C. Hunke, LANL +! William H. Lipscomb, LANL +! Philip W. Jones, LANL +! +! 2006: Converted to free form source (F90) by Elizabeth Hunke +! 2008: E. Hunke moved ESMF code to its own driver + + module CICE_InitMod + + use ice_kinds_mod + use ice_exit, only: abort_ice + use ice_fileunits, only: init_fileunits, nu_diag + use icepack_intfc, only: icepack_aggregate + use icepack_intfc, only: icepack_init_itd, icepack_init_itd_hist + use icepack_intfc, only: icepack_init_fsd_bounds, icepack_init_wave + use icepack_intfc, only: icepack_configure + use icepack_intfc, only: icepack_warnings_flush, icepack_warnings_aborted + use icepack_intfc, only: icepack_query_parameters, icepack_query_tracer_flags, & + icepack_query_tracer_indices, icepack_query_tracer_sizes + + implicit none + private + public :: CICE_Initialize, cice_init + +!======================================================================= + + contains + +!======================================================================= + +! Initialize the basic state, grid and all necessary parameters for +! running the CICE model. Return the initial state in routine +! export state. +! Note: This initialization driver is designed for standalone and +! CESM-coupled applications. For other +! applications (e.g., standalone CAM), this driver would be +! replaced by a different driver that calls subroutine cice_init, +! where most of the work is done. + + subroutine CICE_Initialize + + !-------------------------------------------------------------------- + ! model initialization + !-------------------------------------------------------------------- + + call cice_init + + end subroutine CICE_Initialize + +!======================================================================= +! +! Initialize CICE model. + + subroutine cice_init + + use ice_state, only: alloc_state + use ice_flux_bgc, only: alloc_flux_bgc + use ice_arrays_column, only: hin_max, c_hi_range, zfswin, trcrn_sw, & + ocean_bio_all, ice_bio_net, snow_bio_net, alloc_arrays_column + use ice_arrays_column, only: floe_rad_l, floe_rad_c, & + floe_binwidth, c_fsd_range + use ice_calendar, only: dt, dt_dyn, write_ic, & + init_calendar, advance_timestep, calc_timesteps + use ice_communicate, only: init_communicate, my_task, master_task + use ice_diagnostics, only: init_diags + use ice_domain, only: init_domain_blocks + use ice_domain_size, only: ncat, nfsd + use ice_dyn_eap, only: init_eap, alloc_dyn_eap + use ice_dyn_shared, only: kdyn, init_dyn, alloc_dyn_shared + use ice_dyn_vp, only: init_vp + use ice_flux, only: init_coupler_flux, init_history_therm, & + init_history_dyn, init_flux_atm, init_flux_ocn, alloc_flux + use ice_forcing, only: init_forcing_ocn, init_forcing_atmo, & + get_forcing_atmo, get_forcing_ocn, alloc_forcing, get_wave_spec + use ice_forcing_bgc, only: get_forcing_bgc, get_atm_bgc, & + faero_data, faero_default, faero_optics, alloc_forcing_bgc + use ice_grid, only: init_grid1, init_grid2, alloc_grid + use ice_history, only: init_hist, accum_hist + use ice_restart_shared, only: restart, runid, runtype + use ice_init, only: input_data, init_state + use ice_init_column, only: init_thermo_vertical, init_shortwave, init_zbgc, input_zbgc, count_tracers + use ice_kinds_mod + use ice_restoring, only: ice_HaloRestore_init + use ice_timers, only: timer_total, init_ice_timers, ice_timer_start + use ice_transport_driver, only: init_transport + use lib_mpp, only: mpi_comm_opa ! NEMO MPI communicator + + logical(kind=log_kind) :: tr_aero, tr_zaero, skl_bgc, z_tracers, & + tr_fsd, wave_spec + character(len=*),parameter :: subname = '(cice_init)' + + call init_communicate(mpi_comm_opa) ! initial setup for message passing + call init_fileunits ! unit numbers + + call icepack_configure() ! initialize icepack + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(subname, & + file=__FILE__,line= __LINE__) + + call input_data ! namelist variables + call input_zbgc ! vertical biogeochemistry namelist + call count_tracers ! count tracers + + call init_domain_blocks ! set up block decomposition + call init_grid1 ! domain distribution + call alloc_grid ! allocate grid + call alloc_arrays_column ! allocate column arrays + call alloc_state ! allocate state + call alloc_dyn_shared ! allocate dyn shared (init_uvel,init_vvel) + call alloc_flux_bgc ! allocate flux_bgc + call alloc_flux ! allocate flux + call init_ice_timers ! initialize all timers + call ice_timer_start(timer_total) ! start timing entire run + call init_grid2 ! grid variables + call init_zbgc ! vertical biogeochemistry namelist + + call init_calendar ! initialize some calendar stuff + call init_hist (dt) ! initialize output history file + + call init_dyn (dt_dyn) ! define dynamics parameters, variables + if (kdyn == 2) then + call alloc_dyn_eap ! allocate dyn_eap arrays + call init_eap ! define eap dynamics parameters, variables + else if (kdyn == 3) then + call init_vp ! define vp dynamics parameters, variables + endif + + call init_coupler_flux ! initialize fluxes exchanged with coupler + call init_thermo_vertical ! initialize vertical thermodynamics + + call icepack_init_itd(ncat=ncat, hin_max=hin_max) ! ice thickness distribution + if (my_task == master_task) then + call icepack_init_itd_hist(ncat=ncat, hin_max=hin_max, c_hi_range=c_hi_range) ! output + endif + + call icepack_query_tracer_flags(tr_fsd_out=tr_fsd) + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(trim(subname), & + file=__FILE__,line= __LINE__) + + if (tr_fsd) call icepack_init_fsd_bounds (nfsd, & ! floe size distribution + floe_rad_l, & ! fsd size lower bound in m (radius) + floe_rad_c, & ! fsd size bin centre in m (radius) + floe_binwidth, & ! fsd size bin width in m (radius) + c_fsd_range, & ! string for history output + write_diags=(my_task == master_task)) ! write diag on master only + + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=__FILE__, line=__LINE__) + +#ifndef CICE_IN_NEMO + call init_forcing_ocn(dt) ! initialize sss and sst from data +#endif + call init_state ! initialize the ice state + call init_transport ! initialize horizontal transport + call ice_HaloRestore_init ! restored boundary conditions + + call icepack_query_parameters(skl_bgc_out=skl_bgc, z_tracers_out=z_tracers, & + wave_spec_out=wave_spec) + if (skl_bgc .or. z_tracers) call alloc_forcing_bgc ! allocate biogeochemistry arrays + call init_restart ! initialize restart variables + + call init_diags ! initialize diagnostic output points + call init_history_therm ! initialize thermo history variables + call init_history_dyn ! initialize dynamic history variables + call calc_timesteps ! update timestep counter if not using npt_unit="1" + + call icepack_query_tracer_flags(tr_aero_out=tr_aero, tr_zaero_out=tr_zaero) + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(subname, & + file=__FILE__,line= __LINE__) + + if (tr_aero .or. tr_zaero) call faero_optics !initialize aerosol optical + !property tables + + ! Initialize shortwave components using swdn from previous timestep + ! if restarting. These components will be scaled to current forcing + ! in prep_radiation. + if (trim(runtype) == 'continue' .or. restart) & + call init_shortwave ! initialize radiative transfer + + ! determine the time and date at the end of the first timestep + call advance_timestep() + + !-------------------------------------------------------------------- + ! coupler communication or forcing data initialization + !-------------------------------------------------------------------- + +#ifndef CICE_IN_NEMO + call init_forcing_atmo ! initialize atmospheric forcing (standalone) +#endif + +! standalone +! if (tr_fsd .and. wave_spec) call get_wave_spec ! wave spectrum in ice +! call get_forcing_atmo ! atmospheric forcing from data +! call get_forcing_ocn(dt) ! ocean forcing from data + +! ! aerosols +! ! if (tr_aero) call faero_data ! data file +! ! if (tr_zaero) call fzaero_data ! data file (gx1) +! if (tr_aero .or. tr_zaero) call faero_default ! default values +! if (skl_bgc .or. z_tracers) call get_forcing_bgc ! biogeochemistry + + if (z_tracers) call get_atm_bgc ! biogeochemistry + + if (runtype == 'initial' .and. .not. restart) & + call init_shortwave ! initialize radiative transfer using current swdn + + call init_flux_atm ! initialize atmosphere fluxes sent to coupler + call init_flux_ocn ! initialize ocean fluxes sent to coupler + + if (write_ic) call accum_hist(dt) ! write initial conditions + + end subroutine cice_init + +!======================================================================= + + subroutine init_restart + + use ice_arrays_column, only: dhsn + use ice_blocks, only: nx_block, ny_block + use ice_calendar, only: calendar + use ice_constants, only: c0 + use ice_domain, only: nblocks + use ice_domain_size, only: ncat, n_aero, nfsd + use ice_dyn_eap, only: read_restart_eap + use ice_dyn_shared, only: kdyn + use ice_flux, only: sss + use ice_grid, only: tmask + use ice_init, only: ice_ic + use ice_init_column, only: init_age, init_FY, init_lvl, & + init_meltponds_cesm, init_meltponds_lvl, init_meltponds_topo, & + init_aerosol, init_hbrine, init_bgc, init_fsd + use ice_restart_column, only: restart_age, read_restart_age, & + restart_FY, read_restart_FY, restart_lvl, read_restart_lvl, & + restart_pond_cesm, read_restart_pond_cesm, & + restart_pond_lvl, read_restart_pond_lvl, & + restart_pond_topo, read_restart_pond_topo, & + restart_fsd, read_restart_fsd, & + restart_aero, read_restart_aero, & + restart_hbrine, read_restart_hbrine, & + restart_zsal, restart_bgc + use ice_restart_driver, only: restartfile, restartfile_v4 + use ice_restart_shared, only: runtype, restart + use ice_state ! almost everything + + integer(kind=int_kind) :: & + i, j , & ! horizontal indices + iblk ! block index + logical(kind=log_kind) :: & + tr_iage, tr_FY, tr_lvl, tr_pond_cesm, tr_pond_lvl, & + tr_pond_topo, tr_fsd, tr_aero, tr_brine, & + skl_bgc, z_tracers, solve_zsal + integer(kind=int_kind) :: & + ntrcr + integer(kind=int_kind) :: & + nt_alvl, nt_vlvl, nt_apnd, nt_hpnd, nt_ipnd, & + nt_iage, nt_FY, nt_aero, nt_fsd + + character(len=*),parameter :: subname = '(init_restart)' + + call icepack_query_tracer_sizes(ntrcr_out=ntrcr) + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=__FILE__, line=__LINE__) + + call icepack_query_parameters(skl_bgc_out=skl_bgc, & + z_tracers_out=z_tracers, solve_zsal_out=solve_zsal) + call icepack_query_tracer_flags(tr_iage_out=tr_iage, tr_FY_out=tr_FY, & + tr_lvl_out=tr_lvl, tr_pond_cesm_out=tr_pond_cesm, tr_pond_lvl_out=tr_pond_lvl, & + tr_pond_topo_out=tr_pond_topo, tr_aero_out=tr_aero, tr_brine_out=tr_brine, & + tr_fsd_out=tr_fsd) + call icepack_query_tracer_indices(nt_alvl_out=nt_alvl, nt_vlvl_out=nt_vlvl, & + nt_apnd_out=nt_apnd, nt_hpnd_out=nt_hpnd, nt_ipnd_out=nt_ipnd, & + nt_iage_out=nt_iage, nt_FY_out=nt_FY, nt_aero_out=nt_aero, nt_fsd_out=nt_fsd) + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=__FILE__, line=__LINE__) + + if (trim(runtype) == 'continue') then + ! start from core restart file + call restartfile() ! given by pointer in ice_in + call calendar() ! update time parameters + if (kdyn == 2) call read_restart_eap ! EAP + else if (restart) then ! ice_ic = core restart file + call restartfile (ice_ic) ! or 'default' or 'none' + !!! uncomment to create netcdf + ! call restartfile_v4 (ice_ic) ! CICE v4.1 binary restart file + !!! uncomment if EAP restart data exists + ! if (kdyn == 2) call read_restart_eap + endif + + ! tracers + ! ice age tracer + if (tr_iage) then + if (trim(runtype) == 'continue') & + restart_age = .true. + if (restart_age) then + call read_restart_age + else + do iblk = 1, nblocks + call init_age(trcrn(:,:,nt_iage,:,iblk)) + enddo ! iblk + endif + endif + ! first-year area tracer + if (tr_FY) then + if (trim(runtype) == 'continue') restart_FY = .true. + if (restart_FY) then + call read_restart_FY + else + do iblk = 1, nblocks + call init_FY(trcrn(:,:,nt_FY,:,iblk)) + enddo ! iblk + endif + endif + ! level ice tracer + if (tr_lvl) then + if (trim(runtype) == 'continue') restart_lvl = .true. + if (restart_lvl) then + call read_restart_lvl + else + do iblk = 1, nblocks + call init_lvl(iblk,trcrn(:,:,nt_alvl,:,iblk), & + trcrn(:,:,nt_vlvl,:,iblk)) + enddo ! iblk + endif + endif + ! CESM melt ponds + if (tr_pond_cesm) then + if (trim(runtype) == 'continue') & + restart_pond_cesm = .true. + if (restart_pond_cesm) then + call read_restart_pond_cesm + else + do iblk = 1, nblocks + call init_meltponds_cesm(trcrn(:,:,nt_apnd,:,iblk), & + trcrn(:,:,nt_hpnd,:,iblk)) + enddo ! iblk + endif + endif + ! level-ice melt ponds + if (tr_pond_lvl) then + if (trim(runtype) == 'continue') & + restart_pond_lvl = .true. + if (restart_pond_lvl) then + call read_restart_pond_lvl + else + do iblk = 1, nblocks + call init_meltponds_lvl(trcrn(:,:,nt_apnd,:,iblk), & + trcrn(:,:,nt_hpnd,:,iblk), & + trcrn(:,:,nt_ipnd,:,iblk), & + dhsn(:,:,:,iblk)) + enddo ! iblk + endif + endif + ! topographic melt ponds + if (tr_pond_topo) then + if (trim(runtype) == 'continue') & + restart_pond_topo = .true. + if (restart_pond_topo) then + call read_restart_pond_topo + else + do iblk = 1, nblocks + call init_meltponds_topo(trcrn(:,:,nt_apnd,:,iblk), & + trcrn(:,:,nt_hpnd,:,iblk), & + trcrn(:,:,nt_ipnd,:,iblk)) + enddo ! iblk + endif ! .not. restart_pond + endif + ! floe size distribution + if (tr_fsd) then + if (trim(runtype) == 'continue') restart_fsd = .true. + if (restart_fsd) then + call read_restart_fsd + else + call init_fsd(trcrn(:,:,nt_fsd:nt_fsd+nfsd-1,:,:)) + endif + endif + if (tr_aero) then ! ice aerosol + if (trim(runtype) == 'continue') restart_aero = .true. + if (restart_aero) then + call read_restart_aero + else + do iblk = 1, nblocks + call init_aerosol(trcrn(:,:,nt_aero:nt_aero+4*n_aero-1,:,iblk)) + enddo ! iblk + endif ! .not. restart_aero + endif + + if (trim(runtype) == 'continue') then + if (tr_brine) & + restart_hbrine = .true. + if (solve_zsal) & + restart_zsal = .true. + if (skl_bgc .or. z_tracers) & + restart_bgc = .true. + endif + + if (tr_brine .or. skl_bgc) then ! brine height tracer + call init_hbrine + if (tr_brine .and. restart_hbrine) call read_restart_hbrine + endif + + if (solve_zsal .or. skl_bgc .or. z_tracers) then ! biogeochemistry + if (tr_fsd) then + write (nu_diag,*) 'FSD implementation incomplete for use with BGC' + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=__FILE__, line=__LINE__) + endif + call init_bgc + endif + + !----------------------------------------------------------------- + ! aggregate tracers + !----------------------------------------------------------------- + + !$OMP PARALLEL DO PRIVATE(iblk) + do iblk = 1, nblocks + do j = 1, ny_block + do i = 1, nx_block + if (tmask(i,j,iblk)) then + call icepack_aggregate(ncat = ncat, & + aicen = aicen(i,j,:,iblk), & + trcrn = trcrn(i,j,:,:,iblk), & + vicen = vicen(i,j,:,iblk), & + vsnon = vsnon(i,j,:,iblk), & + aice = aice (i,j, iblk), & + trcr = trcr (i,j,:,iblk), & + vice = vice (i,j, iblk), & + vsno = vsno (i,j, iblk), & + aice0 = aice0(i,j, iblk), & + ntrcr = ntrcr, & + trcr_depend = trcr_depend, & + trcr_base = trcr_base, & + n_trcr_strata = n_trcr_strata, & + nt_strata = nt_strata) + else + ! tcraig, reset all tracer values on land to zero + trcrn(i,j,:,:,iblk) = c0 + endif + enddo + enddo + enddo + !$OMP END PARALLEL DO + + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=__FILE__, line=__LINE__) + + end subroutine init_restart + +!======================================================================= + + end module CICE_InitMod + +!======================================================================= diff --git a/cicecore/drivers/direct/nemo_concepts/CICE_RunMod.F90 b/cicecore/drivers/direct/nemo_concepts/CICE_RunMod.F90 new file mode 100644 index 000000000..ecd95e3c3 --- /dev/null +++ b/cicecore/drivers/direct/nemo_concepts/CICE_RunMod.F90 @@ -0,0 +1,655 @@ +!======================================================================= +! +! Main driver for time stepping of CICE. +! +! authors Elizabeth C. Hunke, LANL +! Philip W. Jones, LANL +! William H. Lipscomb, LANL +! +! 2006 ECH: moved exit timeLoop to prevent execution of unnecessary timestep +! 2006 ECH: Streamlined for efficiency +! 2006 ECH: Converted to free source form (F90) +! 2007 BPB: Modified Delta-Eddington shortwave interface +! 2008 ECH: moved ESMF code to its own driver + + module CICE_RunMod + + use ice_kinds_mod + use ice_fileunits, only: nu_diag + use ice_arrays_column, only: oceanmixed_ice + use ice_constants, only: c0, c1 + use ice_constants, only: field_loc_center, field_type_scalar + use ice_exit, only: abort_ice + use icepack_intfc, only: icepack_warnings_flush, icepack_warnings_aborted + use icepack_intfc, only: icepack_max_aero + use icepack_intfc, only: icepack_query_parameters + use icepack_intfc, only: icepack_query_tracer_flags, icepack_query_tracer_sizes + + implicit none + private + public :: CICE_Run, ice_step + +!======================================================================= + + contains + +!======================================================================= +! +! This is the main driver routine for advancing CICE forward in time. +! +! author Elizabeth C. Hunke, LANL +! Philip W. Jones, LANL +! William H. Lipscomb, LANL + + subroutine CICE_Run + + use ice_calendar, only: stop_now, advance_timestep + use ice_forcing, only: get_forcing_atmo, get_forcing_ocn, & + get_wave_spec + use ice_forcing_bgc, only: get_forcing_bgc, get_atm_bgc, fzaero_data, & + faero_default + use ice_flux, only: init_flux_atm, init_flux_ocn + use ice_timers, only: ice_timer_start, ice_timer_stop, & + timer_couple, timer_step + logical (kind=log_kind) :: & + tr_aero, tr_zaero, skl_bgc, z_tracers, wave_spec, tr_fsd + + character(len=*), parameter :: subname = '(CICE_Run)' + + !-------------------------------------------------------------------- + ! initialize error code and step timer + !-------------------------------------------------------------------- + + call ice_timer_start(timer_step) ! start timing entire run + + call icepack_query_parameters(skl_bgc_out=skl_bgc, & + z_tracers_out=z_tracers, & + wave_spec_out=wave_spec) + call icepack_query_tracer_flags(tr_aero_out=tr_aero, & + tr_zaero_out=tr_zaero, & + tr_fsd_out=tr_fsd) + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=__FILE__, line=__LINE__) + +#ifndef CICE_IN_NEMO + !-------------------------------------------------------------------- + ! timestep loop + !-------------------------------------------------------------------- + + timeLoop: do +#endif + + call ice_step + + call advance_timestep() ! advance time + +#ifndef CICE_IN_NEMO + if (stop_now >= 1) exit timeLoop +#endif + + call ice_timer_start(timer_couple) ! atm/ocn coupling + +! standalone +! for now, wave_spectrum is constant in time +! if (tr_fsd .and. wave_spec) call get_wave_spec ! wave spectrum in ice +! call get_forcing_atmo ! atmospheric forcing from data +! call get_forcing_ocn(dt) ! ocean forcing from data + +! ! aerosols +! ! if (tr_aero) call faero_data ! data file +! ! if (tr_zaero) call fzaero_data ! data file (gx1) +! if (tr_aero .or. tr_zaero) call faero_default ! default values + +! if (skl_bgc .or. z_tracers) call get_forcing_bgc ! biogeochemistry + + if (z_tracers) call get_atm_bgc ! biogeochemistry + + call init_flux_atm ! initialize atmosphere fluxes sent to coupler + call init_flux_ocn ! initialize ocean fluxes sent to coupler + + call ice_timer_stop(timer_couple) ! atm/ocn coupling + +#ifndef CICE_IN_NEMO + enddo timeLoop +#endif + + !-------------------------------------------------------------------- + ! end of timestep loop + !-------------------------------------------------------------------- + + call ice_timer_stop(timer_step) ! end timestepping loop timer + + end subroutine CICE_Run + +!======================================================================= +! +! Calls drivers for physics components, some initialization, and output +! +! author Elizabeth C. Hunke, LANL +! William H. Lipscomb, LANL + + subroutine ice_step + + use ice_boundary, only: ice_HaloUpdate + use ice_calendar, only: dt, dt_dyn, ndtd, diagfreq, write_restart, istep + use ice_diagnostics, only: init_mass_diags, runtime_diags + use ice_diagnostics_bgc, only: hbrine_diags, zsal_diags, bgc_diags + use ice_domain, only: halo_info, nblocks + use ice_domain_size, only: nslyr + use ice_dyn_eap, only: write_restart_eap + use ice_dyn_shared, only: kdyn + use ice_flux, only: scale_factor, init_history_therm, & + daidtt, daidtd, dvidtt, dvidtd, dagedtt, dagedtd + use ice_history, only: accum_hist + use ice_history_bgc, only: init_history_bgc + use ice_restart, only: final_restart + use ice_restart_column, only: write_restart_age, write_restart_FY, & + write_restart_lvl, write_restart_pond_cesm, write_restart_pond_lvl, & + write_restart_pond_topo, write_restart_aero, write_restart_fsd, & + write_restart_bgc, write_restart_hbrine + use ice_restart_driver, only: dumpfile + use ice_restoring, only: restore_ice, ice_HaloRestore + use ice_state, only: trcrn + use ice_step_mod, only: prep_radiation, step_therm1, step_therm2, & + update_state, step_dyn_horiz, step_dyn_ridge, step_radiation, & + biogeochemistry, save_init, step_dyn_wave + use ice_timers, only: ice_timer_start, ice_timer_stop, & + timer_diags, timer_column, timer_thermo, timer_bound, & + timer_hist, timer_readwrite + + integer (kind=int_kind) :: & + iblk , & ! block index + k , & ! dynamics supercycling index + ktherm ! thermodynamics is off when ktherm = -1 + + real (kind=dbl_kind) :: & + offset ! d(age)/dt time offset + + logical (kind=log_kind) :: & + tr_iage, tr_FY, tr_lvl, tr_fsd, & + tr_pond_cesm, tr_pond_lvl, tr_pond_topo, tr_brine, tr_aero, & + calc_Tsfc, skl_bgc, solve_zsal, z_tracers, wave_spec + + character(len=*), parameter :: subname = '(ice_step)' + + call icepack_query_parameters(calc_Tsfc_out=calc_Tsfc, skl_bgc_out=skl_bgc, & + solve_zsal_out=solve_zsal, z_tracers_out=z_tracers, ktherm_out=ktherm, & + wave_spec_out=wave_spec) + call icepack_query_tracer_flags(tr_iage_out=tr_iage, tr_FY_out=tr_FY, & + tr_lvl_out=tr_lvl, tr_pond_cesm_out=tr_pond_cesm, tr_pond_lvl_out=tr_pond_lvl, & + tr_pond_topo_out=tr_pond_topo, tr_brine_out=tr_brine, tr_aero_out=tr_aero, & + tr_fsd_out=tr_fsd) + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=__FILE__, line=__LINE__) + +#ifdef ICE_DA + !--------------------------------------------------------------- + ! Update CICE state variables using data assimilation increments + !--------------------------------------------------------------- + call da_state_update +#endif + + !----------------------------------------------------------------- + ! restoring on grid boundaries + !----------------------------------------------------------------- + + if (restore_ice) call ice_HaloRestore + + !----------------------------------------------------------------- + ! initialize diagnostics and save initial state values + !----------------------------------------------------------------- + + call ice_timer_start(timer_diags) ! diagnostics/history + call init_mass_diags ! diagnostics per timestep + call init_history_therm + call init_history_bgc + call ice_timer_stop(timer_diags) ! diagnostics/history + + call ice_timer_start(timer_column) ! column physics + call ice_timer_start(timer_thermo) ! thermodynamics + + call save_init + + !$OMP PARALLEL DO PRIVATE(iblk) + do iblk = 1, nblocks + + if (ktherm >= 0) then + + !----------------------------------------------------------------- + ! Scale radiation fields + !----------------------------------------------------------------- + + if (calc_Tsfc) call prep_radiation (iblk) + + !----------------------------------------------------------------- + ! thermodynamics and biogeochemistry + !----------------------------------------------------------------- + + call step_therm1 (dt, iblk) ! vertical thermodynamics + call biogeochemistry (dt, iblk) ! biogeochemistry + call step_therm2 (dt, iblk) ! ice thickness distribution thermo + + endif + + enddo ! iblk + !$OMP END PARALLEL DO + + ! clean up, update tendency diagnostics + offset = dt + call update_state (dt, daidtt, dvidtt, dagedtt, offset) + + call ice_timer_stop(timer_thermo) ! thermodynamics + call ice_timer_stop(timer_column) ! column physics + + !----------------------------------------------------------------- + ! dynamics, transport, ridging + !----------------------------------------------------------------- + + ! wave fracture of the floe size distribution + ! note this is called outside of the dynamics subcycling loop + if (tr_fsd .and. wave_spec) call step_dyn_wave(dt) + + do k = 1, ndtd + + ! momentum, stress, transport + call step_dyn_horiz (dt_dyn) + + ! ridging + !$OMP PARALLEL DO PRIVATE(iblk) + do iblk = 1, nblocks + call step_dyn_ridge (dt_dyn, ndtd, iblk) + enddo + !$OMP END PARALLEL DO + + ! clean up, update tendency diagnostics + offset = c0 + call update_state (dt_dyn, daidtd, dvidtd, dagedtd, offset) + + enddo + + !----------------------------------------------------------------- + ! albedo, shortwave radiation + !----------------------------------------------------------------- + + call ice_timer_start(timer_column) ! column physics + call ice_timer_start(timer_thermo) ! thermodynamics + + !$OMP PARALLEL DO PRIVATE(iblk) + do iblk = 1, nblocks + + if (ktherm >= 0) call step_radiation (dt, iblk) + + !----------------------------------------------------------------- + ! get ready for coupling and the next time step + !----------------------------------------------------------------- + + call coupling_prep (iblk) + + enddo ! iblk + !$OMP END PARALLEL DO + + call ice_timer_start(timer_bound) + call ice_HaloUpdate (scale_factor, halo_info, & + field_loc_center, field_type_scalar) + call ice_timer_stop(timer_bound) + + call ice_timer_stop(timer_thermo) ! thermodynamics + call ice_timer_stop(timer_column) ! column physics + + !----------------------------------------------------------------- + ! write data + !----------------------------------------------------------------- + + call ice_timer_start(timer_diags) ! diagnostics + if (mod(istep,diagfreq) == 0) then + call runtime_diags(dt) ! log file + if (solve_zsal) call zsal_diags + if (skl_bgc .or. z_tracers) call bgc_diags + if (tr_brine) call hbrine_diags + endif + call ice_timer_stop(timer_diags) ! diagnostics + + call ice_timer_start(timer_hist) ! history + call accum_hist (dt) ! history file + call ice_timer_stop(timer_hist) ! history + + call ice_timer_start(timer_readwrite) ! reading/writing + if (write_restart == 1) then + call dumpfile ! core variables for restarting + if (tr_iage) call write_restart_age + if (tr_FY) call write_restart_FY + if (tr_lvl) call write_restart_lvl + if (tr_pond_cesm) call write_restart_pond_cesm + if (tr_pond_lvl) call write_restart_pond_lvl + if (tr_pond_topo) call write_restart_pond_topo + if (tr_fsd) call write_restart_fsd + if (tr_aero) call write_restart_aero + if (solve_zsal .or. skl_bgc .or. z_tracers) & + call write_restart_bgc + if (tr_brine) call write_restart_hbrine + if (kdyn == 2) call write_restart_eap + call final_restart + endif + + call ice_timer_stop(timer_readwrite) ! reading/writing + + end subroutine ice_step + +!======================================================================= +! +! Prepare for coupling +! +! authors: Elizabeth C. Hunke, LANL + + subroutine coupling_prep (iblk) + + use ice_arrays_column, only: alvdfn, alidfn, alvdrn, alidrn, & + albicen, albsnon, albpndn, apeffn, fzsal_g, fzsal, snowfracn + use ice_blocks, only: nx_block, ny_block, get_block, block + use ice_domain, only: blocks_ice + use ice_calendar, only: dt, nstreams + use ice_domain_size, only: ncat + use ice_flux, only: alvdf, alidf, alvdr, alidr, albice, albsno, & + albpnd, albcnt, apeff_ai, coszen, fpond, fresh, l_mpond_fresh, & + alvdf_ai, alidf_ai, alvdr_ai, alidr_ai, fhocn_ai, & + fresh_ai, fsalt_ai, fsalt, & + fswthru_ai, fhocn, fswthru, scale_factor, snowfrac, & + fswthru_vdr, fswthru_vdf, fswthru_idr, fswthru_idf, & + swvdr, swidr, swvdf, swidf, Tf, Tair, Qa, strairxT, strairyt, & + fsens, flat, fswabs, flwout, evap, Tref, Qref, & + fsurfn_f, flatn_f, scale_fluxes, frzmlt_init, frzmlt + use ice_flux_bgc, only: faero_ocn, fzsal_ai, fzsal_g_ai, flux_bio, flux_bio_ai + use ice_grid, only: tmask + use ice_state, only: aicen, aice, aice_init + use ice_step_mod, only: ocean_mixed_layer + use ice_timers, only: timer_couple, ice_timer_start, ice_timer_stop + + integer (kind=int_kind), intent(in) :: & + iblk ! block index + + ! local variables + + integer (kind=int_kind) :: & + ilo,ihi,jlo,jhi, & ! beginning and end of physical domain + n , & ! thickness category index + i,j , & ! horizontal indices + k , & ! tracer index + nbtrcr ! + + type (block) :: & + this_block ! block information for current block + + logical (kind=log_kind) :: & + calc_Tsfc ! + + real (kind=dbl_kind) :: & + cszn , & ! counter for history averaging + puny , & ! + rhofresh , & ! + netsw ! flag for shortwave radiation presence + + character(len=*), parameter :: subname = '(coupling_prep)' + + call icepack_query_parameters(puny_out=puny, rhofresh_out=rhofresh) + call icepack_query_tracer_sizes(nbtrcr_out=nbtrcr) + call icepack_query_parameters(calc_Tsfc_out=calc_Tsfc) + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=__FILE__, line=__LINE__) + + !----------------------------------------------------------------- + ! Save current value of frzmlt for diagnostics. + ! Update mixed layer with heat and radiation from ice. + !----------------------------------------------------------------- + + do j = 1, ny_block + do i = 1, nx_block + frzmlt_init (i,j,iblk) = frzmlt(i,j,iblk) + enddo + enddo + + call ice_timer_start(timer_couple) ! atm/ocn coupling + + if (oceanmixed_ice) & + call ocean_mixed_layer (dt,iblk) ! ocean surface fluxes and sst + + !----------------------------------------------------------------- + ! Aggregate albedos + !----------------------------------------------------------------- + + do j = 1, ny_block + do i = 1, nx_block + alvdf(i,j,iblk) = c0 + alidf(i,j,iblk) = c0 + alvdr(i,j,iblk) = c0 + alidr(i,j,iblk) = c0 + + albice(i,j,iblk) = c0 + albsno(i,j,iblk) = c0 + albpnd(i,j,iblk) = c0 + apeff_ai(i,j,iblk) = c0 + snowfrac(i,j,iblk) = c0 + + ! for history averaging + cszn = c0 + netsw = swvdr(i,j,iblk)+swidr(i,j,iblk)+swvdf(i,j,iblk)+swidf(i,j,iblk) + if (netsw > puny) cszn = c1 + do n = 1, nstreams + albcnt(i,j,iblk,n) = albcnt(i,j,iblk,n) + cszn + enddo + enddo + enddo + + this_block = get_block(blocks_ice(iblk),iblk) + ilo = this_block%ilo + ihi = this_block%ihi + jlo = this_block%jlo + jhi = this_block%jhi + + do n = 1, ncat + do j = jlo, jhi + do i = ilo, ihi + if (aicen(i,j,n,iblk) > puny) then + + alvdf(i,j,iblk) = alvdf(i,j,iblk) & + + alvdfn(i,j,n,iblk)*aicen(i,j,n,iblk) + alidf(i,j,iblk) = alidf(i,j,iblk) & + + alidfn(i,j,n,iblk)*aicen(i,j,n,iblk) + alvdr(i,j,iblk) = alvdr(i,j,iblk) & + + alvdrn(i,j,n,iblk)*aicen(i,j,n,iblk) + alidr(i,j,iblk) = alidr(i,j,iblk) & + + alidrn(i,j,n,iblk)*aicen(i,j,n,iblk) + + netsw = swvdr(i,j,iblk) + swidr(i,j,iblk) & + + swvdf(i,j,iblk) + swidf(i,j,iblk) + if (netsw > puny) then ! sun above horizon + albice(i,j,iblk) = albice(i,j,iblk) & + + albicen(i,j,n,iblk)*aicen(i,j,n,iblk) + albsno(i,j,iblk) = albsno(i,j,iblk) & + + albsnon(i,j,n,iblk)*aicen(i,j,n,iblk) + albpnd(i,j,iblk) = albpnd(i,j,iblk) & + + albpndn(i,j,n,iblk)*aicen(i,j,n,iblk) + endif + + apeff_ai(i,j,iblk) = apeff_ai(i,j,iblk) & ! for history + + apeffn(i,j,n,iblk)*aicen(i,j,n,iblk) + snowfrac(i,j,iblk) = snowfrac(i,j,iblk) & ! for history + + snowfracn(i,j,n,iblk)*aicen(i,j,n,iblk) + + endif ! aicen > puny + enddo + enddo + enddo + + do j = 1, ny_block + do i = 1, nx_block + + !----------------------------------------------------------------- + ! reduce fresh by fpond for coupling + !----------------------------------------------------------------- + + if (l_mpond_fresh) then + fpond(i,j,iblk) = fpond(i,j,iblk) * rhofresh/dt + fresh(i,j,iblk) = fresh(i,j,iblk) - fpond(i,j,iblk) + endif + + !---------------------------------------------------------------- + ! Store grid box mean albedos and fluxes before scaling by aice + !---------------------------------------------------------------- + + alvdf_ai (i,j,iblk) = alvdf (i,j,iblk) + alidf_ai (i,j,iblk) = alidf (i,j,iblk) + alvdr_ai (i,j,iblk) = alvdr (i,j,iblk) + alidr_ai (i,j,iblk) = alidr (i,j,iblk) + fresh_ai (i,j,iblk) = fresh (i,j,iblk) + fsalt_ai (i,j,iblk) = fsalt (i,j,iblk) + fhocn_ai (i,j,iblk) = fhocn (i,j,iblk) + fswthru_ai(i,j,iblk) = fswthru(i,j,iblk) + fzsal_ai (i,j,iblk) = fzsal (i,j,iblk) + fzsal_g_ai(i,j,iblk) = fzsal_g(i,j,iblk) + + if (nbtrcr > 0) then + do k = 1, nbtrcr + flux_bio_ai (i,j,k,iblk) = flux_bio (i,j,k,iblk) + enddo + endif + + !----------------------------------------------------------------- + ! Save net shortwave for scaling factor in scale_factor + !----------------------------------------------------------------- + scale_factor(i,j,iblk) = & + swvdr(i,j,iblk)*(c1 - alvdr_ai(i,j,iblk)) & + + swvdf(i,j,iblk)*(c1 - alvdf_ai(i,j,iblk)) & + + swidr(i,j,iblk)*(c1 - alidr_ai(i,j,iblk)) & + + swidf(i,j,iblk)*(c1 - alidf_ai(i,j,iblk)) + + enddo + enddo + + !----------------------------------------------------------------- + ! Divide fluxes by ice area + ! - the CESM coupler assumes fluxes are per unit ice area + ! - also needed for global budget in diagnostics + !----------------------------------------------------------------- + +! RM and froy +! Now use aice_init, more consistent, see merge_fluxes + call scale_fluxes (nx_block, ny_block, & + tmask (:,:,iblk), nbtrcr, icepack_max_aero, & + aice_init(:,:,iblk), Tf (:,:,iblk), & + Tair (:,:,iblk), Qa (:,:,iblk), & + strairxT (:,:,iblk), strairyT(:,:,iblk), & + fsens (:,:,iblk), flat (:,:,iblk), & + fswabs (:,:,iblk), flwout (:,:,iblk), & + evap (:,:,iblk), & + Tref (:,:,iblk), Qref (:,:,iblk), & + fresh (:,:,iblk), fsalt (:,:,iblk), & + fhocn (:,:,iblk), fswthru (:,:,iblk), & + fswthru_vdr(:,:,iblk), & + fswthru_vdf(:,:,iblk), & + fswthru_idr(:,:,iblk), & + fswthru_idf(:,:,iblk), & + faero_ocn(:,:,:,iblk), & + alvdr (:,:,iblk), alidr (:,:,iblk), & + alvdf (:,:,iblk), alidf (:,:,iblk), & + fzsal (:,:,iblk), fzsal_g (:,:,iblk), & + flux_bio(:,:,1:nbtrcr,iblk)) + +!echmod - comment this out for efficiency, if .not. calc_Tsfc + if (.not. calc_Tsfc) then + + !--------------------------------------------------------------- + ! If surface fluxes were provided, conserve these fluxes at ice + ! free points by passing to ocean. + !--------------------------------------------------------------- + + call sfcflux_to_ocn & + (nx_block, ny_block, & + tmask (:,:,iblk), aice_init(:,:,iblk), & + fsurfn_f (:,:,:,iblk), flatn_f(:,:,:,iblk), & + fresh (:,:,iblk), fhocn (:,:,iblk)) + endif +!echmod + + call ice_timer_stop(timer_couple) ! atm/ocn coupling + + end subroutine coupling_prep + +!======================================================================= +! +! If surface heat fluxes are provided to CICE instead of CICE calculating +! them internally (i.e. .not. calc_Tsfc), then these heat fluxes can +! be provided at points which do not have ice. (This is could be due to +! the heat fluxes being calculated on a lower resolution grid or the +! heat fluxes not recalculated at every CICE timestep.) At ice free points, +! conserve energy and water by passing these fluxes to the ocean. +! +! author: A. McLaren, Met Office + + subroutine sfcflux_to_ocn(nx_block, ny_block, & + tmask, aice, & + fsurfn_f, flatn_f, & + fresh, fhocn) + + use ice_domain_size, only: ncat + + integer (kind=int_kind), intent(in) :: & + nx_block, ny_block ! block dimensions + + logical (kind=log_kind), dimension (nx_block,ny_block), intent(in) :: & + tmask ! land/boundary mask, thickness (T-cell) + + real (kind=dbl_kind), dimension(nx_block,ny_block), intent(in):: & + aice ! initial ice concentration + + real (kind=dbl_kind), dimension(nx_block,ny_block,ncat), intent(in) :: & + fsurfn_f, & ! net surface heat flux (provided as forcing) + flatn_f ! latent heat flux (provided as forcing) + + real (kind=dbl_kind), dimension(nx_block,ny_block), intent(inout):: & + fresh , & ! fresh water flux to ocean (kg/m2/s) + fhocn ! actual ocn/ice heat flx (W/m**2) + +#ifdef CICE_IN_NEMO + + ! local variables + integer (kind=int_kind) :: & + i, j, n ! horizontal indices + + real (kind=dbl_kind) :: & + puny, & ! + Lsub, & ! + rLsub ! 1/Lsub + + character(len=*), parameter :: subname = '(sfcflux_to_ocn)' + + call icepack_query_parameters(puny_out=puny, Lsub_out=Lsub) + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=__FILE__, line=__LINE__) + rLsub = c1 / Lsub + + do n = 1, ncat + do j = 1, ny_block + do i = 1, nx_block + if (tmask(i,j) .and. aice(i,j) <= puny) then + fhocn(i,j) = fhocn(i,j) & + + fsurfn_f(i,j,n) + flatn_f(i,j,n) + fresh(i,j) = fresh(i,j) & + + flatn_f(i,j,n) * rLsub + endif + enddo ! i + enddo ! j + enddo ! n + +#endif + + end subroutine sfcflux_to_ocn + +!======================================================================= + + end module CICE_RunMod + +!======================================================================= diff --git a/cicecore/drivers/nuopc/cmeps/CICE_InitMod.F90 b/cicecore/drivers/nuopc/cmeps/CICE_InitMod.F90 index a57f8aef8..cfca994c3 100644 --- a/cicecore/drivers/nuopc/cmeps/CICE_InitMod.F90 +++ b/cicecore/drivers/nuopc/cmeps/CICE_InitMod.F90 @@ -1,446 +1,451 @@ -!======================================================================= -! -! This module contains the CICE initialization routine that sets model -! parameters and initializes the grid and CICE state variables. -! -! authors Elizabeth C. Hunke, LANL -! William H. Lipscomb, LANL -! Philip W. Jones, LANL -! -! 2006: Converted to free form source (F90) by Elizabeth Hunke -! 2008: E. Hunke moved ESMF code to its own driver - - module CICE_InitMod - - use ice_kinds_mod - use ice_exit, only: abort_ice - use ice_fileunits, only: init_fileunits, nu_diag - use icepack_intfc, only: icepack_aggregate - use icepack_intfc, only: icepack_init_itd, icepack_init_itd_hist - use icepack_intfc, only: icepack_init_fsd_bounds, icepack_init_wave - use icepack_intfc, only: icepack_configure - use icepack_intfc, only: icepack_warnings_flush, icepack_warnings_aborted - use icepack_intfc, only: icepack_query_parameters, icepack_query_tracer_flags, & - icepack_query_tracer_indices, icepack_query_tracer_sizes - - implicit none - private - public :: cice_init +module CICE_InitMod -!======================================================================= + ! Initialize CICE model. + + use ice_kinds_mod + use ice_exit , only: abort_ice + use ice_fileunits, only: init_fileunits, nu_diag + use icepack_intfc, only: icepack_aggregate + use icepack_intfc, only: icepack_init_itd, icepack_init_itd_hist + use icepack_intfc, only: icepack_init_fsd_bounds, icepack_init_wave + use icepack_intfc, only: icepack_configure + use icepack_intfc, only: icepack_warnings_flush, icepack_warnings_aborted + use icepack_intfc, only: icepack_query_parameters, icepack_query_tracer_flags + use icepack_intfc, only: icepack_query_tracer_indices, icepack_query_tracer_sizes + + implicit none + private + public :: cice_init1 + public :: cice_init2 - contains + private :: init_restart !======================================================================= -! -! Initialize CICE model. - - subroutine cice_init - - ! Initialize the basic state, grid and all necessary parameters for - ! running the CICE model. - - use ice_arrays_column, only: hin_max, c_hi_range, alloc_arrays_column - use ice_arrays_column, only: floe_rad_l, floe_rad_c, & - floe_binwidth, c_fsd_range - use ice_state, only: alloc_state - use ice_flux_bgc, only: alloc_flux_bgc - use ice_calendar, only: dt, dt_dyn, istep, istep1, write_ic, & - init_calendar, advance_timestep, calc_timesteps - use ice_calendar, only: calendar - use ice_communicate, only: my_task, master_task - use ice_diagnostics, only: init_diags - use ice_domain, only: init_domain_blocks - use ice_domain_size, only: ncat, nfsd - use ice_dyn_eap, only: init_eap, alloc_dyn_eap - use ice_dyn_shared, only: kdyn, init_dyn, alloc_dyn_shared - use ice_dyn_vp, only: init_vp - use ice_flux, only: init_coupler_flux, init_history_therm, & - init_history_dyn, init_flux_atm, init_flux_ocn, alloc_flux - use ice_forcing, only: init_forcing_ocn - use ice_forcing_bgc, only: get_forcing_bgc, get_atm_bgc, & - faero_default, faero_optics, alloc_forcing_bgc, fiso_default - use ice_grid, only: init_grid1, init_grid2, alloc_grid - use ice_history, only: init_hist, accum_hist - use ice_restart_shared, only: restart, runtype - use ice_init, only: input_data, init_state - use ice_init_column, only: init_thermo_vertical, init_shortwave, init_zbgc, input_zbgc, count_tracers - use ice_kinds_mod - use ice_restoring, only: ice_HaloRestore_init - use ice_timers, only: timer_total, init_ice_timers, ice_timer_start - use ice_transport_driver, only: init_transport - - logical(kind=log_kind) :: tr_aero, tr_zaero, skl_bgc, z_tracers, & - tr_iso, tr_fsd, wave_spec - character(len=*), parameter :: subname = '(cice_init)' - - call init_fileunits ! unit numbers - - call icepack_configure() ! initialize icepack - call icepack_warnings_flush(nu_diag) - if (icepack_warnings_aborted()) call abort_ice(trim(subname), & - file=__FILE__,line= __LINE__) - - call input_data ! namelist variables - call input_zbgc ! vertical biogeochemistry namelist - call count_tracers ! count tracers - - call init_domain_blocks ! set up block decomposition - call init_grid1 ! domain distribution - call alloc_grid ! allocate grid arrays - call alloc_arrays_column ! allocate column arrays - call alloc_state ! allocate state arrays - call alloc_dyn_shared ! allocate dyn shared arrays - call alloc_flux_bgc ! allocate flux_bgc arrays - call alloc_flux ! allocate flux arrays - call init_ice_timers ! initialize all timers - call ice_timer_start(timer_total) ! start timing entire run - call init_grid2 ! grid variables - call init_zbgc ! vertical biogeochemistry initialization - call init_calendar ! initialize some calendar stuff - call init_hist (dt) ! initialize output history file - - call init_dyn (dt_dyn) ! define dynamics parameters, variables - if (kdyn == 2) then - call alloc_dyn_eap ! allocate dyn_eap arrays - call init_eap ! define eap dynamics parameters, variables - else if (kdyn == 3) then - call init_vp ! define vp dynamics parameters, variables - endif - - call init_coupler_flux ! initialize fluxes exchanged with coupler - call init_thermo_vertical ! initialize vertical thermodynamics - - call icepack_init_itd(ncat=ncat, hin_max=hin_max) ! ice thickness distribution - if (my_task == master_task) then - call icepack_init_itd_hist(ncat=ncat, hin_max=hin_max, c_hi_range=c_hi_range) ! output - endif - - call icepack_query_tracer_flags(tr_fsd_out=tr_fsd) - call icepack_warnings_flush(nu_diag) - if (icepack_warnings_aborted()) call abort_ice(trim(subname), & - file=__FILE__,line= __LINE__) - - if (tr_fsd) call icepack_init_fsd_bounds (nfsd, & ! floe size distribution +contains +!======================================================================= + + subroutine cice_init1() + + ! Initialize the basic state, grid and all necessary parameters for + ! running the CICE model. + + use ice_init , only: input_data + use ice_init_column , only: input_zbgc, count_tracers + use ice_grid , only: init_grid1, alloc_grid + use ice_domain , only: init_domain_blocks + use ice_arrays_column , only: alloc_arrays_column + use ice_state , only: alloc_state + use ice_dyn_shared , only: alloc_dyn_shared + use ice_flux_bgc , only: alloc_flux_bgc + use ice_flux , only: alloc_flux + use ice_timers , only: timer_total, init_ice_timers, ice_timer_start + + character(len=*), parameter :: subname = '(cice_init1)' + !---------------------------------------------------- + + call init_fileunits ! unit numbers + call icepack_configure() ! initialize icepack + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(trim(subname), & + file=__FILE__,line= __LINE__) + + call input_data ! namelist variables + call input_zbgc ! vertical biogeochemistry namelist + call count_tracers ! count tracers + + call init_domain_blocks ! set up block decomposition + call init_grid1 ! domain distribution + call alloc_grid ! allocate grid arrays + call alloc_arrays_column ! allocate column arrays + call alloc_state ! allocate state arrays + call alloc_dyn_shared ! allocate dyn shared arrays + call alloc_flux_bgc ! allocate flux_bgc arrays + call alloc_flux ! allocate flux arrays + call init_ice_timers ! initialize all timers + call ice_timer_start(timer_total) ! start timing entire run + + end subroutine cice_init1 + + !======================================================================= + subroutine cice_init2() + + ! Initialize the basic state, and all necessary parameters for + ! running the CICE model. + + use ice_arrays_column , only: hin_max, c_hi_range + use ice_arrays_column , only: floe_rad_l, floe_rad_c, floe_binwidth, c_fsd_range + use ice_calendar , only: dt, dt_dyn, istep, istep1, write_ic, init_calendar, calendar + use ice_communicate , only: my_task, master_task + use ice_diagnostics , only: init_diags + use ice_domain_size , only: ncat, nfsd + use ice_dyn_eap , only: init_eap, alloc_dyn_eap + use ice_dyn_shared , only: kdyn, init_dyn + use ice_dyn_vp , only: init_vp + use ice_flux , only: init_coupler_flux, init_history_therm + use ice_flux , only: init_history_dyn, init_flux_atm, init_flux_ocn + use ice_forcing , only: init_forcing_ocn + use ice_forcing_bgc , only: get_forcing_bgc, get_atm_bgc + use ice_forcing_bgc , only: faero_default, faero_optics, alloc_forcing_bgc, fiso_default + use ice_history , only: init_hist, accum_hist + use ice_restart_shared , only: restart, runtype + use ice_init , only: input_data, init_state + use ice_init_column , only: init_thermo_vertical, init_shortwave, init_zbgc + use ice_restoring , only: ice_HaloRestore_init + use ice_timers , only: timer_total, init_ice_timers, ice_timer_start + use ice_transport_driver , only: init_transport + + logical(kind=log_kind) :: tr_aero, tr_zaero, skl_bgc, z_tracers + logical(kind=log_kind) :: tr_iso, tr_fsd, wave_spec + character(len=*), parameter :: subname = '(cice_init2)' + !---------------------------------------------------- + + call init_zbgc ! vertical biogeochemistry initialization + call init_calendar ! initialize some calendar stuff + call init_hist (dt) ! initialize output history file + + call init_dyn (dt_dyn) ! define dynamics parameters, variables + if (kdyn == 2) then + call alloc_dyn_eap ! allocate dyn_eap arrays + call init_eap ! define eap dynamics parameters, variables + else if (kdyn == 3) then + call init_vp ! define vp dynamics parameters, variables + endif + + call init_coupler_flux ! initialize fluxes exchanged with coupler + call init_thermo_vertical ! initialize vertical thermodynamics + + call icepack_init_itd(ncat=ncat, hin_max=hin_max) ! ice thickness distribution + if (my_task == master_task) then + call icepack_init_itd_hist(ncat=ncat, hin_max=hin_max, c_hi_range=c_hi_range) ! output + endif + + call icepack_query_tracer_flags(tr_fsd_out=tr_fsd) + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(trim(subname), & + file=__FILE__,line= __LINE__) + + if (tr_fsd) call icepack_init_fsd_bounds (nfsd, & ! floe size distribution floe_rad_l, & ! fsd size lower bound in m (radius) floe_rad_c, & ! fsd size bin centre in m (radius) floe_binwidth, & ! fsd size bin width in m (radius) c_fsd_range, & ! string for history output write_diags=(my_task == master_task)) ! write diag on master only - call icepack_warnings_flush(nu_diag) - if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & file=__FILE__, line=__LINE__) - call calendar() ! determine the initial date - - ! TODO: - why is this being called when you are using CMEPS? - call init_forcing_ocn(dt) ! initialize sss and sst from data - - call init_state ! initialize the ice state - call init_transport ! initialize horizontal transport - call ice_HaloRestore_init ! restored boundary conditions - - call icepack_query_parameters(skl_bgc_out=skl_bgc, z_tracers_out=z_tracers, & - wave_spec_out=wave_spec) - call icepack_warnings_flush(nu_diag) - if (icepack_warnings_aborted()) call abort_ice(trim(subname), & - file=__FILE__,line= __LINE__) - - if (skl_bgc .or. z_tracers) call alloc_forcing_bgc ! allocate biogeochemistry arrays - - call init_restart ! initialize restart variables - call init_diags ! initialize diagnostic output points - call init_history_therm ! initialize thermo history variables - call init_history_dyn ! initialize dynamic history variables - - call icepack_query_tracer_flags(tr_aero_out=tr_aero, tr_zaero_out=tr_zaero) - call icepack_query_tracer_flags(tr_iso_out=tr_iso) - call icepack_warnings_flush(nu_diag) - if (icepack_warnings_aborted()) call abort_ice(trim(subname), & - file=__FILE__,line= __LINE__) - - if (tr_aero .or. tr_zaero) then - call faero_optics !initialize aerosol optical property tables - end if - ! Initialize shortwave components using swdn from previous timestep - ! if restarting. These components will be scaled to current forcing - ! in prep_radiation. - - if (trim(runtype) == 'continue' .or. restart) then - call init_shortwave ! initialize radiative transfer - end if - - !-------------------------------------------------------------------- - ! coupler communication or forcing data initialization - !-------------------------------------------------------------------- - - if (z_tracers) call get_atm_bgc ! biogeochemistry - - if (runtype == 'initial' .and. .not. restart) then - call init_shortwave ! initialize radiative transfer using current swdn - end if - - call init_flux_atm ! initialize atmosphere fluxes sent to coupler - call init_flux_ocn ! initialize ocean fluxes sent to coupler - - end subroutine cice_init - -!======================================================================= - - subroutine init_restart - - use ice_arrays_column, only: dhsn - use ice_blocks, only: nx_block, ny_block - use ice_calendar, only: calendar - use ice_constants, only: c0 - use ice_domain, only: nblocks - use ice_domain_size, only: ncat, n_iso, n_aero, nfsd - use ice_dyn_eap, only: read_restart_eap - use ice_dyn_shared, only: kdyn - use ice_grid, only: tmask - use ice_init, only: ice_ic - use ice_init_column, only: init_age, init_FY, init_lvl, & - init_meltponds_cesm, init_meltponds_lvl, init_meltponds_topo, & - init_isotope, init_aerosol, init_hbrine, init_bgc, init_fsd - use ice_restart_column, only: restart_age, read_restart_age, & - restart_FY, read_restart_FY, restart_lvl, read_restart_lvl, & - restart_pond_cesm, read_restart_pond_cesm, & - restart_pond_lvl, read_restart_pond_lvl, & - restart_pond_topo, read_restart_pond_topo, & - restart_fsd, read_restart_fsd, & - restart_iso, read_restart_iso, & - restart_aero, read_restart_aero, & - restart_hbrine, read_restart_hbrine, & - restart_zsal, restart_bgc - use ice_restart_driver, only: restartfile - use ice_restart_shared, only: runtype, restart - use ice_state ! almost everything - - integer(kind=int_kind) :: & + call calendar() ! determine the initial date + + !TODO: - why is this being called when you are using CMEPS? + call init_forcing_ocn(dt) ! initialize sss and sst from data + + call init_state ! initialize the ice state + call init_transport ! initialize horizontal transport + call ice_HaloRestore_init ! restored boundary conditions + + call icepack_query_parameters(skl_bgc_out=skl_bgc, z_tracers_out=z_tracers, & + wave_spec_out=wave_spec) + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(trim(subname), & + file=__FILE__,line= __LINE__) + + if (skl_bgc .or. z_tracers) call alloc_forcing_bgc ! allocate biogeochemistry arrays + + call init_restart ! initialize restart variables + call init_diags ! initialize diagnostic output points + call init_history_therm ! initialize thermo history variables + call init_history_dyn ! initialize dynamic history variables + + call icepack_query_tracer_flags(tr_aero_out=tr_aero, tr_zaero_out=tr_zaero) + call icepack_query_tracer_flags(tr_iso_out=tr_iso) + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(trim(subname), & + file=__FILE__,line= __LINE__) + + if (tr_aero .or. tr_zaero) then + call faero_optics !initialize aerosol optical property tables + end if + + ! Initialize shortwave components using swdn from previous timestep + ! if restarting. These components will be scaled to current forcing + ! in prep_radiation. + + if (trim(runtype) == 'continue' .or. restart) then + call init_shortwave ! initialize radiative transfer + end if + + !-------------------------------------------------------------------- + ! coupler communication or forcing data initialization + !-------------------------------------------------------------------- + + if (z_tracers) call get_atm_bgc ! biogeochemistry + + if (runtype == 'initial' .and. .not. restart) then + call init_shortwave ! initialize radiative transfer using current swdn + end if + + call init_flux_atm ! initialize atmosphere fluxes sent to coupler + call init_flux_ocn ! initialize ocean fluxes sent to coupler + + end subroutine cice_init2 + + !======================================================================= + + subroutine init_restart() + + use ice_arrays_column, only: dhsn + use ice_blocks, only: nx_block, ny_block + use ice_calendar, only: calendar + use ice_constants, only: c0 + use ice_domain, only: nblocks + use ice_domain_size, only: ncat, n_iso, n_aero, nfsd + use ice_dyn_eap, only: read_restart_eap + use ice_dyn_shared, only: kdyn + use ice_grid, only: tmask + use ice_init, only: ice_ic + use ice_init_column, only: init_age, init_FY, init_lvl, & + init_meltponds_cesm, init_meltponds_lvl, init_meltponds_topo, & + init_isotope, init_aerosol, init_hbrine, init_bgc, init_fsd + use ice_restart_column, only: restart_age, read_restart_age, & + restart_FY, read_restart_FY, restart_lvl, read_restart_lvl, & + restart_pond_cesm, read_restart_pond_cesm, & + restart_pond_lvl, read_restart_pond_lvl, & + restart_pond_topo, read_restart_pond_topo, & + restart_fsd, read_restart_fsd, & + restart_iso, read_restart_iso, & + restart_aero, read_restart_aero, & + restart_hbrine, read_restart_hbrine, & + restart_zsal, restart_bgc + use ice_restart_driver, only: restartfile + use ice_restart_shared, only: runtype, restart + use ice_state ! almost everything + + integer(kind=int_kind) :: & i, j , & ! horizontal indices iblk ! block index - logical(kind=log_kind) :: & - tr_iage, tr_FY, tr_lvl, tr_pond_cesm, tr_pond_lvl, & - tr_pond_topo, tr_fsd, tr_iso, tr_aero, tr_brine, & - skl_bgc, z_tracers, solve_zsal - integer(kind=int_kind) :: & - ntrcr - integer(kind=int_kind) :: & - nt_alvl, nt_vlvl, nt_apnd, nt_hpnd, nt_ipnd, & - nt_iage, nt_FY, nt_aero, nt_fsd, nt_isosno, nt_isoice - - character(len=*), parameter :: subname = '(init_restart)' - - call icepack_query_tracer_sizes(ntrcr_out=ntrcr) - call icepack_warnings_flush(nu_diag) - if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & - file=__FILE__, line=__LINE__) - - call icepack_query_parameters(skl_bgc_out=skl_bgc, & - z_tracers_out=z_tracers, solve_zsal_out=solve_zsal) - call icepack_query_tracer_flags(tr_iage_out=tr_iage, tr_FY_out=tr_FY, & - tr_lvl_out=tr_lvl, tr_pond_cesm_out=tr_pond_cesm, tr_pond_lvl_out=tr_pond_lvl, & - tr_pond_topo_out=tr_pond_topo, tr_aero_out=tr_aero, tr_brine_out=tr_brine, & - tr_fsd_out=tr_fsd, tr_iso_out=tr_iso) - call icepack_query_tracer_indices(nt_alvl_out=nt_alvl, nt_vlvl_out=nt_vlvl, & - nt_apnd_out=nt_apnd, nt_hpnd_out=nt_hpnd, nt_ipnd_out=nt_ipnd, & - nt_iage_out=nt_iage, nt_FY_out=nt_FY, nt_aero_out=nt_aero, nt_fsd_out=nt_fsd, & - nt_isosno_out=nt_isosno, nt_isoice_out=nt_isoice) - call icepack_warnings_flush(nu_diag) - if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + logical(kind=log_kind) :: & + tr_iage, tr_FY, tr_lvl, tr_pond_cesm, tr_pond_lvl, & + tr_pond_topo, tr_fsd, tr_iso, tr_aero, tr_brine, & + skl_bgc, z_tracers, solve_zsal + integer(kind=int_kind) :: & + ntrcr + integer(kind=int_kind) :: & + nt_alvl, nt_vlvl, nt_apnd, nt_hpnd, nt_ipnd, & + nt_iage, nt_FY, nt_aero, nt_fsd, nt_isosno, nt_isoice + + character(len=*), parameter :: subname = '(init_restart)' + !---------------------------------------------------- + + call icepack_query_tracer_sizes(ntrcr_out=ntrcr) + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & file=__FILE__, line=__LINE__) - if (trim(runtype) == 'continue') then - ! start from core restart file - call restartfile() ! given by pointer in ice_in - call calendar() ! update time parameters - if (kdyn == 2) call read_restart_eap ! EAP - else if (restart) then ! ice_ic = core restart file - call restartfile (ice_ic) ! or 'default' or 'none' - !!! uncomment to create netcdf - ! call restartfile_v4 (ice_ic) ! CICE v4.1 binary restart file - !!! uncomment if EAP restart data exists - ! if (kdyn == 2) call read_restart_eap - endif - - ! tracers - ! ice age tracer - if (tr_iage) then - if (trim(runtype) == 'continue') & - restart_age = .true. - if (restart_age) then - call read_restart_age - else - do iblk = 1, nblocks - call init_age(trcrn(:,:,nt_iage,:,iblk)) - enddo ! iblk - endif - endif - ! first-year area tracer - if (tr_FY) then - if (trim(runtype) == 'continue') restart_FY = .true. - if (restart_FY) then - call read_restart_FY - else - do iblk = 1, nblocks - call init_FY(trcrn(:,:,nt_FY,:,iblk)) - enddo ! iblk - endif - endif - ! level ice tracer - if (tr_lvl) then - if (trim(runtype) == 'continue') restart_lvl = .true. - if (restart_lvl) then - call read_restart_lvl - else - do iblk = 1, nblocks - call init_lvl(iblk,trcrn(:,:,nt_alvl,:,iblk), & - trcrn(:,:,nt_vlvl,:,iblk)) - enddo ! iblk - endif - endif - ! CESM melt ponds - if (tr_pond_cesm) then - if (trim(runtype) == 'continue') & - restart_pond_cesm = .true. - if (restart_pond_cesm) then - call read_restart_pond_cesm - else - do iblk = 1, nblocks - call init_meltponds_cesm(trcrn(:,:,nt_apnd,:,iblk), & - trcrn(:,:,nt_hpnd,:,iblk)) - enddo ! iblk - endif - endif - ! level-ice melt ponds - if (tr_pond_lvl) then - if (trim(runtype) == 'continue') & - restart_pond_lvl = .true. - if (restart_pond_lvl) then - call read_restart_pond_lvl - else - do iblk = 1, nblocks - call init_meltponds_lvl(trcrn(:,:,nt_apnd,:,iblk), & - trcrn(:,:,nt_hpnd,:,iblk), & - trcrn(:,:,nt_ipnd,:,iblk), & - dhsn(:,:,:,iblk)) - enddo ! iblk - endif - endif - ! topographic melt ponds - if (tr_pond_topo) then - if (trim(runtype) == 'continue') & - restart_pond_topo = .true. - if (restart_pond_topo) then - call read_restart_pond_topo - else - do iblk = 1, nblocks - call init_meltponds_topo(trcrn(:,:,nt_apnd,:,iblk), & - trcrn(:,:,nt_hpnd,:,iblk), & - trcrn(:,:,nt_ipnd,:,iblk)) - enddo ! iblk - endif ! .not. restart_pond - endif - ! floe size distribution - if (tr_fsd) then - if (trim(runtype) == 'continue') restart_fsd = .true. - if (restart_fsd) then - call read_restart_fsd - else - call init_fsd(trcrn(:,:,nt_fsd:nt_fsd+nfsd-1,:,:)) - endif - endif - - ! isotopes - if (tr_iso) then - if (trim(runtype) == 'continue') restart_iso = .true. - if (restart_iso) then - call read_restart_iso - else - do iblk = 1, nblocks - call init_isotope(trcrn(:,:,nt_isosno:nt_isosno+n_iso-1,:,iblk), & - trcrn(:,:,nt_isoice:nt_isoice+n_iso-1,:,iblk)) - enddo ! iblk - endif - endif - - if (tr_aero) then ! ice aerosol - if (trim(runtype) == 'continue') restart_aero = .true. - if (restart_aero) then - call read_restart_aero - else - do iblk = 1, nblocks - call init_aerosol(trcrn(:,:,nt_aero:nt_aero+4*n_aero-1,:,iblk)) - enddo ! iblk - endif ! .not. restart_aero - endif - - if (trim(runtype) == 'continue') then - if (tr_brine) & - restart_hbrine = .true. - if (solve_zsal) & - restart_zsal = .true. - if (skl_bgc .or. z_tracers) & - restart_bgc = .true. - endif - - if (tr_brine .or. skl_bgc) then ! brine height tracer - call init_hbrine - if (tr_brine .and. restart_hbrine) call read_restart_hbrine - endif - - if (solve_zsal .or. skl_bgc .or. z_tracers) then ! biogeochemistry - if (tr_fsd) then - write (nu_diag,*) 'FSD implementation incomplete for use with BGC' - call icepack_warnings_flush(nu_diag) - if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + call icepack_query_parameters(skl_bgc_out=skl_bgc, & + z_tracers_out=z_tracers, solve_zsal_out=solve_zsal) + call icepack_query_tracer_flags(tr_iage_out=tr_iage, tr_FY_out=tr_FY, & + tr_lvl_out=tr_lvl, tr_pond_cesm_out=tr_pond_cesm, tr_pond_lvl_out=tr_pond_lvl, & + tr_pond_topo_out=tr_pond_topo, tr_aero_out=tr_aero, tr_brine_out=tr_brine, & + tr_fsd_out=tr_fsd, tr_iso_out=tr_iso) + call icepack_query_tracer_indices(nt_alvl_out=nt_alvl, nt_vlvl_out=nt_vlvl, & + nt_apnd_out=nt_apnd, nt_hpnd_out=nt_hpnd, nt_ipnd_out=nt_ipnd, & + nt_iage_out=nt_iage, nt_FY_out=nt_FY, nt_aero_out=nt_aero, nt_fsd_out=nt_fsd, & + nt_isosno_out=nt_isosno, nt_isoice_out=nt_isoice) + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=__FILE__, line=__LINE__) + + if (trim(runtype) == 'continue') then + ! start from core restart file + call restartfile() ! given by pointer in ice_in + call calendar() ! update time parameters + if (kdyn == 2) call read_restart_eap ! EAP + else if (restart) then ! ice_ic = core restart file + call restartfile (ice_ic) ! or 'default' or 'none' +!!! uncomment to create netcdf + ! call restartfile_v4 (ice_ic) ! CICE v4.1 binary restart file +!!! uncomment if EAP restart data exists + ! if (kdyn == 2) call read_restart_eap + endif + + ! tracers + ! ice age tracer + if (tr_iage) then + if (trim(runtype) == 'continue') & + restart_age = .true. + if (restart_age) then + call read_restart_age + else + do iblk = 1, nblocks + call init_age(trcrn(:,:,nt_iage,:,iblk)) + enddo ! iblk + endif + endif + ! first-year area tracer + if (tr_FY) then + if (trim(runtype) == 'continue') restart_FY = .true. + if (restart_FY) then + call read_restart_FY + else + do iblk = 1, nblocks + call init_FY(trcrn(:,:,nt_FY,:,iblk)) + enddo ! iblk + endif + endif + ! level ice tracer + if (tr_lvl) then + if (trim(runtype) == 'continue') restart_lvl = .true. + if (restart_lvl) then + call read_restart_lvl + else + do iblk = 1, nblocks + call init_lvl(iblk,trcrn(:,:,nt_alvl,:,iblk), & + trcrn(:,:,nt_vlvl,:,iblk)) + enddo ! iblk + endif + endif + ! CESM melt ponds + if (tr_pond_cesm) then + if (trim(runtype) == 'continue') & + restart_pond_cesm = .true. + if (restart_pond_cesm) then + call read_restart_pond_cesm + else + do iblk = 1, nblocks + call init_meltponds_cesm(trcrn(:,:,nt_apnd,:,iblk), & + trcrn(:,:,nt_hpnd,:,iblk)) + enddo ! iblk + endif + endif + ! level-ice melt ponds + if (tr_pond_lvl) then + if (trim(runtype) == 'continue') & + restart_pond_lvl = .true. + if (restart_pond_lvl) then + call read_restart_pond_lvl + else + do iblk = 1, nblocks + call init_meltponds_lvl(trcrn(:,:,nt_apnd,:,iblk), & + trcrn(:,:,nt_hpnd,:,iblk), & + trcrn(:,:,nt_ipnd,:,iblk), & + dhsn(:,:,:,iblk)) + enddo ! iblk + endif + endif + ! topographic melt ponds + if (tr_pond_topo) then + if (trim(runtype) == 'continue') & + restart_pond_topo = .true. + if (restart_pond_topo) then + call read_restart_pond_topo + else + do iblk = 1, nblocks + call init_meltponds_topo(trcrn(:,:,nt_apnd,:,iblk), & + trcrn(:,:,nt_hpnd,:,iblk), & + trcrn(:,:,nt_ipnd,:,iblk)) + enddo ! iblk + endif ! .not. restart_pond + endif + ! floe size distribution + if (tr_fsd) then + if (trim(runtype) == 'continue') restart_fsd = .true. + if (restart_fsd) then + call read_restart_fsd + else + call init_fsd(trcrn(:,:,nt_fsd:nt_fsd+nfsd-1,:,:)) + endif + endif + + ! isotopes + if (tr_iso) then + if (trim(runtype) == 'continue') restart_iso = .true. + if (restart_iso) then + call read_restart_iso + else + do iblk = 1, nblocks + call init_isotope(trcrn(:,:,nt_isosno:nt_isosno+n_iso-1,:,iblk), & + trcrn(:,:,nt_isoice:nt_isoice+n_iso-1,:,iblk)) + enddo ! iblk + endif + endif + + if (tr_aero) then ! ice aerosol + if (trim(runtype) == 'continue') restart_aero = .true. + if (restart_aero) then + call read_restart_aero + else + do iblk = 1, nblocks + call init_aerosol(trcrn(:,:,nt_aero:nt_aero+4*n_aero-1,:,iblk)) + enddo ! iblk + endif ! .not. restart_aero + endif + + if (trim(runtype) == 'continue') then + if (tr_brine) & + restart_hbrine = .true. + if (solve_zsal) & + restart_zsal = .true. + if (skl_bgc .or. z_tracers) & + restart_bgc = .true. + endif + + if (tr_brine .or. skl_bgc) then ! brine height tracer + call init_hbrine + if (tr_brine .and. restart_hbrine) call read_restart_hbrine + endif + + if (solve_zsal .or. skl_bgc .or. z_tracers) then ! biogeochemistry + if (tr_fsd) then + write (nu_diag,*) 'FSD implementation incomplete for use with BGC' + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & file=__FILE__, line=__LINE__) - endif - call init_bgc - endif - - !----------------------------------------------------------------- - ! aggregate tracers - !----------------------------------------------------------------- - - !$OMP PARALLEL DO PRIVATE(iblk) - do iblk = 1, nblocks - do j = 1, ny_block - do i = 1, nx_block - if (tmask(i,j,iblk)) then - call icepack_aggregate(ncat = ncat, & - aicen = aicen(i,j,:,iblk), & - trcrn = trcrn(i,j,:,:,iblk), & - vicen = vicen(i,j,:,iblk), & - vsnon = vsnon(i,j,:,iblk), & - aice = aice (i,j, iblk), & - trcr = trcr (i,j,:,iblk), & - vice = vice (i,j, iblk), & - vsno = vsno (i,j, iblk), & - aice0 = aice0(i,j, iblk), & - ntrcr = ntrcr, & - trcr_depend = trcr_depend, & - trcr_base = trcr_base, & - n_trcr_strata = n_trcr_strata, & - nt_strata = nt_strata) - else - ! tcraig, reset all tracer values on land to zero - trcrn(i,j,:,:,iblk) = c0 - endif - enddo - enddo - enddo - !$OMP END PARALLEL DO - - call icepack_warnings_flush(nu_diag) - if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + endif + call init_bgc + endif + + !----------------------------------------------------------------- + ! aggregate tracers + !----------------------------------------------------------------- + + !$OMP PARALLEL DO PRIVATE(iblk) + do iblk = 1, nblocks + do j = 1, ny_block + do i = 1, nx_block + if (tmask(i,j,iblk)) then + call icepack_aggregate(ncat = ncat, & + aicen = aicen(i,j,:,iblk), & + trcrn = trcrn(i,j,:,:,iblk), & + vicen = vicen(i,j,:,iblk), & + vsnon = vsnon(i,j,:,iblk), & + aice = aice (i,j, iblk), & + trcr = trcr (i,j,:,iblk), & + vice = vice (i,j, iblk), & + vsno = vsno (i,j, iblk), & + aice0 = aice0(i,j, iblk), & + ntrcr = ntrcr, & + trcr_depend = trcr_depend, & + trcr_base = trcr_base, & + n_trcr_strata = n_trcr_strata, & + nt_strata = nt_strata) + else + ! tcraig, reset all tracer values on land to zero + trcrn(i,j,:,:,iblk) = c0 + endif + enddo + enddo + enddo + !$OMP END PARALLEL DO + + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & file=__FILE__, line=__LINE__) - end subroutine init_restart + end subroutine init_restart -!======================================================================= + !======================================================================= - end module CICE_InitMod +end module CICE_InitMod !======================================================================= diff --git a/cicecore/drivers/nuopc/cmeps/CICE_RunMod.F90 b/cicecore/drivers/nuopc/cmeps/CICE_RunMod.F90 index 3daa7e192..81fa367c1 100644 --- a/cicecore/drivers/nuopc/cmeps/CICE_RunMod.F90 +++ b/cicecore/drivers/nuopc/cmeps/CICE_RunMod.F90 @@ -7,7 +7,7 @@ ! William H. Lipscomb, LANL ! ! 2006 ECH: moved exit timeLoop to prevent execution of unnecessary timestep -! 2006 ECH: Streamlined for efficiency +! 2006 ECH: Streamlined for efficiency ! 2006 ECH: Converted to free source form (F90) ! 2007 BPB: Modified Delta-Eddington shortwave interface ! 2008 ECH: moved ESMF code to its own driver @@ -86,8 +86,6 @@ subroutine CICE_Run call init_flux_atm ! Initialize atmosphere fluxes sent to coupler call init_flux_ocn ! initialize ocean fluxes sent to coupler - call calendar() ! at the end of the timestep - call ice_timer_stop(timer_couple) ! atm/ocn coupling call ice_step @@ -96,7 +94,7 @@ subroutine CICE_Run ! end of timestep loop !-------------------------------------------------------------------- - call ice_timer_stop(timer_step) ! end timestepping loop timer + call ice_timer_stop(timer_step) ! end timestepping loop timer end subroutine CICE_Run @@ -138,7 +136,7 @@ subroutine ice_step use ice_prescribed_mod integer (kind=int_kind) :: & - iblk , & ! block index + iblk , & ! block index k , & ! dynamics supercycling index ktherm ! thermodynamics is off when ktherm = -1 @@ -315,7 +313,7 @@ subroutine ice_step if (tr_iso) call write_restart_iso if (tr_aero) call write_restart_aero if (solve_zsal .or. skl_bgc .or. z_tracers) & - call write_restart_bgc + call write_restart_bgc if (tr_brine) call write_restart_hbrine if (kdyn == 2) call write_restart_eap call final_restart @@ -359,12 +357,12 @@ subroutine coupling_prep (iblk) use ice_step_mod, only: ocean_mixed_layer use ice_timers, only: timer_couple, ice_timer_start, ice_timer_stop - integer (kind=int_kind), intent(in) :: & - iblk ! block index + integer (kind=int_kind), intent(in) :: & + iblk ! block index ! local variables - integer (kind=int_kind) :: & + integer (kind=int_kind) :: & ilo,ihi,jlo,jhi, & ! beginning and end of physical domain n , & ! thickness category index i,j , & ! horizontal indices @@ -504,8 +502,8 @@ subroutine coupling_prep (iblk) fsalt_ai (i,j,iblk) = fsalt (i,j,iblk) fhocn_ai (i,j,iblk) = fhocn (i,j,iblk) fswthru_ai(i,j,iblk) = fswthru(i,j,iblk) - fzsal_ai (i,j,iblk) = fzsal (i,j,iblk) - fzsal_g_ai(i,j,iblk) = fzsal_g(i,j,iblk) + fzsal_ai (i,j,iblk) = fzsal (i,j,iblk) + fzsal_g_ai(i,j,iblk) = fzsal_g(i,j,iblk) if (nbtrcr > 0) then do k = 1, nbtrcr @@ -526,7 +524,7 @@ subroutine coupling_prep (iblk) enddo !----------------------------------------------------------------- - ! Divide fluxes by ice area + ! Divide fluxes by ice area ! - the CESM coupler assumes fluxes are per unit ice area ! - also needed for global budget in diagnostics !----------------------------------------------------------------- @@ -578,16 +576,16 @@ subroutine coupling_prep (iblk) if (.not. calc_Tsfc) then !--------------------------------------------------------------- - ! If surface fluxes were provided, conserve these fluxes at ice - ! free points by passing to ocean. + ! If surface fluxes were provided, conserve these fluxes at ice + ! free points by passing to ocean. !--------------------------------------------------------------- - call sfcflux_to_ocn & + call sfcflux_to_ocn & (nx_block, ny_block, & tmask (:,:,iblk), aice_init(:,:,iblk), & fsurfn_f (:,:,:,iblk), flatn_f(:,:,:,iblk), & fresh (:,:,iblk), fhocn (:,:,iblk)) - endif + endif !echmod call ice_timer_stop(timer_couple,iblk) ! atm/ocn coupling @@ -596,10 +594,10 @@ end subroutine coupling_prep !======================================================================= ! ! If surface heat fluxes are provided to CICE instead of CICE calculating -! them internally (i.e. .not. calc_Tsfc), then these heat fluxes can +! them internally (i.e. .not. calc_Tsfc), then these heat fluxes can ! be provided at points which do not have ice. (This is could be due to ! the heat fluxes being calculated on a lower resolution grid or the -! heat fluxes not recalculated at every CICE timestep.) At ice free points, +! heat fluxes not recalculated at every CICE timestep.) At ice free points, ! conserve energy and water by passing these fluxes to the ocean. ! ! author: A. McLaren, Met Office diff --git a/cicecore/drivers/nuopc/cmeps/ice_comp_nuopc.F90 b/cicecore/drivers/nuopc/cmeps/ice_comp_nuopc.F90 index ebfc3d674..a832e7bdf 100644 --- a/cicecore/drivers/nuopc/cmeps/ice_comp_nuopc.F90 +++ b/cicecore/drivers/nuopc/cmeps/ice_comp_nuopc.F90 @@ -15,30 +15,21 @@ module ice_comp_nuopc use NUOPC_Model , only : model_label_SetRunClock => label_SetRunClock use NUOPC_Model , only : model_label_Finalize => label_Finalize use NUOPC_Model , only : NUOPC_ModelGet, SetVM - use ice_constants , only : ice_init_constants + use ice_constants , only : ice_init_constants, c0 use ice_shr_methods , only : chkerr, state_setscalar, state_getscalar, state_diagnose, alarmInit - use ice_shr_methods , only : set_component_logging, get_component_instance - use ice_shr_methods , only : state_flddebug - use ice_import_export , only : ice_import, ice_export - use ice_import_export , only : ice_advertise_fields, ice_realize_fields + use ice_shr_methods , only : set_component_logging, get_component_instance, state_flddebug + use ice_import_export , only : ice_import, ice_export, ice_advertise_fields, ice_realize_fields use ice_domain_size , only : nx_global, ny_global - use ice_domain , only : nblocks, blocks_ice, distrb_info - use ice_blocks , only : block, get_block, nx_block, ny_block, nblocks_x, nblocks_y - use ice_blocks , only : nblocks_tot, get_block_parameter - use ice_distribution , only : ice_distributiongetblockloc - use ice_grid , only : tlon, tlat, hm, tarea, ULON, ULAT + use ice_grid , only : grid_type, init_grid2 use ice_communicate , only : init_communicate, my_task, master_task, mpi_comm_ice use ice_calendar , only : force_restart_now, write_ic - use ice_calendar , only : idate, mday, mmonth, year_init, timesecs + use ice_calendar , only : idate, mday, mmonth, myear, year_init use ice_calendar , only : msec, dt, calendar, calendar_type, nextsw_cday, istep use ice_kinds_mod , only : dbl_kind, int_kind, char_len, char_len_long - use ice_scam , only : scmlat, scmlon, single_column use ice_fileunits , only : nu_diag, nu_diag_set, inst_index, inst_name use ice_fileunits , only : inst_suffix, release_all_fileunits, flush_fileunit use ice_restart_shared , only : runid, runtype, restart, use_restart_time, restart_dir, restart_file use ice_history , only : accum_hist - use CICE_InitMod , only : cice_init - use CICE_RunMod , only : cice_run use ice_exit , only : abort_ice use icepack_intfc , only : icepack_warnings_flush, icepack_warnings_aborted use icepack_intfc , only : icepack_init_orbit, icepack_init_parameters, icepack_query_orbit @@ -48,9 +39,15 @@ module ice_comp_nuopc #ifdef CESMCOUPLED use shr_const_mod use shr_orb_mod , only : shr_orb_decl, shr_orb_params, SHR_ORB_UNDEF_REAL, SHR_ORB_UNDEF_INT + use ice_scam , only : scmlat, scmlon, scol_mask, scol_frac, scol_ni, scol_nj #endif use ice_timers + use CICE_InitMod , only : cice_init1, cice_init2 + use CICE_RunMod , only : cice_run + use ice_mesh_mod , only : ice_mesh_set_distgrid, ice_mesh_setmask_from_maskfile, ice_mesh_check + use ice_mesh_mod , only : ice_mesh_init_tlon_tlat_area_hm, ice_mesh_create_scolumn use ice_prescribed_mod , only : ice_prescribed_init + use ice_scam , only : scol_valid, single_column implicit none private @@ -86,6 +83,10 @@ module ice_comp_nuopc character(len=*),parameter :: shr_cal_noleap = 'NO_LEAP' character(len=*),parameter :: shr_cal_gregorian = 'GREGORIAN' + type(ESMF_Mesh) :: ice_mesh + + integer :: nthrds ! Number of threads to use in this component + integer :: dbug = 0 integer , parameter :: debug_import = 0 ! internal debug level integer , parameter :: debug_export = 0 ! internal debug level @@ -179,8 +180,50 @@ subroutine InitializeAdvertise(gcomp, importState, exportState, clock, rc) ! Local variables character(len=char_len_long) :: cvalue - character(len=char_len_long) :: logmsg + character(len=char_len_long) :: ice_meshfile + character(len=char_len_long) :: ice_maskfile + character(len=char_len_long) :: errmsg logical :: isPresent, isSet + real(dbl_kind) :: eccen, obliqr, lambm0, mvelpp + type(ESMF_DistGrid) :: ice_distGrid + real(kind=dbl_kind) :: atmiter_conv + real(kind=dbl_kind) :: atmiter_conv_driver + integer (kind=int_kind) :: natmiter + integer (kind=int_kind) :: natmiter_driver + character(len=char_len) :: tfrz_option_driver ! tfrz_option from driver attributes + character(len=char_len) :: tfrz_option ! tfrz_option from cice namelist + integer(int_kind) :: ktherm + integer :: localPet + integer :: npes + logical :: mastertask + type(ESMF_VM) :: vm + integer :: lmpicom ! local communicator + type(ESMF_Time) :: currTime ! Current time + type(ESMF_Time) :: startTime ! Start time + type(ESMF_Time) :: stopTime ! Stop time + type(ESMF_Time) :: refTime ! Ref time + type(ESMF_TimeInterval) :: timeStep ! Model timestep + type(ESMF_Calendar) :: esmf_calendar ! esmf calendar + type(ESMF_CalKind_Flag) :: esmf_caltype ! esmf calendar type + integer :: start_ymd ! Start date (YYYYMMDD) + integer :: start_tod ! start time of day (s) + integer :: curr_ymd ! Current date (YYYYMMDD) + integer :: curr_tod ! Current time of day (s) + integer :: stop_ymd ! stop date (YYYYMMDD) + integer :: stop_tod ! stop time of day (sec) + integer :: ref_ymd ! Reference date (YYYYMMDD) + integer :: ref_tod ! reference time of day (s) + integer :: yy,mm,dd ! Temporaries for time query + integer :: dtime ! time step + integer :: shrlogunit ! original log unit + character(len=char_len) :: starttype ! infodata start type + integer :: lsize ! local size of coupling array + integer :: n,c,g,i,j,m ! indices + integer :: iblk, jblk ! indices + integer :: ig, jg ! indices + integer :: ilo, ihi, jlo, jhi ! beginning and end of physical domain + character(len=char_len_long) :: diag_filename = 'unset' + character(len=char_len_long) :: logmsg character(len=*), parameter :: subname=trim(modName)//':(InitializeAdvertise) ' !-------------------------------- @@ -244,102 +287,26 @@ subroutine InitializeAdvertise(gcomp, importState, exportState, clock, rc) write(logmsg,'(i6)') dbug call ESMF_LogWrite('CICE_cap: dbug = '//trim(logmsg), ESMF_LOGMSG_INFO) - call ice_advertise_fields(gcomp, importState, exportState, flds_scalar_name, rc) - if (ChkErr(rc,__LINE__,u_FILE_u)) return - - end subroutine InitializeAdvertise - - !=============================================================================== - - subroutine InitializeRealize(gcomp, importState, exportState, clock, rc) - - ! Arguments - type(ESMF_GridComp) :: gcomp - type(ESMF_State) :: importState - type(ESMF_State) :: exportState - type(ESMF_Clock) :: clock - integer, intent(out) :: rc - - ! Local variables - real(dbl_kind) :: eccen, obliqr, lambm0, mvelpp - type(ESMF_DistGrid) :: distGrid - type(ESMF_Mesh) :: Emesh, EmeshTemp - integer :: spatialDim - integer :: numOwnedElements - real(dbl_kind), pointer :: ownedElemCoords(:) - real(dbl_kind), pointer :: lat(:), latMesh(:) - real(dbl_kind), pointer :: lon(:), lonMesh(:) - integer , allocatable :: gindex_ice(:) - integer , allocatable :: gindex_elim(:) - integer , allocatable :: gindex(:) - integer :: globalID - character(ESMF_MAXSTR) :: cvalue - character(len=char_len) :: tfrz_option - character(ESMF_MAXSTR) :: convCIM, purpComp - type(ESMF_VM) :: vm - type(ESMF_Time) :: currTime ! Current time - type(ESMF_Time) :: startTime ! Start time - type(ESMF_Time) :: stopTime ! Stop time - type(ESMF_Time) :: refTime ! Ref time - type(ESMF_TimeInterval) :: timeStep ! Model timestep - type(ESMF_Calendar) :: esmf_calendar ! esmf calendar - type(ESMF_CalKind_Flag) :: esmf_caltype ! esmf calendar type - integer :: start_ymd ! Start date (YYYYMMDD) - integer :: start_tod ! start time of day (s) - integer :: curr_ymd ! Current date (YYYYMMDD) - integer :: curr_tod ! Current time of day (s) - integer :: stop_ymd ! stop date (YYYYMMDD) - integer :: stop_tod ! stop time of day (sec) - integer :: ref_ymd ! Reference date (YYYYMMDD) - integer :: ref_tod ! reference time of day (s) - integer :: yy,mm,dd ! Temporaries for time query - integer :: iyear ! yyyy - integer :: dtime ! time step - integer :: lmpicom - integer :: shrlogunit ! original log unit - character(len=char_len) :: starttype ! infodata start type - integer :: lsize ! local size of coupling array - logical :: isPresent - logical :: isSet - integer :: localPet - integer :: n,c,g,i,j,m ! indices - integer :: iblk, jblk ! indices - integer :: ig, jg ! indices - integer :: ilo, ihi, jlo, jhi ! beginning and end of physical domain - type(block) :: this_block ! block information for current block - integer :: compid ! component id - character(len=char_len_long) :: tempc1,tempc2 - real(dbl_kind) :: diff_lon - integer :: npes - integer :: num_elim_global - integer :: num_elim_local - integer :: num_elim - integer :: num_ice - integer :: num_elim_gcells ! local number of eliminated gridcells - integer :: num_elim_blocks ! local number of eliminated blocks - integer :: num_total_blocks - integer :: my_elim_start, my_elim_end - real(dbl_kind) :: rad_to_deg - integer(int_kind) :: ktherm - logical :: mastertask - character(len=char_len_long) :: diag_filename = 'unset' - character(len=*), parameter :: F00 = "('(ice_comp_nuopc) ',2a,1x,d21.14)" - character(len=*), parameter :: subname=trim(modName)//':(InitializeRealize) ' - !-------------------------------- - - rc = ESMF_SUCCESS - if (dbug > 5) call ESMF_LogWrite(subname//' called', ESMF_LOGMSG_INFO) - !---------------------------------------------------------------------------- ! generate local mpi comm !---------------------------------------------------------------------------- call ESMF_GridCompGet(gcomp, vm=vm, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return - call ESMF_VMGet(vm, mpiCommunicator=lmpicom, localPet=localPet, PetCount=npes, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return +#ifdef CESMCOUPLED + call ESMF_VMGet(vm, pet=localPet, peCount=nthrds, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + if (nthrds==1) then + call NUOPC_CompAttributeGet(gcomp, "nthreads", value=cvalue, rc=rc) + if (ESMF_LogFoundError(rcToCheck=rc, msg=ESMF_LOGERR_PASSTHRU, line=__LINE__, file=u_FILE_u)) return + read(cvalue,*) nthrds + endif +!$ call omp_set_num_threads(nthrds) +#endif + !---------------------------------------------------------------------------- ! Initialize cice communicators !---------------------------------------------------------------------------- @@ -371,6 +338,8 @@ subroutine InitializeRealize(gcomp, importState, exportState, clock, rc) call ice_init_constants(omega_in=SHR_CONST_OMEGA, radius_in=SHR_CONST_REARTH, & spval_dbl_in=SHR_CONST_SPVAL) + ! TODO: get tfrz_option from driver + call icepack_init_parameters( & secday_in = SHR_CONST_CDAY, & rhoi_in = SHR_CONST_RHOICE, & @@ -447,23 +416,6 @@ subroutine InitializeRealize(gcomp, importState, exportState, clock, rc) runtype = 'initial' ! determined from the namelist in ice_init if CESMCOUPLED is not defined end if - ! Determine if single column - call NUOPC_CompAttributeGet(gcomp, name='single_column', value=cvalue, isPresent=isPresent, isSet=isSet, rc=rc) - if (ChkErr(rc,__LINE__,u_FILE_u)) return - if (isPresent .and. isSet) then - read(cvalue,*) single_column - if (single_column) then - call NUOPC_CompAttributeGet(gcomp, name='scmlon', value=cvalue, rc=rc) - if (ChkErr(rc,__LINE__,u_FILE_u)) return - read(cvalue,*) scmlon - call NUOPC_CompAttributeGet(gcomp, name='scmlat', value=cvalue, rc=rc) - if (ChkErr(rc,__LINE__,u_FILE_u)) return - read(cvalue,*) scmlat - end if - else - single_column = .false. - end if - ! Determine runid call NUOPC_CompAttributeGet(gcomp, name='case_name', value=cvalue, isPresent=isPresent, isSet=isSet, rc=rc) if (isPresent .and. isSet) then @@ -538,15 +490,124 @@ subroutine InitializeRealize(gcomp, importState, exportState, clock, rc) end if !---------------------------------------------------------------------------- - ! Initialize cice + ! First cice initialization phase - before initializing grid info !---------------------------------------------------------------------------- - ! Note that cice_init also sets time manager info as well as mpi communicator info, - ! including master_task and my_task + ! Read the cice namelist as part of the call to cice_init1 + call t_startf ('cice_init1') + call cice_init1 + call t_stopf ('cice_init1') - call t_startf ('cice_init') - call cice_init - call t_stopf ('cice_init') +#ifdef CESMCOUPLED + ! Form of ocean freezing temperature + ! 'minus1p8' = -1.8 C + ! 'linear_salt' = -depressT * sss + ! 'mushy' conforms with ktherm=2 + call NUOPC_CompAttributeGet(gcomp, name="tfreeze_option", value=tfrz_option_driver, & + isPresent=isPresent, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + if (.not. isPresent) then + tfrz_option_driver = 'linear_salt' + end if + call icepack_query_parameters( tfrz_option_out=tfrz_option) + if (tfrz_option_driver /= tfrz_option) then + write(errmsg,'(a)') trim(subname)//'error: tfrz_option from driver '//trim(tfrz_option_driver)//& + ' must be the same as tfrz_option from cice namelist '//trim(tfrz_option) + call abort_ice(trim(errmsg)) + endif + + ! Flux convergence tolerance - always use the driver attribute value + call NUOPC_CompAttributeGet(gcomp, name="flux_convergence", value=cvalue, & + isPresent=isPresent, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + if (isPresent) then + read(cvalue,*) atmiter_conv_driver + call icepack_query_parameters( atmiter_conv_out=atmiter_conv) + if (atmiter_conv_driver /= atmiter_conv) then + write(errmsg,'(a,d13.5,a,d13.5)') trim(subname)//'warning: atmiter_ from driver ',& + atmiter_conv_driver,' is overwritting atmiter_conv from cice namelist ',atmiter_conv + write(nu_diag,*) trim(errmsg) + call icepack_warnings_flush(nu_diag) + call icepack_init_parameters(atmiter_conv_in=atmiter_conv_driver) + end if + end if + + ! Number of iterations for boundary layer calculations + call NUOPC_CompAttributeGet(gcomp, name="flux_max_iteration", value=cvalue, isPresent=isPresent, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + if (isPresent) then + read(cvalue,*) natmiter_driver + else + natmiter_driver = 5 + end if + call icepack_query_parameters( natmiter_out=natmiter) + if (natmiter_driver /= natmiter) then + write(errmsg,'(a,i8,a,i8)') trim(subname)//'error: natmiter_driver ',natmiter_driver, & + ' must be the same as natmiter from cice namelist ',natmiter + call abort_ice(trim(errmsg)) + endif +#endif + !---------------------------------------------------------------------------- + ! Initialize grid info + !---------------------------------------------------------------------------- + + ! Initialize cice mesh and mask if appropriate + + if (single_column .and. scol_valid) then + call ice_mesh_init_tlon_tlat_area_hm() + else + ! Determine mesh input file + call NUOPC_CompAttributeGet(gcomp, name='mesh_ice', value=ice_meshfile, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + ! Determine mask input file + call NUOPC_CompAttributeGet(gcomp, name='mesh_mask', value=cvalue, isPresent=isPresent, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + if (isPresent .and. isSet) then + ice_maskfile = trim(cvalue) + else + ice_maskfile = ice_meshfile + end if + if (my_task == master_task) then + write(nu_diag,*)'mesh file for cice domain is ',trim(ice_meshfile) + write(nu_diag,*)'mask file for cice domain is ',trim(ice_maskfile) + end if + + ! Determine the model distgrid using the decomposition obtained in + ! call to init_grid1 called from cice_init1 + call ice_mesh_set_distgrid(localpet, npes, ice_distgrid, rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + ! Read in the ice mesh on the cice distribution + ice_mesh = ESMF_MeshCreate(filename=trim(ice_meshfile), fileformat=ESMF_FILEFORMAT_ESMFMESH, & + elementDistGrid=ice_distgrid, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + ! Initialize the cice mesh and the cice mask + if (trim(grid_type) == 'setmask') then + ! In this case cap code determines the mask file + call ice_mesh_setmask_from_maskfile(ice_maskfile, ice_mesh, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + call ice_mesh_init_tlon_tlat_area_hm() + if (ChkErr(rc,__LINE__,u_FILE_u)) return + else + ! In this case init_grid2 will initialize tlon, tlat, area and hm + call init_grid2() + call ice_mesh_check(gcomp,ice_mesh, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + end if + end if + + !---------------------------------------------------------------------------- + ! Second cice initialization phase -after initializing grid info + !---------------------------------------------------------------------------- + ! Note that cice_init2 also sets time manager info as well as mpi communicator info, + ! including master_task and my_task + ! Note that cice_init2 calls ice_init() which in turn calls icepack_init_parameters + ! which sets the tfrz_option + call t_startf ('cice_init2') + call cice_init2() + call t_stopf ('cice_init2') !---------------------------------------------------------------------------- ! reset shr logging to my log file @@ -560,14 +621,14 @@ subroutine InitializeRealize(gcomp, importState, exportState, clock, rc) ! Now write output to nu_diag - this must happen AFTER call to cice_init if (mastertask) then - write(nu_diag,F00) trim(subname),' cice init nextsw_cday = ',nextsw_cday - write(nu_diag,*) trim(subname),' tfrz_option = ',trim(tfrz_option) + write(nu_diag,'(a,d21.14)') trim(subname)//' cice init nextsw_cday = ',nextsw_cday + write(nu_diag,'(a)') trim(subname)//' tfrz_option = '//trim(tfrz_option) if (ktherm == 2 .and. trim(tfrz_option) /= 'mushy') then write(nu_diag,*) trim(subname),' Warning: Using ktherm = 2 and tfrz_option = ', trim(tfrz_option) endif - write(nu_diag,*) trim(subname),' inst_name = ',trim(inst_name) - write(nu_diag,*) trim(subname),' inst_index = ',inst_index - write(nu_diag,*) trim(subname),' inst_suffix = ',trim(inst_suffix) + write(nu_diag,'(a )') trim(subname)//' inst_name = '//trim(inst_name) + write(nu_diag,'(a,i8 )') trim(subname)//' inst_index = ',inst_index + write(nu_diag,'(a )') trim(subname)//' inst_suffix = ',trim(inst_suffix) endif !--------------------------------------------------------------------------- @@ -576,7 +637,7 @@ subroutine InitializeRealize(gcomp, importState, exportState, clock, rc) ! - on initial run ! - iyear, month and mday obtained from sync clock - ! - time determined from iyear, month and mday + ! - time determined from myear, month and mday ! - istep0 and istep1 are set to 0 ! - on restart run ! - istep0, time and time_forc are read from restart file @@ -605,28 +666,18 @@ subroutine InitializeRealize(gcomp, importState, exportState, clock, rc) end if call abort_ice(subname//' :: ERROR idate lt zero') endif - iyear = (idate/10000) ! integer year of basedate - mmonth= (idate-iyear*10000)/100 ! integer month of basedate - mday = idate-iyear*10000-mmonth*100 ! day of month of basedate + myear = (idate/10000) ! integer year of basedate + mmonth= (idate-myear*10000)/100 ! integer month of basedate + mday = idate-myear*10000-mmonth*100 ! day of month of basedate if (my_task == master_task) then write(nu_diag,*) trim(subname),' curr_ymd = ',curr_ymd write(nu_diag,*) trim(subname),' cice year_init = ',year_init write(nu_diag,*) trim(subname),' cice start date = ',idate - write(nu_diag,*) trim(subname),' cice start ymds = ',iyear,mmonth,mday,start_tod + write(nu_diag,*) trim(subname),' cice start ymds = ',myear,mmonth,mday,start_tod write(nu_diag,*) trim(subname),' cice calendar_type = ',trim(calendar_type) endif -#ifdef CESMCOUPLED - if (calendar_type == "GREGORIAN" .or. & - calendar_type == "Gregorian" .or. & - calendar_type == "gregorian") then - call time2sec(iyear-(year_init-1),mmonth,mday,time) - else - call time2sec(iyear-year_init,mmonth,mday,time) - endif -#endif - timesecs = timesecs+start_tod end if call calendar() ! update calendar info @@ -634,239 +685,145 @@ subroutine InitializeRealize(gcomp, importState, exportState, clock, rc) call accum_hist(dt) ! write initial conditions end if - !--------------------------------------------------------------------------- - ! Determine the global index space needed for the distgrid - !--------------------------------------------------------------------------- - - ! number the local grid to get allocation size for gindex_ice - lsize = 0 - do iblk = 1, nblocks - this_block = get_block(blocks_ice(iblk),iblk) - ilo = this_block%ilo - ihi = this_block%ihi - jlo = this_block%jlo - jhi = this_block%jhi - do j = jlo, jhi - do i = ilo, ihi - lsize = lsize + 1 - enddo - enddo - enddo - - ! set global index array - allocate(gindex_ice(lsize)) - n = 0 - do iblk = 1, nblocks - this_block = get_block(blocks_ice(iblk),iblk) - ilo = this_block%ilo - ihi = this_block%ihi - jlo = this_block%jlo - jhi = this_block%jhi - do j = jlo, jhi - do i = ilo, ihi - n = n+1 - ig = this_block%i_glob(i) - jg = this_block%j_glob(j) - gindex_ice(n) = (jg-1)*nx_global + ig - enddo - enddo - enddo - - ! Determine total number of eliminated blocks globally - globalID = 0 - num_elim_global = 0 ! number of eliminated blocks - num_total_blocks = 0 - do jblk=1,nblocks_y - do iblk=1,nblocks_x - globalID = globalID + 1 - num_total_blocks = num_total_blocks + 1 - if (distrb_info%blockLocation(globalID) == 0) then - num_elim_global = num_elim_global + 1 - end if - end do - end do + !----------------------------------------------------------------- + ! Prescribed ice initialization + !----------------------------------------------------------------- - if (num_elim_global > 0) then + call ice_prescribed_init(clock, ice_mesh, rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return - ! Distribute the eliminated blocks in a round robin fashion amoung processors - num_elim_local = num_elim_global / npes - my_elim_start = num_elim_local*localPet + min(localPet, mod(num_elim_global, npes)) + 1 - if (localPet < mod(num_elim_global, npes)) then - num_elim_local = num_elim_local + 1 - end if - my_elim_end = my_elim_start + num_elim_local - 1 - - ! Determine the number of eliminated gridcells locally - globalID = 0 - num_elim_blocks = 0 ! local number of eliminated blocks - num_elim_gcells = 0 - do jblk=1,nblocks_y - do iblk=1,nblocks_x - globalID = globalID + 1 - if (distrb_info%blockLocation(globalID) == 0) then - num_elim_blocks = num_elim_blocks + 1 - if (num_elim_blocks >= my_elim_start .and. num_elim_blocks <= my_elim_end) then - this_block = get_block(globalID, globalID) - num_elim_gcells = num_elim_gcells + & - (this_block%jhi-this_block%jlo+1) * (this_block%ihi-this_block%ilo+1) - end if - end if - end do - end do - - ! Determine the global index space of the eliminated gridcells - allocate(gindex_elim(num_elim_gcells)) - globalID = 0 - num_elim_gcells = 0 ! local number of eliminated gridcells - num_elim_blocks = 0 ! local number of eliminated blocks - do jblk=1,nblocks_y - do iblk=1,nblocks_x - globalID = globalID + 1 - if (distrb_info%blockLocation(globalID) == 0) then - this_block = get_block(globalID, globalID) - num_elim_blocks = num_elim_blocks + 1 - if (num_elim_blocks >= my_elim_start .and. num_elim_blocks <= my_elim_end) then - do j=this_block%jlo,this_block%jhi - do i=this_block%ilo,this_block%ihi - num_elim_gcells = num_elim_gcells + 1 - ig = this_block%i_glob(i) - jg = this_block%j_glob(j) - gindex_elim(num_elim_gcells) = (jg-1)*nx_global + ig - end do - end do - end if - end if - end do - end do - - ! create a global index that includes both active and eliminated gridcells - num_ice = size(gindex_ice) - num_elim = size(gindex_elim) - allocate(gindex(num_elim + num_ice)) - do n = 1,num_ice - gindex(n) = gindex_ice(n) - end do - do n = num_ice+1,num_ice+num_elim - gindex(n) = gindex_elim(n-num_ice) - end do - - deallocate(gindex_elim) + !----------------------------------------------------------------- + ! Advertise fields + !----------------------------------------------------------------- - else + ! NOTE: the advertise phase needs to be called after the ice + ! initialization since the number of ice categories is needed for + ! ice_fraction_n and mean_sw_pen_to_ocn_ifrac_n + call ice_advertise_fields(gcomp, importState, exportState, flds_scalar_name, rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return - ! No eliminated land blocks - num_ice = size(gindex_ice) - allocate(gindex(num_ice)) - do n = 1,num_ice - gindex(n) = gindex_ice(n) - end do + call t_stopf ('cice_init_total') - end if + end subroutine InitializeAdvertise - !--------------------------------------------------------------------------- - ! Create distGrid from global index array - !--------------------------------------------------------------------------- + !=============================================================================== - DistGrid = ESMF_DistGridCreate(arbSeqIndexList=gindex, rc=rc) - if (ChkErr(rc,__LINE__,u_FILE_u)) return + subroutine InitializeRealize(gcomp, importState, exportState, clock, rc) - !--------------------------------------------------------------------------- - ! Create the CICE mesh - !--------------------------------------------------------------------------- + ! Arguments + type(ESMF_GridComp) :: gcomp + type(ESMF_State) :: importState + type(ESMF_State) :: exportState + type(ESMF_Clock) :: clock + integer, intent(out) :: rc - ! read in the mesh - call NUOPC_CompAttributeGet(gcomp, name='mesh_ice', value=cvalue, rc=rc) - if (ChkErr(rc,__LINE__,u_FILE_u)) return + ! Local variables + integer :: n + integer :: fieldcount + type(ESMF_Field) :: lfield + character(len=char_len_long) :: cvalue + real(dbl_kind) :: scol_lon + real(dbl_kind) :: scol_lat + real(dbl_kind) :: scol_spval + real(dbl_kind), pointer :: fldptr1d(:) + real(dbl_kind), pointer :: fldptr2d(:,:) + integer :: rank + character(len=char_len_long) :: single_column_lnd_domainfile + character(len=char_len_long) , pointer :: lfieldnamelist(:) => null() + character(len=*), parameter :: subname=trim(modName)//':(InitializeRealize) ' + !-------------------------------- - EMeshTemp = ESMF_MeshCreate(filename=trim(cvalue), fileformat=ESMF_FILEFORMAT_ESMFMESH, rc=rc) - if (ChkErr(rc,__LINE__,u_FILE_u)) return - if (my_task == master_task) then - write(nu_diag,*)'mesh file for cice domain is ',trim(cvalue) - end if + rc = ESMF_SUCCESS + if (dbug > 5) call ESMF_LogWrite(subname//' called', ESMF_LOGMSG_INFO) - ! recreate the mesh using the above distGrid - EMesh = ESMF_MeshCreate(EMeshTemp, elementDistgrid=Distgrid, rc=rc) +#ifdef CESMCOUPLED + call NUOPC_CompAttributeGet(gcomp, name='scol_lon', value=cvalue, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return - - ! obtain mesh lats and lons - call ESMF_MeshGet(Emesh, spatialDim=spatialDim, numOwnedElements=numOwnedElements, rc=rc) + read(cvalue,*) scmlon + call NUOPC_CompAttributeGet(gcomp, name='scol_lat', value=cvalue, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return - allocate(ownedElemCoords(spatialDim*numOwnedElements)) - allocate(lonMesh(numOwnedElements), latMesh(numOwnedElements)) - call ESMF_MeshGet(Emesh, ownedElemCoords=ownedElemCoords) + read(cvalue,*) scmlat + call NUOPC_CompAttributeGet(gcomp, name='scol_spval', value=cvalue, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return + read(cvalue,*) scol_spval - do n = 1,numOwnedElements - lonMesh(n) = ownedElemCoords(2*n-1) - latMesh(n) = ownedElemCoords(2*n) - end do + if (scmlon > scol_spval .and. scmlat > scol_spval) then + call NUOPC_CompAttributeGet(gcomp, name='single_column_lnd_domainfile', & + value=single_column_lnd_domainfile, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + if (trim(single_column_lnd_domainfile) /= 'UNSET') then + single_column = .true. + else + call abort_ice('single_column_domainfile cannot be null for single column mode') + end if + call NUOPC_CompAttributeGet(gcomp, name='scol_ocnmask', value=cvalue, rc=rc) + if (chkerr(rc,__LINE__,u_FILE_u)) return + read(cvalue,*) scol_mask + call NUOPC_CompAttributeGet(gcomp, name='scol_ocnfrac', value=cvalue, rc=rc) + if (chkerr(rc,__LINE__,u_FILE_u)) return + read(cvalue,*) scol_frac + call NUOPC_CompAttributeGet(gcomp, name='scol_ni', value=cvalue, rc=rc) + if (chkerr(rc,__LINE__,u_FILE_u)) return + read(cvalue,*) scol_ni + call NUOPC_CompAttributeGet(gcomp, name='scol_nj', value=cvalue, rc=rc) + if (chkerr(rc,__LINE__,u_FILE_u)) return + read(cvalue,*) scol_nj - call icepack_query_parameters(rad_to_deg_out=rad_to_deg) - call icepack_warnings_flush(nu_diag) - if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & - file=__FILE__, line=__LINE__) + call ice_mesh_create_scolumn(scmlon, scmlat, ice_mesh, rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return - ! obtain internally generated cice lats and lons for error checks - allocate(lon(lsize)) - allocate(lat(lsize)) - n = 0 - do iblk = 1, nblocks - this_block = get_block(blocks_ice(iblk),iblk) - ilo = this_block%ilo - ihi = this_block%ihi - jlo = this_block%jlo - jhi = this_block%jhi - do j = jlo, jhi - do i = ilo, ihi - n = n+1 - lon(n) = tlon(i,j,iblk)*rad_to_deg - lat(n) = tlat(i,j,iblk)*rad_to_deg + scol_valid = (scol_mask == 1) + if (.not. scol_valid) then + ! if single column is not valid - set all export state fields to zero and return + write(nu_diag,'(a)')' (ice_comp_nuopc) single column mode point does not contain any ocn/ice '& + //' - setting all export data to 0' + call ice_realize_fields(gcomp, mesh=ice_mesh, & + flds_scalar_name=flds_scalar_name, flds_scalar_num=flds_scalar_num, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + call ESMF_StateGet(exportState, itemCount=fieldCount, rc=rc) + if (chkerr(rc,__LINE__,u_FILE_u)) return + allocate(lfieldnamelist(fieldCount)) + call ESMF_StateGet(exportState, itemNameList=lfieldnamelist, rc=rc) + if (chkerr(rc,__LINE__,u_FILE_u)) return + do n = 1, fieldCount + if (trim(lfieldnamelist(n)) /= flds_scalar_name) then + call ESMF_StateGet(exportState, itemName=trim(lfieldnamelist(n)), field=lfield, rc=rc) + if (chkerr(rc,__LINE__,u_FILE_u)) return + call ESMF_FieldGet(lfield, rank=rank, rc=rc) + if (chkerr(rc,__LINE__,u_FILE_u)) return + if (rank == 2) then + call ESMF_FieldGet(lfield, farrayPtr=fldptr2d, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + fldptr2d(:,:) = 0._dbl_kind + else + call ESMF_FieldGet(lfield, farrayPtr=fldptr1d, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + fldptr1d(:) = 0._dbl_kind + end if + end if enddo - enddo - enddo - - ! error check differences between internally generated lons and those read in - do n = 1,lsize - diff_lon = abs(lonMesh(n) - lon(n)) - if ( (diff_lon > 1.e2 .and. abs(diff_lon - 360_dbl_kind) > 1.e-1) .or.& - (diff_lon > 1.e-3 .and. diff_lon < 1._dbl_kind) ) then - !write(6,100)n,lonMesh(n),lon(n), diff_lon -100 format('ERROR: CICE n, lonmesh(n), lon(n), diff_lon = ',i6,2(f21.13,3x),d21.5) - !call abort_ice() - end if - if (abs(latMesh(n) - lat(n)) > 1.e-1) then - !write(6,101)n,latMesh(n),lat(n), abs(latMesh(n)-lat(n)) -101 format('ERROR: CICE n, latmesh(n), lat(n), diff_lat = ',i6,2(f21.13,3x),d21.5) - !call abort_ice() + deallocate(lfieldnamelist) + ! ******************* + ! *** RETURN HERE *** + ! ******************* + RETURN + else + write(nu_diag,'(a,3(f10.5,2x))')' (ice_comp_nuopc) single column mode lon/lat/frac is ',& + scmlon,scmlat,scol_frac end if - end do - - ! deallocate memory - deallocate(ownedElemCoords) - deallocate(lon, lonMesh) - deallocate(lat, latMesh) + else + single_column = .false. + end if +#endif !----------------------------------------------------------------- ! Realize the actively coupled fields !----------------------------------------------------------------- - call ice_realize_fields(gcomp, mesh=Emesh, & + call ice_realize_fields(gcomp, mesh=ice_mesh, & flds_scalar_name=flds_scalar_name, flds_scalar_num=flds_scalar_num, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return - !----------------------------------------------------------------- - ! Prescribed ice initialization - first get compid - !----------------------------------------------------------------- - - call NUOPC_CompAttributeGet(gcomp, name='MCTID', value=cvalue, isPresent=isPresent, isSet=isSet, rc=rc) - if (ChkErr(rc,__LINE__,u_FILE_u)) return - if (isPresent .and. isSet) then - read(cvalue,*) compid ! convert from string to integer - else - compid = 0 - end if - call ice_prescribed_init(lmpicom, compid, gindex_ice) - !----------------------------------------------------------------- ! Create cice export state !----------------------------------------------------------------- @@ -881,16 +838,16 @@ subroutine InitializeRealize(gcomp, importState, exportState, clock, rc) flds_scalar_name, flds_scalar_num, rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return + !-------------------------------- + ! diagnostics + !-------------------------------- + ! TODO (mvertens, 2018-12-21): fill in iceberg_prognostic as .false. if (debug_export > 0 .and. my_task==master_task) then call State_fldDebug(exportState, flds_scalar_name, 'cice_export:', & idate, msec, nu_diag, rc=rc) end if - !-------------------------------- - ! diagnostics - !-------------------------------- - if (dbug > 0) then call state_diagnose(exportState,subname//':ES',rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return @@ -898,11 +855,6 @@ subroutine InitializeRealize(gcomp, importState, exportState, clock, rc) if (dbug > 5) call ESMF_LogWrite(subname//' done', ESMF_LOGMSG_INFO) - call t_stopf ('cice_init_total') - - deallocate(gindex_ice) - deallocate(gindex) - call flush_fileunit(nu_diag) end subroutine InitializeRealize @@ -945,7 +897,6 @@ subroutine ModelAdvance(gcomp, rc) character(char_len_long) :: restart_date character(char_len_long) :: restart_filename logical :: isPresent, isSet - character(*) , parameter :: F00 = "('(ice_comp_nuopc) ',2a,i8,d21.14)" character(len=*),parameter :: subname=trim(modName)//':(ModelAdvance) ' character(char_len_long) :: msgString !-------------------------------- @@ -1011,7 +962,7 @@ subroutine ModelAdvance(gcomp, rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return end if if (my_task == master_task) then - write(nu_diag,F00) trim(subname),' cice istep, nextsw_cday = ',istep, nextsw_cday + write(nu_diag,'(a,2x,i8,2x,d24.14)') trim(subname)//' cice istep, nextsw_cday = ',istep, nextsw_cday end if !-------------------------------- @@ -1289,28 +1240,26 @@ end subroutine ModelSetRunClock !=============================================================================== subroutine ModelFinalize(gcomp, rc) + + !-------------------------------- + ! Finalize routine + !-------------------------------- + type(ESMF_GridComp) :: gcomp integer, intent(out) :: rc ! local variables - character(*), parameter :: F00 = "('(ice_comp_nuopc) ',8a)" - character(*), parameter :: F91 = "('(ice_comp_nuopc) ',73('-'))" + character(*), parameter :: F91 = "('(ice_comp_nuopc) ',73('-'))" character(len=*),parameter :: subname=trim(modName)//':(ModelFinalize) ' !-------------------------------- - !-------------------------------- - ! Finalize routine - !-------------------------------- - rc = ESMF_SUCCESS if (dbug > 5) call ESMF_LogWrite(subname//' called', ESMF_LOGMSG_INFO) - if (my_task == master_task) then write(nu_diag,F91) - write(nu_diag,F00) 'CICE: end of main integration loop' + write(nu_diag,'(a)') 'CICE: end of main integration loop' write(nu_diag,F91) end if - if (dbug > 5) call ESMF_LogWrite(subname//' done', ESMF_LOGMSG_INFO) end subroutine ModelFinalize @@ -1475,7 +1424,4 @@ subroutine ice_cal_ymd2date(year, month, day, date) end subroutine ice_cal_ymd2date - !=============================================================================== - - end module ice_comp_nuopc diff --git a/cicecore/drivers/nuopc/cmeps/ice_import_export.F90 b/cicecore/drivers/nuopc/cmeps/ice_import_export.F90 index b32085143..62ff2727d 100644 --- a/cicecore/drivers/nuopc/cmeps/ice_import_export.F90 +++ b/cicecore/drivers/nuopc/cmeps/ice_import_export.F90 @@ -4,7 +4,7 @@ module ice_import_export use NUOPC use NUOPC_Model use ice_kinds_mod , only : int_kind, dbl_kind, char_len, log_kind - use ice_constants , only : c0, c1, spval_dbl + use ice_constants , only : c0, c1, spval_dbl, radius use ice_constants , only : field_loc_center, field_type_scalar, field_type_vector use ice_blocks , only : block, get_block, nx_block, ny_block use ice_domain , only : nblocks, blocks_ice, halo_info, distrb_info @@ -21,10 +21,12 @@ module ice_import_export use ice_flux , only : fresh, fsalt, zlvl, uatm, vatm, potT, Tair, Qa use ice_flux , only : rhoa, swvdr, swvdf, swidr, swidf, flw, frain use ice_flux , only : fsnow, uocn, vocn, sst, ss_tltx, ss_tlty, frzmlt + use ice_flux , only : send_i2x_per_cat use ice_flux , only : sss, Tf, wind, fsw use ice_state , only : vice, vsno, aice, aicen_init, trcr - use ice_grid , only : tlon, tlat, tarea, tmask, anglet, hm, ocn_gridcell_frac + use ice_grid , only : tlon, tlat, tarea, tmask, anglet, hm use ice_grid , only : grid_type, t2ugrid_vector + use ice_mesh_mod , only : ocn_gridcell_frac use ice_boundary , only : ice_HaloUpdate use ice_fileunits , only : nu_diag, flush_fileunit use ice_communicate , only : my_task, master_task, MPI_COMM_ICE @@ -34,9 +36,10 @@ module ice_import_export use icepack_intfc , only : icepack_query_parameters, icepack_query_tracer_flags use icepack_intfc , only : icepack_liquidus_temperature use icepack_intfc , only : icepack_sea_freezing_temperature - use cice_wrapper_mod , only : t_startf, t_stopf, t_barrierf + use cice_wrapper_mod , only : t_startf, t_stopf, t_barrierf #ifdef CESMCOUPLED use shr_frz_mod , only : shr_frz_freezetemp + use shr_mpi_mod , only : shr_mpi_min, shr_mpi_max #endif implicit none @@ -54,20 +57,18 @@ module ice_import_export interface state_getfldptr module procedure state_getfldptr_1d module procedure state_getfldptr_2d - module procedure state_getfldptr_3d - module procedure state_getfldptr_4d end interface state_getfldptr private :: state_getfldptr interface state_getimport - module procedure state_getimport_4d_output - module procedure state_getimport_3d_output + module procedure state_getimport_4d + module procedure state_getimport_3d end interface state_getimport private :: state_getimport interface state_setexport - module procedure state_setexport_4d_input - module procedure state_setexport_3d_input + module procedure state_setexport_4d + module procedure state_setexport_3d end interface state_setexport private :: state_setexport @@ -79,12 +80,15 @@ module ice_import_export integer :: ungridded_ubound = 0 end type fld_list_type + ! area correction factors for fluxes send and received from mediator + real(dbl_kind), allocatable :: mod2med_areacor(:) ! ratios of model areas to input mesh areas + real(dbl_kind), allocatable :: med2mod_areacor(:) ! ratios of input mesh areas to model areas + integer, parameter :: fldsMax = 100 integer :: fldsToIce_num = 0 integer :: fldsFrIce_num = 0 type (fld_list_type) :: fldsToIce(fldsMax) type (fld_list_type) :: fldsFrIce(fldsMax) - type(ESMF_GeomType_Flag) :: geomtype integer , parameter :: io_dbug = 10 ! i/o debug messages character(*), parameter :: u_FILE_u = & @@ -108,7 +112,6 @@ subroutine ice_advertise_fields(gcomp, importState, exportState, flds_scalar_nam character(char_len) :: stdname character(char_len) :: cvalue logical :: flds_wiso ! use case - logical :: flds_i2o_per_cat ! .true. => select per ice thickness category logical :: isPresent, isSet character(len=*), parameter :: subname='(ice_import_export:ice_advertise_fields)' !------------------------------------------------------------------------------- @@ -116,21 +119,31 @@ subroutine ice_advertise_fields(gcomp, importState, exportState, flds_scalar_nam rc = ESMF_SUCCESS if (io_dbug > 5) call ESMF_LogWrite(subname//' called', ESMF_LOGMSG_INFO) - ! Determine if the following attributes are sent by the driver and if so read them in - flds_wiso = .false. - call NUOPC_CompAttributeGet(gcomp, name='flds_wiso', value=cvalue, isPresent=isPresent, isSet=isSet, rc=rc) + ! Determine if ice sends multiple ice category info back to mediator + send_i2x_per_cat = .false. + call NUOPC_CompAttributeGet(gcomp, name='flds_i2o_per_cat', value=cvalue, & + isPresent=isPresent, isSet=isSet, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return if (isPresent .and. isSet) then - read(cvalue,*) flds_wiso - call ESMF_LogWrite('flds_wiso = '// trim(cvalue), ESMF_LOGMSG_INFO) + read(cvalue,*) send_i2x_per_cat + end if + if (my_task == master_task) then + write(nu_diag,*)'send_i2x_per_cat = ',send_i2x_per_cat + end if + if (.not.send_i2x_per_cat) then + deallocate(fswthrun_ai) end if - flds_i2o_per_cat = .false. - call NUOPC_CompAttributeGet(gcomp, name='flds_i2o_per_cat', value=cvalue, isPresent=isPresent, isSet=isSet, rc=rc) + ! Determine if the following attributes are sent by the driver and if so read them in + flds_wiso = .false. + call NUOPC_CompAttributeGet(gcomp, name='flds_wiso', value=cvalue, & + isPresent=isPresent, isSet=isSet, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return if (isPresent .and. isSet) then - read(cvalue,*) send_i2x_per_cat - call ESMF_LogWrite('flds_i2o_per_cat = '// trim(cvalue), ESMF_LOGMSG_INFO) + read(cvalue,*) flds_wiso + end if + if (my_task == master_task) then + write(nu_diag,*)'flds_wiso = ',flds_wiso end if !----------------- @@ -262,21 +275,35 @@ subroutine ice_advertise_fields(gcomp, importState, exportState, flds_scalar_nam end subroutine ice_advertise_fields -!============================================================================== - - subroutine ice_realize_fields(gcomp, mesh, grid, flds_scalar_name, flds_scalar_num, rc) + !============================================================================== + subroutine ice_realize_fields(gcomp, mesh, flds_scalar_name, flds_scalar_num, rc) ! input/output variables - type(ESMF_GridComp) :: gcomp - type(ESMF_Mesh) , optional , intent(in) :: mesh - type(ESMF_Grid) , optional , intent(in) :: grid - character(len=*) , intent(in) :: flds_scalar_name - integer , intent(in) :: flds_scalar_num - integer , intent(out) :: rc + type(ESMF_GridComp) :: gcomp + type(ESMF_Mesh) , intent(in) :: mesh + character(len=*) , intent(in) :: flds_scalar_name + integer , intent(in) :: flds_scalar_num + integer , intent(out) :: rc ! local variables - type(ESMF_State) :: importState - type(ESMF_State) :: exportState + type(ESMF_State) :: importState + type(ESMF_State) :: exportState + type(ESMF_Field) :: lfield + integer :: numOwnedElements + integer :: i, j, iblk, n + integer :: ilo, ihi, jlo, jhi ! beginning and end of physical domain + type(block) :: this_block ! block information for current block + real(dbl_kind), allocatable :: mesh_areas(:) + real(dbl_kind), allocatable :: model_areas(:) + real(dbl_kind), pointer :: dataptr(:) + real(dbl_kind) :: max_mod2med_areacor + real(dbl_kind) :: max_med2mod_areacor + real(dbl_kind) :: min_mod2med_areacor + real(dbl_kind) :: min_med2mod_areacor + real(dbl_kind) :: max_mod2med_areacor_glob + real(dbl_kind) :: max_med2mod_areacor_glob + real(dbl_kind) :: min_mod2med_areacor_glob + real(dbl_kind) :: min_med2mod_areacor_glob character(len=*), parameter :: subname='(ice_import_export:realize_fields)' !--------------------------------------------------------------------------- @@ -285,60 +312,86 @@ subroutine ice_realize_fields(gcomp, mesh, grid, flds_scalar_name, flds_scalar_n call NUOPC_ModelGet(gcomp, importState=importState, exportState=exportState, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return - if (present(mesh)) then - - geomtype = ESMF_GEOMTYPE_MESH - - call fldlist_realize( & - state=ExportState, & - fldList=fldsFrIce, & - numflds=fldsFrIce_num, & - flds_scalar_name=flds_scalar_name, & - flds_scalar_num=flds_scalar_num, & - tag=subname//':CICE_Export',& - mesh=mesh, rc=rc) - if (ChkErr(rc,__LINE__,u_FILE_u)) return - - call fldlist_realize( & - state=importState, & - fldList=fldsToIce, & - numflds=fldsToIce_num, & - flds_scalar_name=flds_scalar_name, & - flds_scalar_num=flds_scalar_num, & - tag=subname//':CICE_Import',& - mesh=mesh, rc=rc) - if (ChkErr(rc,__LINE__,u_FILE_u)) return - - else if (present(grid)) then - - geomtype = ESMF_GEOMTYPE_GRID + call fldlist_realize( & + state=ExportState, & + fldList=fldsFrIce, & + numflds=fldsFrIce_num, & + flds_scalar_name=flds_scalar_name, & + flds_scalar_num=flds_scalar_num, & + tag=subname//':CICE_Export',& + mesh=mesh, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return - call fldlist_realize( & - state=ExportState, & - fldList=fldsFrIce, & - numflds=fldsFrIce_num, & - flds_scalar_name=flds_scalar_name, & - flds_scalar_num=flds_scalar_num, & - tag=subname//':CICE_Export',& - grid=grid, rc=rc) - if (ChkErr(rc,__LINE__,u_FILE_u)) return + call fldlist_realize( & + state=importState, & + fldList=fldsToIce, & + numflds=fldsToIce_num, & + flds_scalar_name=flds_scalar_name, & + flds_scalar_num=flds_scalar_num, & + tag=subname//':CICE_Import',& + mesh=mesh, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return - call fldlist_realize( & - state=importState, & - fldList=fldsToIce, & - numflds=fldsToIce_num, & - flds_scalar_name=flds_scalar_name, & - flds_scalar_num=flds_scalar_num, & - tag=subname//':CICE_Import',& - grid=grid, rc=rc) - if (ChkErr(rc,__LINE__,u_FILE_u)) return +#ifdef CESMCOUPLED + ! Get mesh areas from second field - using second field since the + ! first field is the scalar field + call ESMF_MeshGet(mesh, numOwnedElements=numOwnedElements, rc=rc) + if (chkerr(rc,__LINE__,u_FILE_u)) return + call ESMF_StateGet(exportState, itemName=trim(fldsFrIce(2)%stdname), field=lfield, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + call ESMF_FieldRegridGetArea(lfield, rc=rc) + if (chkerr(rc,__LINE__,u_FILE_u)) return + call ESMF_FieldGet(lfield, farrayPtr=dataptr, rc=rc) + if (chkerr(rc,__LINE__,u_FILE_u)) return + allocate(mesh_areas(numOwnedElements)) + mesh_areas(:) = dataptr(:) + + ! Determine flux correction factors (module variables) + allocate(model_areas(numOwnedElements)) + allocate(mod2med_areacor(numOwnedElements)) + allocate(med2mod_areacor(numOwnedElements)) + mod2med_areacor(:) = 1._dbl_kind + med2mod_areacor(:) = 1._dbl_kind + n = 0 + do iblk = 1, nblocks + this_block = get_block(blocks_ice(iblk),iblk) + ilo = this_block%ilo + ihi = this_block%ihi + jlo = this_block%jlo + jhi = this_block%jhi + do j = jlo, jhi + do i = ilo, ihi + n = n+1 + model_areas(n) = tarea(i,j,iblk)/(radius*radius) + mod2med_areacor(n) = model_areas(n) / mesh_areas(n) + med2mod_areacor(n) = mesh_areas(n) / model_areas(n) + enddo + enddo + enddo + deallocate(model_areas) + deallocate(mesh_areas) + + min_mod2med_areacor = minval(mod2med_areacor) + max_mod2med_areacor = maxval(mod2med_areacor) + min_med2mod_areacor = minval(med2mod_areacor) + max_med2mod_areacor = maxval(med2mod_areacor) + call shr_mpi_max(max_mod2med_areacor, max_mod2med_areacor_glob, mpi_comm_ice) + call shr_mpi_min(min_mod2med_areacor, min_mod2med_areacor_glob, mpi_comm_ice) + call shr_mpi_max(max_med2mod_areacor, max_med2mod_areacor_glob, mpi_comm_ice) + call shr_mpi_min(min_med2mod_areacor, min_med2mod_areacor_glob, mpi_comm_ice) + + if (my_task == master_task) then + write(nu_diag,'(2A,2g23.15,A )') trim(subname),' : min_mod2med_areacor, max_mod2med_areacor ',& + min_mod2med_areacor_glob, max_mod2med_areacor_glob, 'CICE6' + write(nu_diag,'(2A,2g23.15,A )') trim(subname),' : min_med2mod_areacor, max_med2mod_areacor ',& + min_med2mod_areacor_glob, max_med2mod_areacor_glob, 'CICE6' end if +#endif end subroutine ice_realize_fields !============================================================================== - subroutine ice_import( importState, rc ) ! input/output variables @@ -355,7 +408,11 @@ subroutine ice_import( importState, rc ) real (kind=dbl_kind) :: workx, worky real (kind=dbl_kind) :: MIN_RAIN_TEMP, MAX_SNOW_TEMP real (kind=dbl_kind) :: Tffresh - real (kind=dbl_kind) :: inst_pres_height_lowest + real (kind=dbl_kind) :: inst_pres_height_lowest + real (kind=dbl_kind), pointer :: dataptr2d(:,:) + real (kind=dbl_kind), pointer :: dataptr1d(:) + real (kind=dbl_kind), pointer :: dataptr2d_dstwet(:,:) + real (kind=dbl_kind), pointer :: dataptr2d_dstdry(:,:) character(len=char_len) :: tfrz_option integer(int_kind) :: ktherm character(len=*), parameter :: subname = 'ice_import' @@ -365,17 +422,20 @@ subroutine ice_import( importState, rc ) call icepack_query_parameters(Tffresh_out=Tffresh) call icepack_query_parameters(tfrz_option_out=tfrz_option) call icepack_query_parameters(ktherm_out=ktherm) - if (io_dbug > 5) then - write(msgString,'(A,i8)')trim(subname)//' tfrz_option = ' & - // trim(tfrz_option)//', ktherm = ',ktherm - call ESMF_LogWrite(trim(msgString), ESMF_LOGMSG_INFO) + + if (io_dbug > 5) then + write(msgString,'(A,i8)')trim(subname)//' tfrz_option = ' & + // trim(tfrz_option)//', ktherm = ',ktherm + call ESMF_LogWrite(trim(msgString), ESMF_LOGMSG_INFO) end if -! call icepack_query_parameters(tfrz_option_out=tfrz_option, & -! modal_aero_out=modal_aero, z_tracers_out=z_tracers, skl_bgc_out=skl_bgc, & -! Tffresh_out=Tffresh) -! call icepack_query_tracer_flags(tr_aero_out=tr_aero, tr_iage_out=tr_iage, & -! tr_FY_out=tr_FY, tr_pond_out=tr_pond, tr_lvl_out=tr_lvl, & -! tr_zaero_out=tr_zaero, tr_bgc_Nit_out=tr_bgc_Nit) + + ! call icepack_query_parameters(tfrz_option_out=tfrz_option, & + ! modal_aero_out=modal_aero, z_tracers_out=z_tracers, skl_bgc_out=skl_bgc, & + ! Tffresh_out=Tffresh) + ! call icepack_query_tracer_flags(tr_aero_out=tr_aero, tr_iage_out=tr_iage, & + ! tr_FY_out=tr_FY, tr_pond_out=tr_pond, tr_lvl_out=tr_lvl, & + ! tr_zaero_out=tr_zaero, tr_bgc_Nit_out=tr_bgc_Nit) + call icepack_warnings_flush(nu_diag) if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & file=u_FILE_u, line=__LINE__) @@ -429,30 +489,38 @@ subroutine ice_import( importState, rc ) ! import ocn/ice fluxes - call state_getimport(importState, 'freezing_melting_potential', output=aflds, index=9, rc=rc) + call state_getimport(importState, 'freezing_melting_potential', output=aflds, index=9, & + areacor=med2mod_areacor, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return ! import atm fluxes - call state_getimport(importState, 'mean_down_sw_vis_dir_flx', output=aflds, index=10, rc=rc) + call state_getimport(importState, 'mean_down_sw_vis_dir_flx', output=aflds, index=10, & + areacor=med2mod_areacor, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return - call state_getimport(importState, 'mean_down_sw_ir_dir_flx', output=aflds, index=11, rc=rc) + call state_getimport(importState, 'mean_down_sw_ir_dir_flx', output=aflds, index=11, & + areacor=med2mod_areacor, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return - call state_getimport(importState, 'mean_down_sw_vis_dif_flx', output=aflds, index=12, rc=rc) + call state_getimport(importState, 'mean_down_sw_vis_dif_flx', output=aflds, index=12, & + areacor=med2mod_areacor, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return - call state_getimport(importState, 'mean_down_sw_ir_dif_flx', output=aflds, index=13, rc=rc) + call state_getimport(importState, 'mean_down_sw_ir_dif_flx', output=aflds, index=13, & + areacor=med2mod_areacor, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return - call state_getimport(importState, 'mean_down_lw_flx', output=aflds, index=14, rc=rc) + call state_getimport(importState, 'mean_down_lw_flx', output=aflds, index=14, & + areacor=med2mod_areacor, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return - call state_getimport(importState, 'mean_prec_rate', output=aflds, index=15, rc=rc) + call state_getimport(importState, 'mean_prec_rate', output=aflds, index=15, & + areacor=med2mod_areacor, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return - call state_getimport(importState, 'mean_fprec_rate', output=aflds, index=16, rc=rc) + call state_getimport(importState, 'mean_fprec_rate', output=aflds, index=16, & + areacor=med2mod_areacor, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return ! perform a halo update @@ -488,7 +556,7 @@ subroutine ice_import( importState, rc ) end do !$OMP END PARALLEL DO - if ( State_fldChk(importState, 'Sa_ptem') .and. State_fldchk(importState,'air_density_height_lowest')) then + if ( State_fldChk(importState, 'Sa_ptem') .and. State_fldchk(importState,'air_density_height_lowest')) then !$OMP PARALLEL DO PRIVATE(iblk,i,j) do iblk = 1, nblocks do j = 1,ny_block @@ -518,7 +586,7 @@ subroutine ice_import( importState, rc ) endif end do !i end do !j - end do !iblk + end do !iblk !$OMP END PARALLEL DO end if @@ -577,34 +645,45 @@ subroutine ice_import( importState, rc ) ! bcphodry ungridded_index=2 ! bcphiwet ungridded_index=3 - ! bcphodry - call state_getimport(importState, 'Faxa_bcph', output=faero_atm, index=1, ungridded_index=2, rc=rc) - if (ChkErr(rc,__LINE__,u_FILE_u)) return - ! bcphidry + bcphiwet - call state_getimport(importState, 'Faxa_bcph', output=faero_atm, index=2, ungridded_index=1, rc=rc) - if (ChkErr(rc,__LINE__,u_FILE_u)) return - call state_getimport(importState, 'Faxa_bcph', output=faero_atm, index=2, do_sum=.true., ungridded_index=3, rc=rc) + call state_getfldptr(importState, 'Faxa_bcph', fldptr=dataPtr2d, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return + n = 0 + do iblk = 1, nblocks + this_block = get_block(blocks_ice(iblk),iblk) + ilo = this_block%ilo; ihi = this_block%ihi + jlo = this_block%jlo; jhi = this_block%jhi + do j = jlo, jhi + do i = ilo, ihi + n = n+1 + faero_atm(i,j,1,iblk) = dataPtr2d(2,n) * med2mod_areacor(n) ! bcphodry + faero_atm(i,j,2,iblk) = (dataptr2d(1,n) + dataPtr2d(3,n)) * med2mod_areacor(n) ! bcphidry + bcphiwet + end do + end do + end do end if ! Sum over all dry and wet dust fluxes from ath atmosphere if (State_FldChk(importState, 'Faxa_dstwet') .and. State_FldChk(importState, 'Faxa_dstdry')) then - call state_getimport(importState, 'Faxa_dstwet', output=faero_atm, index=3, ungridded_index=1, rc=rc) - if (ChkErr(rc,__LINE__,u_FILE_u)) return - call state_getimport(importState, 'Faxa_dstdry', output=faero_atm, index=3, do_sum=.true., ungridded_index=1, rc=rc) - if (ChkErr(rc,__LINE__,u_FILE_u)) return - call state_getimport(importState, 'Faxa_dstwet', output=faero_atm, index=3, do_sum=.true., ungridded_index=2, rc=rc) + call state_getfldptr(importState, 'Faxa_dstwet', fldptr=dataPtr2d_dstwet, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return - call state_getimport(importState, 'Faxa_dstdry', output=faero_atm, index=3, do_sum=.true., ungridded_index=2, rc=rc) - if (ChkErr(rc,__LINE__,u_FILE_u)) return - call state_getimport(importState, 'Faxa_dstwet', output=faero_atm, index=3, do_sum=.true., ungridded_index=3, rc=rc) - if (ChkErr(rc,__LINE__,u_FILE_u)) return - call state_getimport(importState, 'Faxa_dstdry', output=faero_atm, index=3, do_sum=.true., ungridded_index=3, rc=rc) - if (ChkErr(rc,__LINE__,u_FILE_u)) return - call state_getimport(importState, 'Faxa_dstwet', output=faero_atm, index=3, do_sum=.true., ungridded_index=4, rc=rc) - if (ChkErr(rc,__LINE__,u_FILE_u)) return - call state_getimport(importState, 'Faxa_dstdry', output=faero_atm, index=3, do_sum=.true., ungridded_index=4, rc=rc) + call state_getfldptr(importState, 'Faxa_dstdry', fldptr=dataPtr2d_dstdry, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return + n = 0 + do iblk = 1, nblocks + this_block = get_block(blocks_ice(iblk),iblk) + ilo = this_block%ilo; ihi = this_block%ihi + jlo = this_block%jlo; jhi = this_block%jhi + do j = jlo, jhi + do i = ilo, ihi + n = n+1 + faero_atm(i,j,3,iblk) = dataPtr2d_dstwet(1,n) + dataptr2d_dstdry(1,n) + & + dataPtr2d_dstwet(2,n) + dataptr2d_dstdry(2,n) + & + dataPtr2d_dstwet(3,n) + dataptr2d_dstdry(3,n) + & + dataPtr2d_dstwet(4,n) + dataptr2d_dstdry(4,n) + faero_atm(i,j,3,iblk) = faero_atm(i,j,3,iblk) * med2mod_areacor(n) + end do + end do + end do end if !------------------------------------------------------- @@ -623,12 +702,15 @@ subroutine ice_import( importState, rc ) call state_getimport(importState, 'inst_spec_humid_height_lowest_wiso', output=Qa_iso, index=3, ungridded_index=2, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return -! call state_getimport(importState, 'mean_prec_rate_wiso', output=fiso_rain, index=1, ungridded_index=3, rc=rc) -! if (ChkErr(rc,__LINE__,u_FILE_u)) return -! call state_getimport(importState, 'mean_prec_rate_wiso', output=fiso_rain, index=2, ungridded_index=1, rc=rc) -! if (ChkErr(rc,__LINE__,u_FILE_u)) return -! call state_getimport(importState, 'mean_prec_rate_wiso', output=fiso_rain, index=3, ungridded_index=2, rc=rc) -! if (ChkErr(rc,__LINE__,u_FILE_u)) return + ! call state_getimport(importState, 'mean_prec_rate_wiso', output=fiso_rain, index=1, ungridded_index=3, & + ! areacor=med2mod_areacor, rc=rc) + ! if (ChkErr(rc,__LINE__,u_FILE_u)) return + ! call state_getimport(importState, 'mean_prec_rate_wiso', output=fiso_rain, index=2, ungridded_index=1, & + ! areacor=med2mod_areacor, rc=rc) + ! if (ChkErr(rc,__LINE__,u_FILE_u)) return + ! call state_getimport(importState, 'mean_prec_rate_wiso', output=fiso_rain, index=3, ungridded_index=2, & + ! areacor=med2mod_areacor, rc=rc) + ! if (ChkErr(rc,__LINE__,u_FILE_u)) return call state_getimport(importState, 'mean_fprec_rate_wiso', output=fiso_atm, index=1, ungridded_index=3, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return @@ -637,11 +719,14 @@ subroutine ice_import( importState, rc ) call state_getimport(importState, 'mean_fprec_rate_wiso', output=fiso_atm, index=3, ungridded_index=2, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return - call state_getimport(importState, 'So_roce_wiso', output=HDO_ocn , ungridded_index=3, rc=rc) + call state_getimport(importState, 'So_roce_wiso', output=HDO_ocn , ungridded_index=3, & + areacor=med2mod_areacor, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return - call state_getimport(importState, 'So_roce_wiso', output=H2_16O_ocn, ungridded_index=1, rc=rc) + call state_getimport(importState, 'So_roce_wiso', output=H2_16O_ocn, ungridded_index=1, & + areacor=med2mod_areacor, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return - call state_getimport(importState, 'So_roce_wiso', output=H2_18O_ocn, ungridded_index=2, rc=rc) + call state_getimport(importState, 'So_roce_wiso', output=H2_18O_ocn, ungridded_index=2, & + areacor=med2mod_areacor, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return end if @@ -690,9 +775,11 @@ subroutine ice_import( importState, rc ) #ifdef CESMCOUPLED ! Use shr_frz_mod for this - Tf(:,:,iblk) = shr_frz_freezetemp(sss(:,:,iblk)) -#else - !$OMP PARALLEL DO PRIVATE(iblk,i,j,workx,worky) + do iblk = 1, nblocks + Tf(:,:,iblk) = shr_frz_freezetemp(sss(:,:,iblk)) + end do +#else + !$OMP PARALLEL DO PRIVATE(iblk,i,j) do iblk = 1, nblocks do j = 1,ny_block do i = 1,nx_block @@ -747,7 +834,6 @@ subroutine ice_import( importState, rc ) end subroutine ice_import !=============================================================================== - subroutine ice_export( exportState, rc ) ! input/output variables @@ -770,8 +856,10 @@ subroutine ice_export( exportState, rc ) real (kind=dbl_kind) :: tauxo (nx_block,ny_block,max_blocks) ! ice/ocean stress real (kind=dbl_kind) :: tauyo (nx_block,ny_block,max_blocks) ! ice/ocean stress real (kind=dbl_kind) :: ailohi(nx_block,ny_block,max_blocks) ! fractional ice area - real (kind=dbl_kind), allocatable :: tempfld(:,:,:) real (kind=dbl_kind) :: Tffresh + real (kind=dbl_kind), allocatable :: tempfld(:,:,:) + real (kind=dbl_kind), pointer :: dataptr_ifrac_n(:,:) + real (kind=dbl_kind), pointer :: dataptr_swpen_n(:,:) character(len=*),parameter :: subname = 'ice_export' !----------------------------------------------------- @@ -779,12 +867,13 @@ subroutine ice_export( exportState, rc ) if (io_dbug > 5) call ESMF_LogWrite(subname//' called', ESMF_LOGMSG_INFO) call icepack_query_parameters(Tffresh_out=Tffresh) -! call icepack_query_parameters(tfrz_option_out=tfrz_option, & -! modal_aero_out=modal_aero, z_tracers_out=z_tracers, skl_bgc_out=skl_bgc, & -! Tffresh_out=Tffresh) -! call icepack_query_tracer_flags(tr_aero_out=tr_aero, tr_iage_out=tr_iage, & -! tr_FY_out=tr_FY, tr_pond_out=tr_pond, tr_lvl_out=tr_lvl, & -! tr_zaero_out=tr_zaero, tr_bgc_Nit_out=tr_bgc_Nit) + ! call icepack_query_parameters(tfrz_option_out=tfrz_option, & + ! modal_aero_out=modal_aero, z_tracers_out=z_tracers, skl_bgc_out=skl_bgc, & + ! Tffresh_out=Tffresh) + ! call icepack_query_tracer_flags(tr_aero_out=tr_aero, tr_iage_out=tr_iage, & + ! tr_FY_out=tr_FY, tr_pond_out=tr_pond, tr_lvl_out=tr_lvl, & + ! tr_zaero_out=tr_zaero, tr_bgc_Nit_out=tr_bgc_Nit) + call icepack_warnings_flush(nu_diag) if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & file=u_FILE_u, line=__LINE__) @@ -880,7 +969,7 @@ subroutine ice_export( exportState, rc ) call state_setexport(exportState, 'ice_fraction', input=ailohi, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return - if (trim(grid_type) == 'latlon') then + if (trim(grid_type) == 'setmask') then call state_setexport(exportState, 'ice_mask', input=ocn_gridcell_frac, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return else @@ -967,31 +1056,38 @@ subroutine ice_export( exportState, rc ) ! ------ ! Zonal air/ice stress - call state_setexport(exportState, 'stress_on_air_ice_zonal' , input=tauxa, lmask=tmask, ifrac=ailohi, rc=rc) + call state_setexport(exportState, 'stress_on_air_ice_zonal' , input=tauxa, lmask=tmask, ifrac=ailohi, & + areacor=mod2med_areacor, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return ! Meridional air/ice stress - call state_setexport(exportState, 'stress_on_air_ice_merid' , input=tauya, lmask=tmask, ifrac=ailohi, rc=rc) + call state_setexport(exportState, 'stress_on_air_ice_merid' , input=tauya, lmask=tmask, ifrac=ailohi, & + areacor=mod2med_areacor, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return ! Latent heat flux (atm into ice) - call state_setexport(exportState, 'mean_laten_heat_flx_atm_into_ice' , input=flat, lmask=tmask, ifrac=ailohi, rc=rc) + call state_setexport(exportState, 'mean_laten_heat_flx_atm_into_ice' , input=flat, lmask=tmask, ifrac=ailohi, & + areacor=mod2med_areacor, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return ! Sensible heat flux (atm into ice) - call state_setexport(exportState, 'mean_sensi_heat_flx_atm_into_ice' , input=fsens, lmask=tmask, ifrac=ailohi, rc=rc) + call state_setexport(exportState, 'mean_sensi_heat_flx_atm_into_ice' , input=fsens, lmask=tmask, ifrac=ailohi, & + areacor=mod2med_areacor, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return ! longwave outgoing (upward), average over ice fraction only - call state_setexport(exportState, 'mean_up_lw_flx_ice' , input=flwout, lmask=tmask, ifrac=ailohi, rc=rc) + call state_setexport(exportState, 'mean_up_lw_flx_ice' , input=flwout, lmask=tmask, ifrac=ailohi, & + areacor=mod2med_areacor, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return ! Evaporative water flux (kg/m^2/s) - call state_setexport(exportState, 'mean_evap_rate_atm_into_ice' , input=evap, lmask=tmask, ifrac=ailohi, rc=rc) + call state_setexport(exportState, 'mean_evap_rate_atm_into_ice' , input=evap, lmask=tmask, ifrac=ailohi, & + areacor=mod2med_areacor, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return ! Shortwave flux absorbed in ice and ocean (W/m^2) - call state_setexport(exportState, 'Faii_swnet' , input=fswabs, lmask=tmask, ifrac=ailohi, rc=rc) + call state_setexport(exportState, 'Faii_swnet' , input=fswabs, lmask=tmask, ifrac=ailohi, & + areacor=mod2med_areacor, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return ! ------ @@ -999,43 +1095,53 @@ subroutine ice_export( exportState, rc ) ! ------ ! flux of shortwave through ice to ocean - call state_setexport(exportState, 'mean_sw_pen_to_ocn' , input=fswthru, lmask=tmask, ifrac=ailohi, rc=rc) + call state_setexport(exportState, 'mean_sw_pen_to_ocn' , input=fswthru, lmask=tmask, ifrac=ailohi, & + areacor=mod2med_areacor, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return ! flux of vis dir shortwave through ice to ocean - call state_setexport(exportState, 'mean_sw_pen_to_ocn_vis_dir_flx' , input=fswthru_vdr, lmask=tmask, ifrac=ailohi, rc=rc) + call state_setexport(exportState, 'mean_sw_pen_to_ocn_vis_dir_flx' , input=fswthru_vdr, lmask=tmask, ifrac=ailohi, & + areacor=mod2med_areacor, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return ! flux of vis dif shortwave through ice to ocean - call state_setexport(exportState, 'mean_sw_pen_to_ocn_vis_dif_flx' , input=fswthru_vdf, lmask=tmask, ifrac=ailohi, rc=rc) + call state_setexport(exportState, 'mean_sw_pen_to_ocn_vis_dif_flx' , input=fswthru_vdf, lmask=tmask, ifrac=ailohi, & + areacor=mod2med_areacor, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return ! flux of ir dir shortwave through ice to ocean - call state_setexport(exportState, 'mean_sw_pen_to_ocn_ir_dir_flx' , input=fswthru_idr, lmask=tmask, ifrac=ailohi, rc=rc) + call state_setexport(exportState, 'mean_sw_pen_to_ocn_ir_dir_flx' , input=fswthru_idr, lmask=tmask, ifrac=ailohi, & + areacor=mod2med_areacor, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return ! flux of ir dif shortwave through ice to ocean - call state_setexport(exportState, 'mean_sw_pen_to_ocn_ir_dif_flx' , input=fswthru_idf, lmask=tmask, ifrac=ailohi, rc=rc) + call state_setexport(exportState, 'mean_sw_pen_to_ocn_ir_dif_flx' , input=fswthru_idf, lmask=tmask, ifrac=ailohi, & + areacor=mod2med_areacor, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return - ! heat exchange with ocean - call state_setexport(exportState, 'net_heat_flx_to_ocn' , input=fhocn, lmask=tmask, ifrac=ailohi, rc=rc) + ! flux of heat exchange with ocean + call state_setexport(exportState, 'net_heat_flx_to_ocn' , input=fhocn, lmask=tmask, ifrac=ailohi, & + areacor=mod2med_areacor, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return - ! fresh water to ocean (h2o flux from melting) - call state_setexport(exportState, 'mean_fresh_water_to_ocean_rate' , input=fresh, lmask=tmask, ifrac=ailohi, rc=rc) + ! flux fresh water to ocean (h2o flux from melting) + call state_setexport(exportState, 'mean_fresh_water_to_ocean_rate' , input=fresh, lmask=tmask, ifrac=ailohi, & + areacor=mod2med_areacor, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return - ! salt to ocean (salt flux from melting) - call state_setexport(exportState, 'mean_salt_rate' , input=fsalt, lmask=tmask, ifrac=ailohi, rc=rc) + ! flux of salt to ocean (salt flux from melting) + call state_setexport(exportState, 'mean_salt_rate' , input=fsalt, lmask=tmask, ifrac=ailohi, & + areacor=mod2med_areacor, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return ! stress n i/o zonal - call state_setexport(exportState, 'stress_on_ocn_ice_zonal' , input=tauxo, lmask=tmask, ifrac=ailohi, rc=rc) + call state_setexport(exportState, 'stress_on_ocn_ice_zonal' , input=tauxo, lmask=tmask, ifrac=ailohi, & + areacor=mod2med_areacor, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return ! stress n i/o meridional - call state_setexport(exportState, 'stress_on_ocn_ice_merid' , input=tauyo, lmask=tmask, ifrac=ailohi, rc=rc) + call state_setexport(exportState, 'stress_on_ocn_ice_merid' , input=tauyo, lmask=tmask, ifrac=ailohi, & + areacor=mod2med_areacor, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return ! ------ @@ -1044,19 +1150,22 @@ subroutine ice_export( exportState, rc ) ! hydrophobic bc if (State_FldChk(exportState, 'Fioi_bcpho')) then - call state_setexport(exportState, 'Fioi_bcpho' , input=faero_ocn, index=1, lmask=tmask, ifrac=ailohi, rc=rc) + call state_setexport(exportState, 'Fioi_bcpho' , input=faero_ocn, index=1, lmask=tmask, ifrac=ailohi, & + areacor=mod2med_areacor, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return end if ! hydrophilic bc if (State_FldChk(exportState, 'Fioi_bcphi')) then - call state_setexport(exportState, 'Fioi_bcphi' , input=faero_ocn, index=2, lmask=tmask, ifrac=ailohi, rc=rc) + call state_setexport(exportState, 'Fioi_bcphi' , input=faero_ocn, index=2, lmask=tmask, ifrac=ailohi, & + areacor=mod2med_areacor, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return end if ! dust if (State_FldChk(exportState, 'Fioi_flxdst')) then - call state_setexport(exportState, 'Fioi_flxdst' , input=faero_ocn, index=3, lmask=tmask, ifrac=ailohi, rc=rc) + call state_setexport(exportState, 'Fioi_flxdst' , input=faero_ocn, index=3, lmask=tmask, ifrac=ailohi, & + areacor=mod2med_areacor, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return end if @@ -1070,13 +1179,13 @@ subroutine ice_export( exportState, rc ) ! HDO => ungridded_index=3 call state_setexport(exportState, 'mean_fresh_water_to_ocean_rate_wiso' , input=fiso_ocn, index=1, & - lmask=tmask, ifrac=ailohi, ungridded_index=3, rc=rc) + lmask=tmask, ifrac=ailohi, ungridded_index=3, areacor=mod2med_areacor, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return call state_setexport(exportState, 'mean_fresh_water_to_ocean_rate_wiso' , input=fiso_ocn, index=2, & - lmask=tmask, ifrac=ailohi, ungridded_index=1, rc=rc) + lmask=tmask, ifrac=ailohi, ungridded_index=1, areacor=mod2med_areacor, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return call state_setexport(exportState, 'mean_fresh_water_to_ocean_rate_wiso' , input=fiso_ocn, index=3, & - lmask=tmask, ifrac=ailohi, ungridded_index=2, rc=rc) + lmask=tmask, ifrac=ailohi, ungridded_index=2, areacor=mod2med_areacor, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return end if @@ -1087,16 +1196,16 @@ subroutine ice_export( exportState, rc ) if (State_FldChk(exportState, 'mean_evap_rate_atm_into_ice_wiso')) then ! Isotope evap to atm call state_setexport(exportState, 'mean_evap_rate_atm_into_ice_wiso' , input=fiso_evap, index=1, & - lmask=tmask, ifrac=ailohi, ungridded_index=3, rc=rc) + lmask=tmask, ifrac=ailohi, ungridded_index=3, areacor=mod2med_areacor, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return call state_setexport(exportState, 'mean_evap_rate_atm_into_ice_wiso' , input=fiso_evap, index=2, & - lmask=tmask, ifrac=ailohi, ungridded_index=1, rc=rc) + lmask=tmask, ifrac=ailohi, ungridded_index=1, areacor=mod2med_areacor, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return call state_setexport(exportState, 'mean_evap_rate_atm_into_ice_wiso' , input=fiso_evap, index=3, & - lmask=tmask, ifrac=ailohi, ungridded_index=2, rc=rc) + lmask=tmask, ifrac=ailohi, ungridded_index=2, areacor=mod2med_areacor, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return - ! Isotope evap to atm + ! qref to atm call state_setexport(exportState, 'Si_qref_wiso' , input=Qref_iso, index=1, & lmask=tmask, ifrac=ailohi, ungridded_index=3, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return @@ -1124,7 +1233,7 @@ subroutine ice_export( exportState, rc ) ! Note: no need zero out pass-through fields over land for benefit of x2oacc fields in cpl hist files since ! the export state has been zeroed out at the beginning call state_setexport(exportState, 'mean_sw_pen_to_ocn_ifrac_n', input=fswthrun_ai, index=n, & - lmask=tmask, ifrac=ailohi, ungridded_index=n, rc=rc) + lmask=tmask, ifrac=ailohi, ungridded_index=n, areacor=mod2med_areacor, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return end do end if @@ -1132,7 +1241,6 @@ subroutine ice_export( exportState, rc ) end subroutine ice_export !=============================================================================== - subroutine fldlist_add(num, fldlist, stdname, ungridded_lbound, ungridded_ubound) ! input/output variables @@ -1162,7 +1270,6 @@ subroutine fldlist_add(num, fldlist, stdname, ungridded_lbound, ungridded_ubound end subroutine fldlist_add !=============================================================================== - subroutine fldlist_realize(state, fldList, numflds, flds_scalar_name, flds_scalar_num, mesh, grid, tag, rc) use NUOPC, only : NUOPC_IsConnected, NUOPC_Realize @@ -1187,6 +1294,7 @@ subroutine fldlist_realize(state, fldList, numflds, flds_scalar_name, flds_scala integer :: n type(ESMF_Field) :: field character(len=80) :: stdname + character(ESMF_MAXSTR) :: msg character(len=*),parameter :: subname='(ice_import_export:fld_list_realize)' ! ---------------------------------------------- @@ -1203,8 +1311,6 @@ subroutine fldlist_realize(state, fldList, numflds, flds_scalar_name, flds_scala if (ChkErr(rc,__LINE__,u_FILE_u)) return else if (present(mesh)) then - call ESMF_LogWrite(trim(subname)//trim(tag)//" Field = "//trim(stdname)//" is connected using mesh", & - ESMF_LOGMSG_INFO) ! Create the field if (fldlist(n)%ungridded_lbound > 0 .and. fldlist(n)%ungridded_ubound > 0) then field = ESMF_FieldCreate(mesh, ESMF_TYPEKIND_R8, name=stdname, meshloc=ESMF_MESHLOC_ELEMENT, & @@ -1212,9 +1318,16 @@ subroutine fldlist_realize(state, fldList, numflds, flds_scalar_name, flds_scala ungriddedUbound=(/fldlist(n)%ungridded_ubound/), & gridToFieldMap=(/2/), rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return + write(msg, '(a,i4,2x,i4)') trim(subname)//trim(tag)//" Field = "//trim(stdname)//& + " is connected using mesh with lbound, ubound = ",& + fldlist(n)%ungridded_lbound,fldlist(n)%ungridded_ubound + call ESMF_LogWrite(msg, ESMF_LOGMSG_INFO) else field = ESMF_FieldCreate(mesh, ESMF_TYPEKIND_R8, name=stdname, meshloc=ESMF_MESHLOC_ELEMENT, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return + write(msg, '(a,i4,a,i4)') trim(subname)//trim(tag)//" Field = "//trim(stdname)//& + " is connected using mesh without ungridded dimension" + call ESMF_LogWrite(msg, ESMF_LOGMSG_INFO) end if else if (present(grid)) then call ESMF_LogWrite(trim(subname)//trim(tag)//" Field = "//trim(stdname)//" is connected using grid", & @@ -1287,7 +1400,6 @@ end subroutine SetScalarField end subroutine fldlist_realize !=============================================================================== - logical function State_FldChk(State, fldname) ! ---------------------------------------------- ! Determine if field is in state @@ -1302,27 +1414,25 @@ logical function State_FldChk(State, fldname) ! ---------------------------------------------- call ESMF_StateGet(State, trim(fldname), itemType) - State_FldChk = (itemType /= ESMF_STATEITEM_NOTFOUND) end function State_FldChk !=============================================================================== - - subroutine state_getimport_4d_output(state, fldname, output, index, do_sum, ungridded_index, rc) + subroutine state_getimport_4d(state, fldname, output, index, ungridded_index, areacor, rc) ! ---------------------------------------------- ! Map import state field to output array ! ---------------------------------------------- ! input/output variables - type(ESMF_State) , intent(in) :: state - character(len=*) , intent(in) :: fldname - real (kind=dbl_kind) , intent(inout) :: output(:,:,:,:) - integer , intent(in) :: index - logical, optional , intent(in) :: do_sum - integer, optional , intent(in) :: ungridded_index - integer , intent(out) :: rc + type(ESMF_State) , intent(in) :: state + character(len=*) , intent(in) :: fldname + real (kind=dbl_kind) , intent(inout) :: output(:,:,:,:) + integer , intent(in) :: index + integer, optional , intent(in) :: ungridded_index + real(kind=dbl_kind), optional , intent(in) :: areacor(:) + integer , intent(out) :: rc ! local variables type(block) :: this_block ! block information for current block @@ -1330,9 +1440,7 @@ subroutine state_getimport_4d_output(state, fldname, output, index, do_sum, ungr integer :: i, j, iblk, n, i1, j1 ! incides real(kind=dbl_kind), pointer :: dataPtr1d(:) ! mesh real(kind=dbl_kind), pointer :: dataPtr2d(:,:) ! mesh - real(kind=dbl_kind), pointer :: dataPtr3d(:,:,:) ! grid - real(kind=dbl_kind), pointer :: dataPtr4d(:,:,:,:) ! grid - character(len=*), parameter :: subname='(ice_import_export:state_getimport)' + character(len=*), parameter :: subname='(ice_import_export:state_getimport_4d)' ! ---------------------------------------------- rc = ESMF_SUCCESS @@ -1340,103 +1448,65 @@ subroutine state_getimport_4d_output(state, fldname, output, index, do_sum, ungr ! check that fieldname exists if (.not. State_FldChk(state, trim(fldname))) return - if (geomtype == ESMF_GEOMTYPE_MESH) then - - ! get field pointer - if (present(ungridded_index)) then - call state_getfldptr(state, trim(fldname), dataPtr2d, rc) - if (ChkErr(rc,__LINE__,u_FILE_u)) return - else - call state_getfldptr(state, trim(fldname), dataPtr1d, rc) - if (ChkErr(rc,__LINE__,u_FILE_u)) return - end if + ! get field pointer + if (present(ungridded_index)) then + call state_getfldptr(state, trim(fldname), dataPtr2d, rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + else + call state_getfldptr(state, trim(fldname), dataPtr1d, rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + end if - ! set values of output array - n=0 + ! set values of output array + n=0 + do iblk = 1, nblocks + this_block = get_block(blocks_ice(iblk),iblk) + ilo = this_block%ilo + ihi = this_block%ihi + jlo = this_block%jlo + jhi = this_block%jhi + do j = jlo, jhi + do i = ilo, ihi + n = n+1 + if (present(ungridded_index)) then + output(i,j,index,iblk) = dataPtr2d(ungridded_index,n) + else + output(i,j,index,iblk) = dataPtr1d(n) + end if + end do + end do + end do + if (present(areacor)) then + n = 0 do iblk = 1, nblocks this_block = get_block(blocks_ice(iblk),iblk) - ilo = this_block%ilo - ihi = this_block%ihi - jlo = this_block%jlo - jhi = this_block%jhi + ilo = this_block%ilo; ihi = this_block%ihi + jlo = this_block%jlo; jhi = this_block%jhi do j = jlo, jhi do i = ilo, ihi - n = n+1 - if (present(do_sum)) then ! do sum - if (present(ungridded_index)) then - output(i,j,index,iblk) = output(i,j,index, iblk) + dataPtr2d(ungridded_index,n) - else - output(i,j,index,iblk) = output(i,j,index, iblk) + dataPtr1d(n) - end if - else ! do not do sum - if (present(ungridded_index)) then - output(i,j,index,iblk) = dataPtr2d(ungridded_index,n) - else - output(i,j,index,iblk) = dataPtr1d(n) - end if - end if + n = n + 1 + output(i,j,index,iblk) = output(i,j,index,iblk) * areacor(n) end do end do end do - - else if (geomtype == ESMF_GEOMTYPE_GRID) then - - ! get field pointer - if (present(ungridded_index)) then - call state_getfldptr(state, trim(fldname), dataptr4d, rc) - if (ChkErr(rc,__LINE__,u_FILE_u)) return - else - call state_getfldptr(state, trim(fldname), dataptr3d, rc) - if (ChkErr(rc,__LINE__,u_FILE_u)) return - end if - - ! set values of output array - do iblk = 1,nblocks - this_block = get_block(blocks_ice(iblk),iblk) - ilo = this_block%ilo - ihi = this_block%ihi - jlo = this_block%jlo - jhi = this_block%jhi - do j = jlo,jhi - do i = ilo,ihi - i1 = i - ilo + 1 - j1 = j - jlo + 1 - if (present(do_sum)) then - if (present(ungridded_index)) then - output(i,j,index,iblk) = output(i,j,index,iblk) + dataPtr4d(i1,j1,iblk,ungridded_index) - else - output(i,j,index,iblk) = output(i,j,index,iblk) + dataPtr3d(i1,j1,iblk) - end if - else - if (present(ungridded_index)) then - output(i,j,index,iblk) = dataPtr4d(i1,j1,iblk,ungridded_index) - else - output(i,j,index,iblk) = dataPtr3d(i1,j1,iblk) - end if - end if - end do - end do - end do - end if - end subroutine state_getimport_4d_output + end subroutine state_getimport_4d !=============================================================================== - - subroutine state_getimport_3d_output(state, fldname, output, do_sum, ungridded_index, rc) + subroutine state_getimport_3d(state, fldname, output, ungridded_index, areacor, rc) ! ---------------------------------------------- ! Map import state field to output array ! ---------------------------------------------- ! input/output variables - type(ESMF_State) , intent(in) :: state - character(len=*) , intent(in) :: fldname - real (kind=dbl_kind) , intent(inout) :: output(:,:,:) - logical, optional , intent(in) :: do_sum - integer, optional , intent(in) :: ungridded_index - integer , intent(out) :: rc + type(ESMF_State) , intent(in) :: state + character(len=*) , intent(in) :: fldname + real (kind=dbl_kind) , intent(inout) :: output(:,:,:) + integer, optional , intent(in) :: ungridded_index + real(kind=dbl_kind), optional , intent(in) :: areacor(:) + integer , intent(out) :: rc ! local variables type(block) :: this_block ! block information for current block @@ -1444,9 +1514,7 @@ subroutine state_getimport_3d_output(state, fldname, output, do_sum, ungridded_i integer :: i, j, iblk, n, i1, j1 ! incides real(kind=dbl_kind), pointer :: dataPtr1d(:) ! mesh real(kind=dbl_kind), pointer :: dataPtr2d(:,:) ! mesh - real(kind=dbl_kind), pointer :: dataPtr3d(:,:,:) ! grid - real(kind=dbl_kind), pointer :: dataPtr4d(:,:,:,:) ! grid - character(len=*) , parameter :: subname='(ice_import_export:state_getimport)' + character(len=*) , parameter :: subname='(ice_import_export:state_getimport_3d)' ! ---------------------------------------------- rc = ESMF_SUCCESS @@ -1454,83 +1522,53 @@ subroutine state_getimport_3d_output(state, fldname, output, do_sum, ungridded_i ! check that fieldname exists if (.not. State_FldChk(state, trim(fldname))) return - if (geomtype == ESMF_GEOMTYPE_MESH) then - - ! get field pointer - if (present(ungridded_index)) then - call state_getfldptr(state, trim(fldname), dataPtr2d, rc) - if (ChkErr(rc,__LINE__,u_FILE_u)) return - else - call state_getfldptr(state, trim(fldname), dataPtr1d, rc) - if (ChkErr(rc,__LINE__,u_FILE_u)) return - end if + ! get field pointer + if (present(ungridded_index)) then + call state_getfldptr(state, trim(fldname), dataPtr2d, rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + else + call state_getfldptr(state, trim(fldname), dataPtr1d, rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + end if - ! determine output array - n=0 + ! determine output array + n = 0 + do iblk = 1, nblocks + this_block = get_block(blocks_ice(iblk),iblk) + ilo = this_block%ilo + ihi = this_block%ihi + jlo = this_block%jlo + jhi = this_block%jhi + do j = jlo, jhi + do i = ilo, ihi + n = n+1 + if (present(ungridded_index)) then + output(i,j,iblk) = dataPtr2d(ungridded_index,n) + else + output(i,j,iblk) = dataPtr1d(n) + end if + end do + end do + end do + if (present(areacor)) then + n = 0 do iblk = 1, nblocks this_block = get_block(blocks_ice(iblk),iblk) - ilo = this_block%ilo - ihi = this_block%ihi - jlo = this_block%jlo - jhi = this_block%jhi + ilo = this_block%ilo; ihi = this_block%ihi + jlo = this_block%jlo; jhi = this_block%jhi do j = jlo, jhi do i = ilo, ihi - n = n+1 - if (present(do_sum) .and. present(ungridded_index)) then - output(i,j,iblk) = output(i,j,iblk) + dataPtr2d(ungridded_index,n) - else if (present(do_sum)) then - output(i,j,iblk) = output(i,j,iblk) + dataPtr1d(n) - else if (present(ungridded_index)) then - output(i,j,iblk) = dataPtr2d(ungridded_index,n) - else - output(i,j,iblk) = dataPtr1d(n) - end if + n = n + 1 + output(i,j,iblk) = output(i,j,iblk) * areacor(n) end do end do end do - - else if (geomtype == ESMF_GEOMTYPE_GRID) then - - ! get field pointer - if (present(ungridded_index)) then - call state_getfldptr(state, trim(fldname), dataptr4d, rc) - if (ChkErr(rc,__LINE__,u_FILE_u)) return - else - call state_getfldptr(state, trim(fldname), dataptr3d, rc) - if (ChkErr(rc,__LINE__,u_FILE_u)) return - end if - - ! set values of output array - do iblk = 1,nblocks - this_block = get_block(blocks_ice(iblk),iblk) - ilo = this_block%ilo - ihi = this_block%ihi - jlo = this_block%jlo - jhi = this_block%jhi - do j = jlo,jhi - do i = ilo,ihi - i1 = i - ilo + 1 - j1 = j - jlo + 1 - if (present(do_sum) .and. present(ungridded_index)) then - output(i,j,iblk) = output(i,j,iblk) + dataPtr4d(i1,j1,iblk,ungridded_index) - else if (present(do_sum)) then - output(i,j,iblk) = output(i,j,iblk) + dataPtr3d(i1,j1,iblk) - else if (present(ungridded_index)) then - output(i,j,iblk) = dataPtr4d(i1,j1,iblk, ungridded_index) - else - output(i,j,iblk) = dataPtr3d(i1,j1,iblk) - end if - end do - end do - end do - end if - end subroutine state_getimport_3d_output + end subroutine state_getimport_3d !=============================================================================== - - subroutine state_setexport_4d_input(state, fldname, input, index, lmask, ifrac, ungridded_index, rc) + subroutine state_setexport_4d(state, fldname, input, index, lmask, ifrac, ungridded_index, areacor, rc) ! ---------------------------------------------- ! Map 4d input array to export state field @@ -1544,6 +1582,7 @@ subroutine state_setexport_4d_input(state, fldname, input, index, lmask, ifrac, logical , optional, intent(in) :: lmask(:,:,:) real(kind=dbl_kind) , optional, intent(in) :: ifrac(:,:,:) integer , optional, intent(in) :: ungridded_index + real(kind=dbl_kind) , optional, intent(in) :: areacor(:) integer , intent(out) :: rc ! local variables @@ -1552,9 +1591,8 @@ subroutine state_setexport_4d_input(state, fldname, input, index, lmask, ifrac, integer :: i, j, iblk, n, i1, j1 ! indices real(kind=dbl_kind), pointer :: dataPtr1d(:) ! mesh real(kind=dbl_kind), pointer :: dataPtr2d(:,:) ! mesh - real(kind=dbl_kind), pointer :: dataPtr3d(:,:,:) ! grid - real(kind=dbl_kind), pointer :: dataPtr4d(:,:,:,:) ! grid - character(len=*), parameter :: subname='(ice_import_export:state_setexport)' + integer :: ice_num + character(len=*), parameter :: subname='(ice_import_export:state_setexport_4d)' ! ---------------------------------------------- rc = ESMF_SUCCESS @@ -1562,93 +1600,81 @@ subroutine state_setexport_4d_input(state, fldname, input, index, lmask, ifrac, ! check that fieldname exists if (.not. State_FldChk(state, trim(fldname))) return - if (geomtype == ESMF_GEOMTYPE_MESH) then - - ! get field pointer - if (present(ungridded_index)) then - call state_getfldptr(state, trim(fldname), dataPtr2d, rc) - if (ChkErr(rc,__LINE__,u_FILE_u)) return - else - call state_getfldptr(state, trim(fldname), dataPtr1d, rc) - if (ChkErr(rc,__LINE__,u_FILE_u)) return + ! get field pointer + if (present(ungridded_index)) then + call state_getfldptr(state, trim(fldname), dataPtr2d, rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + if (ungridded_index == 1) then + dataptr2d(:,:) = c0 end if - - ! set values of field pointer n = 0 do iblk = 1, nblocks this_block = get_block(blocks_ice(iblk),iblk) - ilo = this_block%ilo - ihi = this_block%ihi - jlo = this_block%jlo - jhi = this_block%jhi - do j = jlo, jhi - do i = ilo, ihi - n = n+1 - if (present(lmask) .and. present(ifrac)) then + ilo = this_block%ilo; ihi = this_block%ihi + jlo = this_block%jlo; jhi = this_block%jhi + if (present(lmask) .and. present(ifrac)) then + do j = jlo, jhi + do i = ilo, ihi + n = n+1 if ( lmask(i,j,iblk) .and. ifrac(i,j,iblk) > c0 ) then - if (present(ungridded_index)) then - dataPtr2d(ungridded_index,n) = input(i,j,index,iblk) - else - dataPtr1d(n) = input(i,j,index,iblk) - end if - end if - else - if (present(ungridded_index)) then dataPtr2d(ungridded_index,n) = input(i,j,index,iblk) else - dataPtr1d(n) = input(i,j,index,iblk) + dataPtr2d(ungridded_index,n) = c0 end if - end if + end do end do - end do + else + do j = jlo, jhi + do i = ilo, ihi + n = n+1 + dataPtr2d(ungridded_index,n) = input(i,j,index,iblk) + end do + end do + end if end do - - else if (geomtype == ESMF_GEOMTYPE_GRID) then - - if (present(ungridded_index)) then - call state_getfldptr(state, trim(fldname), dataptr4d, rc) - if (ChkErr(rc,__LINE__,u_FILE_u)) return - else - call state_getfldptr(state, trim(fldname), dataptr3d, rc) - if (ChkErr(rc,__LINE__,u_FILE_u)) return + ice_num = n + if (present(areacor)) then + do n = 1,ice_num + dataPtr2d(ungridded_index,n) = dataPtr2d(ungridded_index,n) * areacor(n) + end do end if - - do iblk = 1,nblocks + else + call state_getfldptr(state, trim(fldname), dataPtr1d, rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + dataptr1d(:) = c0 + n = 0 + do iblk = 1, nblocks this_block = get_block(blocks_ice(iblk),iblk) - ilo = this_block%ilo - ihi = this_block%ihi - jlo = this_block%jlo - jhi = this_block%jhi - do j = jlo,jhi - do i = ilo,ihi - i1 = i - ilo + 1 - j1 = j - jlo + 1 - if (present(lmask) .and. present(ifrac)) then + ilo = this_block%ilo; ihi = this_block%ihi + jlo = this_block%jlo; jhi = this_block%jhi + if (present(lmask) .and. present(ifrac)) then + do j = jlo, jhi + do i = ilo, ihi + n = n+1 if ( lmask(i,j,iblk) .and. ifrac(i,j,iblk) > c0 ) then - if (present(ungridded_index)) then - dataPtr4d(i1,j1,iblk,ungridded_index) = input(i,j,index,iblk) - end if - else - dataPtr3d(i1,j1,iblk) = input(i,j,index,iblk) - end if - else - if (present(ungridded_index)) then - dataPtr4d(i1,j1,iblk,ungridded_index) = input(i,j,index,iblk) - else - dataPtr3d(i1,j1,iblk) = input(i,j,index,iblk) + dataPtr1d(n) = input(i,j,index,iblk) end if - end if + end do end do - end do + else + do i = ilo, ihi + n = n+1 + dataPtr1d(n) = input(i,j,index,iblk) + end do + end if end do - + ice_num = n + if (present(areacor)) then + do n = 1,ice_num + dataPtr1d(n) = dataPtr1d(n) * areacor(n) + end do + end if end if - end subroutine state_setexport_4d_input + end subroutine state_setexport_4d !=============================================================================== - - subroutine state_setexport_3d_input(state, fldname, input, lmask, ifrac, ungridded_index, rc) + subroutine state_setexport_3d(state, fldname, input, lmask, ifrac, ungridded_index, areacor, rc) ! ---------------------------------------------- ! Map 3d input array to export state field @@ -1661,6 +1687,7 @@ subroutine state_setexport_3d_input(state, fldname, input, lmask, ifrac, ungridd logical , optional , intent(in) :: lmask(:,:,:) real(kind=dbl_kind) , optional , intent(in) :: ifrac(:,:,:) integer , optional , intent(in) :: ungridded_index + real(kind=dbl_kind) , optional , intent(in) :: areacor(:) integer , intent(out) :: rc ! local variables @@ -1669,9 +1696,8 @@ subroutine state_setexport_3d_input(state, fldname, input, lmask, ifrac, ungridd integer :: i, j, iblk, n, i1, j1 ! incides real(kind=dbl_kind), pointer :: dataPtr1d(:) ! mesh real(kind=dbl_kind), pointer :: dataPtr2d(:,:) ! mesh - real(kind=dbl_kind), pointer :: dataPtr3d(:,:,:) ! grid - real(kind=dbl_kind), pointer :: dataPtr4d(:,:,:,:) ! grid - character(len=*), parameter :: subname='(ice_import_export:state_setexport)' + integer :: num_ice + character(len=*), parameter :: subname='(ice_import_export:state_setexport_3d)' ! ---------------------------------------------- rc = ESMF_SUCCESS @@ -1679,92 +1705,59 @@ subroutine state_setexport_3d_input(state, fldname, input, lmask, ifrac, ungridd ! check that fieldname exists if (.not. State_FldChk(state, trim(fldname))) return - if (geomtype == ESMF_GEOMTYPE_MESH) then - - ! get field pointer - if (present(ungridded_index)) then - call state_getfldptr(state, trim(fldname), dataPtr2d, rc) - if (ChkErr(rc,__LINE__,u_FILE_u)) return - else - call state_getfldptr(state, trim(fldname), dataPtr1d, rc) - if (ChkErr(rc,__LINE__,u_FILE_u)) return - end if + ! get field pointer + if (present(ungridded_index)) then + call state_getfldptr(state, trim(fldname), dataPtr2d, rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + else + call state_getfldptr(state, trim(fldname), dataPtr1d, rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + end if - n = 0 - do iblk = 1, nblocks - this_block = get_block(blocks_ice(iblk),iblk) - ilo = this_block%ilo - ihi = this_block%ihi - jlo = this_block%jlo - jhi = this_block%jhi - do j = jlo, jhi - do i = ilo, ihi - n = n+1 - if (present(lmask) .and. present(ifrac)) then - if ( lmask(i,j,iblk) .and. ifrac(i,j,iblk) > c0 ) then - if (present(ungridded_index)) then - dataPtr2d(ungridded_index,n) = input(i,j,iblk) - else - dataPtr1d(n) = input(i,j,iblk) - end if - end if - else + n = 0 + do iblk = 1, nblocks + this_block = get_block(blocks_ice(iblk),iblk) + ilo = this_block%ilo + ihi = this_block%ihi + jlo = this_block%jlo + jhi = this_block%jhi + do j = jlo, jhi + do i = ilo, ihi + n = n+1 + if (present(lmask) .and. present(ifrac)) then + if ( lmask(i,j,iblk) .and. ifrac(i,j,iblk) > c0 ) then if (present(ungridded_index)) then dataPtr2d(ungridded_index,n) = input(i,j,iblk) else dataPtr1d(n) = input(i,j,iblk) end if end if - end do + else + if (present(ungridded_index)) then + dataPtr2d(ungridded_index,n) = input(i,j,iblk) + else + dataPtr1d(n) = input(i,j,iblk) + end if + end if end do end do - - else if (geomtype == ESMF_GEOMTYPE_GRID) then - - ! get field pointer + end do + num_ice = n + if (present(areacor)) then if (present(ungridded_index)) then - call state_getfldptr(state, trim(fldname), dataptr4d, rc) - if (ChkErr(rc,__LINE__,u_FILE_u)) return + do n = 1,num_ice + dataPtr2d(:,n) = dataPtr2d(:,n) * areacor(n) + end do else - call state_getfldptr(state, trim(fldname), dataptr3d, rc) - if (ChkErr(rc,__LINE__,u_FILE_u)) return - end if - - do iblk = 1,nblocks - this_block = get_block(blocks_ice(iblk),iblk) - ilo = this_block%ilo - ihi = this_block%ihi - jlo = this_block%jlo - jhi = this_block%jhi - do j = jlo,jhi - do i = ilo,ihi - i1 = i - ilo + 1 - j1 = j - jlo + 1 - if (present(lmask) .and. present(ifrac)) then - if ( lmask(i,j,iblk) .and. ifrac(i,j,iblk) > c0 ) then - if (present(ungridded_index)) then - dataPtr4d(i1,j1,iblk,ungridded_index) = input(i,j,iblk) - else - dataPtr3d(i1,j1,iblk) = input(i,j,iblk) - end if - end if - else - if (present(ungridded_index)) then - dataPtr4d(i1,j1,iblk,ungridded_index) = input(i,j,iblk) - else - dataPtr3d(i1,j1,iblk) = input(i,j,iblk) - end if - end if - end do + do n = 1,num_ice + dataPtr1d(n) = dataPtr1d(n) * areacor(n) end do - end do - + end if end if - end subroutine state_setexport_3d_input + end subroutine state_setexport_3d !=============================================================================== - subroutine State_GetFldPtr_1d(State, fldname, fldptr, rc) ! ---------------------------------------------- ! Get pointer to a state field @@ -1788,10 +1781,10 @@ subroutine State_GetFldPtr_1d(State, fldname, fldptr, rc) call ESMF_FieldGet(lfield, farrayPtr=fldptr, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return + end subroutine State_GetFldPtr_1d !=============================================================================== - subroutine State_GetFldPtr_2d(State, fldname, fldptr, rc) ! ---------------------------------------------- ! Get pointer to a state field @@ -1815,60 +1808,7 @@ subroutine State_GetFldPtr_2d(State, fldname, fldptr, rc) call ESMF_FieldGet(lfield, farrayPtr=fldptr, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return - end subroutine State_GetFldPtr_2d - - !=============================================================================== - - subroutine State_GetFldPtr_3d(State, fldname, fldptr, rc) - ! ---------------------------------------------- - ! Get pointer to a state field - ! ---------------------------------------------- - - ! input/output variables - type(ESMF_State) , intent(in) :: State - character(len=*) , intent(in) :: fldname - real(kind=dbl_kind) , pointer , intent(inout) :: fldptr(:,:,:) - integer , optional , intent(out) :: rc - - ! local variables - type(ESMF_Field) :: lfield - character(len=*),parameter :: subname='(ice_import_export:State_GetFldPtr_3d)' - ! ---------------------------------------------- - - rc = ESMF_SUCCESS - - call ESMF_StateGet(State, itemName=trim(fldname), field=lfield, rc=rc) - if (ChkErr(rc,__LINE__,u_FILE_u)) return - - call ESMF_FieldGet(lfield, farrayPtr=fldptr, rc=rc) - if (ChkErr(rc,__LINE__,u_FILE_u)) return - end subroutine State_GetFldPtr_3d - !=============================================================================== - - subroutine State_GetFldPtr_4d(State, fldname, fldptr, rc) - ! ---------------------------------------------- - ! Get pointer to a state field - ! ---------------------------------------------- - - ! input/output variables - type(ESMF_State) , intent(in) :: State - character(len=*) , intent(in) :: fldname - real(kind=dbl_kind) , pointer , intent(inout) :: fldptr(:,:,:,:) - integer , optional , intent(out) :: rc - - ! local variables - type(ESMF_Field) :: lfield - character(len=*),parameter :: subname='(ice_import_export:State_GetFldPtr_3d)' - ! ---------------------------------------------- - - rc = ESMF_SUCCESS - - call ESMF_StateGet(State, itemName=trim(fldname), field=lfield, rc=rc) - if (ChkErr(rc,__LINE__,u_FILE_u)) return - - call ESMF_FieldGet(lfield, farrayPtr=fldptr, rc=rc) - if (ChkErr(rc,__LINE__,u_FILE_u)) return - end subroutine State_GetFldPtr_4d + end subroutine State_GetFldPtr_2d end module ice_import_export diff --git a/cicecore/drivers/nuopc/cmeps/ice_mesh_mod.F90 b/cicecore/drivers/nuopc/cmeps/ice_mesh_mod.F90 new file mode 100644 index 000000000..17941435d --- /dev/null +++ b/cicecore/drivers/nuopc/cmeps/ice_mesh_mod.F90 @@ -0,0 +1,666 @@ +module ice_mesh_mod + + use ESMF + use NUOPC , only : NUOPC_CompAttributeGet + use ice_kinds_mod , only : dbl_kind, int_kind, char_len, char_len_long + use ice_domain_size , only : nx_global, ny_global, max_blocks + use ice_domain , only : nblocks, blocks_ice, distrb_info + use ice_blocks , only : block, get_block, nx_block, ny_block, nblocks_x, nblocks_y + use ice_shr_methods , only : chkerr + use ice_fileunits , only : nu_diag + use ice_communicate , only : my_task, master_task + use ice_exit , only : abort_ice + use icepack_intfc , only : icepack_query_parameters + use icepack_intfc , only : icepack_warnings_flush, icepack_warnings_aborted + + implicit none + private + + public :: ice_mesh_set_distgrid + public :: ice_mesh_setmask_from_maskfile + public :: ice_mesh_create_scolumn + public :: ice_mesh_init_tlon_tlat_area_hm + public :: ice_mesh_check + + ! Only relevant for lat-lon grids gridcell value of [1 - (land fraction)] (T-cell) + real (dbl_kind), allocatable, public :: ocn_gridcell_frac(:,:,:) + + character(*), parameter :: u_FILE_u = & + __FILE__ + +!======================================================================= +contains +!======================================================================= + + subroutine ice_mesh_set_distgrid(localpet, npes, distgrid, rc) + + ! Determine the global index space needed for the distgrid + + ! input/output variables + integer , intent(in) :: localpet + integer , intent(in) :: npes + type(ESMF_DistGrid) , intent(inout) :: distgrid + integer , intent(out) :: rc + + ! local variables + integer :: n,c,g,i,j,m ! indices + integer :: iblk, jblk ! indices + integer :: ig, jg ! indices + integer :: ilo, ihi, jlo, jhi ! beginning and end of physical domain + integer :: lsize ! local size of coupling array + type(block) :: this_block ! block information for current block + integer :: num_elim_global + integer :: num_elim_local + integer :: num_elim + integer :: num_ice + integer :: num_elim_gcells ! local number of eliminated gridcells + integer :: num_elim_blocks ! local number of eliminated blocks + integer :: num_total_blocks + integer :: my_elim_start, my_elim_end + integer , allocatable :: gindex(:) + integer , allocatable :: gindex_ice(:) + integer , allocatable :: gindex_elim(:) + integer :: globalID + character(len=*), parameter :: subname = ' ice_mesh_set_distgrid: ' + !---------------------------------------------------------------- + + rc = ESMF_SUCCESS + + ! number the local grid to get allocation size for gindex_ice + lsize = 0 + do iblk = 1, nblocks + this_block = get_block(blocks_ice(iblk),iblk) + ilo = this_block%ilo + ihi = this_block%ihi + jlo = this_block%jlo + jhi = this_block%jhi + do j = jlo, jhi + do i = ilo, ihi + lsize = lsize + 1 + enddo + enddo + enddo + + ! set global index array + allocate(gindex_ice(lsize)) + n = 0 + do iblk = 1, nblocks + this_block = get_block(blocks_ice(iblk),iblk) + ilo = this_block%ilo + ihi = this_block%ihi + jlo = this_block%jlo + jhi = this_block%jhi + do j = jlo, jhi + do i = ilo, ihi + n = n+1 + ig = this_block%i_glob(i) + jg = this_block%j_glob(j) + gindex_ice(n) = (jg-1)*nx_global + ig + enddo + enddo + enddo + + ! Determine total number of eliminated blocks globally + globalID = 0 + num_elim_global = 0 ! number of eliminated blocks + num_total_blocks = 0 + do jblk=1,nblocks_y + do iblk=1,nblocks_x + globalID = globalID + 1 + num_total_blocks = num_total_blocks + 1 + if (distrb_info%blockLocation(globalID) == 0) then + num_elim_global = num_elim_global + 1 + end if + end do + end do + + if (num_elim_global > 0) then + + ! Distribute the eliminated blocks in a round robin fashion amoung processors + num_elim_local = num_elim_global / npes + my_elim_start = num_elim_local*localPet + min(localPet, mod(num_elim_global, npes)) + 1 + if (localPet < mod(num_elim_global, npes)) then + num_elim_local = num_elim_local + 1 + end if + my_elim_end = my_elim_start + num_elim_local - 1 + + ! Determine the number of eliminated gridcells locally + globalID = 0 + num_elim_blocks = 0 ! local number of eliminated blocks + num_elim_gcells = 0 + do jblk=1,nblocks_y + do iblk=1,nblocks_x + globalID = globalID + 1 + if (distrb_info%blockLocation(globalID) == 0) then + num_elim_blocks = num_elim_blocks + 1 + if (num_elim_blocks >= my_elim_start .and. num_elim_blocks <= my_elim_end) then + this_block = get_block(globalID, globalID) + num_elim_gcells = num_elim_gcells + & + (this_block%jhi-this_block%jlo+1) * (this_block%ihi-this_block%ilo+1) + end if + end if + end do + end do + + ! Determine the global index space of the eliminated gridcells + allocate(gindex_elim(num_elim_gcells)) + globalID = 0 + num_elim_gcells = 0 ! local number of eliminated gridcells + num_elim_blocks = 0 ! local number of eliminated blocks + do jblk=1,nblocks_y + do iblk=1,nblocks_x + globalID = globalID + 1 + if (distrb_info%blockLocation(globalID) == 0) then + this_block = get_block(globalID, globalID) + num_elim_blocks = num_elim_blocks + 1 + if (num_elim_blocks >= my_elim_start .and. num_elim_blocks <= my_elim_end) then + do j=this_block%jlo,this_block%jhi + do i=this_block%ilo,this_block%ihi + num_elim_gcells = num_elim_gcells + 1 + ig = this_block%i_glob(i) + jg = this_block%j_glob(j) + gindex_elim(num_elim_gcells) = (jg-1)*nx_global + ig + end do + end do + end if + end if + end do + end do + + ! create a global index that includes both active and eliminated gridcells + num_ice = size(gindex_ice) + num_elim = size(gindex_elim) + allocate(gindex(num_elim + num_ice)) + do n = 1,num_ice + gindex(n) = gindex_ice(n) + end do + do n = num_ice+1,num_ice+num_elim + gindex(n) = gindex_elim(n-num_ice) + end do + + deallocate(gindex_elim) + + else + + ! No eliminated land blocks + num_ice = size(gindex_ice) + allocate(gindex(num_ice)) + do n = 1,num_ice + gindex(n) = gindex_ice(n) + end do + + end if + + !--------------------------------------------------------------------------- + ! Create distGrid from global index array + !--------------------------------------------------------------------------- + + DistGrid = ESMF_DistGridCreate(arbSeqIndexList=gindex, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + deallocate(gindex_ice) + deallocate(gindex) + + end subroutine ice_mesh_set_distgrid + + !======================================================================= + subroutine ice_mesh_setmask_from_maskfile(ice_maskfile, ice_mesh, rc) + + use ice_grid , only : tlon, tlat, hm, tarea + use ice_constants , only : c0, c1, c2, p25, radius + + ! input/output variables + character(len=*) , intent(in) :: ice_maskfile + type(ESMF_Mesh) , intent(inout) :: ice_mesh + integer , intent(out) :: rc + + ! local variables + integer :: i, j, n + integer (int_kind) :: ni, nj + integer :: iblk, jblk ! indices + integer :: ilo, ihi, jlo, jhi ! beginning and end of physical domain + type (block) :: this_block ! block information for current block + real(dbl_kind) , pointer :: ice_frac(:) + type(ESMF_Field) :: areaField + type(ESMF_Mesh) :: mesh_mask + type(ESMF_Field) :: field_mask + type(ESMF_Field) :: field_dst + type(ESMF_RouteHandle) :: rhandle + integer :: srcMaskValue = 0 + integer :: dstMaskValue = -987987 ! spval for RH mask values + integer :: srcTermProcessing_Value = 0 + logical :: checkflag = .false. + integer, pointer :: ice_mask(:) + real(dbl_kind) , pointer :: mask_src(:) ! on mesh created from ice_maskfile + real(dbl_kind) , pointer :: dataptr1d(:) + type(ESMF_DistGrid) :: distgrid_mask + type(ESMF_Array) :: elemMaskArray + integer :: lsize_mask, lsize_dst + integer :: spatialDim + real(dbl_kind) :: fminval = 0.001_dbl_kind ! TODO: make this a share constant + real(dbl_kind) :: fmaxval = 1._dbl_kind + real(dbl_kind) :: lfrac + real(dbl_kind) , pointer :: mesh_areas(:) + integer :: numownedelements + real(dbl_kind) , pointer :: ownedElemCoords(:) + real(dbl_kind) :: pi + real(dbl_kind) :: c180 + real(dbl_kind) :: puny + real(dbl_kind) :: deg_to_rad + character(len=*), parameter :: subname = ' ice_mesh_setmask_from_maskfile' + !--------------------------------------------------- + + rc = ESMF_SUCCESS + + mesh_mask = ESMF_MeshCreate(trim(ice_maskfile), fileformat=ESMF_FILEFORMAT_ESMFMESH, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + call ESMF_MeshGet(ice_mesh, spatialDim=spatialDim, numOwnedElements=lsize_dst, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + allocate(ice_mask(lsize_dst)) + allocate(ice_frac(lsize_dst)) + + ! create fields on source and destination meshes + field_mask = ESMF_FieldCreate(mesh_mask, ESMF_TYPEKIND_R8, meshloc=ESMF_MESHLOC_ELEMENT, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + field_dst = ESMF_FieldCreate(ice_mesh, ESMF_TYPEKIND_R8, meshloc=ESMF_MESHLOC_ELEMENT, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + ! create route handle to map source mask (assume ocean) to destination mesh (assume atm/lnd) + call ESMF_FieldRegridStore(field_mask, field_dst, routehandle=rhandle, & + srcMaskValues=(/srcMaskValue/), dstMaskValues=(/dstMaskValue/), & + regridmethod=ESMF_REGRIDMETHOD_CONSERVE, normType=ESMF_NORMTYPE_DSTAREA, & + srcTermProcessing=srcTermProcessing_Value, & + ignoreDegenerate=.true., unmappedaction=ESMF_UNMAPPEDACTION_IGNORE, rc=rc) + if (chkerr(rc,__LINE__,u_FILE_u)) return + + ! fill in values for field_mask with mask on source mesh + call ESMF_MeshGet(mesh_mask, elementdistGrid=distgrid_mask, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + call ESMF_DistGridGet(distgrid_mask, localDe=0, elementCount=lsize_mask, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + allocate(mask_src(lsize_mask)) + elemMaskArray = ESMF_ArrayCreate(distgrid_mask, mask_src, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + ! The following call fills in the values of mask_src + call ESMF_MeshGet(mesh_mask, elemMaskArray=elemMaskArray, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + ! The following call fills in the values of field_mask + call ESMF_FieldGet(field_mask, farrayptr=dataptr1d, rc=rc) + dataptr1d(:) = mask_src(:) + + ! map source mask to destination mesh - to obtain destination mask and frac + call ESMF_FieldRegrid(field_mask, field_dst, routehandle=rhandle, & + termorderflag=ESMF_TERMORDER_SRCSEQ, checkflag=checkflag, zeroregion=ESMF_REGION_TOTAL, rc=rc) + if (chkerr(rc,__LINE__,u_FILE_u)) return + + call ESMF_MeshGet(ice_mesh, spatialDim=spatialDim, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + call ESMF_FieldGet(field_dst, farrayptr=dataptr1d, rc=rc) + if (chkerr(rc,__LINE__,u_FILE_u)) return + + ! now determine ice_mask and ice_frac + do n = 1,size(dataptr1d) + lfrac = c1 - dataptr1d(n) + if (lfrac > fmaxval) lfrac = c1 + if (lfrac < fminval) lfrac = c0 + ice_frac(n) = c1 - lfrac + if (ice_frac(n) == c0) then + ice_mask(n) = 0 + else + ice_mask(n) = 1 + end if + enddo + + ! reset the model mesh mask + call ESMF_MeshSet(ice_mesh, elementMask=ice_mask, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + ! deallocate memory + call ESMF_RouteHandleDestroy(rhandle, rc=rc) + if (chkerr(rc,__LINE__,u_FILE_u)) return + call ESMF_FieldDestroy(field_mask, rc=rc) + if (chkerr(rc,__LINE__,u_FILE_u)) return + call ESMF_FieldDestroy(field_dst, rc=rc) + if (chkerr(rc,__LINE__,u_FILE_u)) return + deallocate(mask_src) + + ! Allocate module variable ocn_gridcell_frac + allocate(ocn_gridcell_frac(nx_block,ny_block,max_blocks)) + + ! Obtain mesh areas in radians^2 + areaField = ESMF_FieldCreate(ice_mesh, ESMF_TYPEKIND_R8, meshloc=ESMF_MESHLOC_ELEMENT, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + call ESMF_FieldRegridGetArea(areaField, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + call ESMF_FieldGet(areaField, farrayPtr=mesh_areas, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + ! Obtain mesh lons and lats in degrees + call ESMF_MeshGet(ice_mesh, spatialDim=spatialDim, numOwnedElements=numOwnedElements, rc=rc) + if (chkerr(rc,__LINE__,u_FILE_u)) return + allocate(ownedElemCoords(spatialDim*numownedelements)) + call ESMF_MeshGet(ice_mesh, ownedElemCoords=ownedElemCoords) + if (chkerr(rc,__LINE__,u_FILE_u)) return + call ESMF_MeshGet(ice_mesh, ownedElemCoords=ownedElemCoords, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + ! Get required constants + call icepack_query_parameters(pi_out=pi, c180_out=c180) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=__FILE__, line=__LINE__) + deg_to_rad = pi/c180 + + ! Set tlon, tlat, tarea, hm + ! Convert mesh areas from radians^2 to m^2 (tarea is in m^2) + ! Convert lons and lats from degrees to radians + n = 0 + do iblk = 1, nblocks + this_block = get_block(blocks_ice(iblk),iblk) + ilo = this_block%ilo + ihi = this_block%ihi + jlo = this_block%jlo + jhi = this_block%jhi + do j = jlo, jhi + do i = ilo, ihi + n = n + 1 + tlon(i,j,iblk) = ownedElemCoords(2*n-1) * deg_to_rad + tlat(i,j,iblk) = ownedElemCoords(2*n) * deg_to_rad + tarea(i,j,iblk) = mesh_areas(n) * (radius*radius) + hm(i,j,iblk) = real(ice_mask(n),kind=dbl_kind) + ocn_gridcell_frac(i,j,iblk) = ice_frac(n) + enddo + enddo + enddo + + ! Dealocate memory + deallocate(ownedElemCoords) + call ESMF_FieldDestroy(areaField) + + end subroutine ice_mesh_setmask_from_maskfile + + !=============================================================================== + subroutine ice_mesh_create_scolumn(scol_lon, scol_lat, ice_mesh, rc) + + use ice_constants , only : c0, c1 + use ice_scam , only : scmlat, scmlon, scol_area, scol_mask, scol_frac, scol_nj + use netcdf + + ! Create the model mesh from the domain file - for either single column mode + ! or for a regional grid + + ! input/output variables + real(dbl_kind) , intent(in) :: scol_lon + real(dbl_kind) , intent(in) :: scol_lat + type(ESMF_Mesh) , intent(inout) :: ice_mesh + integer , intent(out) :: rc + + ! local variables + type(ESMF_Grid) :: lgrid + integer :: maxIndex(2) + real(dbl_kind) :: mincornerCoord(2) + real(dbl_kind) :: maxcornerCoord(2) + integer :: i, j,iblk, jblk ! indices + integer :: ilo, ihi, jlo, jhi ! beginning and end of physical domain + type (block) :: this_block ! block information for current block + character(len=*), parameter :: subname = ' ice_mesh_create_scolumn' + ! ---------------------------------------------- + + rc = ESMF_SUCCESS + + ! Use center and come up with arbitrary area delta lon and lat = .1 degree + maxIndex(1) = 1 ! number of lons + maxIndex(2) = 1 ! number of lats + mincornerCoord(1) = scol_lon - .1_dbl_kind ! min lon + mincornerCoord(2) = scol_lat - .1_dbl_kind ! min lat + maxcornerCoord(1) = scol_lon + .1_dbl_kind ! max lon + maxcornerCoord(2) = scol_lat + .1_dbl_kind ! max lat + + ! create the ESMF grid + lgrid = ESMF_GridCreateNoPeriDimUfrm (maxindex=maxindex, & + mincornercoord=mincornercoord, maxcornercoord= maxcornercoord, & + staggerloclist=(/ESMF_STAGGERLOC_CENTER, ESMF_STAGGERLOC_CORNER/), rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + ! create the mesh from the lgrid + ice_mesh = ESMF_MeshCreate(lgrid, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + ! Allocate module variable ocn_gridcell_frac + allocate(ocn_gridcell_frac(nx_block,ny_block,max_blocks)) + ocn_gridcell_frac(:,:,:) = scol_frac + + end subroutine ice_mesh_create_scolumn + + !=============================================================================== + subroutine ice_mesh_init_tlon_tlat_area_hm() + + use ice_grid , only : tlon, tlat, hm, tarea, ULON, ULAT, HTN, HTE, ANGLE, ANGLET + use ice_grid , only : uarea, uarear, tarear, tinyarea + use ice_grid , only : dxt, dyt, dxu, dyu, dyhx, dxhy, cyp, cxp, cym, cxm + use ice_grid , only : makemask + use ice_boundary , only : ice_HaloUpdate + use ice_domain , only : blocks_ice, nblocks, halo_info, distrb_info + use ice_constants , only : c0, c1, p25 + use ice_constants , only : field_loc_center, field_type_scalar + use ice_scam , only : scmlat, scmlon, scol_area, scol_mask, scol_frac, scol_nj, single_column + + ! local variables + integer :: i,j,n + integer :: iblk, jblk ! indices + integer :: ilo, ihi, jlo, jhi ! beginning and end of physical domain + type (block) :: this_block ! block information for current block + real(dbl_kind) :: puny + real(dbl_kind) :: pi + character(len=*), parameter :: subname = ' ice_mesh_init_tlon_tlat_area_hm' + ! ---------------------------------------------- + + ! Get required constants + call icepack_query_parameters(pi_out=pi, puny_out=puny) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=__FILE__, line=__LINE__) + + ! Check for consistency + if (single_column) then + if ((nx_global /= 1).or. (ny_global /= 1)) then + write(nu_diag,*) 'nx_global = ',nx_global + write(nu_diag,*) 'ny_global = ',ny_global + write(nu_diag,*) 'Because you have selected the column model flag' + write(nu_diag,*) 'then require nx_global=ny_global=1 in file ice_domain_size.F' + call abort_ice(' ice_mesh_init_tlon_tlat_area_hm: nx_global and ny_global need to be 1 for single column') + else + write(nu_diag,'(a,f10.5)')' single column mode lon/lat does contain ocn with ocn fraction ',scol_frac + end if + + TLON = scmlon + TLAT = scmlat + tarea = scol_area + hm = scol_mask + ULAT = TLAT + pi/scol_nj + end if + + call ice_HaloUpdate (TLON , halo_info, field_loc_center, field_type_scalar, fillValue=c1) + call ice_HaloUpdate (TLAT , halo_info, field_loc_center, field_type_scalar, fillValue=c1) + call ice_HaloUpdate (tarea , halo_info, field_loc_center, field_type_scalar, fillValue=c1) + call ice_HaloUpdate (hm , halo_info, field_loc_center, field_type_scalar, fillValue=c1) + + !----------------------------------------------------------------- + ! CALCULATE various geometric 2d arrays + ! The U grid (velocity) is not used when run with sequential CAM + ! because we only use thermodynamic sea ice. However, ULAT is used + ! in the default initialization of CICE so we calculate it here as + ! a "dummy" so that CICE will initialize with ice. If a no ice + ! initialization is OK (or desired) this can be commented out and + ! ULAT will remain 0 as specified above. ULAT is located at the + ! NE corner of the grid cell, TLAT at the center, so here ULAT is + ! hacked by adding half the latitudinal spacing (in radians) to TLAT. + !----------------------------------------------------------------- + + ANGLET(:,:,:) = c0 + + do iblk = 1, nblocks + this_block = get_block(blocks_ice(iblk),iblk) + ilo = this_block%ilo + ihi = this_block%ihi + jlo = this_block%jlo + jhi = this_block%jhi + + do j = jlo, jhi + do i = ilo, ihi + + if (ny_global == 1) then + uarea(i,j,iblk) = tarea(i,j, iblk) + else + uarea(i,j,iblk) = p25* & + (tarea(i,j, iblk) + tarea(i+1,j, iblk) & + + tarea(i,j+1,iblk) + tarea(i+1,j+1,iblk)) + endif + tarear(i,j,iblk) = c1/tarea(i,j,iblk) + uarear(i,j,iblk) = c1/uarea(i,j,iblk) + tinyarea(i,j,iblk) = puny*tarea(i,j,iblk) + + if (.not. single_column) then + if (ny_global == 1) then + ULAT(i,j,iblk) = TLAT(i,j,iblk) + else + ULAT(i,j,iblk) = TLAT(i,j,iblk)+(pi/ny_global) + endif + endif + ULON (i,j,iblk) = c0 + ANGLE (i,j,iblk) = c0 + + HTN (i,j,iblk) = 1.e36_dbl_kind + HTE (i,j,iblk) = 1.e36_dbl_kind + dxt (i,j,iblk) = 1.e36_dbl_kind + dyt (i,j,iblk) = 1.e36_dbl_kind + dxu (i,j,iblk) = 1.e36_dbl_kind + dyu (i,j,iblk) = 1.e36_dbl_kind + dxhy (i,j,iblk) = 1.e36_dbl_kind + dyhx (i,j,iblk) = 1.e36_dbl_kind + cyp (i,j,iblk) = 1.e36_dbl_kind + cxp (i,j,iblk) = 1.e36_dbl_kind + cym (i,j,iblk) = 1.e36_dbl_kind + cxm (i,j,iblk) = 1.e36_dbl_kind + enddo + enddo + enddo + + call ice_HaloUpdate (ULAT, halo_info, field_loc_center, field_type_scalar, fillValue=c1) + + ! Set the boundary values for the T cell land mask (hm) and + ! make the logical land masks for T and U cells (tmask, umask). + ! Also create hemisphere masks (mask-n northern, mask-s southern) + call makemask() + + end subroutine ice_mesh_init_tlon_tlat_area_hm + + !=============================================================================== + subroutine ice_mesh_check(gcomp, ice_mesh, rc) + + ! Check CICE mesh + + use ice_constants, only : c1,c0,c360 + use ice_grid , only : tlon, tlat + + ! input/output parameters + type(ESMF_GridComp) , intent(inout) :: gcomp + type(ESMF_Mesh) , intent(inout) :: ice_mesh + integer , intent(out) :: rc + + ! local variables + type(ESMF_DistGrid) :: distGrid + integer :: n,c,g,i,j,m ! indices + integer :: iblk, jblk ! indices + integer :: ilo, ihi, jlo, jhi ! beginning and end of physical domain + type(block) :: this_block ! block information for current block + integer :: spatialDim + integer :: numOwnedElements + real(dbl_kind), pointer :: ownedElemCoords(:) + real(dbl_kind), pointer :: lat(:), latMesh(:) + real(dbl_kind), pointer :: lon(:), lonMesh(:) + real(dbl_kind) :: diff_lon + real(dbl_kind) :: diff_lat + real(dbl_kind) :: rad_to_deg + real(dbl_kind) :: tmplon, eps_imesh + logical :: isPresent, isSet + character(len=char_len_long) :: cvalue + character(len=char_len_long) :: logmsg + character(len=*), parameter :: subname = ' ice_mesh_check: ' + !--------------------------------------------------- + + ! Determine allowed mesh error + call NUOPC_CompAttributeGet(gcomp, name='eps_imesh', value=cvalue, & + isPresent=isPresent, isSet=isSet, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + if (isPresent .and. isSet) then + read(cvalue,*) eps_imesh + else + eps_imesh = 1.0e-1_dbl_kind + end if + write(logmsg,*) eps_imesh + call ESMF_LogWrite(trim(subname)//' eps_imesh = '//trim(logmsg), ESMF_LOGMSG_INFO) + + ! error check differences between internally generated lons and those read in + call ESMF_MeshGet(ice_mesh, spatialDim=spatialDim, numOwnedElements=numOwnedElements, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + allocate(ownedElemCoords(spatialDim*numownedelements)) + allocate(lonmesh(numOwnedElements)) + allocate(latmesh(numOwnedElements)) + call ESMF_MeshGet(ice_mesh, ownedElemCoords=ownedElemCoords) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + do n = 1,numOwnedElements + lonMesh(n) = ownedElemCoords(2*n-1) + latMesh(n) = ownedElemCoords(2*n) + end do + + ! obtain internally generated cice lats and lons for error checks + call icepack_query_parameters(rad_to_deg_out=rad_to_deg) + allocate(lon(numOwnedElements)) + allocate(lat(numOwnedElements)) + lon(:) = 0. + lat(:) = 0. + n = 0 + do iblk = 1, nblocks + this_block = get_block(blocks_ice(iblk),iblk) + ilo = this_block%ilo + ihi = this_block%ihi + jlo = this_block%jlo + jhi = this_block%jhi + do j = jlo, jhi + do i = ilo, ihi + n = n + 1 + lon(n) = tlon(i,j,iblk)*rad_to_deg + lat(n) = tlat(i,j,iblk)*rad_to_deg + + tmplon = lon(n) + if(tmplon < c0)tmplon = tmplon + c360 + + ! error check differences between internally generated lons and those read in + diff_lon = abs(mod(lonMesh(n) - tmplon,360.0)) + if (diff_lon > eps_imesh ) then + write(6,100)n,lonMesh(n),tmplon, diff_lon + call abort_ice(error_message=subname, & + file=__FILE__, line=__LINE__) + end if + diff_lat = abs(latMesh(n) - lat(n)) + if (diff_lat > eps_imesh) then + write(6,101)n,latMesh(n),lat(n), diff_lat + call abort_ice(error_message=subname, & + file=__FILE__, line=__LINE__) + end if + + enddo + enddo + enddo + +100 format('ERROR: CICE n, lonmesh, lon, diff_lon = ',i6,2(f21.13,3x),d21.5) +101 format('ERROR: CICE n, latmesh, lat, diff_lat = ',i6,2(f21.13,3x),d21.5) + + ! deallocate memory + deallocate(ownedElemCoords) + deallocate(lon, lonMesh) + deallocate(lat, latMesh) + + end subroutine ice_mesh_check + +end module ice_mesh_mod diff --git a/cicecore/drivers/nuopc/cmeps/ice_prescribed_mod.F90 b/cicecore/drivers/nuopc/cmeps/ice_prescribed_mod.F90 index 6eca4f2b4..dc40177d8 100644 --- a/cicecore/drivers/nuopc/cmeps/ice_prescribed_mod.F90 +++ b/cicecore/drivers/nuopc/cmeps/ice_prescribed_mod.F90 @@ -7,39 +7,33 @@ module ice_prescribed_mod ! Ice/ocean fluxes are set to zero, and ice dynamics are not calculated. ! Regridding and data cycling capabilities are included. + use ESMF + #ifndef CESMCOUPLED use ice_kinds_mod - implicit none private ! except - public :: ice_prescribed_init ! initialize input data stream logical(kind=log_kind), parameter, public :: prescribed_ice = .false. ! true if prescribed ice - contains ! This is a stub routine for now - subroutine ice_prescribed_init(mpicom, compid, gindex) - integer(kind=int_kind), intent(in) :: mpicom - integer(kind=int_kind), intent(in) :: compid - integer(kind=int_kind), intent(in) :: gindex(:) + subroutine ice_prescribed_init(clock, mesh, rc) + type(ESMF_Clock) , intent(in) :: clock + type(ESMF_Mesh) , intent(in) :: mesh + integer , intent(out) :: rc ! do nothing end subroutine ice_prescribed_init -#else - - use shr_nl_mod , only : shr_nl_find_group_name - use shr_strdata_mod - use shr_dmodel_mod - use shr_string_mod - use shr_ncread_mod - use shr_sys_mod - use shr_mct_mod - use mct_mod - use pio +#else + + use ice_kinds_mod + use shr_nl_mod , only : shr_nl_find_group_name + use dshr_strdata_mod , only : shr_strdata_type, shr_strdata_print + use dshr_strdata_mod , only : shr_strdata_init_from_inline, shr_strdata_advance + use dshr_methods_mod , only : dshr_fldbun_getfldptr use ice_broadcast use ice_communicate , only : my_task, master_task, MPI_COMM_ICE - use ice_kinds_mod use ice_fileunits use ice_exit , only : abort_ice use ice_domain_size , only : nx_global, ny_global, ncat, nilyr, nslyr, max_blocks @@ -54,306 +48,288 @@ end subroutine ice_prescribed_init use icepack_intfc , only: icepack_warnings_flush, icepack_warnings_aborted use icepack_intfc , only: icepack_query_tracer_indices, icepack_query_tracer_sizes use icepack_intfc , only: icepack_query_parameters + use ice_shr_methods , only: chkerr implicit none private ! except - ! MEMBER FUNCTIONS: - public :: ice_prescribed_init ! initialize input data stream - public :: ice_prescribed_run ! get time slices and time interp - public :: ice_prescribed_phys ! set prescribed ice state and fluxes - - ! !PUBLIC DATA MEMBERS: - logical(kind=log_kind), public :: prescribed_ice ! true if prescribed ice - integer(kind=int_kind),parameter :: nFilesMaximum = 400 ! max number of files - integer(kind=int_kind) :: stream_year_first ! first year in stream to use - integer(kind=int_kind) :: stream_year_last ! last year in stream to use - integer(kind=int_kind) :: model_year_align ! align stream_year_first with this model year - character(len=char_len_long) :: stream_fldVarName - character(len=char_len_long) :: stream_fldFileName(nFilesMaximum) - character(len=char_len_long) :: stream_domTvarName - character(len=char_len_long) :: stream_domXvarName - character(len=char_len_long) :: stream_domYvarName - character(len=char_len_long) :: stream_domAreaName - character(len=char_len_long) :: stream_domMaskName - character(len=char_len_long) :: stream_domFileName - character(len=char_len_long) :: stream_mapread - logical(kind=log_kind) :: prescribed_ice_fill ! true if data fill required - type(shr_strdata_type) :: sdat ! prescribed data stream - character(len=char_len_long) :: fldList ! list of fields in data stream - real(kind=dbl_kind),allocatable :: ice_cov(:,:,:) ! ice cover + ! public member functions: + public :: ice_prescribed_init ! initialize input data stream + public :: ice_prescribed_run ! get time slices and time interp + public :: ice_prescribed_phys ! set prescribed ice state and fluxes -contains + ! public data members: + logical(kind=log_kind), public :: prescribed_ice ! true if prescribed ice - subroutine ice_prescribed_init(mpicom, compid, gindex) + ! private data members: + type(shr_strdata_type) :: sdat ! prescribed data stream + real(kind=dbl_kind),allocatable :: ice_cov(:,:,:) ! ice cover - ! Prescribed ice initialization - needed to - ! work with new shr_strdata module derived type + character(*), parameter :: u_FILE_u = & + __FILE__ - use shr_pio_mod, only : shr_pio_getiotype, shr_pio_getiosys, shr_pio_getioformat +!======================================================================= +contains +!=============================================================================== + + subroutine ice_prescribed_init(clock, mesh, rc) + + ! Prescribed ice initialization - implicit none include 'mpif.h' - ! !nput/output parameters: - integer(kind=int_kind), intent(in) :: mpicom - integer(kind=int_kind), intent(in) :: compid - integer(kind=int_kind), intent(in) :: gindex(:) + ! input/output parameters + type(ESMF_Clock) , intent(in) :: clock + type(ESMF_Mesh) , intent(in) :: mesh + integer , intent(out) :: rc + + ! local parameters + integer(kind=int_kind),parameter :: nFilesMaximum = 400 ! max number of files + integer(kind=int_kind) :: n, nFile, ierr + integer(kind=int_kind) :: nml_error ! namelist i/o error flag + character(len=char_len_long) :: stream_meshFile + character(len=char_len_long) :: stream_dataFiles(nFilesMaximum) + character(len=char_len_long) :: stream_varname + character(len=char_len_long) :: stream_mapalgo + integer(kind=int_kind) :: stream_yearfirst ! first year in stream to use + integer(kind=int_kind) :: stream_yearlast ! last year in stream to use + integer(kind=int_kind) :: stream_yearalign ! align stream_year_first + integer(kind=int_kind) :: nu_nml + logical :: prescribed_ice_mode + character(*),parameter :: subName = "('ice_prescribed_init')" + character(*),parameter :: F00 = "('(ice_prescribed_init) ',4a)" + character(*),parameter :: F01 = "('(ice_prescribed_init) ',a,i0)" + character(*),parameter :: F02 = "('(ice_prescribed_init) ',2a,i0,)" + !-------------------------------- - !----- Local ------ - type(mct_gsMap) :: gsmap_ice - type(mct_gGrid) :: dom_ice - integer(kind=int_kind) :: lsize - integer(kind=int_kind) :: gsize - integer(kind=int_kind) :: nml_error ! namelist i/o error flag - integer(kind=int_kind) :: n, nFile, ierr - character(len=8) :: fillalgo - character(*),parameter :: subName = '(ice_prescribed_init)' - - namelist /ice_prescribed_nml/ & - prescribed_ice, & - model_year_align, & - stream_year_first , & - stream_year_last , & - stream_fldVarName , & - stream_fldFileName, & - stream_domTvarName, & - stream_domXvarName, & - stream_domYvarName, & - stream_domAreaName, & - stream_domMaskName, & - stream_domFileName, & - stream_mapread, & - prescribed_ice_fill + namelist /ice_prescribed_nml/ & + prescribed_ice_mode, & + stream_meshfile, & + stream_varname , & + stream_datafiles, & + stream_mapalgo, & + stream_yearalign, & + stream_yearfirst , & + stream_yearlast + + rc = ESMF_SUCCESS ! default values for namelist - prescribed_ice = .false. ! if true, prescribe ice - stream_year_first = 1 ! first year in pice stream to use - stream_year_last = 1 ! last year in pice stream to use - model_year_align = 1 ! align stream_year_first with this model year - stream_fldVarName = 'ice_cov' - stream_fldFileName(:) = ' ' - stream_domTvarName = 'time' - stream_domXvarName = 'lon' - stream_domYvarName = 'lat' - stream_domAreaName = 'area' - stream_domMaskName = 'mask' - stream_domFileName = ' ' - stream_mapread = 'NOT_SET' - prescribed_ice_fill = .false. ! true if pice data fill required - - ! read from input file - call get_fileunit(nu_nml) + prescribed_ice_mode = .false. ! if true, prescribe ice + stream_yearfirst = 1 ! first year in pice stream to use + stream_yearlast = 1 ! last year in pice stream to use + stream_yearalign = 1 ! align stream_year_first with this model year + stream_varname = 'ice_cov' + stream_meshfile = ' ' + stream_datafiles(:) = ' ' + stream_mapalgo = 'bilinear' + + ! read namelist on master task if (my_task == master_task) then - open (nu_nml, file=nml_filename, status='old',iostat=nml_error) + open (newunit=nu_nml, file=nml_filename, status='old',iostat=nml_error) call shr_nl_find_group_name(nu_nml, 'ice_prescribed_nml', status=nml_error) - if (nml_error == 0) then - read(nu_nml, ice_prescribed_nml, iostat=nml_error) - if (nml_error > 0) then - call shr_sys_abort( 'problem on read of ice_prescribed namelist in ice_prescribed_mod' ) - endif + if (nml_error /= 0) then + write(nu_diag,F00) "ERROR: problem on read of ice_prescribed_nml namelist" + call abort_ice(subName) endif + read(nu_nml, ice_prescribed_nml, iostat=nml_error) + close(nu_nml) end if - call release_fileunit(nu_nml) - call broadcast_scalar(prescribed_ice, master_task) - - ! *** If not prescribed ice then return *** - if (.not. prescribed_ice) RETURN - - call broadcast_scalar(model_year_align,master_task) - call broadcast_scalar(stream_year_first,master_task) - call broadcast_scalar(stream_year_last,master_task) - call broadcast_scalar(stream_fldVarName,master_task) - call broadcast_scalar(stream_domTvarName,master_task) - call broadcast_scalar(stream_domXvarName,master_task) - call broadcast_scalar(stream_domYvarName,master_task) - call broadcast_scalar(stream_domAreaName,master_task) - call broadcast_scalar(stream_domMaskName,master_task) - call broadcast_scalar(stream_domFileName,master_task) - call broadcast_scalar(stream_mapread,master_task) - call broadcast_scalar(prescribed_ice_fill,master_task) - call mpi_bcast(stream_fldFileName, len(stream_fldFileName(1))*NFilesMaximum, & - MPI_CHARACTER, 0, MPI_COMM_ICE, ierr) - - nFile = 0 - do n=1,nFilesMaximum - if (stream_fldFileName(n) /= ' ') nFile = nFile + 1 - end do - ! Read shr_strdata_nml namelist - if (prescribed_ice_fill) then - fillalgo='nn' - else - fillalgo='none' - endif + ! broadcast namelist input + call broadcast_scalar(prescribed_ice_mode, master_task) - if (my_task == master_task) then - write(nu_diag,*) ' ' - write(nu_diag,*) 'This is the prescribed ice coverage option.' - write(nu_diag,*) ' stream_year_first = ',stream_year_first - write(nu_diag,*) ' stream_year_last = ',stream_year_last - write(nu_diag,*) ' model_year_align = ',model_year_align - write(nu_diag,*) ' stream_fldVarName = ',trim(stream_fldVarName) - do n = 1,nFile - write(nu_diag,*) ' stream_fldFileName = ',trim(stream_fldFileName(n)),n + ! set module variable 'prescribed_ice' + prescribed_ice = prescribed_ice_mode + + ! -------------------------------------------------- + ! only do the following if prescribed ice mode is on + ! -------------------------------------------------- + + if (prescribed_ice_mode) then + + call broadcast_scalar(stream_yearalign , master_task) + call broadcast_scalar(stream_yearfirst , master_task) + call broadcast_scalar(stream_yearlast , master_task) + call broadcast_scalar(stream_meshfile , master_task) + call broadcast_scalar(stream_mapalgo , master_task) + call broadcast_scalar(stream_varname , master_task) + call mpi_bcast(stream_dataFiles, len(stream_datafiles(1))*NFilesMaximum, MPI_CHARACTER, 0, MPI_COMM_ICE, ierr) + + nFile = 0 + do n = 1,nFilesMaximum + if (stream_datafiles(n) /= ' ') nFile = nFile + 1 end do - write(nu_diag,*) ' stream_domTvarName = ',trim(stream_domTvarName) - write(nu_diag,*) ' stream_domXvarName = ',trim(stream_domXvarName) - write(nu_diag,*) ' stream_domYvarName = ',trim(stream_domYvarName) - write(nu_diag,*) ' stream_domFileName = ',trim(stream_domFileName) - write(nu_diag,*) ' stream_mapread = ',trim(stream_mapread) - write(nu_diag,*) ' stream_fillalgo = ',trim(fillalgo) - write(nu_diag,*) ' ' - endif - - gsize = nx_global*ny_global - lsize = size(gindex) - call mct_gsMap_init( gsmap_ice, gindex, MPI_COMM_ICE, compid, lsize, gsize) - call ice_prescribed_set_domain( lsize, MPI_COMM_ICE, gsmap_ice, dom_ice ) - - call shr_strdata_create(sdat,name="prescribed_ice", & - mpicom=MPI_COMM_ICE, compid=compid, & - gsmap=gsmap_ice, ggrid=dom_ice, & - nxg=nx_global,nyg=ny_global, & - yearFirst=stream_year_first, & - yearLast=stream_year_last, & - yearAlign=model_year_align, & - offset=0, & - domFilePath='', & - domFileName=trim(stream_domFileName), & - domTvarName=stream_domTvarName, & - domXvarName=stream_domXvarName, & - domYvarName=stream_domYvarName, & - domAreaName=stream_domAreaName, & - domMaskName=stream_domMaskName, & - filePath='', & - filename=stream_fldFileName(1:nFile), & - fldListFile=stream_fldVarName, & - fldListModel=stream_fldVarName, & - fillalgo=trim(fillalgo), & - calendar=trim(calendar_type), & - mapread=trim(stream_mapread)) - if (my_task == master_task) then - call shr_strdata_print(sdat,'SPRESICE data') - endif + if (my_task == master_task) then + write(nu_diag,*) ' ' + write(nu_diag,F00) 'This is the prescribed ice coverage option.' + write(nu_diag,F01) ' stream_yearfirst = ',stream_yearfirst + write(nu_diag,F01) ' stream_yearlast = ',stream_yearlast + write(nu_diag,F01) ' stream_yearalign = ',stream_yearalign + write(nu_diag,F00) ' stream_meshfile = ',trim(stream_meshfile) + write(nu_diag,F00) ' stream_varname = ',trim(stream_varname) + write(nu_diag,F00) ' stream_mapalgo = ',trim(stream_mapalgo) + do n = 1,nFile + write(nu_diag,F00) ' stream_datafiles = ',trim(stream_dataFiles(n)) + end do + write(nu_diag,*) ' ' + endif + + ! initialize sdat + call shr_strdata_init_from_inline(sdat, & + my_task = my_task, & + logunit = nu_diag, & + compname = 'ICE', & + model_clock = clock, & + model_mesh = mesh, & + stream_meshfile = stream_meshfile, & + stream_lev_dimname = 'null', & + stream_mapalgo = trim(stream_mapalgo), & + stream_filenames = stream_datafiles(1:nfile), & + stream_fldlistFile = (/'ice_cov'/), & + stream_fldListModel = (/'ice_cov'/), & + stream_yearFirst = stream_yearFirst, & + stream_yearLast = stream_yearLast, & + stream_yearAlign = stream_yearAlign , & + stream_offset = 0, & + stream_taxmode = 'cycle', & + stream_dtlimit = 1.5_dbl_kind, & + stream_tintalgo = 'linear', & + rc = rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + ! print out sdat info + if (my_task == master_task) then + call shr_strdata_print(sdat,'ice coverage prescribed data') + endif + + ! For one ice category, set hin_max(1) to something big + if (ncat == 1) then + hin_max(1) = 999._dbl_kind + end if + + end if ! end of if prescribed ice mode - !----------------------------------------------------------------- - ! For one ice category, set hin_max(1) to something big - !----------------------------------------------------------------- - if (ncat == 1) then - hin_max(1) = 999._dbl_kind - end if end subroutine ice_prescribed_init !======================================================================= subroutine ice_prescribed_run(mDateIn, secIn) - ! !DESCRIPTION: - ! Finds two time slices bounding current model time, remaps if necessary - - implicit none + ! Finds two time slices bounding current model time, remaps if necessary + ! Interpolate to new ice coverage - ! !INPUT/OUTPUT PARAMETERS: - integer(kind=int_kind), intent(in) :: mDateIn ! Current model date (yyyymmdd) - integer(kind=int_kind), intent(in) :: secIn ! Elapsed seconds on model date + ! input/output parameters: + integer(kind=int_kind), intent(in) :: mDateIn ! Current model date (yyyymmdd) + integer(kind=int_kind), intent(in) :: secIn ! Elapsed seconds on model date + + ! local variables + integer(kind=int_kind) :: i,j,n,iblk ! loop indices and counter + integer(kind=int_kind) :: ilo,ihi,jlo,jhi ! beginning and end of physical domain + type (block) :: this_block + real(kind=dbl_kind) :: aice_max ! maximun ice concentration + real(kind=dbl_kind), pointer :: dataptr(:) + integer :: rc ! ESMF return code + character(*),parameter :: subName = "('ice_prescribed_run')" + character(*),parameter :: F00 = "('(ice_prescribed_run) ',a,2g20.13)" + logical :: first_time = .true. + !------------------------------------------------------------------------ - ! local varaibles - integer(kind=int_kind) :: i,j,n,iblk ! loop indices and counter - integer(kind=int_kind) :: ilo,ihi,jlo,jhi ! beginning and end of physical domain - type (block) :: this_block - real(kind=dbl_kind) :: aice_max ! maximun ice concentration - logical, save :: first_time = .true. - character(*),parameter :: subName = '(ice_prescribed_run)' - character(*),parameter :: F00 = "(a,2g20.13)" + rc = ESMF_SUCCESS - !------------------------------------------------------------------------ - ! Interpolate to new ice coverage - !------------------------------------------------------------------------ + ! Advance sdat stream + call shr_strdata_advance(sdat, ymd=mDateIn, tod=SecIn, logunit=nu_diag, istr='cice_pice', rc=rc) + if (ESMF_LogFoundError(rcToCheck=rc, msg=ESMF_LOGERR_PASSTHRU, line=__LINE__, file=__FILE__)) then + call ESMF_Finalize(endflag=ESMF_END_ABORT) + end if - call shr_strdata_advance(sdat,mDateIn,SecIn,MPI_COMM_ICE,'cice_pice') + ! Get pointer for stream data that is time and spatially interpolate to model time and grid + call dshr_fldbun_getFldPtr(sdat%pstrm(1)%fldbun_model, 'ice_cov', dataptr, rc=rc) + if (ESMF_LogFoundError(rcToCheck=rc, msg=ESMF_LOGERR_PASSTHRU, line=__LINE__, file=__FILE__)) then + call ESMF_Finalize(endflag=ESMF_END_ABORT) + end if - if (first_time) then + ! Fill in module ice_cov array + if (.not. allocated(ice_cov)) then allocate(ice_cov(nx_block,ny_block,max_blocks)) - endif - + end if ice_cov(:,:,:) = c0 ! This initializes ghost cells as well - - n=0 + n = 0 do iblk = 1, nblocks this_block = get_block(blocks_ice(iblk),iblk) ilo = this_block%ilo ihi = this_block%ihi jlo = this_block%jlo jhi = this_block%jhi - do j = jlo, jhi do i = ilo, ihi n = n+1 - ice_cov(i,j,iblk) = sdat%avs(1)%rAttr(1,n) + ice_cov(i,j,iblk) = dataptr(n) end do end do end do - !-------------------------------------------------------------------- ! Check to see that ice concentration is in fraction, not percent - !-------------------------------------------------------------------- if (first_time) then aice_max = maxval(ice_cov) - if (aice_max > c10) then - write(nu_diag,F00) subname//" ERROR: Ice conc data must be in fraction, aice_max= ",& - aice_max - call abort_ice(subName) + write(nu_diag,F00) "ERROR: Ice conc data must be in fraction, aice_max= ", aice_max + rc = ESMF_FAILURE + if (ESMF_LogFoundError(rcToCheck=rc, msg=ESMF_LOGERR_PASSTHRU, line=__LINE__, file=__FILE__)) then + call ESMF_Finalize(endflag=ESMF_END_ABORT) + end if end if first_time = .false. end if - !----------------------------------------------------------------- ! Set prescribed ice state and fluxes - !----------------------------------------------------------------- - call ice_prescribed_phys() end subroutine ice_prescribed_run - !=============================================================================== - subroutine ice_prescribed_phys + !======================================================================= + subroutine ice_prescribed_phys() ! Set prescribed ice state using input ice concentration; ! set surface ice temperature to atmospheric value; use ! linear temperature gradient in ice to ocean temperature. - ! !USES: use ice_flux use ice_state use icepack_intfc, only : icepack_aggregate use ice_dyn_evp - implicit none !----- Local ------ integer(kind=int_kind) :: layer ! level index integer(kind=int_kind) :: nc ! ice category index integer(kind=int_kind) :: i,j,k ! longitude, latitude and level indices integer(kind=int_kind) :: iblk - integer(kind=int_kind) :: nt_Tsfc, nt_sice, nt_qice, nt_qsno, ntrcr - - real(kind=dbl_kind) :: slope ! diff in underlying ocean tmp and ice surface tmp - real(kind=dbl_kind) :: Ti ! ice level temperature - real(kind=dbl_kind) :: Tmlt ! ice level melt temperature - real(kind=dbl_kind) :: qin_save(nilyr) - real(kind=dbl_kind) :: qsn_save(nslyr) - real(kind=dbl_kind) :: hi ! ice prescribed (hemispheric) ice thickness - real(kind=dbl_kind) :: hs ! snow thickness - real(kind=dbl_kind) :: zn ! normalized ice thickness - real(kind=dbl_kind) :: salin(nilyr) ! salinity (ppt) - real(kind=dbl_kind) :: rad_to_deg, pi, puny - real(kind=dbl_kind) :: rhoi, rhos, cp_ice, cp_ocn, lfresh, depressT - + integer(kind=int_kind) :: nt_Tsfc + integer(kind=int_kind) :: nt_sice + integer(kind=int_kind) :: nt_qice + integer(kind=int_kind) :: nt_qsno + integer(kind=int_kind) :: ntrcr + real(kind=dbl_kind) :: slope ! diff in underlying ocean tmp and ice surface tmp + real(kind=dbl_kind) :: Ti ! ice level temperature + real(kind=dbl_kind) :: Tmlt ! ice level melt temperature + real(kind=dbl_kind) :: qin_save(nilyr) + real(kind=dbl_kind) :: qsn_save(nslyr) + real(kind=dbl_kind) :: hi ! ice prescribed (hemispheric) ice thickness + real(kind=dbl_kind) :: hs ! snow thickness + real(kind=dbl_kind) :: zn ! normalized ice thickness + real(kind=dbl_kind) :: salin(nilyr) ! salinity (ppt) + real(kind=dbl_kind) :: rad_to_deg, pi, puny + real(kind=dbl_kind) :: rhoi + real(kind=dbl_kind) :: rhos + real(kind=dbl_kind) :: cp_ice + real(kind=dbl_kind) :: cp_ocn + real(kind=dbl_kind) :: lfresh + real(kind=dbl_kind) :: depressT real(kind=dbl_kind), parameter :: nsal = 0.407_dbl_kind real(kind=dbl_kind), parameter :: msal = 0.573_dbl_kind real(kind=dbl_kind), parameter :: saltmax = 3.2_dbl_kind ! max salinity at ice base (ppm) character(*),parameter :: subName = '(ice_prescribed_phys)' + !----------------------------------------------------------------- call icepack_query_tracer_indices(nt_Tsfc_out=nt_Tsfc, nt_sice_out=nt_sice, & nt_qice_out=nt_qice, nt_qsno_out=nt_qsno) @@ -458,7 +434,7 @@ subroutine ice_prescribed_phys trcrn(i,j,nt_sice:nt_sice+nilyr-1,:,iblk) = c0 trcrn(i,j,nt_qice:nt_qice+nilyr-1,:,iblk) = c0 trcrn(i,j,nt_qsno:nt_qsno+nslyr-1,:,iblk) = c0 - end if ! ice_cov >= eps04 + end if ! ice_cov >= eps04 !-------------------------------------------------------------------- ! compute aggregate ice state and open water area @@ -478,10 +454,11 @@ subroutine ice_prescribed_phys trcr_base = trcr_base(1:ntrcr,:), & n_trcr_strata = n_trcr_strata(1:ntrcr), & nt_strata = nt_strata(1:ntrcr,:)) - end if ! tmask - enddo ! i - enddo ! j - enddo ! iblk + + end if ! tmask + enddo ! i + enddo ! j + enddo ! iblk do iblk = 1, nblocks do j = 1, ny_block @@ -509,105 +486,6 @@ subroutine ice_prescribed_phys end subroutine ice_prescribed_phys - !=============================================================================== - subroutine ice_prescribed_set_domain( lsize, mpicom, gsmap_i, dom_i ) - - ! Arguments - integer , intent(in) :: lsize - integer , intent(in) :: mpicom - type(mct_gsMap), intent(in) :: gsMap_i - type(mct_ggrid), intent(inout) :: dom_i - - ! Local Variables - integer :: i, j, iblk, n ! indices - integer :: ilo, ihi, jlo, jhi ! beginning and end of physical domain - real(dbl_kind), pointer :: data1(:) ! temporary - real(dbl_kind), pointer :: data2(:) ! temporary - real(dbl_kind), pointer :: data3(:) ! temporary - real(dbl_kind), pointer :: data4(:) ! temporary - real(dbl_kind), pointer :: data5(:) ! temporary - real(dbl_kind), pointer :: data6(:) ! temporary - integer , pointer :: idata(:) ! temporary - real(kind=dbl_kind) :: rad_to_deg - type(block) :: this_block ! block information for current block - character(*),parameter :: subName = '(ice_prescribed_set_domain)' - !-------------------------------- - - call icepack_query_parameters(rad_to_deg_out=rad_to_deg) - call icepack_warnings_flush(nu_diag) - if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & - file=__FILE__, line=__LINE__) - - ! Initialize mct domain type - call mct_gGrid_init(GGrid=dom_i, & - CoordChars='lat:lon:hgt', OtherChars='area:aream:mask:frac', lsize=lsize ) - call mct_aVect_zero(dom_i%data) - - ! Determine global gridpoint number attribute, GlobGridNum, which is set automatically by MCT - call mct_gsMap_orderedPoints(gsMap_i, my_task, idata) - call mct_gGrid_importIAttr(dom_i,'GlobGridNum',idata,lsize) - deallocate(idata) - - ! Determine domain (numbering scheme is: West to East and South to North to South pole) - ! Initialize attribute vector with special value - - allocate(data1(lsize)) - allocate(data2(lsize)) - allocate(data3(lsize)) - allocate(data4(lsize)) - allocate(data5(lsize)) - allocate(data6(lsize)) - - data1(:) = -9999.0_dbl_kind - data2(:) = -9999.0_dbl_kind - data3(:) = -9999.0_dbl_kind - data4(:) = -9999.0_dbl_kind - call mct_gGrid_importRAttr(dom_i,"lat" ,data1,lsize) - call mct_gGrid_importRAttr(dom_i,"lon" ,data2,lsize) - call mct_gGrid_importRAttr(dom_i,"area" ,data3,lsize) - call mct_gGrid_importRAttr(dom_i,"aream",data4,lsize) - data5(:) = 0.0_dbl_kind - data6(:) = 0.0_dbl_kind - call mct_gGrid_importRAttr(dom_i,"mask" ,data5,lsize) - call mct_gGrid_importRAttr(dom_i,"frac" ,data6,lsize) - - ! Fill in correct values for domain components - ! lat/lon in degrees, area in radians^2, mask is 1 (ocean), 0 (non-ocean) - n=0 - do iblk = 1, nblocks - this_block = get_block(blocks_ice(iblk),iblk) - ilo = this_block%ilo - ihi = this_block%ihi - jlo = this_block%jlo - jhi = this_block%jhi - do j = jlo, jhi - do i = ilo, ihi - n = n+1 - - data1(n) = TLON(i,j,iblk)*rad_to_deg - data2(n) = TLAT(i,j,iblk)*rad_to_deg - data3(n) = tarea(i,j,iblk)/(radius*radius) - - data5(n) = real(nint(hm(i,j,iblk)),kind=dbl_kind) - if (trim(grid_type) == 'latlon') then - data6(n) = ocn_gridcell_frac(i,j,iblk) - else - data6(n) = real(nint(hm(i,j,iblk)),kind=dbl_kind) - end if - - enddo !i - enddo !j - enddo !iblk - call mct_gGrid_importRattr(dom_i,"lon" ,data1,lsize) - call mct_gGrid_importRattr(dom_i,"lat" ,data2,lsize) - call mct_gGrid_importRattr(dom_i,"area",data3,lsize) - call mct_gGrid_importRattr(dom_i,"mask",data5,lsize) - call mct_gGrid_importRattr(dom_i,"frac",data6,lsize) - - deallocate(data1, data2, data3, data4, data5, data6) - - end subroutine ice_prescribed_set_domain - #endif end module ice_prescribed_mod diff --git a/cicecore/drivers/nuopc/cmeps/ice_scam.F90 b/cicecore/drivers/nuopc/cmeps/ice_scam.F90 index f5280b259..b92900e4f 100644 --- a/cicecore/drivers/nuopc/cmeps/ice_scam.F90 +++ b/cicecore/drivers/nuopc/cmeps/ice_scam.F90 @@ -6,9 +6,15 @@ module ice_scam ! single column control variables (only used for latlon grid) - logical :: single_column ! true => single column mode - real (kind=dbl_kind) scmlat ! single column latitude (degrees) - real (kind=dbl_kind) scmlon ! single column longitude (degrees) + logical :: single_column = .false. ! true => single column mode + real (kind=dbl_kind) :: scmlat ! single column latitude (degrees) + real (kind=dbl_kind) :: scmlon ! single column longitude (degrees) + real (kind=dbl_kind) :: scol_frac ! single column ocn fraction + real (kind=dbl_kind) :: scol_mask ! single column ocn mask + real (kind=dbl_kind) :: scol_area ! single column ocn area + integer :: scol_ni ! ni size of single column domain file + integer :: scol_nj ! nj size of single column domain file + logical :: scol_valid = .false. ! true => single column mask is 1 end module ice_scam diff --git a/cicecore/drivers/nuopc/cmeps/ice_shr_methods.F90 b/cicecore/drivers/nuopc/cmeps/ice_shr_methods.F90 index 323cba9a4..1144568b4 100644 --- a/cicecore/drivers/nuopc/cmeps/ice_shr_methods.F90 +++ b/cicecore/drivers/nuopc/cmeps/ice_shr_methods.F90 @@ -11,8 +11,8 @@ module ice_shr_methods use ESMF , only : ESMF_GeomType_Flag, ESMF_FieldStatus_Flag use ESMF , only : ESMF_Mesh, ESMF_MeshGet use ESMF , only : ESMF_GEOMTYPE_MESH, ESMF_GEOMTYPE_GRID, ESMF_FIELDSTATUS_COMPLETE - use ESMF , only : ESMF_Clock, ESMF_ClockCreate, ESMF_ClockGet, ESMF_ClockSet - use ESMF , only : ESMF_ClockPrint, ESMF_ClockAdvance + use ESMF , only : ESMF_Clock, ESMF_ClockCreate, ESMF_ClockGet, ESMF_ClockSet + use ESMF , only : ESMF_ClockPrint, ESMF_ClockAdvance use ESMF , only : ESMF_Alarm, ESMF_AlarmCreate, ESMF_AlarmGet, ESMF_AlarmSet use ESMF , only : ESMF_Calendar, ESMF_CALKIND_NOLEAP, ESMF_CALKIND_GREGORIAN use ESMF , only : ESMF_Time, ESMF_TimeGet, ESMF_TimeSet @@ -38,7 +38,7 @@ module ice_shr_methods public :: state_reset public :: state_flddebug public :: state_diagnose - public :: alarmInit + public :: alarmInit public :: chkerr private :: timeInit @@ -65,7 +65,7 @@ module ice_shr_methods optMonthly = "monthly" , & optYearly = "yearly" , & optDate = "date" , & - optIfdays0 = "ifdays0" + optIfdays0 = "ifdays0" ! Module data integer, parameter :: SecPerDay = 86400 ! Seconds per day @@ -588,7 +588,7 @@ subroutine field_getfldptr(field, fldptr1, fldptr2, rank, abort, rc) call ESMF_MeshGet(lmesh, numOwnedNodes=nnodes, numOwnedElements=nelements, rc=rc) if (chkerr(rc,__LINE__,u_FILE_u)) return if (nnodes == 0 .and. nelements == 0) lrank = 0 - else + else call ESMF_LogWrite(trim(subname)//": ERROR geomtype not supported ", & ESMF_LOGMSG_INFO, rc=rc) rc = ESMF_FAILURE @@ -949,7 +949,7 @@ end subroutine alarmInit subroutine timeInit( Time, ymd, cal, tod, rc) - ! Create the ESMF_Time object corresponding to the given input time, + ! Create the ESMF_Time object corresponding to the given input time, ! given in YMD (Year Month Day) and TOD (Time-of-day) format. ! Set the time by an integer as YYYYMMDD and integer seconds in the day diff --git a/cicecore/drivers/unittest/bcstchk/bcstchk.F90 b/cicecore/drivers/unittest/bcstchk/bcstchk.F90 new file mode 100644 index 000000000..c0dbb026c --- /dev/null +++ b/cicecore/drivers/unittest/bcstchk/bcstchk.F90 @@ -0,0 +1,285 @@ + + program bcstchk + + ! This tests the CICE ice_broadcast infrastructure by calling the + ! methods with hardwired input and known outputs and verifying the + ! results. + + use ice_kinds_mod, only: int_kind, dbl_kind, real_kind, log_kind + use ice_communicate, only: my_task, master_task, get_num_procs, get_rank, MPI_COMM_ICE + use ice_communicate, only: init_communicate, get_num_procs, ice_barrier + use ice_global_reductions, only: global_maxval + use ice_fileunits, only: flush_fileunit + use ice_exit, only: abort_ice, end_run + use ice_broadcast + + implicit none + + integer(int_kind) :: n, k, k1, k2, k3 + + integer(int_kind), parameter :: dsize = 10 + integer(int_kind) :: ival, i0, i1(dsize), i2(dsize,dsize), i3(dsize,dsize,dsize) + logical(log_kind) :: lval, l0, l1(dsize), l2(dsize,dsize), l3(dsize,dsize,dsize) + real(real_kind) :: rval, r0, r1(dsize), r2(dsize,dsize), r3(dsize,dsize,dsize) + real(dbl_kind) :: dval, d0, d1(dsize), d2(dsize,dsize), d3(dsize,dsize,dsize) + character(len=32) :: cval, c0 + + real(dbl_kind) :: xval + + integer(int_kind), parameter :: ntests1 = 17 + character(len=8) :: errorflag1(ntests1) + character(len=32) :: stringflag1(ntests1) + + integer(int_kind) :: ierr, npes, bcst_pe + integer(int_kind) :: iflag, gflag + character(len=8) :: errorflag0 + character(len=16) :: teststr + character(len=*), parameter :: & + passflag = 'PASS', & + failflag = 'FAIL' + + character(len=*), parameter :: subname = '(bcstchk)' + + ! --------------------------- + + call init_communicate() + npes = get_num_procs() + my_task = get_rank() + master_task = 0 + + if (my_task == master_task) then + write(6,*) ' ' + write(6,*) '==========================================================' + write(6,*) ' ' + write(6,*) 'Running BCSTCHK' + write(6,*) ' ' + write(6,*) ' npes = ',npes + write(6,*) ' my_task = ',my_task + write(6,*) ' ' + endif + + errorflag0 = passflag + errorflag1 = passflag + stringflag1 = ' ' + + ! --------------------------- + ! Test ice_broadcast methods + ! Test broadcast from root and from npes + ! --------------------------- + + do k = 1,2 + if (k == 1) then + bcst_pe = 0 + else + bcst_pe = max(npes,1) - 1 + endif + if (my_task == master_task) then + write(6,*) ' ' + write(6,*) ' bcst_pe = ',bcst_pe + endif + + xval = -999._dbl_kind + rval = 21.5_real_kind + real(bcst_pe,kind=real_kind) + dval = 17.3_dbl_kind + real(bcst_pe,kind=dbl_kind) + ival = 223 + bcst_pe + write(cval,'(a,i4.4)') 'string is passed from ',bcst_pe + lval = (k == 1) + + do n = 1,ntests1 + i0 = xval + i1 = xval + i2 = xval + i3 = xval + r0 = xval + r1 = xval + r2 = xval + r3 = xval + d0 = xval + d1 = xval + d2 = xval + d3 = xval + l0 = .not.lval + l1 = .not.lval + l2 = .not.lval + l3 = .not.lval + c0 = 'nothing to see here' + + if (my_task == bcst_pe) then + i0 = ival + i1 = ival + i2 = ival + i3 = ival + r0 = rval + r1 = rval + r2 = rval + r3 = rval + d0 = dval + d1 = dval + d2 = dval + d3 = dval + l0 = lval + l1 = lval + l2 = lval + l3 = lval + c0 = cval + endif + + iflag = 0 + gflag = -1 + write(teststr,'(a,1x,i2.2)') ' test',n + if (n == 1) then + stringflag1(n) = ' bcst_scalar_dbl' + call broadcast_scalar(d0, bcst_pe) + if (my_task == master_task) write(6,*) trim(teststr),trim(stringflag1(n)),d0,dval + if (d0 /= dval) iflag=1 + elseif (n == 2) then + stringflag1(n) = ' bcst_array_dbl_1d' + call broadcast_array(d1, bcst_pe) + if (my_task == master_task) write(6,*) trim(teststr),trim(stringflag1(n)),minval(d1),maxval(d1),dval + if (minval(d1) /= dval) iflag=1 + if (maxval(d1) /= dval) iflag=1 + elseif (n == 3) then + stringflag1(n) = ' bcst_array_dbl_2d' + call broadcast_array(d2, bcst_pe) + if (my_task == master_task) write(6,*) trim(teststr),trim(stringflag1(n)),minval(d2),maxval(d2),dval + if (minval(d2) /= dval) iflag=1 + if (maxval(d2) /= dval) iflag=1 + elseif (n == 4) then + stringflag1(n) = ' bcst_array_dbl_3d' + call broadcast_array(d3, bcst_pe) + if (my_task == master_task) write(6,*) trim(teststr),trim(stringflag1(n)),minval(d3),maxval(d3),dval + if (minval(d3) /= dval) iflag=1 + if (maxval(d3) /= dval) iflag=1 + elseif (n == 5) then + stringflag1(n) = ' bcst_scalar_real' + call broadcast_scalar(r0, bcst_pe) + if (my_task == master_task) write(6,*) trim(teststr),trim(stringflag1(n)),r0,rval + if (r0 /= rval) iflag=1 + elseif (n == 6) then + stringflag1(n) = ' bcst_array_real_1d' + call broadcast_array(r1, bcst_pe) + if (my_task == master_task) write(6,*) trim(teststr),trim(stringflag1(n)),minval(r1),maxval(r1),rval + if (minval(r1) /= rval) iflag=1 + if (maxval(r1) /= rval) iflag=1 + elseif (n == 7) then + stringflag1(n) = ' bcst_array_real_2d' + call broadcast_array(r2, bcst_pe) + if (my_task == master_task) write(6,*) trim(teststr),trim(stringflag1(n)),minval(r2),maxval(r2),rval + if (minval(r2) /= rval) iflag=1 + if (maxval(r2) /= rval) iflag=1 + elseif (n == 8) then + stringflag1(n) = ' bcst_array_real_3d' + call broadcast_array(r3, bcst_pe) + if (my_task == master_task) write(6,*) trim(teststr),trim(stringflag1(n)),minval(r3),maxval(r3),rval + if (minval(r3) /= rval) iflag=1 + if (maxval(r3) /= rval) iflag=1 + elseif (n == 9) then + stringflag1(n) = ' bcst_scalar_int' + call broadcast_scalar(i0, bcst_pe) + if (my_task == master_task) write(6,*) trim(teststr),trim(stringflag1(n)),i0,ival + if (i0 /= ival) iflag=1 + elseif (n == 10) then + stringflag1(n) = ' bcst_array_int_1d' + call broadcast_array(i1, bcst_pe) + if (my_task == master_task) write(6,*) trim(teststr),trim(stringflag1(n)),minval(i1),maxval(i1),ival + if (minval(i1) /= ival) iflag=1 + if (maxval(i1) /= ival) iflag=1 + elseif (n == 11) then + stringflag1(n) = ' bcst_array_int_2d' + call broadcast_array(i2, bcst_pe) + if (my_task == master_task) write(6,*) trim(teststr),trim(stringflag1(n)),minval(i2),maxval(i2),ival + if (minval(i2) /= ival) iflag=1 + if (maxval(i2) /= ival) iflag=1 + elseif (n == 12) then + stringflag1(n) = ' bcst_array_int_3d' + call broadcast_array(i3, bcst_pe) + if (my_task == master_task) write(6,*) trim(teststr),trim(stringflag1(n)),minval(i3),maxval(i3),ival + if (minval(i3) /= ival) iflag=1 + if (maxval(i3) /= ival) iflag=1 + elseif (n == 13) then + stringflag1(n) = ' bcst_scalar_logical' + call broadcast_scalar(l0, bcst_pe) + if (my_task == master_task) write(6,*) trim(teststr),trim(stringflag1(n)),l0,lval + if (l0 .neqv. lval) iflag=1 + elseif (n == 14) then + stringflag1(n) = ' bcst_array_logical_1d' + call broadcast_array(l1, bcst_pe) + if (my_task == master_task) write(6,*) trim(teststr),trim(stringflag1(n)),l1(1),lval + do k1 = 1,dsize + if (l1(k1) .neqv. lval) iflag=1 + enddo + elseif (n == 15) then + stringflag1(n) = ' bcst_array_logical_2d' + call broadcast_array(l2, bcst_pe) + if (my_task == master_task) write(6,*) trim(teststr),trim(stringflag1(n)),l2(1,1),lval + do k2 = 1,dsize + do k1 = 1,dsize + if (l2(k1,k2) .neqv. lval) iflag=1 + enddo + enddo + elseif (n == 16) then + stringflag1(n) = ' bcst_array_logical_3d' + call broadcast_array(l3, bcst_pe) + if (my_task == master_task) write(6,*) trim(teststr),trim(stringflag1(n)),l3(1,1,1),lval + do k3 = 1,dsize + do k2 = 1,dsize + do k1 = 1,dsize + if (l3(k1,k2,k3) .neqv. lval) iflag=1 + enddo + enddo + enddo + elseif (n == 17) then + stringflag1(n) = ' bcst_scalar_char' + call broadcast_scalar(c0, bcst_pe) + if (my_task == master_task) write(6,*) trim(teststr),trim(stringflag1(n)),' ',trim(c0),' : ',trim(cval) + if (c0 /= cval) iflag=1 + else + call abort_ice(subname//' illegal k bcst',file=__FILE__,line=__LINE__) + endif + + gflag = global_maxval(iflag, MPI_COMM_ICE) + if (gflag /= 0) then + if (my_task == master_task) write(6,*) ' **** ERROR test ',n + errorflag1(n) = failflag + errorflag0 = failflag + endif + enddo ! n + enddo ! k + + call flush_fileunit(6) + call ice_barrier() + + ! --------------------------- + + if (my_task == master_task) then + write(6,*) ' ' + do k = 1,ntests1 + write(6,*) errorflag1(k),stringflag1(k) + enddo + write(6,*) ' ' + if (errorflag0 == passflag) then + write(6,*) 'BCSTCHK COMPLETED SUCCESSFULLY' + else + write(6,*) 'BCSTCHK FAILED' + call abort_ice(subname//' ERROR: BCSTCHK FAILED',file=__FILE__,line=__LINE__) + endif + write(6,*) ' ' + write(6,*) '==========================================================' + write(6,*) ' ' + write(6,*) 'NOTE: We are testing the abort now so you should see an abort to follow' + write(6,*) 'The BCSTCHK passed, so please ignore the abort' + write(6,*) ' ' + endif + + ! Test abort_ice, regardless of test outcome + call flush_fileunit(6) + call ice_barrier() + call abort_ice(subname//' Test abort ',file=__FILE__,line=__LINE__) + + if (my_task == master_task) write(6,*) subname,'This line should not be written' + + call end_run() + + end program bcstchk + +!======================================================================= diff --git a/cicecore/drivers/unittest/calchk/calchk.F90 b/cicecore/drivers/unittest/calchk/calchk.F90 index bbd61b63e..c8472faba 100644 --- a/cicecore/drivers/unittest/calchk/calchk.F90 +++ b/cicecore/drivers/unittest/calchk/calchk.F90 @@ -1,6 +1,9 @@ program calchk + ! This tests the CICE calendar by calling it directly from this driver + ! and verifies results from hardwired inputs with known outputs + use ice_kinds_mod, only: int_kind, dbl_kind use ice_calendar, only: myear, mmonth, mday, msec use ice_calendar, only: year_init, month_init, day_init, sec_init diff --git a/cicecore/drivers/unittest/sumchk/CICE_FinalMod.F90 b/cicecore/drivers/unittest/sumchk/CICE_FinalMod.F90 new file mode 100644 index 000000000..a59c210aa --- /dev/null +++ b/cicecore/drivers/unittest/sumchk/CICE_FinalMod.F90 @@ -0,0 +1,70 @@ +!======================================================================= +! +! This module contains routines for the final exit of the CICE model, +! including final output and clean exit from any message passing +! environments and frameworks. +! +! authors: Philip W. Jones, LANL +! 2006: Converted to free source form (F90) by Elizabeth Hunke +! 2008: E. Hunke moved ESMF code to its own driver + + module CICE_FinalMod + + use ice_kinds_mod + use ice_communicate, only: my_task, master_task + use ice_exit, only: end_run, abort_ice + use ice_fileunits, only: nu_diag, release_all_fileunits + use icepack_intfc, only: icepack_warnings_flush, icepack_warnings_aborted + + implicit none + private + public :: CICE_Finalize + +!======================================================================= + + contains + +!======================================================================= +! +! This routine shuts down CICE by exiting all relevent environments. + + subroutine CICE_Finalize + + use ice_restart_shared, only: runid + use ice_timers, only: ice_timer_stop, ice_timer_print_all, timer_total + + character(len=*), parameter :: subname = '(CICE_Finalize)' + + !------------------------------------------------------------------- + ! stop timers and print timer info + !------------------------------------------------------------------- + + call ice_timer_stop(timer_total) ! stop timing entire run + call ice_timer_print_all(stats=.false.) ! print timing information + + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=__FILE__,line= __LINE__) + + if (my_task == master_task) then + write(nu_diag, *) " " + write(nu_diag, *) "CICE COMPLETED SUCCESSFULLY " + write(nu_diag, *) " " + endif + +!echmod if (nu_diag /= 6) close (nu_diag) ! diagnostic output + call release_all_fileunits + + !------------------------------------------------------------------- + ! quit MPI + !------------------------------------------------------------------- + + call end_run ! quit MPI + + end subroutine CICE_Finalize + +!======================================================================= + + end module CICE_FinalMod + +!======================================================================= diff --git a/cicecore/drivers/unittest/sumchk/CICE_InitMod.F90 b/cicecore/drivers/unittest/sumchk/CICE_InitMod.F90 new file mode 100644 index 000000000..60f71fa8a --- /dev/null +++ b/cicecore/drivers/unittest/sumchk/CICE_InitMod.F90 @@ -0,0 +1,486 @@ +!======================================================================= +! +! This module contains the CICE initialization routine that sets model +! parameters and initializes the grid and CICE state variables. +! +! authors Elizabeth C. Hunke, LANL +! William H. Lipscomb, LANL +! Philip W. Jones, LANL +! +! 2006: Converted to free form source (F90) by Elizabeth Hunke +! 2008: E. Hunke moved ESMF code to its own driver + + module CICE_InitMod + + use ice_kinds_mod + use ice_exit, only: abort_ice + use ice_fileunits, only: init_fileunits, nu_diag + use icepack_intfc, only: icepack_aggregate + use icepack_intfc, only: icepack_init_itd, icepack_init_itd_hist + use icepack_intfc, only: icepack_init_fsd_bounds, icepack_init_wave + use icepack_intfc, only: icepack_configure + use icepack_intfc, only: icepack_warnings_flush, icepack_warnings_aborted + use icepack_intfc, only: icepack_query_parameters, icepack_query_tracer_flags, & + icepack_query_tracer_indices, icepack_query_tracer_sizes + + implicit none + private + public :: CICE_Initialize, cice_init + +!======================================================================= + + contains + +!======================================================================= + +! Initialize the basic state, grid and all necessary parameters for +! running the CICE model. Return the initial state in routine +! export state. +! Note: This initialization driver is designed for standalone and +! CESM-coupled applications. For other +! applications (e.g., standalone CAM), this driver would be +! replaced by a different driver that calls subroutine cice_init, +! where most of the work is done. + + subroutine CICE_Initialize + + character(len=*), parameter :: subname='(CICE_Initialize)' + !-------------------------------------------------------------------- + ! model initialization + !-------------------------------------------------------------------- + + call cice_init + + end subroutine CICE_Initialize + +!======================================================================= +! +! Initialize CICE model. + + subroutine cice_init + + use ice_arrays_column, only: hin_max, c_hi_range, alloc_arrays_column + use ice_arrays_column, only: floe_rad_l, floe_rad_c, & + floe_binwidth, c_fsd_range + use ice_state, only: alloc_state + use ice_flux_bgc, only: alloc_flux_bgc + use ice_calendar, only: dt, dt_dyn, istep, istep1, write_ic, & + init_calendar, advance_timestep, calc_timesteps + use ice_communicate, only: init_communicate, my_task, master_task + use ice_diagnostics, only: init_diags + use ice_domain, only: init_domain_blocks + use ice_domain_size, only: ncat, nfsd + use ice_dyn_eap, only: init_eap, alloc_dyn_eap + use ice_dyn_shared, only: kdyn, init_dyn, alloc_dyn_shared + use ice_dyn_vp, only: init_vp + use ice_flux, only: init_coupler_flux, init_history_therm, & + init_history_dyn, init_flux_atm, init_flux_ocn, alloc_flux + use ice_forcing, only: init_forcing_ocn, init_forcing_atmo, & + get_forcing_atmo, get_forcing_ocn, get_wave_spec + use ice_forcing_bgc, only: get_forcing_bgc, get_atm_bgc, & + faero_default, faero_optics, alloc_forcing_bgc, fiso_default + use ice_grid, only: init_grid1, init_grid2, alloc_grid + use ice_history, only: init_hist, accum_hist + use ice_restart_shared, only: restart, runtype + use ice_init, only: input_data, init_state + use ice_init_column, only: init_thermo_vertical, init_shortwave, init_zbgc, input_zbgc, count_tracers + use ice_kinds_mod + use ice_restoring, only: ice_HaloRestore_init + use ice_timers, only: timer_total, init_ice_timers, ice_timer_start + use ice_transport_driver, only: init_transport + + logical(kind=log_kind) :: tr_aero, tr_zaero, skl_bgc, z_tracers, & + tr_iso, tr_fsd, wave_spec + character(len=*), parameter :: subname = '(cice_init)' + + call init_communicate ! initial setup for message passing + call init_fileunits ! unit numbers + + ! tcx debug, this will create a different logfile for each pe + ! if (my_task /= master_task) nu_diag = 100+my_task + + call icepack_configure() ! initialize icepack + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(trim(subname), & + file=__FILE__,line= __LINE__) + + call input_data ! namelist variables + call input_zbgc ! vertical biogeochemistry namelist + call count_tracers ! count tracers + + call init_domain_blocks ! set up block decomposition + call init_grid1 ! domain distribution + call alloc_grid ! allocate grid arrays + call alloc_arrays_column ! allocate column arrays + call alloc_state ! allocate state arrays + call alloc_dyn_shared ! allocate dyn shared arrays + call alloc_flux_bgc ! allocate flux_bgc arrays + call alloc_flux ! allocate flux arrays + call init_ice_timers ! initialize all timers + call ice_timer_start(timer_total) ! start timing entire run + call init_grid2 ! grid variables + call init_zbgc ! vertical biogeochemistry initialization + call init_calendar ! initialize some calendar stuff + call init_hist (dt) ! initialize output history file + + call init_dyn (dt_dyn) ! define dynamics parameters, variables + if (kdyn == 2) then + call alloc_dyn_eap ! allocate dyn_eap arrays + call init_eap ! define eap dynamics parameters, variables + else if (kdyn == 3) then + call init_vp ! define vp dynamics parameters, variables + endif + + call init_coupler_flux ! initialize fluxes exchanged with coupler + + call init_thermo_vertical ! initialize vertical thermodynamics + + call icepack_init_itd(ncat=ncat, hin_max=hin_max) ! ice thickness distribution + if (my_task == master_task) then + call icepack_init_itd_hist(ncat=ncat, hin_max=hin_max, c_hi_range=c_hi_range) ! output + endif + + call icepack_query_tracer_flags(tr_fsd_out=tr_fsd) + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(trim(subname), & + file=__FILE__,line= __LINE__) + + if (tr_fsd) call icepack_init_fsd_bounds (nfsd, & ! floe size distribution + floe_rad_l, & ! fsd size lower bound in m (radius) + floe_rad_c, & ! fsd size bin centre in m (radius) + floe_binwidth, & ! fsd size bin width in m (radius) + c_fsd_range, & ! string for history output + write_diags=(my_task == master_task)) ! write diag on master only + + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=__FILE__, line=__LINE__) + + call init_forcing_ocn(dt) ! initialize sss and sst from data + call init_state ! initialize the ice state + call init_transport ! initialize horizontal transport + call ice_HaloRestore_init ! restored boundary conditions + + call icepack_query_parameters(skl_bgc_out=skl_bgc, z_tracers_out=z_tracers, & + wave_spec_out=wave_spec) + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(trim(subname), & + file=__FILE__,line= __LINE__) + + if (skl_bgc .or. z_tracers) call alloc_forcing_bgc ! allocate biogeochemistry arrays + + call init_restart ! initialize restart variables + call init_diags ! initialize diagnostic output points + call init_history_therm ! initialize thermo history variables + call init_history_dyn ! initialize dynamic history variables + call calc_timesteps ! update timestep counter if not using npt_unit="1" + + call icepack_query_tracer_flags(tr_aero_out=tr_aero, tr_zaero_out=tr_zaero) + call icepack_query_tracer_flags(tr_iso_out=tr_iso) + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(trim(subname), & + file=__FILE__,line= __LINE__) + + if (tr_aero .or. tr_zaero) call faero_optics !initialize aerosol optical + !property tables + + ! Initialize shortwave components using swdn from previous timestep + ! if restarting. These components will be scaled to current forcing + ! in prep_radiation. + if (trim(runtype) == 'continue' .or. restart) & + call init_shortwave ! initialize radiative transfer + +! tcraig, use advance_timestep here +! istep = istep + 1 ! update time step counters +! istep1 = istep1 + 1 +! time = time + dt ! determine the time and date +! call calendar(time) ! at the end of the first timestep + call advance_timestep() + + !-------------------------------------------------------------------- + ! coupler communication or forcing data initialization + !-------------------------------------------------------------------- + + call init_forcing_atmo ! initialize atmospheric forcing (standalone) + + if (tr_fsd .and. wave_spec) call get_wave_spec ! wave spectrum in ice + call get_forcing_atmo ! atmospheric forcing from data + call get_forcing_ocn(dt) ! ocean forcing from data + + ! isotopes + if (tr_iso) call fiso_default ! default values + ! aerosols + ! if (tr_aero) call faero_data ! data file + ! if (tr_zaero) call fzaero_data ! data file (gx1) + if (tr_aero .or. tr_zaero) call faero_default ! default values + if (skl_bgc .or. z_tracers) call get_forcing_bgc ! biogeochemistry + if (z_tracers) call get_atm_bgc ! biogeochemistry + + if (runtype == 'initial' .and. .not. restart) & + call init_shortwave ! initialize radiative transfer using current swdn + + call init_flux_atm ! initialize atmosphere fluxes sent to coupler + call init_flux_ocn ! initialize ocean fluxes sent to coupler + + if (write_ic) call accum_hist(dt) ! write initial conditions + + end subroutine cice_init + +!======================================================================= + + subroutine init_restart + + use ice_arrays_column, only: dhsn + use ice_blocks, only: nx_block, ny_block + use ice_calendar, only: calendar + use ice_constants, only: c0 + use ice_domain, only: nblocks + use ice_domain_size, only: ncat, n_iso, n_aero, nfsd + use ice_dyn_eap, only: read_restart_eap + use ice_dyn_shared, only: kdyn + use ice_grid, only: tmask + use ice_init, only: ice_ic + use ice_init_column, only: init_age, init_FY, init_lvl, & + init_meltponds_cesm, init_meltponds_lvl, init_meltponds_topo, & + init_isotope, init_aerosol, init_hbrine, init_bgc, init_fsd + use ice_restart_column, only: restart_age, read_restart_age, & + restart_FY, read_restart_FY, restart_lvl, read_restart_lvl, & + restart_pond_cesm, read_restart_pond_cesm, & + restart_pond_lvl, read_restart_pond_lvl, & + restart_pond_topo, read_restart_pond_topo, & + restart_fsd, read_restart_fsd, & + restart_iso, read_restart_iso, & + restart_aero, read_restart_aero, & + restart_hbrine, read_restart_hbrine, & + restart_zsal, restart_bgc + use ice_restart_driver, only: restartfile + use ice_restart_shared, only: runtype, restart + use ice_state ! almost everything + + integer(kind=int_kind) :: & + i, j , & ! horizontal indices + iblk ! block index + logical(kind=log_kind) :: & + tr_iage, tr_FY, tr_lvl, tr_pond_cesm, tr_pond_lvl, & + tr_pond_topo, tr_fsd, tr_iso, tr_aero, tr_brine, & + skl_bgc, z_tracers, solve_zsal + integer(kind=int_kind) :: & + ntrcr + integer(kind=int_kind) :: & + nt_alvl, nt_vlvl, nt_apnd, nt_hpnd, nt_ipnd, & + nt_iage, nt_FY, nt_aero, nt_fsd, nt_isosno, nt_isoice + + character(len=*), parameter :: subname = '(init_restart)' + + call icepack_query_tracer_sizes(ntrcr_out=ntrcr) + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=__FILE__, line=__LINE__) + + call icepack_query_parameters(skl_bgc_out=skl_bgc, & + z_tracers_out=z_tracers, solve_zsal_out=solve_zsal) + call icepack_query_tracer_flags(tr_iage_out=tr_iage, tr_FY_out=tr_FY, & + tr_lvl_out=tr_lvl, tr_pond_cesm_out=tr_pond_cesm, tr_pond_lvl_out=tr_pond_lvl, & + tr_pond_topo_out=tr_pond_topo, tr_aero_out=tr_aero, tr_brine_out=tr_brine, & + tr_fsd_out=tr_fsd, tr_iso_out=tr_iso) + call icepack_query_tracer_indices(nt_alvl_out=nt_alvl, nt_vlvl_out=nt_vlvl, & + nt_apnd_out=nt_apnd, nt_hpnd_out=nt_hpnd, nt_ipnd_out=nt_ipnd, & + nt_iage_out=nt_iage, nt_FY_out=nt_FY, nt_aero_out=nt_aero, nt_fsd_out=nt_fsd, & + nt_isosno_out=nt_isosno, nt_isoice_out=nt_isoice) + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=__FILE__, line=__LINE__) + + if (trim(runtype) == 'continue') then + ! start from core restart file + call restartfile() ! given by pointer in ice_in + call calendar() ! update time parameters + if (kdyn == 2) call read_restart_eap ! EAP + else if (restart) then ! ice_ic = core restart file + call restartfile (ice_ic) ! or 'default' or 'none' + !!! uncomment to create netcdf + ! call restartfile_v4 (ice_ic) ! CICE v4.1 binary restart file + !!! uncomment if EAP restart data exists + ! if (kdyn == 2) call read_restart_eap + endif + + ! tracers + ! ice age tracer + if (tr_iage) then + if (trim(runtype) == 'continue') & + restart_age = .true. + if (restart_age) then + call read_restart_age + else + do iblk = 1, nblocks + call init_age(trcrn(:,:,nt_iage,:,iblk)) + enddo ! iblk + endif + endif + ! first-year area tracer + if (tr_FY) then + if (trim(runtype) == 'continue') restart_FY = .true. + if (restart_FY) then + call read_restart_FY + else + do iblk = 1, nblocks + call init_FY(trcrn(:,:,nt_FY,:,iblk)) + enddo ! iblk + endif + endif + ! level ice tracer + if (tr_lvl) then + if (trim(runtype) == 'continue') restart_lvl = .true. + if (restart_lvl) then + call read_restart_lvl + else + do iblk = 1, nblocks + call init_lvl(iblk,trcrn(:,:,nt_alvl,:,iblk), & + trcrn(:,:,nt_vlvl,:,iblk)) + enddo ! iblk + endif + endif + ! CESM melt ponds + if (tr_pond_cesm) then + if (trim(runtype) == 'continue') & + restart_pond_cesm = .true. + if (restart_pond_cesm) then + call read_restart_pond_cesm + else + do iblk = 1, nblocks + call init_meltponds_cesm(trcrn(:,:,nt_apnd,:,iblk), & + trcrn(:,:,nt_hpnd,:,iblk)) + enddo ! iblk + endif + endif + ! level-ice melt ponds + if (tr_pond_lvl) then + if (trim(runtype) == 'continue') & + restart_pond_lvl = .true. + if (restart_pond_lvl) then + call read_restart_pond_lvl + else + do iblk = 1, nblocks + call init_meltponds_lvl(trcrn(:,:,nt_apnd,:,iblk), & + trcrn(:,:,nt_hpnd,:,iblk), & + trcrn(:,:,nt_ipnd,:,iblk), & + dhsn(:,:,:,iblk)) + enddo ! iblk + endif + endif + ! topographic melt ponds + if (tr_pond_topo) then + if (trim(runtype) == 'continue') & + restart_pond_topo = .true. + if (restart_pond_topo) then + call read_restart_pond_topo + else + do iblk = 1, nblocks + call init_meltponds_topo(trcrn(:,:,nt_apnd,:,iblk), & + trcrn(:,:,nt_hpnd,:,iblk), & + trcrn(:,:,nt_ipnd,:,iblk)) + enddo ! iblk + endif ! .not. restart_pond + endif + ! floe size distribution + if (tr_fsd) then + if (trim(runtype) == 'continue') restart_fsd = .true. + if (restart_fsd) then + call read_restart_fsd + else + call init_fsd(trcrn(:,:,nt_fsd:nt_fsd+nfsd-1,:,:)) + endif + endif + + ! isotopes + if (tr_iso) then + if (trim(runtype) == 'continue') restart_iso = .true. + if (restart_iso) then + call read_restart_iso + else + do iblk = 1, nblocks + call init_isotope(trcrn(:,:,nt_isosno:nt_isosno+n_iso-1,:,iblk), & + trcrn(:,:,nt_isoice:nt_isoice+n_iso-1,:,iblk)) + enddo ! iblk + endif + endif + + if (tr_aero) then ! ice aerosol + if (trim(runtype) == 'continue') restart_aero = .true. + if (restart_aero) then + call read_restart_aero + else + do iblk = 1, nblocks + call init_aerosol(trcrn(:,:,nt_aero:nt_aero+4*n_aero-1,:,iblk)) + enddo ! iblk + endif ! .not. restart_aero + endif + + if (trim(runtype) == 'continue') then + if (tr_brine) & + restart_hbrine = .true. + if (solve_zsal) & + restart_zsal = .true. + if (skl_bgc .or. z_tracers) & + restart_bgc = .true. + endif + + if (tr_brine .or. skl_bgc) then ! brine height tracer + call init_hbrine + if (tr_brine .and. restart_hbrine) call read_restart_hbrine + endif + + if (solve_zsal .or. skl_bgc .or. z_tracers) then ! biogeochemistry + if (tr_fsd) then + write (nu_diag,*) 'FSD implementation incomplete for use with BGC' + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=__FILE__, line=__LINE__) + endif + call init_bgc + endif + + !----------------------------------------------------------------- + ! aggregate tracers + !----------------------------------------------------------------- + + !$OMP PARALLEL DO PRIVATE(iblk) + do iblk = 1, nblocks + do j = 1, ny_block + do i = 1, nx_block + if (tmask(i,j,iblk)) then + call icepack_aggregate(ncat = ncat, & + aicen = aicen(i,j,:,iblk), & + trcrn = trcrn(i,j,:,:,iblk), & + vicen = vicen(i,j,:,iblk), & + vsnon = vsnon(i,j,:,iblk), & + aice = aice (i,j, iblk), & + trcr = trcr (i,j,:,iblk), & + vice = vice (i,j, iblk), & + vsno = vsno (i,j, iblk), & + aice0 = aice0(i,j, iblk), & + ntrcr = ntrcr, & + trcr_depend = trcr_depend, & + trcr_base = trcr_base, & + n_trcr_strata = n_trcr_strata, & + nt_strata = nt_strata) + else + ! tcraig, reset all tracer values on land to zero + trcrn(i,j,:,:,iblk) = c0 + endif + enddo + enddo + enddo + !$OMP END PARALLEL DO + + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=__FILE__, line=__LINE__) + + end subroutine init_restart + +!======================================================================= + + end module CICE_InitMod + +!======================================================================= diff --git a/cicecore/drivers/unittest/sumchk/sumchk.F90 b/cicecore/drivers/unittest/sumchk/sumchk.F90 new file mode 100644 index 000000000..a811f5964 --- /dev/null +++ b/cicecore/drivers/unittest/sumchk/sumchk.F90 @@ -0,0 +1,698 @@ + + program sumchk + + ! This tests the CICE ice_global_reductions infrastructure by + ! using CICE_InitMod (from the standalone model) to read/initialize + ! a CICE grid/configuration. Then methods in ice_global_reductions + ! are verified using hardwired inputs with known outputs. + ! A grid needs to be initialized because most of the global reduction + ! infrastructure assumes haloed and distributed arrays are passed + ! possibly with a tripole seam. These interfaces are more than just + ! layers on top of MPI. They have the CICE grid/decomposition + ! infrastructure built-in. + + use CICE_InitMod + use CICE_FinalMod + use ice_kinds_mod, only: int_kind, dbl_kind, real_kind + use ice_communicate, only: my_task, master_task, get_num_procs + use ice_domain_size, only: nx_global, ny_global + use ice_domain_size, only: block_size_x, block_size_y, max_blocks + use ice_domain, only: distrb_info + use ice_blocks, only: block, get_block, nx_block, ny_block, nblocks_tot + use ice_distribution, only: ice_distributionGetBlockID, ice_distributionGet + use ice_constants, only: field_loc_center, field_loc_Nface + use ice_fileunits, only: bfbflag + use ice_global_reductions + use ice_exit, only: abort_ice + + implicit none + + integer(int_kind) :: i, j, k, l, m, n, iblock, ib, ie, jb, je + integer(int_kind) :: blockID, numBlocks + type (block) :: this_block + + real(dbl_kind) ,allocatable :: arrayA(:,:,:),arrayB(:,:,:),arrayC(:,:,:) + integer(int_kind),allocatable :: arrayiA(:,:,:),arrayiB(:,:,:) + real(dbl_kind) ,allocatable :: array8(:,:,:),array82(:,:,:) + real(real_kind) ,allocatable :: array4(:,:,:),array42(:,:,:) + integer(int_kind),allocatable :: arrayi1(:,:,:),arrayi2(:,:,:) + real(dbl_kind) ,allocatable :: mmask8(:,:,:) + real(real_kind) ,allocatable :: mmask4(:,:,:) + integer(int_kind),allocatable :: mmaski(:,:,:) + logical ,allocatable :: lmask (:,:,:) + real(dbl_kind) ,allocatable :: vec8(:),sum8(:) + real(dbl_kind) :: locval, corval, minval, maxval ! local, correct, min, max values + real(dbl_kind) :: locval8, sumval8, minval8, maxval8 + real(real_kind) :: locval4, sumval4, minval4, maxval4 + integer(int_kind) :: iocval, locvali, sumvali, corvali, minvali, maxvali + real(dbl_kind) :: reldig,reldigchk_now + real(dbl_kind) ,allocatable :: reldigchk(:,:) + + character(len=8) :: errorflag0 + character(len=32) :: string + integer(int_kind),parameter :: ntests1 = 19 + character(len=8) :: errorflag1(ntests1) + character(len=32) :: stringflag1(ntests1) + integer(int_kind),parameter :: ntests2 = 6 + character(len=8) :: errorflag2(ntests2) + character(len=32) :: stringflag2(ntests2) + integer(int_kind),parameter :: ntests3 = 3 + character(len=8) :: errorflag3(ntests3) + character(len=32) :: stringflag3(ntests3) + integer(int_kind),parameter :: ntests4 = 1 + character(len=8) :: errorflag4(ntests4) + character(len=32) :: stringflag4(ntests4) + + integer(int_kind) :: npes, ierr, ntask + + + integer(int_kind), parameter :: mfld_loc = 2 + integer(int_kind), parameter :: field_loc(mfld_loc) = & + (/ field_loc_center, field_loc_Nface /) + character(len=16), parameter :: field_loc_string(mfld_loc) = & + (/ 'field_loc_center', 'field_loc_Nface ' /) + + integer(int_kind), parameter :: nscale = 4 + real(dbl_kind), parameter :: lscale(nscale) = & + (/ 1.0_dbl_kind, & + 1.0e8_dbl_kind, & + 1.0e16_dbl_kind, & + 1.0e32_dbl_kind /) + + integer(int_kind), parameter :: nbflags = 6 + character(len=8), parameter :: bflags(1:nbflags) = & + (/ 'off ','lsum8 ','lsum16 ','lsum4 ','ddpdd ','reprosum' /) + character(len=*), parameter :: & + passflag = 'PASS', & + failflag = 'FAIL' + character(len=*), parameter :: subname='(sumchk)' + + !----------------------------------------------------------------- + ! Initialize CICE + !----------------------------------------------------------------- + + call CICE_Initialize + + !----------------------------------------------------------------- + ! Testing + !----------------------------------------------------------------- + + errorflag0 = passflag + errorflag1 = passflag + errorflag2 = passflag + errorflag3 = passflag + errorflag4 = passflag + npes = get_num_procs() + + if (my_task == master_task) then + write(6,*) ' ' + write(6,*) '==========================================================' + write(6,*) ' ' + write(6,*) 'Running SUMCHK' + write(6,*) ' ' + write(6,*) ' npes = ',npes + write(6,*) ' my_task = ',my_task + write(6,*) ' nx_global = ',nx_global + write(6,*) ' ny_global = ',ny_global + write(6,*) ' block_size_x = ',block_size_x + write(6,*) ' block_size_y = ',block_size_y + write(6,*) ' nblocks_tot = ',nblocks_tot + write(6,*) ' ' + endif + + ! --------------------------- + ! TEST GLOBAL SUMS + ! --------------------------- + ! test difficult sum + ! fill array with constant value that sums to corval when 2 gridcells per block are excluded + ! fill those two gridcells per block with very large and opposite signed values + ! arrayA should sum to corval, arrayB should sum to corval when mask is applied on 2 gridcells + ! fill 2 extra gridcells with special values + ! lscale defines relative size of large values + ! arrayA has large and opposite values in upper right hand corner of block + ! arrayB has large and same size values in upper right hand corner to check masks + ! arrayC has large and opposite values in first two values of block + ! arrayA should add large values at end of a local sum (bad) + ! arrayC should add large values first then rest of values (not so bad) + + if (my_task == master_task) write(6,*) ' ' + + allocate(arrayA (nx_block,ny_block,max_blocks)) + allocate(arrayB (nx_block,ny_block,max_blocks)) + allocate(arrayC (nx_block,ny_block,max_blocks)) + allocate(arrayiA(nx_block,ny_block,max_blocks)) + allocate(arrayiB(nx_block,ny_block,max_blocks)) + allocate(array4 (nx_block,ny_block,max_blocks)) + allocate(array8 (nx_block,ny_block,max_blocks)) + allocate(array42(nx_block,ny_block,max_blocks)) + allocate(array82(nx_block,ny_block,max_blocks)) + allocate(arrayi1(nx_block,ny_block,max_blocks)) + allocate(arrayi2(nx_block,ny_block,max_blocks)) + allocate(mmask4 (nx_block,ny_block,max_blocks)) + allocate(mmask8 (nx_block,ny_block,max_blocks)) + allocate(mmaski (nx_block,ny_block,max_blocks)) + allocate(lmask (nx_block,ny_block,max_blocks)) + + call ice_distributionGet(distrb_info, numLocalBlocks = numBlocks) + + ! correct results for relative digits check in sum + allocate(reldigchk(nbflags,nscale)) +#ifdef NO_R16 + ! lsum16 will revert to a double precision calc like lsum8 + reldigchk(:,:) = 15.7 + reldigchk(1:3,1) = 14. + reldigchk(4,1) = 3.9 + reldigchk(1:3,2) = 9. + reldigchk(4,2) = 1. + reldigchk(1:3,3) = 1. + reldigchk(4,3) = 0. + reldigchk(1:3,4) = 0. + reldigchk(4,4) = 0. + reldigchk(5,4) = 15. + if (nx_global == 360 .and. ny_global == 240) then + reldigchk(1:3,1) = 13. + reldigchk(5,4) = 14. + endif +#else + reldigchk(:,:) = 15.7 + reldigchk(1:2,1) = 14. + reldigchk(4,1) = 3.9 + reldigchk(1:2,2) = 9. + reldigchk(4,2) = 1. + reldigchk(1:2,3) = 1. + reldigchk(4,3) = 0. + reldigchk(1:2,4) = 0. + reldigchk(3,4) = 3. + reldigchk(4,4) = 0. + reldigchk(5,4) = 15. + if (nx_global == 360 .and. ny_global == 240) then + reldigchk(1:2,1) = 13. + reldigchk(5,4) = 14. + endif +#endif + + ! test list + n = 1 ; stringflag1(n) = 'dble sum easy' + n = n + 1; stringflag1(n) = 'dble sum' + n = n + 1; stringflag1(n) = 'real sum' + n = n + 1; stringflag1(n) = 'intg sum' + n = n + 1; stringflag1(n) = 'dble sum + dble mask' + n = n + 1; stringflag1(n) = 'real sum + real mask' + n = n + 1; stringflag1(n) = 'intg sum + intg mask' + n = n + 1; stringflag1(n) = 'dble sum + logical mask' + n = n + 1; stringflag1(n) = 'real sum + logical mask' + n = n + 1; stringflag1(n) = 'intg sum + logical mask' + n = n + 1; stringflag1(n) = 'dble prod sum' + n = n + 1; stringflag1(n) = 'real prod sum' + n = n + 1; stringflag1(n) = 'intg prod sum' + n = n + 1; stringflag1(n) = 'dble prod sum + dble mask' + n = n + 1; stringflag1(n) = 'real prod sum + real mask' + n = n + 1; stringflag1(n) = 'intg prod sum + intg mask' + n = n + 1; stringflag1(n) = 'dble prod sum + logical mask' + n = n + 1; stringflag1(n) = 'real prod sum + logical mask' + n = n + 1; stringflag1(n) = 'intg prod sum + logical mask' + + do m = 1, mfld_loc + + ! set corval to something a little interesting (not 1.0 for instance which gives atypical results) + corval = 4.0_dbl_kind/3.0_dbl_kind + iocval = 8 + ! tuned for gx3 and tx1 only + if ((nx_global == 100 .and. ny_global == 116) .or. & + (nx_global == 360 .and. ny_global == 240)) then + if (field_loc(m) == field_loc_Nface .and. nx_global == 360 .and. ny_global == 240) then + ! tx1 tripole face, need to adjust local value to remove half of row at ny_global + ! or modify corval to account for different sum + locval = corval / real((nblocks_tot*(block_size_x*block_size_y-2)-nx_global/2),dbl_kind) + corvali = (nblocks_tot*(block_size_x*block_size_y-2)-nx_global/2)*iocval + else + locval = corval / real(nblocks_tot*(block_size_x*block_size_y-2),dbl_kind) + corvali = nblocks_tot*(block_size_x*block_size_y-2)*iocval + endif + else + call abort_ice(subname//' ERROR not set for this grid ') + endif + + do l = 1, nscale + if (my_task == master_task) then + write(6,*) ' ' + write(6,'(a,i4,a,i4)') 'test: m = ',m,': l = ', l + write(6,'(a,a )') 'field_loc = ',trim(field_loc_string(m)) + write(6,'(a,e11.4)') 'lscale = ',lscale(l) + write(6,*) 'local array value = ',locval + write(6,*) 'correct value = ',corval + write(6,*) 'correct value int = ',corvali + write(6,*) ' ' + write(6,'(6x,a,26x,a,10x,a,10x,a)') 'test','bfbflag','sum','digits of precision (max is 16)' + endif + + arrayA(:,:,:) = locval + arrayB(:,:,:) = locval + arrayC(:,:,:) = locval + lmask(:,:,:) = .true. + do iblock = 1,numBlocks + call ice_distributionGetBlockID(distrb_info, iblock, blockID) + this_block = get_block(blockID, blockID) + ib = this_block%ilo + ie = this_block%ihi + jb = this_block%jlo + je = this_block%jhi + + lmask(ie,je-1,iblock) = .false. + lmask(ie,je-2,iblock) = .false. + arrayA(ie,je-1,iblock) = locval * lscale(l) + arrayA(ie,je-2,iblock) = -arrayA(ie,je-1,iblock) + arrayB(ie,je-1,iblock) = locval * lscale(l) + arrayB(ie,je-2,iblock) = arrayB(ie,je-1,iblock) + arrayC(ib,jb,iblock) = locval * lscale(l) + arrayC(ib+1,jb,iblock) = -arrayC(ib,jb,iblock) + arrayiA(:,:,iblock) = iocval + arrayiB(:,:,iblock) = iocval + arrayiA(ie,je-1,iblock) = 13 * iocval + arrayiA(ie,je-2,iblock) = -arrayiA(ie,je-1,iblock) + enddo + + do k = 1,ntests1 + do n = 1,nbflags + bfbflag = bflags(n) + string = stringflag1(k) + sumval8 = 888.0e12 + sumvali = 8888888 + + if (k == 1) then + array8(:,:,:) = arrayC(:,:,:) + sumval8 = global_sum(array8, distrb_info, field_loc(m)) + elseif (k == 2) then + array8(:,:,:) = arrayA(:,:,:) + sumval8 = global_sum(array8, distrb_info, field_loc(m)) + elseif (k == 3) then + array4(:,:,:) = arrayA(:,:,:) + sumval4 = global_sum(array4, distrb_info, field_loc(m)) + sumval8 = sumval4 + elseif (k == 4) then + arrayi1 = arrayiA + sumvali = global_sum(arrayi1, distrb_info, field_loc(m)) + elseif (k == 5) then + mmask8(:,:,:) = 6.0_dbl_kind + array8(:,:,:) = arrayA(:,:,:)/mmask8(:,:,:) + sumval8 = global_sum(array8, distrb_info, field_loc(m), mmask=mmask8) + elseif (k == 6) then + mmask4(:,:,:) = 6.0_real_kind + array4(:,:,:) = arrayA(:,:,:)/mmask4(:,:,:) + sumval4 = global_sum(array4, distrb_info, field_loc(m), mmask=mmask4) + sumval8 = sumval4 + elseif (k == 7) then + mmaski(:,:,:) = 2 + arrayi1(:,:,:) = arrayiA(:,:,:)/mmaski(:,:,:) + sumvali = global_sum(arrayi1, distrb_info, field_loc(m), mmask=mmaski) + elseif (k == 8) then + array8(:,:,:) = arrayB(:,:,:) + sumval8 = global_sum(array8, distrb_info, field_loc(m), lmask=lmask) + elseif (k == 9) then + array4(:,:,:) = arrayB(:,:,:) + sumval4 = global_sum(array4, distrb_info, field_loc(m), lmask=lmask) + sumval8 = sumval4 + elseif (k == 10) then + arrayi1(:,:,:) = arrayiB(:,:,:) + sumvali = global_sum(arrayi1, distrb_info, field_loc(m), lmask=lmask) + elseif (k == 11) then + array82(:,:,:) = 7.0_dbl_kind + array8(:,:,:) = arrayA(:,:,:)/array82(:,:,:) + sumval8 = global_sum_prod(array8, array82, distrb_info, field_loc(m)) + elseif (k == 12) then + array42(:,:,:) = 7.0_real_kind + array4(:,:,:) = arrayA(:,:,:)/array42(:,:,:) + sumval4 = global_sum_prod(array4, array42, distrb_info, field_loc(m)) + sumval8 = sumval4 + elseif (k == 13) then + arrayi2(:,:,:) = 4 + arrayi1(:,:,:) = arrayiA(:,:,:)/arrayi2(:,:,:) + sumvali = global_sum_prod(arrayi1, arrayi2, distrb_info, field_loc(m)) + elseif (k == 14) then + array82(:,:,:) = 7.0_dbl_kind + mmask8(:,:,:) = 6.0_dbl_kind + array8(:,:,:) = arrayA(:,:,:)/(mmask8(:,:,:)*array82(:,:,:)) + sumval8 = global_sum_prod(array8, array82, distrb_info, field_loc(m), mmask=mmask8) + elseif (k == 15) then + array42(:,:,:) = 7.0_real_kind + mmask4(:,:,:) = 6.0_real_kind + array4(:,:,:) = arrayA(:,:,:)/(mmask4(:,:,:)*array42(:,:,:)) + sumval4 = global_sum_prod(array4, array42, distrb_info, field_loc(m), mmask=mmask4) + sumval8 = sumval4 + elseif (k == 16) then + arrayi2(:,:,:) = 2 + mmaski(:,:,:) = 2 + arrayi1(:,:,:) = arrayiA(:,:,:)/(arrayi2(:,:,:)*mmaski(:,:,:)) + sumvali = global_sum_prod(arrayi1, arrayi2, distrb_info, field_loc(m), mmask=mmaski) + elseif (k == 17) then + array82(:,:,:) = 7.0_dbl_kind + array8(:,:,:) = arrayB(:,:,:)/array82(:,:,:) + sumval8 = global_sum_prod(array8, array82, distrb_info, field_loc(m), lmask=lmask) + elseif (k == 18) then + array42(:,:,:) = 7.0_real_kind + array4(:,:,:) = arrayB(:,:,:)/array42(:,:,:) + sumval4 = global_sum_prod(array4, array42, distrb_info, field_loc(m), lmask=lmask) + sumval8 = sumval4 + elseif (k == 19) then + arrayi2(:,:,:) = 4 + arrayi1(:,:,:) = arrayiB(:,:,:)/(arrayi2(:,:,:)) + sumvali = global_sum_prod(arrayi1, arrayi2, distrb_info, field_loc(m), lmask=lmask) + else + call abort_ice(subname//' illegal k sum',file=__FILE__,line=__LINE__) + endif + + if (string(1:4) == 'intg') then + ! integer + if (my_task == master_task) then + write(6,'(1x,a,a10,i12)') string,trim(bfbflag), sumvali + endif + if (sumvali /= corvali) then + errorflag1(k) = failflag + errorflag0 = failflag + if (my_task == master_task) then + write(6,*) '**** ERROR ',sumvali,corvali + endif + endif + else + ! real/dbl + if (sumval8 == corval) then + reldig = 16.0_dbl_kind + elseif (sumval8 == 0._dbl_kind) then + reldig = 0 + else + reldig = -log10(abs(corval-sumval8)/corval) + endif + if (my_task == master_task) then + write(6,'(1x,a,a10,g25.17,f8.2)') string,trim(bfbflag), sumval8, reldig + endif + + ! (real*4) can't have more than 8 digits of precision + reldigchk_now = reldigchk(n,l) + if (string(1:4) == 'real') reldigchk_now = min(reldigchk(n,l),7.0) + if (reldig < reldigchk_now) then + errorflag1(k) = failflag + errorflag0 = failflag + if (my_task == master_task) then + write(6,*) '**** ERROR ',reldig,reldigchk_now + endif + endif + endif + enddo ! n + enddo ! k + enddo ! l + enddo ! m + + ! --------------------------- + ! Test Global Min/Max + ! --------------------------- + + if (my_task == master_task) write(6,*) ' ' + + n = 1 ; stringflag2(n) = 'dble min/max' + n = n + 1; stringflag2(n) = 'real min/max' + n = n + 1; stringflag2(n) = 'intg min/max' + n = n + 1; stringflag2(n) = 'dble min/max + logical mask' + n = n + 1; stringflag2(n) = 'real min/max + logical mask' + n = n + 1; stringflag2(n) = 'intg min/max + logical mask' + + minval = -17. + maxval = 37. + + ! fill arrays with large values as default + array8 = 999.0e10_dbl_kind + array4 = 999.0e10_real_kind + arrayi1 = 9999999 + + n = 1 + ! fill active part of arrays with values between 0 and 10 + do iblock = 1,numBlocks + call ice_distributionGetBlockID(distrb_info, iblock, blockID) + this_block = get_block(blockID, blockID) + ib = this_block%ilo + ie = this_block%ihi + jb = this_block%jlo + je = this_block%jhi + do j = jb,je + do i = ib,ie + n = n + 1 + array8(i,j,iblock) = real(mod(n,10),dbl_kind) + array4(i,j,iblock) = real(mod(n,8),real_kind) + arrayi1(i,j,iblock) = mod(n,9) + enddo + enddo + enddo + + ! fill one gridcell with a min and max value + ntask = max(npes-1,1)-1 + iblock = max(numBlocks-1,1) + call ice_distributionGetBlockID(distrb_info, iblock, blockID) + this_block = get_block(blockID, blockID) + ib = this_block%ilo + ie = this_block%ihi + jb = this_block%jlo + je = this_block%jhi + i = max(ie-3,ib) + j = max(je-4,jb) + if (my_task == ntask) then + array8(i,j,iblock) = minval + array4(i,j,iblock) = minval + arrayi1(i,j,iblock) = minval + endif + + ntask = min(npes,2)-1 + iblock = min(numBlocks,2) + call ice_distributionGetBlockID(distrb_info, iblock, blockID) + this_block = get_block(blockID, blockID) + ib = this_block%ilo + ie = this_block%ihi + jb = this_block%jlo + je = this_block%jhi + i = min(ib+1,ie) + j = min(jb+2,je) + if (my_task == ntask) then + array8(i,j,iblock) = maxval + array4(i,j,iblock) = maxval + arrayi1(i,j,iblock) = maxval + endif + + do k = 1,ntests2 + string = stringflag2(k) + minval8 = 888e12 + maxval8 = -888e12 + if (k == 1) then + minval8 = global_minval(array8, distrb_info) + maxval8 = global_maxval(array8, distrb_info) + elseif (k == 2) then + minval4 = global_minval(array4, distrb_info) + maxval4 = global_maxval(array4, distrb_info) + minval8 = minval4 + maxval8 = maxval4 + elseif (k == 3) then + minvali = global_minval(arrayi1, distrb_info) + maxvali = global_maxval(arrayi1, distrb_info) + minval8 = minvali + maxval8 = maxvali + elseif (k == 4) then + minval8 = global_minval(array8, distrb_info, lmask=lmask) + maxval8 = global_maxval(array8, distrb_info, lmask=lmask) + elseif (k == 5) then + minval4 = global_minval(array4, distrb_info, lmask=lmask) + maxval4 = global_maxval(array4, distrb_info, lmask=lmask) + minval8 = minval4 + maxval8 = maxval4 + elseif (k == 6) then + minvali = global_minval(arrayi1, distrb_info, lmask=lmask) + maxvali = global_maxval(arrayi1, distrb_info, lmask=lmask) + minval8 = minvali + maxval8 = maxvali + else + call abort_ice(subname//' illegal k minmax',file=__FILE__,line=__LINE__) + endif + + if (my_task == master_task) then + write(6,'(1x,a,2g16.8)') string, minval8, maxval8 + endif + + if (minval8 /= minval .or. maxval8 /= maxval) then + errorflag2(k) = failflag + errorflag0 = failflag + if (my_task == master_task) then + write(6,*) '**** ERROR ', minval8, minval, maxval8, maxval + endif + endif + enddo + + ! --------------------------- + ! Test Scalar Reductions + ! --------------------------- + + if (my_task == master_task) write(6,*) ' ' + + n = 1 ; stringflag3(n) = 'dble scalar min/max/sum' + n = n + 1; stringflag3(n) = 'real scalar min/max/sum' + n = n + 1; stringflag3(n) = 'intg scalar min/max/sum' + + minval = -5. + maxval = 8. + + locval8 = 1. + locval4 = 1. + locvali = 1. + + ! fill one gridcell with a min and max value + ntask = max(npes-1,1)-1 + if (my_task == ntask) then + locval8 = minval + locval4 = minval + locvali = minval + endif + ntask = min(npes,2)-1 + if (my_task == ntask) then + locval8 = maxval + locval4 = maxval + locvali = maxval + endif + + ! compute correct results + if (npes == 1) then + minval = maxval + corval = maxval + else + corval = (npes - 2) * 1.0 + minval + maxval + endif + + do k = 1,ntests3 + string = stringflag3(k) + minval8 = 888e12 + maxval8 = -888e12 + sumval8 = -888e12 + if (k == 1) then + minval8 = global_minval(locval8, distrb_info) + maxval8 = global_maxval(locval8, distrb_info) + sumval8 = global_sum (locval8, distrb_info) + elseif (k == 2) then + minval4 = global_minval(locval4, distrb_info) + maxval4 = global_maxval(locval4, distrb_info) + sumval4 = global_sum (locval4, distrb_info) + minval8 = minval4 + maxval8 = maxval4 + sumval8 = sumval4 + elseif (k == 3) then + minvali = global_minval(locvali, distrb_info) + maxvali = global_maxval(locvali, distrb_info) + sumvali = global_sum (locvali, distrb_info) + minval8 = minvali + maxval8 = maxvali + sumval8 = sumvali + else + call abort_ice(subname//' illegal k scalar',file=__FILE__,line=__LINE__) + endif + + if (my_task == master_task) then + write(6,'(1x,a,3g16.8)') string, minval8, maxval8, sumval8 + endif + + if (minval8 /= minval .or. maxval8 /= maxval .or. sumval8 /= corval) then + errorflag3(k) = failflag + errorflag0 = failflag + if (my_task == master_task) then + write(6,*) '**** ERROR ', minval8, minval, maxval8, maxval, sumval8, corval + endif + endif + enddo + + ! --------------------------- + ! Test Vector Reductions + ! --------------------------- + + if (my_task == master_task) write(6,*) ' ' + + n = 1 ; stringflag4(n) = 'dble sum vector' + allocate(vec8(3)) + allocate(sum8(3)) + + minval = -5. + maxval = 8. + + vec8(1) = 1. + + ! fill one gridcell with a min and max value + ntask = max(npes-1,1)-1 + if (my_task == ntask) then + vec8(1) = minval + endif + ntask = min(npes,2)-1 + if (my_task == ntask) then + vec8(1) = maxval + endif + vec8(2) = 2. * vec8(1) + vec8(3) = 3. * vec8(1) + + ! compute correct results + if (npes == 1) then + minval = maxval + corval = maxval + else + corval = (npes - 2) * 1.0 + minval + maxval + endif + + do k = 1,ntests4 + string = stringflag4(k) + sum8 = -888e12 + if (k == 1) then + sum8 = global_allreduce_sum(vec8, distrb_info) + else + call abort_ice(subname//' illegal k vector',file=__FILE__,line=__LINE__) + endif + + if (my_task == master_task) then + write(6,'(1x,a,3g16.8)') string, sum8(1),sum8(2),sum8(3) + endif + + if (sum8(1) /= corval .or. sum8(2) /= 2.*corval .or. sum8(3) /= 3.*corval) then + errorflag4(k) = failflag + errorflag0 = failflag + if (my_task == master_task) then + write(6,*) '**** ERROR ', sum8(1),sum8(2),sum8(3),corval + endif + endif + enddo + + ! --------------------------- + + if (my_task == master_task) then + write(6,*) ' ' + do k = 1,ntests1 + write(6,*) errorflag1(k),stringflag1(k) + enddo + do k = 1,ntests2 + write(6,*) errorflag2(k),stringflag2(k) + enddo + do k = 1,ntests3 + write(6,*) errorflag3(k),stringflag3(k) + enddo + do k = 1,ntests4 + write(6,*) errorflag4(k),stringflag4(k) + enddo + write(6,*) ' ' + if (errorflag0 == passflag) then + write(6,*) 'SUMCHK COMPLETED SUCCESSFULLY' + else + write(6,*) 'SUMCHK FAILED' + call abort_ice(subname//' ERROR: SUMCHK FAILED',file=__FILE__,line=__LINE__) + endif + write(6,*) ' ' + write(6,*) '==========================================================' + write(6,*) ' ' + endif + + + !----------------------------------------------------------------- + ! Finalize CICE + !----------------------------------------------------------------- + + call CICE_Finalize + + end program sumchk + +!======================================================================= diff --git a/cicecore/shared/ice_arrays_column.F90 b/cicecore/shared/ice_arrays_column.F90 index 06efd6e94..46ea6f62e 100644 --- a/cicecore/shared/ice_arrays_column.F90 +++ b/cicecore/shared/ice_arrays_column.F90 @@ -267,6 +267,10 @@ module ice_arrays_column character(char_len_long), public :: & bgc_data_dir ! directory for biogeochemistry data + character(char_len_long), public :: & + optics_file, & ! modal aero optics file + optics_file_fieldname ! modal aero optics file fieldname + real (kind=dbl_kind), dimension(:), allocatable, public :: & R_C2N_DON ! carbon to nitrogen mole ratio of DON pool @@ -305,12 +309,12 @@ subroutine alloc_arrays_column ! Allocate column arrays use ice_exit, only: abort_ice integer (int_kind) :: max_nbtrcr, max_algae, max_aero, & - nmodal1, nmodal2, max_don, nbtrcr_sw + nmodal1, nmodal2, max_don integer (int_kind) :: ierr, ntrcr character(len=*),parameter :: subname='(alloc_arrays_column)' - call icepack_query_tracer_sizes(ntrcr_out=ntrcr, nbtrcr_sw_out=nbtrcr_sw) + call icepack_query_tracer_sizes(ntrcr_out=ntrcr) call icepack_query_tracer_sizes( max_nbtrcr_out=max_nbtrcr, & max_algae_out=max_algae, max_aero_out=max_aero, & nmodal1_out=nmodal1, nmodal2_out=nmodal2, max_don_out=max_don) @@ -396,8 +400,7 @@ subroutine alloc_arrays_column ocean_bio_all(nx_block,ny_block,max_nbtrcr,max_blocks), & ! fixed order, all values even for tracers false ice_bio_net (nx_block,ny_block,max_nbtrcr,max_blocks), & ! depth integrated tracer (mmol/m^2) snow_bio_net (nx_block,ny_block,max_nbtrcr,max_blocks), & ! depth integrated snow tracer (mmol/m^2) - trcrn_sw (nx_block,ny_block,nbtrcr_sw,ncat,max_blocks), & ! bgc tracers active in the delta-Eddington shortwave - algal_peak (nx_block,ny_block,max_algae,max_blocks), & ! vertical location of algal maximum, 0 if no maximum + algal_peak (nx_block,ny_block,max_algae ,max_blocks), & ! vertical location of algal maximum, 0 if no maximum stat=ierr) if (ierr/=0) call abort_ice(subname//': Out of Memory2') diff --git a/cicecore/shared/ice_init_column.F90 b/cicecore/shared/ice_init_column.F90 index 1362e055e..4f4641467 100644 --- a/cicecore/shared/ice_init_column.F90 +++ b/cicecore/shared/ice_init_column.F90 @@ -7,6 +7,7 @@ module ice_init_column use ice_kinds_mod + use ice_blocks, only: nx_block, ny_block use ice_constants use ice_communicate, only: my_task, master_task, ice_barrier use ice_domain_size, only: ncat, max_blocks @@ -129,7 +130,6 @@ module ice_init_column subroutine init_thermo_vertical - use ice_blocks, only: nx_block, ny_block use ice_flux, only: salinz, Tmltz integer (kind=int_kind) :: & @@ -186,7 +186,7 @@ subroutine init_shortwave fswintn, albpndn, apeffn, trcrn_sw, dhsn, ffracn, snowfracn, & kaer_tab, waer_tab, gaer_tab, kaer_bc_tab, waer_bc_tab, gaer_bc_tab, bcenh, & swgrid, igrid - use ice_blocks, only: block, get_block, nx_block, ny_block + use ice_blocks, only: block, get_block use ice_calendar, only: dt, calendar_type, & days_per_year, nextsw_cday, yday, msec use ice_diagnostics, only: npnt, print_points, pmloc, piloc, pjloc @@ -594,7 +594,6 @@ subroutine init_fsd(floesize) use ice_arrays_column, only: floe_rad_c, floe_binwidth, & wavefreq, dwavefreq, wave_sig_ht, wave_spectrum, & d_afsd_newi, d_afsd_latg, d_afsd_latm, d_afsd_wave, d_afsd_weld - use ice_blocks, only: nx_block, ny_block use ice_domain_size, only: ncat, max_blocks, nfsd use ice_init, only: ice_ic use ice_state, only: aicen @@ -1005,7 +1004,7 @@ end subroutine init_hbrine subroutine input_zbgc - use ice_arrays_column, only: restore_bgc + use ice_arrays_column, only: restore_bgc, optics_file, optics_file_fieldname use ice_broadcast, only: broadcast_scalar use ice_restart_column, only: restart_bgc, restart_zsal, & restart_hbrine @@ -1048,7 +1047,7 @@ subroutine input_zbgc restore_bgc, restart_bgc, scale_bgc, solve_zsal, restart_zsal, & tr_bgc_Nit, tr_bgc_C, tr_bgc_chl, tr_bgc_Am, tr_bgc_Sil, & tr_bgc_DMS, tr_bgc_PON, tr_bgc_hum, tr_bgc_DON, tr_bgc_Fe, & - grid_o, grid_o_t, l_sk, grid_oS, & + grid_o, grid_o_t, l_sk, grid_oS, optics_file, optics_file_fieldname, & l_skS, phi_snow, initbio_frac, frazil_scav, & ratio_Si2N_diatoms , ratio_Si2N_sp , ratio_Si2N_phaeo , & ratio_S2N_diatoms , ratio_S2N_sp , ratio_S2N_phaeo , & @@ -1105,6 +1104,8 @@ subroutine input_zbgc tr_brine = .false. ! brine height differs from ice height tr_zaero = .false. ! z aerosol tracers modal_aero = .false. ! use modal aerosol treatment of aerosols + optics_file = 'unknown_optics_file' ! modal aerosol optics file + optics_file_fieldname = 'unknown_optics_fieldname' ! modal aerosol optics file fieldname restore_bgc = .false. ! restore bgc if true solve_zsal = .false. ! update salinity tracer profile from solve_S_dt restart_bgc = .false. ! biogeochemistry restart @@ -1321,6 +1322,8 @@ subroutine input_zbgc call broadcast_scalar(tr_zaero, master_task) call broadcast_scalar(dEdd_algae, master_task) call broadcast_scalar(modal_aero, master_task) + call broadcast_scalar(optics_file, master_task) + call broadcast_scalar(optics_file_fieldname, master_task) call broadcast_scalar(grid_o, master_task) call broadcast_scalar(grid_o_t, master_task) call broadcast_scalar(l_sk, master_task) @@ -1690,6 +1693,8 @@ subroutine input_zbgc write(nu_diag,1010) ' solve_zbgc = ', solve_zbgc write(nu_diag,1010) ' tr_zaero = ', tr_zaero write(nu_diag,1020) ' number of aerosols = ', n_zaero + write(nu_diag,1031) ' optics_file = ', trim(optics_file) + write(nu_diag,1031) ' optics_file_fieldname = ', trim(optics_file_fieldname) ! bio parameters write(nu_diag,1000) ' grid_o = ', grid_o write(nu_diag,1000) ' grid_o_t = ', grid_o_t @@ -1747,6 +1752,7 @@ subroutine input_zbgc 1010 format (a30,2x,l6) ! logical 1020 format (a30,2x,i6) ! integer 1030 format (a30, a8) ! character + 1031 format (a30, a ) ! character end subroutine input_zbgc @@ -2280,7 +2286,7 @@ subroutine init_zbgc use ice_state, only: trcr_base, trcr_depend, n_trcr_strata, & nt_strata - use ice_arrays_column, only: R_C2N, R_chl2N, R_C2N_DON, R_Si2N + use ice_arrays_column, only: R_C2N, R_chl2N, R_C2N_DON, R_Si2N, trcrn_sw integer (kind=int_kind) :: & nbtrcr, nbtrcr_sw, nt_fbri, & @@ -2948,6 +2954,10 @@ subroutine init_zbgc endif if (.NOT. dEdd_algae) nbtrcr_sw = 1 + ! tcraig, added 6/1/21, why is nbtrcr_sw set here? + call icepack_init_tracer_sizes(nbtrcr_sw_in=nbtrcr_sw) + allocate(trcrn_sw(nx_block,ny_block,nbtrcr_sw,ncat,max_blocks)) ! bgc tracers active in the delta-Eddington shortwave + !----------------------------------------------------------------- ! spew !----------------------------------------------------------------- diff --git a/configuration/scripts/Makefile b/configuration/scripts/Makefile index e0b7799d6..51c36cee3 100644 --- a/configuration/scripts/Makefile +++ b/configuration/scripts/Makefile @@ -75,7 +75,7 @@ AR := ar .SUFFIXES: .SUFFIXES: .F90 .F .c .o -.PHONY: all cice libcice targets target db_files db_flags clean realclean helloworld calchk +.PHONY: all cice libcice targets target db_files db_flags clean realclean helloworld calchk sumchk bcstchk all: $(EXEC) cice: $(EXEC) @@ -94,7 +94,7 @@ targets: @echo " " @echo "Supported Makefile Targets are: cice, libcice, makdep, depends, clean, realclean" @echo " Diagnostics: targets, db_files, db_flags" - @echo " Unit Tests : helloworld, calchk" + @echo " Unit Tests : helloworld, calchk, sumchk, bcstchk" target: targets db_files: @@ -143,9 +143,15 @@ $(DEPGEN): $(OBJS_DEPGEN) # this builds all dependent source code automatically even though only a subset might actually be used # this is no different than the cice target and in fact the binary is called cice # it exists just to create separation as needed for unit tests + calchk: $(EXEC) +sumchk: $(EXEC) + +bcstchk: $(EXEC) + # this builds just a subset of source code specified explicitly and requires a separate target + HWOBJS := helloworld.o helloworld: $(HWOBJS) $(LD) -o $(EXEC) $(LDFLAGS) $(HWOBJS) $(ULIBS) $(SLIBS) diff --git a/configuration/scripts/cice.settings b/configuration/scripts/cice.settings index 3bd85f5f9..1faf2c5be 100755 --- a/configuration/scripts/cice.settings +++ b/configuration/scripts/cice.settings @@ -42,5 +42,5 @@ if (${ICE_NTASKS} == 1) setenv ICE_COMMDIR serial ### Specialty code setenv ICE_BLDDEBUG false # build debug flags -setenv ICE_COVERAGE false # build debug flags +setenv ICE_COVERAGE false # build coverage flags diff --git a/configuration/scripts/forapps/ufs/comp_ice.backend.clean b/configuration/scripts/forapps/ufs/comp_ice.backend.clean deleted file mode 100755 index d75d381b4..000000000 --- a/configuration/scripts/forapps/ufs/comp_ice.backend.clean +++ /dev/null @@ -1,46 +0,0 @@ -#! /bin/csh -f - -### Expect to find the following environment variables set on entry: -# MACHINE_ID -# SYSTEM_USERDIR -# SRCDIR -# EXEDIR - -setenv OBJDIR $EXEDIR/compile ; if !(-d $OBJDIR) mkdir -p $OBJDIR - -if (${MACHINE_ID} =~ cheyenne*) then - setenv ARCH cheyenne_intel -else if (${MACHINE_ID} =~ orion*) then - setenv ARCH orion_intel -else if (${MACHINE_ID} =~ hera*) then - setenv ARCH hera_intel -else if (${MACHINE_ID} =~ wcoss*) then - setenv ARCH wcoss_dell_p3_intel -else if (${MACHINE_ID} =~ stampede*) then - setenv ARCH stampede_intel -else - echo "CICE6 ${0}: ERROR in ARCH setup, ${hname}" - exit -2 -endif - -echo "CICE6 ${0}: ARCH = $ARCH" - -cd $OBJDIR - -setenv MAKENAME gmake -setenv MAKETHRDS 1 -setenv MAKEFILE ${SRCDIR}/configuration/scripts/Makefile -setenv MACROSFILE ${SRCDIR}/configuration/scripts/machines/Macros.$ARCH - -echo "CICE6 ${0}: EXEDIR = ${EXEDIR}" -echo "CICE6 ${0}: OBJDIR = ${OBJDIR}" -echo "CICE6 ${0}: MAKEFILE = ${MAKEFILE}" -echo "CICE6 ${0}: MACROSFILE = ${MACROSFILE}" -echo "CICE6 ${0}: ESMFMKFILE = ${ESMFMKFILE}" - -#clean -${MAKENAME} EXEC=${OBJDIR}/libcice6.a \ - -f ${MAKEFILE} MACFILE=${MACROSFILE} clean - -#clean install -rm -r -f ${BINDIR} diff --git a/configuration/scripts/forapps/ufs/comp_ice.backend.libcice b/configuration/scripts/forapps/ufs/comp_ice.backend.libcice deleted file mode 100755 index 47985bef2..000000000 --- a/configuration/scripts/forapps/ufs/comp_ice.backend.libcice +++ /dev/null @@ -1,149 +0,0 @@ -#! /bin/csh -f - -### Expect to find the following environment variables set on entry: -# MACHINE_ID -# SYSTEM_USERDIR -# SRCDIR -# EXEDIR - -### local variable that begin with ICE_ are needed in the Macros file -# ICE_COMMDIR -# ICE_BLDDEBUG -# ICE_THREADED -# ICE_CPPDEFS - -setenv OBJDIR $EXEDIR/compile ; if !(-d $OBJDIR) mkdir -p $OBJDIR - -setenv THRD no # set to yes for OpenMP threading - -if (${MACHINE_ID} =~ cheyenne*) then - setenv ARCH cheyenne_intel -else if (${MACHINE_ID} =~ orion*) then - setenv ARCH orion_intel -else if (${MACHINE_ID} =~ hera*) then - setenv ARCH hera_intel -else if (${MACHINE_ID} =~ wcoss*) then - setenv ARCH wcoss_dell_p3_intel -else if (${MACHINE_ID} =~ stampede*) then - setenv ARCH stampede_intel -else - echo "CICE6 ${0}: ERROR in ARCH setup, ${hname}" - exit -2 -endif - -echo "CICE6 ${0}: ARCH = $ARCH" - -cd $OBJDIR - -setenv SHRDIR csm_share # location of CCSM shared code -setenv DRVDIR nuopc/cmeps - -#if ($NTASK == 1) then -# setenv ICE_COMMDIR serial -#else - setenv ICE_COMMDIR mpi -#endif - -if ($THRD == 'yes') then - setenv ICE_THREADED true -else - setenv ICE_THREADED false -endif - -if ($?ICE_CPPDEFS) then - setenv ICE_CPPDEFS "${ICE_CPPDEFS} -Dcoupled" -else - setenv ICE_CPPDEFS "-Dcoupled" -endif - -if !($?IO_TYPE) then - setenv IO_TYPE netcdf4 # set to none if netcdf library is unavailable -endif -if ($IO_TYPE == 'netcdf3' || $IO_TYPE == 'netcdf4') then - setenv IODIR io_netcdf - setenv ICE_CPPDEFS "${ICE_CPPDEFS} -DUSE_NETCDF" -else if ($IO_TYPE == 'pio') then - setenv IODIR io_pio - setenv ICE_CPPDEFS "${ICE_CPPDEFS} -DUSE_NETCDF" -else - setenv IODIR io_binary -endif - -# Build in debug mode. If DEBUG=Y, enable DEBUG compilation. This -# flag is set in ${ROOTDIR}/coupledFV3_MOM6_CICE_debug.appBuilder file. -if (! $?DEBUG) then - setenv ICE_BLDDEBUG false -else - if ($DEBUG == "Y") then - setenv ICE_BLDDEBUG true - else - setenv ICE_BLDDEBUG false - endif -endif -echo "CICE6 ${0}: DEBUG = ${ICE_BLDDEBUG}" - -### List of source code directories (in order of importance). -cat >! Filepath << EOF -${SRCDIR}/cicecore/drivers/${DRVDIR} -${SRCDIR}/cicecore/cicedynB/dynamics -${SRCDIR}/cicecore/cicedynB/general -${SRCDIR}/cicecore/cicedynB/analysis -${SRCDIR}/cicecore/cicedynB/infrastructure -${SRCDIR}/cicecore/cicedynB/infrastructure/io/${IODIR} -${SRCDIR}/cicecore/cicedynB/infrastructure/comm/${ICE_COMMDIR} -${SRCDIR}/cicecore/shared -${SRCDIR}/icepack/columnphysics -${SRCDIR}/$SHRDIR -EOF - -setenv MAKENAME gmake -setenv MAKETHRDS 1 -setenv MAKEFILE ${SRCDIR}/configuration/scripts/Makefile -setenv MACROSFILE ${SRCDIR}/configuration/scripts/machines/Macros.$ARCH -setenv DEPFILE ${SRCDIR}/configuration/scripts/makdep.c - -echo "CICE6 ${0}: EXEDIR = ${EXEDIR}" -echo "CICE6 ${0}: OBJDIR = ${OBJDIR}" -echo "CICE6 ${0}: MAKEFILE = ${MAKEFILE}" -echo "CICE6 ${0}: MACROSFILE = ${MACROSFILE}" -echo "CICE6 ${0}: DEPFILE = ${DEPFILE}" -echo "CICE6 ${0}: ESMFMKFILE = ${ESMFMKFILE}" - -#diagnostics -#${MAKENAME} -j ${MAKETHRDS} VPFILE=Filepath EXEC=${OBJDIR}/cice \ -# -f ${MAKEFILE} MACFILE=${MACROSFILE} DEPFILE=${DEPFILE} db_files -#${MAKENAME} -j ${MAKETHRDS} VPFILE=Filepath EXEC=${OBJDIR}/cice \ -# -f ${MAKEFILE} MACFILE=${MACROSFILE} DEPFILE=${DEPFILE} db_flags - -#clean -#${MAKENAME} VPFILE=Filepath EXEC=${OBJDIR}/cice \ -# -f ${MAKEFILE} MACFILE=${MACROSFILE} DEPFILE=${DEPFILE} clean - -#needed to trigger a failed build to rest of system -rm ${BINDIR}/cice6.mk - -#build lib (includes dependencies) -${MAKENAME} -j ${MAKETHRDS} VPFILE=Filepath EXEC=${OBJDIR}/libcice6.a \ - -f ${MAKEFILE} MACFILE=${MACROSFILE} DEPFILE=${DEPFILE} libcice - -if ($status != 0) then - echo "CICE6 ${0}: gmake failed, exiting" - exit -2 -endif - -#install -mkdir -p ${BINDIR} -cp -f ${OBJDIR}/libcice6.a ${BINDIR}/ -cp -f ${OBJDIR}/ice_comp_nuopc.mod ${BINDIR}/ -cp -f ${OBJDIR}/ice_timers.mod ${BINDIR}/ - -cat >! ${BINDIR}/cice6.mk << EOF -# ESMF self-describing build dependency makefile fragment - -ESMF_DEP_FRONT = ice_comp_nuopc -ESMF_DEP_INCPATH = ${BINDIR} -ESMF_DEP_CMPL_OBJS = -ESMF_DEP_LINK_OBJS = ${BINDIR}/libcice6.a - -EOF - diff --git a/configuration/scripts/ice_in b/configuration/scripts/ice_in index e5fcb9177..47c2bf58a 100644 --- a/configuration/scripts/ice_in +++ b/configuration/scripts/ice_in @@ -30,8 +30,12 @@ diag_type = 'stdout' diag_file = 'ice_diag.d' debug_model = .false. - debug_model_step = 999999999 - forcing_diag = .false. + debug_model_step = 0 + debug_model_i = -1 + debug_model_j = -1 + debug_model_iblk = -1 + debug_model_task = -1 + debug_forcing = .false. print_global = .true. print_points = .true. conserv_check = .false. @@ -203,6 +207,8 @@ atmiter_conv = 0.0d0 ustar_min = 0.0005 iceruf = 0.0005 + calc_dragio = .false. + iceruf_ocn = 0.03 emissivity = 0.985 fbot_xfer_type = 'constant' update_ocn_f = .false. @@ -258,6 +264,8 @@ restart_hbrine = .false. tr_zaero = .false. modal_aero = .false. + optics_file = 'ICE_MACHINE_INPUTDATA/CICE_data/forcing/snicar_optics_5bnd_snow_and_aerosols.nc' + optics_file_fieldname = 'modalBCabsorptionParameter5band' skl_bgc = .false. z_tracers = .false. dEdd_algae = .false. diff --git a/configuration/scripts/options/set_env.bcstchk b/configuration/scripts/options/set_env.bcstchk new file mode 100644 index 000000000..bf6b49bd2 --- /dev/null +++ b/configuration/scripts/options/set_env.bcstchk @@ -0,0 +1,2 @@ +setenv ICE_DRVOPT unittest/bcstchk +setenv ICE_TARGET bcstchk diff --git a/configuration/scripts/options/set_env.sumchk b/configuration/scripts/options/set_env.sumchk new file mode 100644 index 000000000..8a8495df2 --- /dev/null +++ b/configuration/scripts/options/set_env.sumchk @@ -0,0 +1,2 @@ +setenv ICE_DRVOPT unittest/sumchk +setenv ICE_TARGET sumchk diff --git a/configuration/scripts/options/set_nml.bigdiag b/configuration/scripts/options/set_nml.bigdiag index a98bc0c2b..95d752af6 100644 --- a/configuration/scripts/options/set_nml.bigdiag +++ b/configuration/scripts/options/set_nml.bigdiag @@ -1,4 +1,4 @@ -forcing_diag = .true. +debug_forcing = .true. debug_model = .true. debug_model_step = 4 print_global = .true. diff --git a/configuration/scripts/options/set_nml.calcdragio b/configuration/scripts/options/set_nml.calcdragio new file mode 100644 index 000000000..cf86664bf --- /dev/null +++ b/configuration/scripts/options/set_nml.calcdragio @@ -0,0 +1 @@ +calc_dragio = .true. diff --git a/configuration/scripts/options/set_nml.diagpt1 b/configuration/scripts/options/set_nml.diagpt1 new file mode 100644 index 000000000..baaa564e6 --- /dev/null +++ b/configuration/scripts/options/set_nml.diagpt1 @@ -0,0 +1,5 @@ +# this local point is hardwired to (85,-150) for gx3, 7x2x5x29x12 roundrobin +debug_model_i = 3 +debug_model_j = 22 +debug_model_iblk = 11 +debug_model_task = 0 diff --git a/configuration/scripts/options/set_nml.dwghtfile b/configuration/scripts/options/set_nml.dwghtfile index d72b0fb8a..33bb2d29f 100644 --- a/configuration/scripts/options/set_nml.dwghtfile +++ b/configuration/scripts/options/set_nml.dwghtfile @@ -1,3 +1,2 @@ distribution_type = 'wghtfile' distribution_wght = 'file' - distribution_wght_file = 'ICE_MACHINE_INPUTDATA/CICE_data/grid/gx1/cice62_gx1_wghtmask.nc' diff --git a/configuration/scripts/options/set_nml.gx1 b/configuration/scripts/options/set_nml.gx1 index 2e8d4f5b7..50615e81e 100644 --- a/configuration/scripts/options/set_nml.gx1 +++ b/configuration/scripts/options/set_nml.gx1 @@ -19,3 +19,4 @@ atm_data_dir = 'ICE_MACHINE_INPUTDATA/CICE_data/forcing/gx1/JRA55' precip_units = 'mks' ocn_data_dir = 'ICE_MACHINE_INPUTDATA/CICE_data/forcing/gx1/CESM/MONTHLY' bgc_data_dir = 'ICE_MACHINE_INPUTDATA/CICE_data/forcing/gx1/WOA/MONTHLY' +distribution_wght_file = 'ICE_MACHINE_INPUTDATA/CICE_data/grid/gx1/cice62_gx1_wghtmask.nc' diff --git a/configuration/scripts/options/set_nml.tx1 b/configuration/scripts/options/set_nml.tx1 index dfdf6f19b..2ef4edd33 100644 --- a/configuration/scripts/options/set_nml.tx1 +++ b/configuration/scripts/options/set_nml.tx1 @@ -3,6 +3,7 @@ runtype = 'initial' ice_ic = 'default' grid_format = 'bin' grid_type = 'tripole' +ns_boundary_type = 'tripole' grid_file = 'ICE_MACHINE_INPUTDATA/CICE_data/grid/tx1/grid_tx1.bin' kmt_file = 'ICE_MACHINE_INPUTDATA/CICE_data/grid/tx1/kmt_tx1.bin' atm_data_dir = 'ICE_MACHINE_INPUTDATA/CICE_data/forcing/tx1/JRA55' diff --git a/configuration/scripts/tests/base_suite.ts b/configuration/scripts/tests/base_suite.ts index c37750a31..69252f9fb 100644 --- a/configuration/scripts/tests/base_suite.ts +++ b/configuration/scripts/tests/base_suite.ts @@ -5,27 +5,32 @@ smoke gx3 1x4 debug,diag1,run2day smoke gx3 4x1 debug,diag1,run5day restart gx3 8x2 debug smoke gx3 8x2 diag24,run1year,medium -smoke gx3 7x2 diag1,bigdiag,run1day +smoke gx3 7x2 diag1,bigdiag,run1day,diagpt1 decomp gx3 4x2x25x29x5 none smoke gx3 4x2 diag1,run5day smoke_gx3_8x2_diag1_run5day smoke gx3 4x1 diag1,run5day,thread smoke_gx3_8x2_diag1_run5day -restart gx1 40x4 droundrobin,medium -restart tx1 40x4 dsectrobin,medium +restart gx1 40x4 droundrobin +restart tx1 40x4 dsectrobin +restart tx1 60x2 droundrobin,maskhalo restart gx3 4x4 none +restart gx3 10x4 maskhalo restart gx3 6x2 alt01 restart gx3 8x2 alt02 restart gx3 4x2 alt03 +restart gx3 12x2 alt03,maskhalo,droundrobin restart gx3 4x4 alt04 restart gx3 4x4 alt05 restart gx3 8x2 alt06 +restart gx3 18x2 debug,maskhalo restart gx3 6x2 alt01,debug,short restart gx3 8x2 alt02,debug,short restart gx3 4x2 alt03,debug,short +smoke gx3 12x2 alt03,debug,short,maskhalo,droundrobin smoke gx3 4x4 alt04,debug,short smoke gx3 4x4 alt05,debug,short smoke gx3 8x2 alt06,debug,short smoke gx3 10x2 debug,diag1,run5day,gx3sep2 -smoke gx3 7x2 diag1,bigdiag,run1day +smoke gx3 7x2x5x29x12 diag1,bigdiag,run1day,debug restart gbox128 4x2 short restart gbox128 4x2 boxnodyn,short restart gbox128 4x2 boxnodyn,short,debug @@ -44,22 +49,23 @@ restart gx1 8x1 bgczclim,medium smoke gx1 24x1 medium,run90day,yi2008 smoke gx3 8x1 medium,run90day,yi2008 restart gx1 24x1 short -restart gx1 16x2 seabedLKD,gx1apr,medium,debug -restart gx1 15x2 seabedprob,medium -restart gx1 32x1 gx1prod,medium +restart gx1 16x2 seabedLKD,gx1apr,short,debug +restart gx1 15x2 seabedprob +restart gx1 32x1 gx1prod smoke gx3 4x2 fsd1,diag24,run5day,debug -smoke gx3 8x2 fsd12,diag24,run5day,short +smoke gx3 8x2 fsd12,diag24,run5day restart gx3 4x2 fsd12,debug,short -smoke gx3 8x2 fsd12ww3,diag24,run1day,medium +smoke gx3 8x2 fsd12ww3,diag24,run1day smoke gx3 4x1 isotope,debug restart gx3 8x2 isotope restart gx3 4x4 gx3ncarbulk,iobinary restart gx3 4x4 histall,precision8,cdf64 smoke gx3 30x1 bgcz,histall smoke gx3 14x2 fsd12,histall -smoke gx3 4x1 dynpicard,medium +smoke gx3 4x1 dynpicard smoke gx3 8x2 diag24,run5day,zsal,debug restart gx3 8x2 zsal restart gx3 8x2 gx3ncarbulk,debug restart gx3 4x4 gx3ncarbulk,diag1 restart gx1 24x1 gx1coreii,short +smoke gx3 4x1 calcdragio diff --git a/configuration/scripts/tests/unittest_suite.ts b/configuration/scripts/tests/unittest_suite.ts index 2e9dcc7cf..21810a1e3 100644 --- a/configuration/scripts/tests/unittest_suite.ts +++ b/configuration/scripts/tests/unittest_suite.ts @@ -1,4 +1,8 @@ -# Test Grid PEs Sets BFB-compare -unittest gx3 1x1 helloworld -unittest gx3 1x1 calchk - +# Test Grid PEs Sets BFB-compare +unittest gx3 1x1 helloworld +unittest gx3 1x1 calchk,short +unittest gx3 4x1x25x29x4 sumchk +unittest gx3 1x1x25x29x16 sumchk +unittest tx1 8x1 sumchk +unittest gx3 4x1 bcstchk +unittest gx3 1x1 bcstchk diff --git a/doc/source/cice_index.rst b/doc/source/cice_index.rst index 69222e10c..d3291dbd8 100644 --- a/doc/source/cice_index.rst +++ b/doc/source/cice_index.rst @@ -93,6 +93,7 @@ either Celsius or Kelvin units). "**C**", "", "" "c", "real(\ :math:`n`)", "" "rotate_wind", ":math:`\bullet` if true, rotate wind/stress components to computational grid", "T" + "calc_dragio", ":math:`\bullet` if true, calculate ``dragio`` from ``iceruf_ocn`` and ``thickness_ocn_layer1``", "F" "calc_strair", ":math:`\bullet` if true, calculate wind stress", "T" "calc_Tsfc", ":math:`\bullet` if true, calculate surface temperature", "T" "Cdn_atm", "atmospheric drag coefficient", "" @@ -141,8 +142,14 @@ either Celsius or Kelvin units). "days_per_year", ":math:`\bullet` number of days in one year", "365" "day_init", ":math:`\bullet` the initial day of the month", "" "dbl_kind", "definition of double precision", "selected_real_kind(13)" + "debug_blocks", ":math:`\bullet` write extra diagnostics for blocks and decomposition", ".false." + "debug_forcing", ":math:`\bullet` write extra diagnostics for forcing inputs", ".false." "debug_model", "Logical that controls extended model point debugging.", "" - "debug_model_step", "Initial timestep for output associated with debug_model.", "" + "debug_model_i", "Local i gridpoint that defines debug_model point output.", "" + "debug_model_iblk", "Local iblk value that defines debug_model point output.", "" + "debug_model_j", "Local j gridpoint that defines debug_model point output.", "" + "debug_model_task", "Local mpi task value that defines debug_model point output.", "" + "debug_model_step", "Initial timestep for output from the debug_model flag.", "" "Delta", "function of strain rates (see Section :ref:`dynam`)", "1/s" "default_season", "Season from which initial values of forcing are set.", "winter" "denom1", "combination of constants for stress equation", "" @@ -231,7 +238,6 @@ either Celsius or Kelvin units). "flw", "incoming longwave radiation", "W/m\ :math:`^2`" "flwout", "outgoing longwave radiation", "W/m\ :math:`^2`" "fm", "Coriolis parameter * mass in U cell", "kg/s" - "forcing_diag", ":math:`\bullet` write extra diagnostics for forcing inputs", ".false." "formdrag", ":math:`\bullet` calculate form drag", "" "fpond", "fresh water flux to ponds", "kg/m\ :math:`^2`/s" "fr_resp", "bgc respiration fraction", "0.05" @@ -317,6 +323,7 @@ either Celsius or Kelvin units). "ice_ref_salinity", "reference salinity for ice–ocean exchanges", "4. ppt" "icells", "number of grid cells with specified property (for vectorization)", "" "iceruf", ":math:`\bullet` ice surface roughness at atmosphere interface", "5.\ :math:`\times`\ 10\ :math:`^{-4}` m" + "iceruf_ocn", ":math:`\bullet` under-ice roughness (at ocean interface)", "0.03 m" "icetmask", "ice extent mask (T-cell)", "" "iceumask", "ice extent mask (U-cell)", "" "idate", "the date at the end of the current time step (yyyymmdd)", "" @@ -459,6 +466,8 @@ either Celsius or Kelvin units). "ocn_data_type", ":math:`\bullet` source of surface temperature, salinity data", "" "omega", "angular velocity of Earth", "7.292\ :math:`\times`\ 10\ :math:`^{-5}` rad/s" "opening", "rate of ice opening due to divergence and shear", "1/s" + "optics_file", "optics filename associated with modal aerosols", "" + "optics_file_fieldname", "optics file fieldname that is read", "" "**P**", "", "" "p001", "1/1000", "" "p01", "1/100", "" diff --git a/doc/source/user_guide/ug_case_settings.rst b/doc/source/user_guide/ug_case_settings.rst index 44ee6f5b0..225ab91b1 100644 --- a/doc/source/user_guide/ug_case_settings.rst +++ b/doc/source/user_guide/ug_case_settings.rst @@ -145,8 +145,13 @@ setup_nml "``cpl_bgc``", "logical", "couple bgc thru driver", "``.false.``" "``days_per_year``", "integer", "number of days in a model year", "365" "``day_init``", "integer", "the initial day of the month if not using restart", "1" + "``debug_forcing``", "logical", "write extra forcing diagnostics", "``.false.``" "``debug_model``", "logical", "write extended model point diagnostics", "``.false.``" - "``debug_model_step``", "logical", "initial timestep to write ``debug_model`` output", "999999999" + "``debug_model_i``", "integer", "local i index of debug_model point", "-1" + "``debug_model_iblk``", "integer", "iblk value for debug_model point", "-1" + "``debug_model_j``", "integer", "local j index of debug_model point", "-1" + "``debug_model_task``", "integer", "mpi task value for debug_model point", "-1" + "``debug_model_step``", "logical", "initial timestep to write ``debug_model`` output", "0" "``diagfreq``", "integer", "frequency of diagnostic output in timesteps", "24" "``diag_type``", "``stdout``", "write diagnostic output to stdout", "``stdout``" "", "``file``", "write diagnostic output to file", "" @@ -159,7 +164,6 @@ setup_nml "", "``1``", "write restart every ``dumpfreq_n`` time step", "" "``dumpfreq_n``", "integer", "write restart frequency with ``dumpfreq``", "1" "``dump_last``", "logical", "write restart on last time step of simulation", "``.false.``" - "``forcing_diag``", "logical", "write extra diagnostics", "``.false.``" "``hist_avg``", "logical", "write time-averaged data", "``.true.``" "``histfreq``", "``d``", "write history every ``histfreq_n`` days", "'1','h','d','m','y'" "", "``h``", "write history every ``histfreq_n`` hours", "" @@ -643,7 +647,7 @@ zbgc_nml "``l_skS``", "real", "z salinity characteristic diffusive scale in m", "7.0" "``max_dfe_doc1``", "real", "max ratio of dFe to saccharides in the ice in nm Fe / muM C", "0.2" "``max_loss``", "real", "restrict uptake to percent of remaining value", "0.9" - "``modal_aero``", "logical", "modal aersols", "``.false.``" + "``modal_aero``", "logical", "modal aerosols", "``.false.``" "``mort_pre_diatoms``", "real", "mortality diatoms", "0.007" "``mort_pre_phaeo``", "real", "mortality phaeocystis", "0.007" "``mort_pre_sp``", "real", "mortality small plankton", "0.007" @@ -654,6 +658,8 @@ zbgc_nml "``mu_max_phaeo``", "real", "maximum growth rate phaeocystis per day", "0.851" "``mu_max_sp``", "real", "maximum growth rate small plankton per day", "0.851" "``nitratetype``", "real", "mobility type between stationary and mobile nitrate", "-1.0" + "``optics_file``", "string", "optics file associated with modal aerosols", "unknown_optics_file" + "``optics_file_fieldname``", "string", "optics file fieldname to read", "unknown_optics_fieldname" "``op_dep_min``", "real", "light attenuates for optical depths exceeding min", "0.1" "``phi_snow``", "real", "snow porosity for brine height tracer", "0.5" "``ratio_chl2N_diatoms``", "real", "algal chl to N in mg/mmol diatoms", "2.1" diff --git a/doc/source/user_guide/ug_implementation.rst b/doc/source/user_guide/ug_implementation.rst index 566d10fbc..8a733f4cc 100644 --- a/doc/source/user_guide/ug_implementation.rst +++ b/doc/source/user_guide/ug_implementation.rst @@ -916,15 +916,28 @@ output is written to a log file. The log file unit to which diagnostic output is written is set in **ice\_fileunits.F90**. If ``diag_type`` = ‘stdout’, then it is written to standard out (or to **ice.log.[ID]** if you redirect standard out as in **cice.run**); otherwise it is written -to the file given by ``diag_file``. In addition to the standard diagnostic +to the file given by ``diag_file``. + +In addition to the standard diagnostic output (maximum area-averaged thickness, velocity, average albedo, total ice area, and total ice and snow volumes), the namelist options ``print_points`` and ``print_global`` cause additional diagnostic information to be computed and written. ``print_global`` outputs global sums that are useful for checking global conservation of mass and energy. -``print_points`` writes data for two specific grid points. Currently, one +``print_points`` writes data for two specific grid points defined by the +input namelist ``lonpnt`` and ``latpnt``. By default, one point is near the North Pole and the other is in the Weddell Sea; these -may be changed in **ice\_in**. +may be changed in **ice\_in**. + +The namelist ``debug_model`` prints detailed +debug diagnostics for a single point as the model advances. The point is defined +by the namelist ``debug_model_i``, ``debug_model_j``, ``debug_model_iblk``, +and ``debug_model_task``. These are the local i, j, block, and mpi task index values +of the point to be diagnosed. This point is defined in local index space +and can be values in the array halo. If the local point is not defined in +namelist, the point associated with ``lonpnt(1)`` and ``latpnt(1)`` is used. +``debug_model`` is normally used when the model aborts and needs to be debugged +in detail at a particular (usually failing) grid point. Timers are declared and initialized in **ice\_timers.F90**, and the code to be timed is wrapped with calls to *ice\_timer\_start* and diff --git a/doc/source/user_guide/ug_testing.rst b/doc/source/user_guide/ug_testing.rst index 5a289db6a..f2bc62656 100644 --- a/doc/source/user_guide/ug_testing.rst +++ b/doc/source/user_guide/ug_testing.rst @@ -667,6 +667,20 @@ in **configuration/scripts/options**. In particular, **ICE_DRVOPT** and **configuration/scripts/Makefile** and create a target for the unit test. The unit tests calchk or helloworld can be used as examples. +The following are brief descriptions of some of the current unit tests, + + - **bcstchk** is a unit test that exercises the methods in ice_broadcast.F90. This test does not + depend on the CICE grid to carry out the testing. By testing with a serial and mpi configuration, + both sets of software are tested independently and correctness is verified. + - **calchk** is a unit test that exercises the CICE calendar over 100,000 years and verifies correctness. + This test does not depend on the CICE initialization. + - **helloworld** is a simple test that writes out helloworld and uses no CICE infrastructure. + This tests exists to demonstrate how to build a unit test by specifying the object files directly + in the Makefile + - **sumchk** is a unit test that exercises the methods in ice_global_reductions.F90. This test requires + that a CICE grid and decomposition be initialized, so CICE_InitMod.F90 is leveraged to initialize + the model prior to running a suite of unit validation tests to verify correctness. + .. _testreporting: diff --git a/doc/source/user_guide/ug_troubleshooting.rst b/doc/source/user_guide/ug_troubleshooting.rst index a8a9c2c4d..f400673ac 100644 --- a/doc/source/user_guide/ug_troubleshooting.rst +++ b/doc/source/user_guide/ug_troubleshooting.rst @@ -135,6 +135,18 @@ conflicts in module dependencies. `debug\_model` = true (**ice\_in**) Print extended diagnostics for the first point associated with `print\_points`. +`debug\_model\_i` = integer (**ice\_in**) + Defines the local i index for the point to be diagnosed with `debug\_model`. + +`debug\_model\_j` = integer (**ice\_in**) + Defines the local j index for the point to be diagnosed with `debug\_model`. + +`debug\_model\_iblk` = integer (**ice\_in**) + Defines the local iblk value for the point to be diagnosed with `debug\_model`. + +`debug\_model\_task` = integer (**ice\_in**) + Defines the local task value for the point to be diagnosed with `debug\_model`. + `debug\_model\_step` = true (**ice\_in**) Timestep to starting printing diagnostics associated with `debug\_model`.