diff --git a/README.md b/README.md index 316231668..0e8029de2 100644 --- a/README.md +++ b/README.md @@ -7,33 +7,36 @@ CICE consists of a top level driver and dynamical core plus the Icepack column p ## Obtaining CICE +A list of the official CICE releases along with release notes is located here: +https://github.com/CICE-Consortium/CICE/releases + If you expect to make any changes to the code, we recommend that you first fork both the CICE and Icepack repositories. Basic instructions for working with CICE and Icepack are found in the [Git Workflow Guidance](https://github.com/CICE-Consortium/About-Us/wiki/Git-Workflow-Guidance), linked from the wikis in the primary code repositories https://github.com/CICE-Consortium/CICE/wiki https://github.com/CICE-Consortium/Icepack/wiki CICE may be obtained in several different ways: [not yet tested] 1. clone the full repository -See [Git Workflow Guidance](https://github.com/CICE-Consortium/About-Us/wiki/Git-Workflow-Guidance) +See [Git Workflow Guidance](https://github.com/CICE-Consortium/About-Us/wiki/Git-Workflow-Guidance) 2. check out only a particular branch, version or tag In the workflow for step 1 above, substitute - git clone -b branch_name --single-branch --recursive https://github.com/CICE-Consortium/CICE.git local_directory_name + git clone -b branch_name https://github.com/CICE-Consortium/CICE.git local_directory_name or use svn - svn co https://github.com/CICE-Consortium/CICE/branch_name + svn co https://github.com/CICE-Consortium/CICE/branch_name where "branch name" can also be a version name -3. download a tarball for a particular version -[how] - -## More information +3. download a tarball for a particular version from the git releases (see above) -Detailed and searchable online documentation of CICE can be found at https://cice-consortium.github.io/CICE/. In this documentation, a [“Quick Start”](https://cice-consortium.github.io/CICE/cice_1_introduction.html#quick-start-guide) subsection is available with instructions for running the model. A [“Testing”](https://cice-consortium.github.io/CICE/cice_3_user_guide.html#testing-cice) subsection with instructions for setting up standard tests (e.g. regression, restart) is also available. +## More Information -In-progress documentation (not yet merged into the main repo): https://duvivier.github.io/CICE/ - -The [wiki](https://github.com/CICE-Consortium/CICE/wiki) pages for each repository contain links to additional information, e.g. -- complete documentation +The [CICE wiki](https://github.com/CICE-Consortium/CICE/wiki) page contains links to additional information, e.g. +- complete documentation - both searchable html and pdf - larger files such as the gx1 grid, land mask, and forcing files - testing data -Test results for both CICE and Icepack can be found in the ["Test-Results" repository](https://github.com/CICE-Consortium/Test-Results). +The [Test-Results wiki](https://github.com/CICE-Consortium/Test-Results/wiki) has test results for both CICE and Icepack. + +The [About-Us repository](https://github.com/CICE-Consortium/About-Us) includes background and supporting information about the CICE Consortium, including how to interact with it. + +See also our [FAQ](https://github.com/CICE-Consortium/About-Us/wiki/FAQ). + + -The ["About-Us" repository](https://github.com/CICE-Consortium/About-Us) includes background and supporting information about the CICE Consortium, including how to interact with it. diff --git a/cicecore/cicedynB/analysis/ice_history.F90 b/cicecore/cicedynB/analysis/ice_history.F90 index 32dbe5823..fddba106b 100644 --- a/cicecore/cicedynB/analysis/ice_history.F90 +++ b/cicecore/cicedynB/analysis/ice_history.F90 @@ -5,7 +5,7 @@ ! ! The following variables are currently hard-wired as snapshots ! (instantaneous rather than time-averages): -! divu, shear, sig1, sig2, trsig, mlt_onset, frz_onset, hisnap, aisnap +! divu, shear, sig1, sig2, sigP, trsig, mlt_onset, frz_onset, hisnap, aisnap ! ! Options for histfreq: '1','h','d','m','y','x', where x means that ! output stream will not be used (recommended for efficiency). @@ -300,6 +300,7 @@ subroutine init_hist (dt) call broadcast_scalar (f_shear, master_task) call broadcast_scalar (f_sig1, master_task) call broadcast_scalar (f_sig2, master_task) + call broadcast_scalar (f_sigP, master_task) call broadcast_scalar (f_dvidtt, master_task) call broadcast_scalar (f_dvidtd, master_task) call broadcast_scalar (f_daidtt, master_task) @@ -776,6 +777,11 @@ subroutine init_hist (dt) "norm. principal stress 2", & "sig2 is instantaneous", c1, c0, & ns1, f_sig2) + + call define_hist_field(n_sigP,"sigP","1",ustr2D, ucstr, & + "ice pressure", & + "sigP is instantaneous", c1, c0, & + ns1, f_sigP) call define_hist_field(n_dvidtt,"dvidtt","cm/day",tstr2D, tcstr, & "volume tendency thermo", & @@ -1199,11 +1205,11 @@ subroutine accum_hist (dt) fhocn, fhocn_ai, uatm, vatm, fbot, & fswthru_ai, strairx, strairy, strtltx, strtlty, strintx, strinty, & taubx, tauby, strocnx, strocny, fm, daidtt, dvidtt, daidtd, dvidtd, fsurf, & - fcondtop, fsurfn, fcondtopn, flatn, fsensn, albcnt, prs_sig, & + fcondtop, fsurfn, fcondtopn, flatn, fsensn, albcnt, & stressp_1, stressm_1, stress12_1, & stressp_2, stressm_2, stress12_2, & stressp_3, stressm_3, stress12_3, & - stressp_4, stressm_4, stress12_4, sig1, sig2, & + stressp_4, stressm_4, stress12_4, sig1, sig2, sigP, & mlt_onset, frz_onset, dagedtt, dagedtd, fswint_ai, keffn_top, & snowfrac, alvdr_ai, alvdf_ai, alidr_ai, alidf_ai use ice_arrays_column, only: snowfracn @@ -1922,9 +1928,10 @@ subroutine accum_hist (dt) stressp_1 (:,:,iblk), & stressm_1 (:,:,iblk), & stress12_1(:,:,iblk), & - prs_sig (:,:,iblk), & + strength (:,:,iblk), & sig1 (:,:,iblk), & - sig2 (:,:,iblk)) + sig2 (:,:,iblk), & + sigP (:,:,iblk)) do j = jlo, jhi do i = ilo, ihi @@ -1933,6 +1940,7 @@ subroutine accum_hist (dt) if (n_shear (ns) /= 0) a2D(i,j,n_shear(ns), iblk) = spval if (n_sig1 (ns) /= 0) a2D(i,j,n_sig1(ns), iblk) = spval if (n_sig2 (ns) /= 0) a2D(i,j,n_sig2(ns), iblk) = spval + if (n_sigP (ns) /= 0) a2D(i,j,n_sigP(ns), iblk) = spval if (n_mlt_onset(ns) /= 0) a2D(i,j,n_mlt_onset(ns),iblk) = spval if (n_frz_onset(ns) /= 0) a2D(i,j,n_frz_onset(ns),iblk) = spval if (n_hisnap (ns) /= 0) a2D(i,j,n_hisnap(ns), iblk) = spval @@ -1961,6 +1969,8 @@ subroutine accum_hist (dt) sig1 (i,j,iblk)*avail_hist_fields(n_sig1(ns))%cona if (n_sig2 (ns) /= 0) a2D(i,j,n_sig2(ns),iblk) = & sig2 (i,j,iblk)*avail_hist_fields(n_sig2(ns))%cona + if (n_sigP (ns) /= 0) a2D(i,j,n_sigP(ns),iblk) = & + sigP (i,j,iblk)*avail_hist_fields(n_sigP(ns))%cona if (n_mlt_onset(ns) /= 0) a2D(i,j,n_mlt_onset(ns),iblk) = & mlt_onset(i,j,iblk) if (n_frz_onset(ns) /= 0) a2D(i,j,n_frz_onset(ns),iblk) = & @@ -1972,7 +1982,7 @@ subroutine accum_hist (dt) if (kdyn == 2) then ! for EAP dynamics different time of output if (n_trsig (ns) /= 0) a2D(i,j,n_trsig(ns),iblk ) = & - prs_sig(i,j,iblk) + strength(i,j,iblk) else if (n_trsig (ns) /= 0) a2D(i,j,n_trsig(ns),iblk ) = & p25*(stressp_1(i,j,iblk) & diff --git a/cicecore/cicedynB/analysis/ice_history_shared.F90 b/cicecore/cicedynB/analysis/ice_history_shared.F90 index f98f10044..e50c12cbe 100644 --- a/cicecore/cicedynB/analysis/ice_history_shared.F90 +++ b/cicecore/cicedynB/analysis/ice_history_shared.F90 @@ -5,7 +5,7 @@ ! ! The following variables are currently hard-wired as snapshots ! (instantaneous rather than time-averages): -! divu, shear, sig1, sig2, trsig, mlt_onset, frz_onset, hisnap, aisnap +! divu, shear, sig1, sig2, sigP, trsig, mlt_onset, frz_onset, hisnap, aisnap ! ! Options for histfreq: '1','h','d','m','y','x', where x means that ! output stream will not be used (recommended for efficiency). @@ -227,6 +227,7 @@ module ice_history_shared f_strength = 'm', & f_divu = 'm', f_shear = 'm', & f_sig1 = 'm', f_sig2 = 'm', & + f_sigP = 'm', & f_dvidtt = 'm', f_dvidtd = 'm', & f_daidtt = 'm', f_daidtd = 'm', & f_dagedtt = 'm', f_dagedtd = 'm', & @@ -314,6 +315,7 @@ module ice_history_shared f_strength, & f_divu, f_shear , & f_sig1, f_sig2 , & + f_sigP, & f_dvidtt, f_dvidtd , & f_daidtt, f_daidtd , & f_dagedtt, f_dagedtd , & @@ -419,6 +421,7 @@ module ice_history_shared n_strength , & n_divu , n_shear , & n_sig1 , n_sig2 , & + n_sigP , & n_dvidtt , n_dvidtd , & n_daidtt , n_daidtd , & n_dagedtt , n_dagedtd , & diff --git a/cicecore/cicedynB/dynamics/ice_dyn_eap.F90 b/cicecore/cicedynB/dynamics/ice_dyn_eap.F90 index b2a528b9d..7510ce37e 100644 --- a/cicecore/cicedynB/dynamics/ice_dyn_eap.F90 +++ b/cicecore/cicedynB/dynamics/ice_dyn_eap.F90 @@ -93,7 +93,7 @@ subroutine eap (dt) cosw, sinw, denom1, uvel_init, vvel_init, arlx1i, & evp_prep1, evp_prep2, stepu, evp_finish, & basal_stress_coeff, basalstress - use ice_flux, only: rdg_conv, rdg_shear, prs_sig, strairxT, strairyT, & + use ice_flux, only: rdg_conv, rdg_shear, strairxT, strairyT, & strairx, strairy, uocn, vocn, ss_tltx, ss_tlty, iceumask, fm, & strtltx, strtlty, strocnx, strocny, strintx, strinty, & strocnxT, strocnyT, strax, stray, & @@ -176,7 +176,6 @@ subroutine eap (dt) rdg_shear(i,j,iblk) = c0 divu (i,j,iblk) = c0 shear(i,j,iblk) = c0 - prs_sig(i,j,iblk) = c0 e11(i,j,iblk) = c0 e12(i,j,iblk) = c0 e22(i,j,iblk) = c0 @@ -394,7 +393,6 @@ subroutine eap (dt) yieldstress11 (:,:,iblk), & yieldstress12 (:,:,iblk), & yieldstress22 (:,:,iblk), & - prs_sig (:,:,iblk), & rdg_conv (:,:,iblk), rdg_shear (:,:,iblk), & strtmp (:,:,:)) ! call ice_timer_stop(timer_tmp1) ! dynamics @@ -486,8 +484,8 @@ subroutine eap (dt) if ( basalstress ) then !$OMP PARALLEL DO PRIVATE(iblk) do iblk = 1, nblocks - taubx(:,:,iblk) = Cbu(:,:,iblk)*uvel(:,:,iblk) - tauby(:,:,iblk) = Cbu(:,:,iblk)*vvel(:,:,iblk) + taubx(:,:,iblk) = -Cbu(:,:,iblk)*uvel(:,:,iblk) + tauby(:,:,iblk) = -Cbu(:,:,iblk)*vvel(:,:,iblk) enddo !$OMP END PARALLEL DO endif @@ -1145,7 +1143,6 @@ subroutine stress_eap (nx_block, ny_block, & yieldstress11, & yieldstress12, & yieldstress22, & - prs_sig, & rdg_conv, rdg_shear, & strtmp) @@ -1196,7 +1193,6 @@ subroutine stress_eap (nx_block, ny_block, & real (kind=dbl_kind), dimension (nx_block,ny_block), & intent(inout) :: & - prs_sig , & ! replacement pressure, for stress calc shear , & ! strain rate II component (1/s) divu , & ! strain rate I component, velocity divergence (1/s) e11 , & ! components of strain rate tensor (1/s) @@ -1353,8 +1349,6 @@ subroutine stress_eap (nx_block, ny_block, & e22(i,j) = p5*p25*(divune + divunw + divuse + divusw - & tensionne - tensionnw - tensionse - tensionsw) * tarear(i,j) - prs_sig(i,j) = strength(i,j) - !----------------------------------------------------------------- ! elastic relaxation, see Eq. A12-A14 !----------------------------------------------------------------- diff --git a/cicecore/cicedynB/dynamics/ice_dyn_evp.F90 b/cicecore/cicedynB/dynamics/ice_dyn_evp.F90 index 18abfb04f..eb211681f 100644 --- a/cicecore/cicedynB/dynamics/ice_dyn_evp.F90 +++ b/cicecore/cicedynB/dynamics/ice_dyn_evp.F90 @@ -80,7 +80,7 @@ subroutine evp (dt) use ice_blocks, only: block, get_block, nx_block, ny_block use ice_domain, only: nblocks, blocks_ice, halo_info, maskhalo_dyn use ice_domain_size, only: max_blocks, ncat - use ice_flux, only: rdg_conv, rdg_shear, prs_sig, strairxT, strairyT, & + use ice_flux, only: rdg_conv, rdg_shear, strairxT, strairyT, & strairx, strairy, uocn, vocn, ss_tltx, ss_tlty, iceumask, fm, & strtltx, strtlty, strocnx, strocny, strintx, strinty, & strocnxT, strocnyT, strax, stray, & @@ -177,7 +177,6 @@ subroutine evp (dt) rdg_shear(i,j,iblk) = c0 divu (i,j,iblk) = c0 shear(i,j,iblk) = c0 - prs_sig(i,j,iblk) = c0 enddo enddo @@ -352,7 +351,6 @@ subroutine evp (dt) stress12_1(:,:,iblk), stress12_2(:,:,iblk), & stress12_3(:,:,iblk), stress12_4(:,:,iblk), & shear (:,:,iblk), divu (:,:,iblk), & - prs_sig (:,:,iblk), & rdg_conv (:,:,iblk), rdg_shear (:,:,iblk), & strtmp (:,:,:) ) ! endif ! yield_curve @@ -420,8 +418,8 @@ subroutine evp (dt) if ( basalstress ) then !$OMP PARALLEL DO PRIVATE(iblk) do iblk = 1, nblocks - taubx(:,:,iblk) = Cbu(:,:,iblk)*uvel(:,:,iblk) - tauby(:,:,iblk) = Cbu(:,:,iblk)*vvel(:,:,iblk) + taubx(:,:,iblk) = -Cbu(:,:,iblk)*uvel(:,:,iblk) + tauby(:,:,iblk) = -Cbu(:,:,iblk)*vvel(:,:,iblk) enddo !$OMP END PARALLEL DO endif @@ -551,7 +549,6 @@ subroutine stress (nx_block, ny_block, & stress12_1, stress12_2, & stress12_3, stress12_4, & shear, divu, & - prs_sig, & rdg_conv, rdg_shear, & str ) @@ -588,7 +585,6 @@ subroutine stress (nx_block, ny_block, & real (kind=dbl_kind), dimension (nx_block,ny_block), & intent(inout) :: & - prs_sig , & ! replacement pressure, for stress calc shear , & ! strain rate II component (1/s) divu , & ! strain rate I component, velocity divergence (1/s) rdg_conv , & ! convergence term for ridging (1/s) @@ -698,7 +694,6 @@ subroutine stress (nx_block, ny_block, & c0nw = strength(i,j)/max(Deltanw,tinyarea(i,j)) c0sw = strength(i,j)/max(Deltasw,tinyarea(i,j)) c0se = strength(i,j)/max(Deltase,tinyarea(i,j)) - prs_sig(i,j) = c0ne*Deltane ! northeast c1ne = c0ne*arlx1i c1nw = c0nw*arlx1i diff --git a/cicecore/cicedynB/dynamics/ice_dyn_shared.F90 b/cicecore/cicedynB/dynamics/ice_dyn_shared.F90 index 44185ec8c..9391f6bf2 100644 --- a/cicecore/cicedynB/dynamics/ice_dyn_shared.F90 +++ b/cicecore/cicedynB/dynamics/ice_dyn_shared.F90 @@ -851,6 +851,8 @@ subroutine basal_stress_coeff (nx_block, ny_block, icellu, & uold, vold, & Cbu) + use ice_constants, only: c0, c1 + integer (kind=int_kind), intent(in) :: & nx_block, ny_block, & ! block dimensions icellu ! no. of cells where icetmask = 1 @@ -900,11 +902,9 @@ subroutine basal_stress_coeff (nx_block, ny_block, icellu, & hcu = au * hwu / k1 ! 2- calculate stress factor - if (au > p01 .and. hu > hcu ) then - ! endif - Cbu(i,j) = ( k2 / (sqrt(uold(i,j)**2 + vold(i,j)**2) + u0) ) & - * (hu - hcu) * exp(-CC * (1 - au)) - endif + + Cbu(i,j) = ( k2 / (sqrt(uold(i,j)**2 + vold(i,j)**2) + u0) ) & + * max(c0,(hu - hcu)) * exp(-CC * (c1 - au)) enddo ! ij @@ -919,8 +919,9 @@ end subroutine basal_stress_coeff subroutine principal_stress(nx_block, ny_block, & stressp_1, stressm_1, & - stress12_1, prs_sig, & - sig1, sig2) + stress12_1, strength, & + sig1, sig2, & + sigP) use ice_constants, only: spval_dbl, p5, c4 @@ -931,11 +932,12 @@ subroutine principal_stress(nx_block, ny_block, & stressp_1 , & ! sigma11 + sigma22 stressm_1 , & ! sigma11 - sigma22 stress12_1, & ! sigma12 - prs_sig ! replacement pressure, for stress calc + strength ! for normalization of sig1 and sig2 real (kind=dbl_kind), dimension (nx_block,ny_block), intent(out):: & - sig1 , & ! principal stress component - sig2 ! principal stress component + sig1 , & ! normalized principal stress component + sig2 , & ! normalized principal stress component + sigP ! internal ice pressure (N/m) ! local variables @@ -950,16 +952,21 @@ subroutine principal_stress(nx_block, ny_block, & do j = 1, ny_block do i = 1, nx_block - if (prs_sig(i,j) > puny) then + if (strength(i,j) > puny) then + ! ice internal pressure + sigP(i,j) = -p5*stressp_1(i,j) + + ! normalized principal stresses sig1(i,j) = (p5*(stressp_1(i,j) & + sqrt(stressm_1(i,j)**2+c4*stress12_1(i,j)**2))) & - / prs_sig(i,j) + / strength(i,j) sig2(i,j) = (p5*(stressp_1(i,j) & - sqrt(stressm_1(i,j)**2+c4*stress12_1(i,j)**2))) & - / prs_sig(i,j) + / strength(i,j) else sig1(i,j) = spval_dbl sig2(i,j) = spval_dbl + sigP(i,j) = spval_dbl endif enddo enddo diff --git a/cicecore/cicedynB/general/ice_flux.F90 b/cicecore/cicedynB/general/ice_flux.F90 index 15324491c..5476c1c10 100644 --- a/cicecore/cicedynB/general/ice_flux.F90 +++ b/cicecore/cicedynB/general/ice_flux.F90 @@ -58,8 +58,9 @@ module ice_flux ! diagnostic real (kind=dbl_kind), dimension (nx_block,ny_block,max_blocks), public :: & - sig1 , & ! principal stress component - sig2 , & ! principal stress component + sig1 , & ! normalized principal stress component + sig2 , & ! normalized principal stress component + sigP , & ! internal ice pressure (N/m) taubx , & ! basal stress (x) (N/m^2) tauby , & ! basal stress (y) (N/m^2) strairx , & ! stress on ice by air, x-direction @@ -108,7 +109,6 @@ module ice_flux ! internal real (kind=dbl_kind), dimension (nx_block,ny_block,max_blocks), public :: & - prs_sig , & ! replacement pressure, for stress calc fm , & ! Coriolis param. * mass in U-cell (kg/s) Cbu ! coefficient for basal stress (landfast ice) @@ -721,7 +721,6 @@ subroutine init_history_dyn if (tr_iage) & dagedtd (:,:,:) = trcr(:,:,nt_iage,:) ! temporary initial age fm (:,:,:) = c0 - prs_sig (:,:,:) = c0 ardgn (:,:,:,:) = c0 vrdgn (:,:,:,:) = c0 krdgn (:,:,:,:) = c1 diff --git a/cicecore/cicedynB/infrastructure/io/io_binary/ice_history_write.F90 b/cicecore/cicedynB/infrastructure/io/io_binary/ice_history_write.F90 index 863d7314c..fcb0549f4 100644 --- a/cicecore/cicedynB/infrastructure/io/io_binary/ice_history_write.F90 +++ b/cicecore/cicedynB/infrastructure/io/io_binary/ice_history_write.F90 @@ -147,7 +147,7 @@ subroutine ice_write_hist(ns) if (histfreq(ns) == '1' .or. .not. hist_avg & .or. n==n_divu(ns) .or. n==n_shear(ns) & ! snapshots .or. n==n_sig1(ns) .or. n==n_sig2(ns) & - .or. n==n_trsig(ns) & + .or. n==n_sigP(ns) .or. n==n_trsig(ns) & .or. n==n_mlt_onset(ns) .or. n==n_frz_onset(ns) & .or. n==n_hisnap(ns) .or. n==n_aisnap(ns)) then write (nu_hdr, 996) nrec,trim(avail_hist_fields(n)%vname), & diff --git a/cicecore/cicedynB/infrastructure/io/io_netcdf/ice_history_write.F90 b/cicecore/cicedynB/infrastructure/io/io_netcdf/ice_history_write.F90 index 899302b6f..103d6a6fa 100644 --- a/cicecore/cicedynB/infrastructure/io/io_netcdf/ice_history_write.F90 +++ b/cicecore/cicedynB/infrastructure/io/io_netcdf/ice_history_write.F90 @@ -505,7 +505,8 @@ subroutine ice_write_hist (ns) !----------------------------------------------------------------- if (hist_avg) then if (TRIM(avail_hist_fields(n)%vname)/='sig1' & - .or.TRIM(avail_hist_fields(n)%vname)/='sig2') then + .or.TRIM(avail_hist_fields(n)%vname)/='sig2' & + .or.TRIM(avail_hist_fields(n)%vname)/='sigP') then status = nf90_put_att(ncid,varid,'cell_methods','time: mean') if (status /= nf90_noerr) call abort_ice( & 'Error defining cell methods for '//avail_hist_fields(n)%vname) @@ -515,7 +516,7 @@ subroutine ice_write_hist (ns) if (histfreq(ns) == '1' .or. .not. hist_avg & .or. n==n_divu(ns) .or. n==n_shear(ns) & ! snapshots .or. n==n_sig1(ns) .or. n==n_sig2(ns) & - .or. n==n_trsig(ns) & + .or. n==n_sigP(ns) .or. n==n_trsig(ns) & .or. n==n_mlt_onset(ns) .or. n==n_frz_onset(ns) & .or. n==n_hisnap(ns) .or. n==n_aisnap(ns)) then status = nf90_put_att(ncid,varid,'time_rep','instantaneous') diff --git a/cicecore/cicedynB/infrastructure/io/io_pio/ice_history_write.F90 b/cicecore/cicedynB/infrastructure/io/io_pio/ice_history_write.F90 index 5c2e8b7d0..4d43e9e13 100644 --- a/cicecore/cicedynB/infrastructure/io/io_pio/ice_history_write.F90 +++ b/cicecore/cicedynB/infrastructure/io/io_pio/ice_history_write.F90 @@ -425,7 +425,8 @@ subroutine ice_write_hist (ns) ! Add cell_methods attribute to variables if averaged if (hist_avg .and. histfreq(ns) /= '1') then if (TRIM(avail_hist_fields(n)%vname)/='sig1' & - .or.TRIM(avail_hist_fields(n)%vname)/='sig2') then + .or.TRIM(avail_hist_fields(n)%vname)/='sig2' & + .or.TRIM(avail_hist_fields(n)%vname)/='sigP') then status = pio_put_att(File,varid,'cell_methods','time: mean') endif endif @@ -433,7 +434,7 @@ subroutine ice_write_hist (ns) if (histfreq(ns) == '1' .or. .not. hist_avg & .or. n==n_divu(ns) .or. n==n_shear(ns) & ! snapshots .or. n==n_sig1(ns) .or. n==n_sig2(ns) & - .or. n==n_trsig(ns) & + .or. n==n_sigP(ns) .or. n==n_trsig(ns) & .or. n==n_mlt_onset(ns) .or. n==n_frz_onset(ns) & .or. n==n_hisnap(ns) .or. n==n_aisnap(ns)) then status = pio_put_att(File,varid,'time_rep','instantaneous') diff --git a/configuration/scripts/ice_in b/configuration/scripts/ice_in index f543b2e57..44e57a0f0 100644 --- a/configuration/scripts/ice_in +++ b/configuration/scripts/ice_in @@ -420,6 +420,7 @@ f_shear = 'm' f_sig1 = 'm' f_sig2 = 'm' + f_sigP = 'm' f_dvidtt = 'm' f_dvidtd = 'm' f_daidtt = 'm' diff --git a/configuration/scripts/machines/env.fram_intel b/configuration/scripts/machines/env.fram_intel index d28ac57a3..c7499e737 100755 --- a/configuration/scripts/machines/env.fram_intel +++ b/configuration/scripts/machines/env.fram_intel @@ -4,10 +4,11 @@ #source NEMO_compiler.ksh setenv ICE_MACHINE_ENVNAME fram +setenv ICE_MACHINE_COMPILER intel setenv ICE_MACHINE_MAKE make -setenv ICE_MACHINE_WKDIR /users/dor/armn/jfl/local1/CICE6/tests/CICE_RUNS -setenv ICE_MACHINE_INPUTDATA /fs/cetus/fs1/cmd/e/afsg/fdu/models/fdm/CICE/code/cice_6.0/CICE/configuration/data/gx3Ncar -setenv ICE_MACHINE_BASELINE /users/dor/armn/jfl/local1/CICE6/tests/CICE_BASELINE +setenv ICE_MACHINE_WKDIR /users/dor/armn/jfl/local1/CICE6dev/CICE/tests/CICE_RUNS +setenv ICE_MACHINE_INPUTDATA /users/dor/armn/jfl/local1/CICE6/CICE/configuration/data/gx3Ncar +setenv ICE_MACHINE_BASELINE /users/dor/armn/jfl/local1/CICE6dev/CICE/tests/CICE_BASELINE setenv ICE_MACHINE_SUBMIT "qsub" setenv ICE_MACHINE_TPNODE 36 setenv ICE_MACHINE_ACCT P0000000 diff --git a/doc/.DS_Store b/doc/.DS_Store index edbe77ac4..635694845 100644 Binary files a/doc/.DS_Store and b/doc/.DS_Store differ diff --git a/doc/CICE-Consortium.pdf b/doc/CICE-Consortium.pdf deleted file mode 100644 index 1a80a3b23..000000000 Binary files a/doc/CICE-Consortium.pdf and /dev/null differ diff --git a/doc/about-sphinx-documentation.txt b/doc/about-sphinx-documentation.txt deleted file mode 100644 index 8e3bf471f..000000000 --- a/doc/about-sphinx-documentation.txt +++ /dev/null @@ -1,85 +0,0 @@ -Basic information about creating documentation with Sphinx -Alice DuVivier -July 14, 2017 ----------------------------------------------------------- - -INSTALLING SPHINX -You first need to install sphinx on your local machine. See here for more info on how to do this: -https://github.com/NCAR/CICE/wiki/Working-with-CICE-documentation-on-github-and-sphinx -I just had the NCAR ISG folks do it. Note that you will need the sphinxcontrib.bibtex library. - -CONVERTING LATEX TO RST CODE -If you start from a LaTeX (*.tex) document you will need to convert this to the restructured -text (*.rst) format that sphinx requires. A handy tool to do this is pandoc, which you can -install quickly and run from the command line. See here: http://pandoc.org/getting-started.html -The basic syntax is that at the command line you enter: -> panda NAMEIN.tex -f latex -t rst -s -ou NAMEOUT.rst -From here you can just start editing the NAMEOUT.rst files directly for sphinx. Note that this -did a beautiful job of converting the text, equations, and many tables. However, equation -numbering, section linking, references, figures, and some tables required more hands on care -to be sure they render correctly. -- Note: pandoc requires that the .tex files be in utf-8 encoding. To easily do this open the *.tex -document in Emacs then do ctrl-x ctrl-m f and you will be prompted to enter encoding type. Just -type in utf-8 and hit enter. Then save with ctrl-x ctrl-s . You are done and the document can be -converted with panda. - -CREATING AN ORPHAN BRANCH -To create documentation for a repository on GitHub we want to create an orphan branch for that -repository. An orphan branch is a branch that has no history in common with the master repo off -of which it was forked. In this case, the purpose for this is that the html pages created by -sphinx will exist in the orphan branch while the source *.rst code used to create the html exists -in the master repository. This way changes to the source code go into the evolving repository, but -changes to the html are isolated to the orphan branch and when you try to merge it avoids conflicts -related to the html that aren’t always caught by GitHub and could cause the documentation to break. - -To create an orphan branch, you will need owner status for a repository. Steps to create orphan branch: -On GitHub website create personal fork of the repo you’re interested in -On GitHub website from your fork, get link for the repository -On local machine: -> git clone URL -> git branch --l -> git checkout --orphan gh-pages -(creates the orphan branch gh-pages from master. Should have all documents in master to start) -> git rm -rf . -(removes everything in this repository. i.e. we are making it so their histories diverge) -> git status . -(check if all the files are gone) -> git echo “Documentation link goes here” > README.md -(it’s still nice to have a readme file) -> git add README.md -(this is all that will be in the branch) -> git commit -m “Initial commit to create gh-pages branch of repo” -> git remote --v -(get list of remote repositories and links. Should show your personal fork as “origin”) -> git push origin gh-pages -(adds the orphan branch to your local fork but not the original repo from which you forked) -> git remote add upstream URL -(add the URL for the original repository as your “upstream” repository. Get this from Github website) -> git remote --v -(verify that you now have an upstream remote URL too) -> git branch --l -(verify you are on the gh-pages branch only so that these changes will be pushed) -**** BE CAREFUL **** -> git push upstream gh-pages -(this pushes the gh-pages branch to the upstream repo. You may also want to try doing a pull request instead so that -others have a chance to look at what you’ve done. I did it both ways, I’d prefer the pull request method for the future). - -Now you have set up the orphan branch, but you need to build the sphinx stuff for it: -> git checkout master -(switch to master branch in order to set up doc/source directory it will use and other sphinx stuff) -> cd doc -> mkdir source -> sphinx-quickstart -(Your local machine will prompt you with interactive options to choose. I had Alice B.’s guidance to do this but could look at an old conf.py file to see what options were chosen. You only do this once for a repo but conf.py can be changed later on). -make/copy .gitignore from CICE repo to the main Icepack repository. Want to ignore html code when pushing to master branch. Also want to ignore emacs backup files (*~). -> git status -(should show the files conf.py, .gitignore, others? to initialize the sphinx stuff) -> git add . -> git commit -m “Initial setup for sphinx documentation” -> git push origin master -(push pages to your local master. Then either push upstream if you are an owner or *preferably* do a pull request to merge with the original repository) - -Make local gh-pages version for just the html. This is convenient so that you can have the master repo where you make code and another repo for the gh-pages branch from which you push the html. -> git clone -b gh-pages — single-branch https://github.com/duvivier/Icepack.git Icepack.gh-pages -(makes local copy of just gh-pages branch) - diff --git a/doc/cicedoc.pdf b/doc/cicedoc.pdf deleted file mode 100644 index 682f5f380..000000000 Binary files a/doc/cicedoc.pdf and /dev/null differ diff --git a/doc/requirements.txt b/doc/requirements.txt new file mode 100644 index 000000000..8788d6ac3 --- /dev/null +++ b/doc/requirements.txt @@ -0,0 +1,5 @@ +# +# +sphinxcontrib-bibtex +# +# diff --git a/doc/source/.DS_Store b/doc/source/.DS_Store new file mode 100644 index 000000000..434378d52 Binary files /dev/null and b/doc/source/.DS_Store differ diff --git a/doc/source/cice_1_introduction.rst b/doc/source/cice_1_introduction.rst deleted file mode 100644 index 7edf1bb9d..000000000 --- a/doc/source/cice_1_introduction.rst +++ /dev/null @@ -1,287 +0,0 @@ -:tocdepth: 3 - -Introduction - CICE5 -============================================ - -The Los Alamos sea ice model (CICE) is the result of an effort to -develop a computationally efficient sea ice component for a fully -coupled atmosphere--land global climate model. It was -designed to be compatible with the Parallel Ocean Program -(POP), an ocean circulation model developed at -Los Alamos National Laboratory for use on massively parallel computers -:cite:`SDM92,DSM93,DSM94`. The current version of the -model has been enhanced greatly through collaborations with members of -the community. - -CICE has several interacting components: a thermodynamic model that -computes local growth rates of snow and ice due to vertical conductive, -radiative and turbulent fluxes, along with snowfall; a model of ice -dynamics, which predicts the velocity field of the ice pack based on -a model of the material strength of the ice; a transport model that -describes advection of the areal concentration, ice volumes and other -state variables; and a ridging parameterization that transfers ice among -thickness categories based on energetic balances and -rates of strain.External routines would prepare and execute data exchanges with an -external "flux coupler," which then passes the data to other climate -model components such as POP. - -This model release is CICE version 5.1, available from http://oceans11.lanl.gov/trac/CICE/wiki. -It updates CICE5.0, which was released in September 2013. With so many new parameterizations, -we must admit that all combinations have not been tested. Also, different parameterizations for -various sub-processes (e.g., snow infiltration by melt or sea water) have been introduced as part -of the new code options, which need to be unified or pulled out as separate options. - -This document uses the following text conventions: -Variable names used in the code are ``typewritten``. -Subroutine names are given in *italic*. -File and directory names are in **boldface**. -A comprehensive :ref:`index`, including glossary of symbols with many of their values, appears -at the end of this guide. - -================= -Quick Start guide -================= - -~~~~~~~~~~~~~ -Get the model -~~~~~~~~~~~~~ - -Checkout the model from the CICE-Consortium repository, - - github.com/CICE-Consortium - -For more details about how to work in github with CICE, a document can be -found `here `_. - -~~~~~~~~~~~~~~~~~ -Running the model -~~~~~~~~~~~~~~~~~ - -> cd consortium - -> ./create.case -c ~/mycase1 -g gx3 -m thunder -s diag1,thread -p 8x1 - -> cd ~/mycase1 - -> ./cice.build - -> ./cice.submit/Users/duvivier/Documents/Research/github/CICE-Consortium/CICE/doc/source/all_orig/cice_2_quick_start.rst - -~~~~~~~~~~~~ -More Details -~~~~~~~~~~~~ - -create.case generates a case, use "create.case -h" for help with the tool. - -c is the case name and location (required) - - -m is the machine name (required). Currently, there are working ports for NCAR cheyenne, AFRL thunder, NavyDSRC gordon and conrad, and LANL’s wolf machines. - - -g is the resolution (default is gx3) - - -p is the task x thread/task values (default is 4x1) - - -s are comma separated optional env or namelist settings (default is "null") - - -t is the test name and location (cannot be used with -c). - - -bd is used to specify the location of the baseline datasets (only used with -t) - - -bg is used to specify the cice version name for generating baseline datasets (only used with -t) - - -bc is used to specify the cice versoin name for comparison. I.e., the version name for the baseline dataset (only used with -t) - - -testid is used to specify a test ID (used only with -t or -ts) - - -ts is used to generate all test cases for a given test suite. - - -Several files are placed in the case directory - - - env.${machine} defines the environment - - - cice.settings defines many variables associated with building and running the model - - - makdep.c is a tool that will automatically generate the make dependencies - - - Macros.${machine} defines the Makefile Macros - - - Makefile is the makefile used to build the model - - - cice.build is a script that build the model - - - ice_in is the namelist file - - - cice.run is a batch run script - - - cice.submit is a simple script that submits the cice.run script - -Once the case is created, all scripts and namelist are fully resolved. Users can edit any -of the files in the case directory manually to change the model configuration. The file -dependency is indicated in the above list. For instance, if any of the files before -cice.build in the list are edited, cice.build should be rerun. - -The casescripts directory holds scripts used to create the case and can largely be ignored. - -In general, when cice.build is executed, the model will build from scratch due to the large -dependence on cpps. To change this behavior, edit the env variable ICE_CLEANBUILD in -cice.settings. - -The cice.submit script just submits the cice.run script. You can use cice.submit or just -submit the cice.run script on the command line. - -The model will run in the directory defined by the env variable CICE_RUNDIR in cice.settings. -Build and run logs will be copied into the case logs directory when complete. - -To port, an env.machine and Macros.machine file have to be added to scripts/machines and the cice.run.setup.csh file needs to be modified. - - cd to consortium/scripts/machines - - Copy an existing env and Macros file to new names for your new machine - - Edit the env and Macros file - - cd to consortium/scripts - - Edit the cice.run.setup.csh script to add a section for your machine for the batch settings and for the job launch settings - - Download and untar the 1997 dataset to the location defined by ICE_MACHINE_INPUTDATA in the env file - - Create a file in your home directory called .cice_proj and add your preferred account name to the first line. - - You can now create a case and test. If there are problems, you can manually edit the env, Macros, and cice.run files in the case directory until things are working properly. Then you can copy the env and Macros files back to consortium/scripts/machines. You will have to manually modify the cice.run.setup.csh script if there any changes needed there. - -~~~~~~~~~~~~ -Forcing data -~~~~~~~~~~~~ - -The code is currently configured to run in standalone mode on a 3 degree grid using -atmospheric data from 1997, available as detailed on the `wiki `_. -These data files are designed only for testing the code, not for use in production -runs or as observational data. Please do not publish results based on these data -sets. Module cicecore/dynamics/cicedynB/ice_forcing.F90 can be modified to change the -forcing data. - -As currently configured, the model runs on 4 processors. MPI is used for message passing -between processors, and OpenMP threading is available. The grid provided here is too -small for the code to scale well beyond about 8 processors. A 1 degree grid is provided also, -and details about this grid can be found on the `wiki `_. - -~~~~~~~~~~~~~~~~ -Online resources -~~~~~~~~~~~~~~~~ - -**DO WE WANT TO KEEP THESE?** - -primary wiki page: - - -FAQ: - - -instructions for code developers: - - -ongoing or planned development projects: - - -list of users and publications: - - -Please send references to your publications using the CICE model to ... - - -Please report any bugs to -Elizabeth Hunke (eclare@lanl.gov) - -Good luck! - - -============= -Major updates -============= - -~~~~~~~~~ -CICE V5.1 -~~~~~~~~~ - -- include ice velocity in atm-ice coupling updates (e.g. stress) for high-frequency coupling -- allow a variable coefficient for the ice-ocean heat flux -- several new namelist options improve flexibility, especially for coupled model configurations: - - ice-ocean heat flux - - 'virtual' or 'real' topo melt pond water - - ocean freezing temperature - - high-frequency coupling - - coupling and computational grids may be different - - and more -- additional software enhancements improve flexibility and compatibility with CESM, Hadley Centre, and U.S. Navy coupled models -- new diagnostics and updated documentation -- various bug fixes - -~~~~~~~~~ -CICE V5.0 -~~~~~~~~~ - -- A method for prognosing sea ice salinity, including improved snow-ice formation -- Two new explicit melt pond parameterizations (topo and level-ice) -- Sea ice biogeochemistry -- Elastic-Anisotropic-Plastic rheology -- Improved parameterization of form drag -- The "revised EVP" under-damping approach -- Gracefully handles the case when an internal layer melts completely -- Gregorian calendar with leap years -- Reduced memory and floating-point operations for tracer calculations -- Ice and snow enthalpy defined as tracers -- New history variables for melt ponds, ridging diagnostics, biogeochemistry and more -- Read/write variables on the extended grid, including ghost (halo) cells -- Parallelism option via OpenMP threads -- Improved parallel efficiency through halo masks and new block distributions -- Parallel I/O option via the PIO library -- Restarts in binary or netCDF formats -- CPP options for categories, layers and tracers -- Corrected bugs, particularly for nonstandard configurations. - -====================== -Acknowledgements -====================== -This work has been supported under the Department of Energy’s Climate, -Ocean and Sea Ice Modeling project through the Computer Hardware Applied -Mathematics and Model Physics (CHAMMP) program, Climate Change -Prediction Program (CCPP), Improving the Characterization of Clouds, -Aerosols and the Cryosphere in Climate Models (Cloud-Cryo) program and -Scientific Discovery through Advanced Computing (SCIDAC) program, with -additional support from the T-3 Fluid Dynamics and Solid Mechanics Group -at Los Alamos National Laboratory. Special thanks are due to the -following people: - -- members of the CESM Polar Climate Working Group, including David - Bailey, Alice DuVivier, Cecilia Bitz, Bruce Briegleb, Tony Craig, - Marika Holland, John Dennis, Julie Schramm, Bonnie Light and Phil Jones. - -- Andrew Roberts of the Naval Postgraduate School, - -- David Hebert and Olivier Lecomte for their melt pond work, - -- Jonathan Gregory of the University of Reading and the U.K. MetOffice - for supplying tripole T-fold code and documentation, - -- Alison McLaren, Ann Keen and others working with the Hadley Centre - GCM for testing non-standard model configurations and providing their - code to us, - -- Daniel Feltham and his research group for several new - parameterizations and documentation, - -- Sylvain Bouillon for the revised EVP approach, - -- the many researchers who tested beta versions of CICE 5 and waited - patiently for the official release. - -====================== -Copyright -====================== -© Copyright 2013, LANS LLC. All rights reserved. Unless otherwise -indicated, this information has been authored by an employee or -employees of the Los Alamos National Security, LLC (LANS), operator of -the Los Alamos National Laboratory under Contract No. DE-AC52-06NA25396 -with the U.S. Department of Energy. The U.S. Government has rights to -use, reproduce, and distribute this information. The public may copy and -use this information without charge, provided that this Notice and any -statement of authorship are reproduced on all copies. Neither the -Government nor LANS makes any warranty, express or implied, or assumes -any liability or responsibility for the use of this information. -Beginning with version 4.0, the CICE code carries Los Alamos Software -Release number LA-CC-06-012. - - diff --git a/doc/source/cice_4_index.rst b/doc/source/cice_index.rst similarity index 99% rename from doc/source/cice_4_index.rst rename to doc/source/cice_index.rst index fc1abd123..9a620aab8 100644 --- a/doc/source/cice_4_index.rst +++ b/doc/source/cice_index.rst @@ -12,10 +12,6 @@ values. For other namelist options, see Section :ref:`tab-namelist`. All quantities in the code are expressed in MKS units (temperatures may take either Celsius or Kelvin units). -================================ -Comprehensive Alphabetical Index -================================ - .. csv-table:: Alphabetical Index :header: " ", " ", " " :widths: 15, 30, 15, 1 diff --git a/doc/source/conf.py b/doc/source/conf.py index 674b348f7..9d277bed4 100644 --- a/doc/source/conf.py +++ b/doc/source/conf.py @@ -62,7 +62,7 @@ # built documents. # # The short X.Y version. -version = u'6.0.0' +version = u'6.0.0.alpha' # The full version, including alpha/beta/rc tags. release = u'6.0.0.alpha' diff --git a/doc/source/developer_guide/.DS_Store b/doc/source/developer_guide/.DS_Store new file mode 100644 index 000000000..5008ddfcf Binary files /dev/null and b/doc/source/developer_guide/.DS_Store differ diff --git a/doc/source/developer_guide/dg_about.rst b/doc/source/developer_guide/dg_about.rst new file mode 100755 index 000000000..a88c94175 --- /dev/null +++ b/doc/source/developer_guide/dg_about.rst @@ -0,0 +1,31 @@ +:tocdepth: 3 + +.. _about_dev: + +About Development +================== + +The Icepack model consists of three different parts, the column physics +code, the icepack driver, and the scripts. Development of each of these +pieces will be described below separately. + +Subroutine calls and other linkages into Icepack from the host model should only +need to access the **icepack\_intfc\*.F90** interface modules within the +``columnphysics/`` directory. +The Icepack driver in the ``configuration/driver/`` directory is based on the CICE +model and provides an example of the sea ice host model capabilities needed for inclusion +of Icepack. In particular, host models will need to include code equivalent to that +in the modules **icedrv\_\*_column.F90**. Calls into the Icepack interface routines +are primarily from **icedrv\_step\_mod.F90** but there are others (search the driver code +for ``intfc``). + +Guiding principles for the creation of Icepack include the following: + - The column physics modules shall be independent of all sea ice model infrastructural + elements that may vary from model to model. Examples include input/output, timers, + references to CPUs or computational tasks, initialization other than that necessary for + strictly physical reasons, and anything related to a horizontal grid. + - The column physics modules shall not call or reference any routines or code that + reside outside of the **columnphysics/** directory. + - Any capabilities required by a host sea ice model (e.g. calendar variables, tracer + flags, diagnostics) shall be implemented in the driver and passed into or out of the + column physics modules via array arguments. diff --git a/doc/source/developer_guide/dg_documentation.rst b/doc/source/developer_guide/dg_documentation.rst new file mode 100755 index 000000000..a3b600284 --- /dev/null +++ b/doc/source/developer_guide/dg_documentation.rst @@ -0,0 +1,264 @@ +:tocdepth: 3 + +.. _doc: + +Documentation System +==================== + +With Icepack development, corresponding updates or modification to the Icepack +documentation are required. Whenever you modify the model you should update +documentation. Icepack uses Sphinx to create online HTML documentation. + +FAQs +---- + +1) What are Sphinx and reStructuredText (RST)? + + The CICE and Icepack documentation is written using reStructuredText (RST) markup language. + ReStructuredText is a markup language, like HTML, markdown or LaTeX. Sphinx is a python tool + for publishing RST documents in other formats such as HTML and PDF. Additional information + about using RST and Sphinx are found in the sections below. + +2) What is expected of *me* when changing the documentation? + + We expect that if you need to add or modify documentation that you will be able to modify the + RST source files and generate HTML using Sphinx in order to review the HTML documentation. We + will review the RST and HTML during a Pull Request to verify it is working properly and is consistent + with the rest of the CICE-Consortium documentation format. + + We do not expect you to generate a PDF of your documentation changes and include this as part of a + Pull Request. Updated PDF documentation will be generated for each new release. The online HTML + documentation, however, will be updated regularly with regular code development workflow. + +3) Where are the documentation files kept? + + A PDF of the latest release documentation is available in the master branch at /doc/Icepack-v*.pdf. + This document is generated by the CICE-Consortium team and will be updated with releases. The HTML + on the website is kept up to date with the developing code, and is therefore updated more often than the PDF. + + The RST source files for generating html are stored in the master branch of the repository under /doc/source/. + + On your local fork, when you modify RST and use it to generate HTML, the HTML files are created in the + /doc/build/html/ directory and can be opened locally for review as you modify the documentation. + + For the CICE Consortium, the tested and vetted HTML pages brought in through successful Pull Requests are + stored in the gh-pages branch of the repository and accessible from the URL to the linked, searchable + User's Guide on in the README.md file at the top level of the Icepack repository or on the main wiki page + for Icepack (https://github.com/CICE-Consortium/Icepack/wiki). Note that gh-pages is an orphan + branch (see below for details) of the repository and has *only* the Sphinx generated HTML documentation + and nothing else. + + +Steps for Modifying Documentation +--------------------------------- + +Installing Sphinx +~~~~~~~~~~~~~~~~~~ + +This must be done once on each platform. See `Sphinx `_ or +`Installing Sphinx `_ for details. Below are the +commands for installing Sphinx on a mac laptop at the command line. +Other platforms may require other steps. :: + + $ sudo pip install --ignore-installed sphinx + $ sudo pip install --ignore-installed sphinxcontrib-bibtex + +The CICE Consortium uses the following software to get successful Sphinx HTML builds, including linked +references: + +* python 2.7.11 + +* Sphinx (1.6.3) + +* sphinx-rtd-theme (0.1.9) + +* sphinxcontrib-bibtex (0.3.5) + +* sphinxcontrib-websupport (1.0.1) + +You will need to use the CICE Consortium's conf.py file, which is found under /doc/source/conf.py in the repository +in order to build the Sphinx documentation. + +To use linked references within the HTML you will need to have the sphinxcontrib-bibtex package as well as the zreferences.rst and master_list.bib files located in /doc/source/ in the master repository. The list of references in master_list.bib is currently ordered sequentially from oldest to newest and alphabetically within a given year. To add references for your documentation, edit the master_list.bib file using the Articles and Books as examples for your addition(s). Please follow the format for ordering the date/alphabetization as well as including a URL with the document's DOI. + +Model sandbox and documentation +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Follow the general `CICE-Consortium Git Workflow and Developer's guide `_ +to clone the repository and create your personal fork for model modifications. Whenever you modify the model +you should update documentation. You can update the documentation on the same branch of your fork on which +you test code, or you can create a separate branch called documentation to test only the RST and HTML documentation. + +Editing RST files +~~~~~~~~~~~~~~~~~~ + +Open the RST file using a text editor and make the changes necessary. Note that from the User's Guide documentation (see link above) there is a hyperlink called "Show Source" on the left hand column that will show you the RST source code for the HTML you are viewing. This is a good way to see the syntax for tables, equations, linking references, labeling tables or figures, and correctly identifying documentation sections or subsections. + +Here are some resources for using RST files: + +* `RST Primer1 `_ + +* `RST Primer2 `_ + +* `RST Syntax `_ + +* `RST tables `_ - Note that tables can be tricky in Sphinx and we prefer using `comma separated tables `_ whenever possible. + + +Building HTML +~~~~~~~~~~~~~~ + +Move into the /doc/ directory of your sandbox. Then execute the following command:: + + $ make clean + +to get rid of old HTML files. Then execute:: + + $ make html + +to build HTML into /build/html/ directory. It will also give you errors if there is a problem with the build that will help you figure out how you need to modify your RST files for a successful HTML build. Finally :: + + $ open /build/html/FILE.html + +Open the HTML on your browser for testing. + + +Push changes back to the repository +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +When you're happy with the HTML you've generated, follow the standard CICE-Consortium +`Git Workflow and Developer's guide `_ +to do a Pull Request and make sure to note in the Pull Request Template that documentation has also +been updated. We will take care of testing and adding changed HTML to the gh-pages orphan branch. + +Other Tips and Tricks +--------------------- + +Converting LaTeX to RST +~~~~~~~~~~~~~~~~~~~~~~~ + +If you start from a LaTeX (``*.tex``) document you will need to convert this to the RST format that Sphinx +requires. A handy tool to do this is `Pandoc `_, which you +can install quickly and run from the command line. + +Once Pandoc is installed, the basic command line syntax to convert a file is :: + + $ pandoc NAMEIN.tex -f latex -t rst -s -ou NAMEOUT.rst + +The NAMEOUT.rst file can be directly edited for Sphinx. Pandoc does a beautiful job of converting the text, +equations, and many tables. However, equation numbering, section linking, references, figures, and some +tables required more hands on care to be sure they render correctly. + +Pandoc requires that the ``*.tex`` files be in utf-8 encoding. To easily do this open the ``*.tex`` +document in Emacs then do ``ctrl-x ctrl-m f`` and you will be prompted to enter encoding type. Just +type in ``utf-8`` and hit enter. Then save with ``ctrl-x ctrl-s`` . You are done and the document can be +converted with Pandoc. + +Converting RST to PDF +~~~~~~~~~~~~~~~~~~~~~ + +Generating a PDF is more complex and currently requires a two-step process. The generation will require +recent versions of both LaTeX and Sphinx. From the /doc/ directory do the following:: + + $ make latex + $ cd build/latex + $ make + +Then search for the ``*.pdf`` document created. + +Creating an orphan branch +~~~~~~~~~~~~~~~~~~~~~~~~~ + +**THIS STEP SHOULD NOT BE REQUIRED FOR ANY ADDITIONS TO DOCUMENTATION BUT A DESCRIPTION IS PROVIDED +IN THE INTEREST OF TRANSPARENCY AND REPRODUCIBILITY FOR THE BENEFIT OF USERS AND THE COMMUNITY** + +An orphan branch is a branch that has no history in common with the master repository off of which it was +forked. For the CICE-Consortium, the purpose for this is that the HTML pages created by Sphinx will exist +in the orphan branch while the source RST code used to create the HTML exists in the master repository. +This way changes to the RST go into the evolving repository, but changes to the HTML are isolated to the +orphan branch. This method avoids conflicts related to the HTML that aren’t always caught by GitHub merges +and could cause the documentation to break. + +Steps to create orphan branch: + +1. On GitHub website create personal fork of the repo you’re interested in + +2. On GitHub website from your fork, get link for the repository + +3. On your local machine execute the following commands: + + Clone the original repository. :: + + $ git clone URL + + List all the branches of the repository. :: + + $ git branch --l + + Create the orphan branch gh-pages from the master branch. The branch should initially have all + documents from the master. :: + + $ git checkout --orphan gh-pages + + Add the URL for the original repository as your “upstream” repository. Get this from Github website. :: + + $ git remote add upstream URL + + Remove everything in this repository. i.e. we are making it so their histories diverge. :: + + $ git rm -rf . + + Check if all the files are gone. :: + + $ git status . + + Commit the change to make gh-pages branch. :: + + $ git commit -m “Initial commit to create gh-pages branch of repo” + + Provide a list of remote repositories and links. Should show your personal fork as “origin” and + the original repository as "upstream". :: + + $ git remote --v + + Add the orphan branch, named gh-pages, to your local fork but not the original repo from which you forked. :: + + $ git push origin gh-pages + +4. Once you've created the gh-pages branch on your personal fork do a pull request to incorporate the new branch + into the master repository. This way someone else will review what you've done and make sure you don't delete + an entire repository by accident. It is possible to push the gh-pages branch upstream if you have owner status; + **be very careful if you do this.** + +5. For a new orphan branch, set up sphinx stuff by executing the following commands: + + Go to doc directory. :: + + $ cd doc + + Make a source directory. :: + + $ mkdir source + + Initiate sphinx interactive session to create a conf.py file. Note that the conf.py file can be changed later on. :: + + $ sphinx-quickstart + + Answer the prompts + + Set GitHub to ignore HTML and Emacs back up files during commits, pushes, and pull requests. :: + + $ cp .gitignore from CICE-Consortium CICE or Icepack master repository + + Add the two new files (conf.py, .gitignore). :: + + $ git add . + + Commit the changes. :: + + $ git commit -m “Initial setup for Sphinx documentation” + +6. Do a pull request to add these to the master repository. + + + diff --git a/doc/source/developer_guide/dg_driver.rst b/doc/source/developer_guide/dg_driver.rst new file mode 100755 index 000000000..b8389b90f --- /dev/null +++ b/doc/source/developer_guide/dg_driver.rst @@ -0,0 +1,46 @@ +:tocdepth: 3 + +.. _dev_driver: + +Driver Implementation +======================== + +The icepack driver is Fortran source code and exists to test the column physics +in a stand-alone mode for some simple column configurations. + +File List +------------------- + +The icepack driver consists of the following files + +| **configuration/driver/** driver for testing Icepack in stand-alone mode +| **icedrv_MAIN.F90** main program +| **icedrv_InitMod.F90** routines for initializing a run +| **icedrv_RunMod.F90** main driver routines for time stepping +| **icedrv_arrays_column.F90** essential arrays to describe the state of the ice +| **icedrv_calendar.F90** keeps track of what time it is +| **icedrv_constants.F90** physical and numerical constants and parameters +| **icedrv_diagnostics.F90** miscellaneous diagnostic and debugging routines +| **icedrv_diagnostics_bgc.F90** diagnostic routines for biogeochemistry +| **icedrv_domain_size.F90** domain sizes +| **icedrv_flux.F90** fluxes needed/produced by the model +| **icedrv_forcing.F90** routines to read and interpolate forcing data for stand-alone model runs +| **icedrv_forcing_bgc.F90** routines to read and interpolate forcing data for bgc stand-alone model runs +| **icedrv_init.F90** general initialization routines +| **icedrv_init_column.F90** initialization routines specific to the column physics +| **icedrv_restart.F90** driver for reading/writing restart files +| **icedrv_restart_bgc.F90** restart routines specific to the column physics +| **icedrv_restart_shared.F90** code shared by all restart options +| **icedrv_state.F90** essential arrays to describe the state of the ice +| **icedrv_step.F90** routines for time stepping the major code components +| **icedrv_system.F90** overall system management calls + +Overview +------------ + +The icepack driver exists to test the column physics. At the present time, it is hardwired +to run 4 different gridcells on one processor with the same forcing used for all gridcells. +There is no MPI and no threading built into the icepack driver. There is limited IO capabilities, +no history files, and no netcdf restart files. The model generally runs very quickly. + +Forcing data and details on these data are available in :ref:`force`. diff --git a/doc/source/developer_guide/dg_scripts.rst b/doc/source/developer_guide/dg_scripts.rst new file mode 100755 index 000000000..cc320b459 --- /dev/null +++ b/doc/source/developer_guide/dg_scripts.rst @@ -0,0 +1,109 @@ +:tocdepth: 3 + +.. _dev_scripts: + +Scripts Implementation +======================== + +The scripts are the third part of the icepack package. They support setting up +cases, building, and running the icepack stand-alone model. + +File List +-------------- + +The directory structure under configure/scripts is as follows. + +| **configuration/scripts/** +| **Makefile** primary makefile +| **icepack.batch.csh** creates batch scripts for particular machines +| **icepack.build** compiles the code +| **icepack.launch.csh** creates script logic that runs the executable +| **icepack.run.setup.csh** sets up the run scripts +| **icepack.run.suite.csh** sets up the test suite +| **icepack.settings** defines environment, model configuration and run settings +| **icepack.test.setup.csh** creates configurations for testing the model +| **icepack_decomp.csh** defines the grid size +| **icepack_in** namelist input data +| **machines/** machine specific files to set env and Macros +| **makdep.c** determines module dependencies +| **options/** other namelist configurations available from the icepack.setup command line +| **parse_namelist.sh** replaces namelist with command-line configuration +| **parse_namelist_from_settings.sh** replaces namelist with values from icepack.settings +| **parse_settings.sh** replaces settings with command-line configuration +| **tests/** scripts for configuring and running basic tests + +.. _dev_strategy: + +Strategy +----------- + +The icepack scripts are implemented such that everything is resolved after +**icepack.setup** is called. This is done by both copying specific files +into the case directory and running scripts as part of the **icepack.setup** +command line to setup various files. + +**icepack.setup** drives the case setup. It is written in csh. All supporting +scripts are relatively simple csh or sh scripts. + +The file **icepack.settings** specifies a set of env defaults for the case. The file +**icepack_in** defines the namelist input for the icepack driver. + +.. _dev_options: + +Preset Case Options +--------------------- + + +``icepack.setup -s`` option allows the user to choose some predetermined icepack +settings and namelist. Those options are defined in **configurations/scripts/options/** +and the files are prefixed by either set_env, set_nml, or test_nml. When **icepack.setup** +is executed, the appropriate files are read from **configurations/scripts/options/** +and the **icepack.settings** and/or **icepack_in** files are updated in the case directory +based on the values in those files. + +The filename suffix determines the name of the -s option. So, for instance, + + ``icepack.setup -s diag1,debug,bgcISPOL`` + +will search for option files with suffixes of diag1, debug, and bgcISPOL and then +apply those settings. + +**parse_namelist.sh**, **parse_settings.sh**, and **parse_namelist_from_settings.sh** +are the three scripts that modify **icepack_in** and **icepack.settings**. + +To add new options, just add new files to the **configurations/scripts/options/** directory +with appropriate names and syntax. The set_nml file syntax is the same as namelist +syntax and the set_env files are consistent with csh setenv syntax. See other files for +examples of the syntax. + +.. _dev_machines: + +Machines +----------- + +Machine specific information is contained in **configuration/scripts/machines**. That +directory contains a Macros file and an env file for each supported machine. +One other files will need to be +changed to support a port, that is **configuration/scripts/icepack.batch.csh**. +To port to a new machine, see :ref:`porting`. + +.. _dev_testing: + +Test scripts +------------- + +Under **configuration/scripts/tests** are several files including the scripts to +setup the smoke and restart tests (**test_smoke.script**, **test_restart.script*). +A baseline test script (**baseline.script**) is also there to setup the regression +and comparison testing. That directory also contains the preset test suites +(ie. **base_suite.ts**) and a file that supports post-processing on the model +output (**timeseries.csh**). + +There is a subdirectory, **configuration/scripts/tests/CTest**, that supports the +CTest scripts. These scripts allow test reporting to CDash. + +To add a new test, a file associated with that test will need to be added to the +**configuration/scripts/tests** directory similar to **test_smoke.script** +and **test_restart.script**. In addition, some new options files in +**configuration/scripts/options** may need to be added similar to **test_nml.restart1**, +**test_nml.restart2**, and **set_nml.restart**. diff --git a/doc/source/developer_guide/index.rst b/doc/source/developer_guide/index.rst new file mode 100755 index 000000000..3ea52748d --- /dev/null +++ b/doc/source/developer_guide/index.rst @@ -0,0 +1,17 @@ +.. CICE-Consortium documentation master file, created by + sphinx-quickstart on Thu Jun 29 13:47:09 2017. + You can adapt this file completely to your liking, but it should at least + contain the root `toctree` directive. + +.. _developer_guide: + +Developer Guide +----------------- + +.. toctree:: + :maxdepth: 3 + + dg_about.rst + dg_driver.rst + dg_scripts.rst + dg_documentation.rst diff --git a/doc/source/index.rst b/doc/source/index.rst index c6f42e413..7f4629bc3 100644 --- a/doc/source/index.rst +++ b/doc/source/index.rst @@ -11,13 +11,14 @@ Table of Contents: ------------------ .. toctree:: - :maxdepth: 5 + :maxdepth: 2 :numbered: - cice_1_introduction.rst - cice_2_science_guide.rst - cice_3_user_guide.rst - cice_4_index.rst + intro/index.rst + science_guide/index.rst + user_guide/index.rst + developer_guide/index.rst + cice_index.rst zreferences.rst Useful tools diff --git a/doc/source/intro/.DS_Store b/doc/source/intro/.DS_Store new file mode 100644 index 000000000..5008ddfcf Binary files /dev/null and b/doc/source/intro/.DS_Store differ diff --git a/doc/source/intro/about.rst b/doc/source/intro/about.rst new file mode 100755 index 000000000..756256d8c --- /dev/null +++ b/doc/source/intro/about.rst @@ -0,0 +1,38 @@ +:tocdepth: 3 + +.. _about: + +About CICE +============= + +The Los Alamos sea ice model (CICE) is the result of an effort to +develop a computationally efficient sea ice component for a fully +coupled atmosphere--land global climate model. It was +designed to be compatible with the Parallel Ocean Program +(POP), an ocean circulation model developed at +Los Alamos National Laboratory for use on massively parallel computers +:cite:`SDM92,DSM93,DSM94`. The current version of the +model has been enhanced greatly through collaborations with members of +the community. + +CICE has several interacting components: a thermodynamic model that +computes local growth rates of snow and ice due to vertical conductive, +radiative and turbulent fluxes, along with snowfall; a model of ice +dynamics, which predicts the velocity field of the ice pack based on +a model of the material strength of the ice; a transport model that +describes advection of the areal concentration, ice volumes and other +state variables; and a ridging parameterization that transfers ice among +thickness categories based on energetic balances and +rates of strain.External routines would prepare and execute data exchanges with an +external "flux coupler," which then passes the data to other climate +model components such as POP. + +This model release is CICE version 6.0.0.alpha, available from https://github.com/CICE-Consortium/CICE. +It updates CICE5.1, and there are considerable changes to the code. + +This document uses the following text conventions: +Variable names used in the code are ``typewritten``. +Subroutine names are given in *italic*. +File and directory names are in **boldface**. +A comprehensive :ref:`index`, including glossary of symbols with many of their values, appears +at the end of this guide. diff --git a/doc/source/intro/acknowledgements.rst b/doc/source/intro/acknowledgements.rst new file mode 100755 index 000000000..f2f38d8ea --- /dev/null +++ b/doc/source/intro/acknowledgements.rst @@ -0,0 +1,36 @@ +:tocdepth: 3 + +.. _acknowledgements: + +Acknowledgements +============================= + +This work has been completed through the CICE Consortium and its members with funding +through the +Department of Energy, +Department of Defense (Navy), +Department of Commerce (NOAA), +National Science Foundation +and Environment and Climate Change Canada. +Special thanks are due to the following people: + +- Elizabeth Hunke, Nicole Jeffery, Adrian Turner and Chris Newman at Los Alamos National Laboratory + +- David Bailey, Alice DuVivier and Marika Holland at the National Center for Atmospheric Research + +- Rick Allard, Matt Turner and David Hebert at the Naval Research Laboratory, Stennis Space Center, + +- Andrew Roberts of the Naval Postgraduate School, + +- Michael Winton and Anders Damsgaard of the Geophysical Fluid Dynamics Laboratory, + +- Jean-Francois Lemieux and Frederic Dupont of Environment and Climate Change Canada, + +- Tony Craig and his supporters at the National Center for Atmospheric Research, the Naval Postgraduate School, and NOAA National Weather Service, + +- Jessie Carman and Robert Grumbine of the National Oceanographic and Atmospheric Administration + +- Cecilia Bitz of the University of Washington, for her column forcing data, + +- and many others who contributed to previous versions of CICE. + diff --git a/doc/source/intro/copyright.rst b/doc/source/intro/copyright.rst new file mode 100755 index 000000000..dce37f8b6 --- /dev/null +++ b/doc/source/intro/copyright.rst @@ -0,0 +1,41 @@ +:tocdepth: 3 + +.. _copyright: + +Copyright +============================= + +© Copyright 2018, Los Alamos National Security LLC. All rights reserved. +This software was produced under U.S. Government contract +DE-AC52-06NA25396 for Los Alamos National Laboratory (LANL), which is +operated by Los Alamos National Security, LLC for the U.S. Department +of Energy. The U.S. Government has rights to use, reproduce, and distribute +this software. NEITHER THE GOVERNMENT NOR LOS ALAMOS NATIONAL SECURITY, LLC +MAKES ANY WARRANTY, EXPRESS OR IMPLIED, OR ASSUMES ANY LIABILITY FOR THE USE +OF THIS SOFTWARE. If software is modified to produce derivative works, such +modified software should be clearly marked, so as not to confuse it with the +version available from LANL. + +Additionally, redistribution and use in source and binary forms, with or +without modification, are permitted provided that the following conditions +are met: + +- Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. + +- Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. + +- Neither the name of Los Alamos National Security, LLC, Los Alamos National Laboratory, LANL, the U.S. Government, nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY LOS ALAMOS NATIONAL SECURITY, LLC AND +CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT +NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL LOS ALAMOS NATIONAL +SECURITY, LLC OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED +TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR +PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF +LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + diff --git a/doc/source/intro/index.rst b/doc/source/intro/index.rst new file mode 100755 index 000000000..448ca304d --- /dev/null +++ b/doc/source/intro/index.rst @@ -0,0 +1,19 @@ +.. CICE-Consortium documentation master file, created by + sphinx-quickstart on Thu Jun 29 13:47:09 2017. + You can adapt this file completely to your liking, but it should at least + contain the root `toctree` directive. + +.. _introduction: + +Introduction - Icepack +----------------------- + +.. toctree:: + :maxdepth: 3 + + about.rst + quickstart.rst + major_updates.rst + acknowledgements.rst + copyright.rst + diff --git a/doc/source/intro/major_updates.rst b/doc/source/intro/major_updates.rst new file mode 100755 index 000000000..82fa29ee9 --- /dev/null +++ b/doc/source/intro/major_updates.rst @@ -0,0 +1,62 @@ +:tocdepth: 3 + +.. _updates: + + +Major CICE updates +============================================ + +This model release is CICE version 6.0.0.alpha. + +~~~~~~~~~~~~~~~~~ +CICE V6.0.0.alpha +~~~~~~~~~~~~~~~~~ +Major changes: + +- A new fast-ice parameterization +- Full vertical biogeochemistry +- Independent column physics package Icepack implemented as a git submodule +- A flexible, extensible, robust interface between the column physics modules and the driver +- A warning package that captures diagnostic and error information from within the column physics, for printing by the driver +- Restructured code and forcing data directories +- An entirely new scripting system +- A comprehensive test suite of various configuration options, with quality control and compliance tests +- Automated testing using Travis CI +- Automated test reporting organized by hash, version, machine and branch, for both the primary Consortium repository and user forks +- Online documentation +- See also updates in Icepack releases and recent changes + +Enhancements: + +- Change use of ULAT to TLAT to determine what latitudes initial ice is present in set_state_var [r970] +- Add 4d fields to history (categories, vertical ice) r1076 +- Update PIO; Universal large file support [r1094] +- Remove calendar_type from namelist options and initialize it based on the namelist flag use_leap_years. [r1098] +- Add fbot to history output [r1107] +- Add shortwave diagnostics [r1108] +- Modifications to enable ocean and ice biogeochemical coupling [r1111, r1200] +- Remove the computational overhead of coupling BGC when it is not being used [r1123] +- Change reference to char_len in stop_label [r1143] +- Add grounding scheme and tensile strength #52 +- Add new namelist options for dynamics parameters #52 +- Update Icepack version in CICE (Icepack v1.0.0 #81) +- Modifications to stress diagnostics, including principal stress normalization and internal pressure #99 + +Bug fixes: + +- Properly read and rotate ocean currents from 3D gx1 netcdf data r959 +- Correct diagnostic output 'avg salinity' [r1022] +- Bug fix for padded domains. r1031 +- Use VGRD instead of VGRDi for 3D [r1037] +- change shortwave calculation to depend on the net shortwave sum instead of cosine of the zenith angle (not BFB: in addition to the different shortwave calculation, albedo output in history is different). r1076 +- Correct available history fields. [r1082] +- Fix coupled restart bug; initialize coszen; adjust calendar_type implementation [r1094] +- Port miscellaneous changes from the various column package branches back to the trunk. BFB in the standard configuration, but the initializations and conditional changes for coszen could change the answers in other configurations. Also the flux calculation change in ice_therm_itd.F90 could change the answers in coupled simulations. 1102 +- Ensure fractions of snow, ponds and bare ice add to one r1120 +- Zero out thin-pond fraction for radiation in cesm, topo pond schemes (not BFB), and set albedo=1 where/when there is no incoming shortwave (changes the average-albedo diagnostic), and fix thin (cesm) ponds overlapping snow. [r1126, r1132] +- Fix padding when using the extended-grid functionality, to prevent arrays out of bounds. [r1128] +- Change dynamics halo update mask from icetmask to iceumask (fixes occasional exact restart problem and error in halo update) [r1133] +- Add surface flooding and surface runoff terms which increase with open water area in surface condition for update_hbrine, z_salinity, z_biogeochemistry [r1161] +- Set all tracer values to c0 over land after initialization #16 +- Remove OpenMP directives for loops that do not appear to be thread safe #25 +- Remove iblk from timer starts #98 diff --git a/doc/source/intro/quickstart.rst b/doc/source/intro/quickstart.rst new file mode 100755 index 000000000..85cd01072 --- /dev/null +++ b/doc/source/intro/quickstart.rst @@ -0,0 +1,117 @@ +:tocdepth: 3 + + +.. _quickstart: + +Quick Start +=========== + +Download the model from the CICE-Consortium repository, + https://github.com/CICE-Consortium/CICE + +Instructions for working in github with CICE (and Icepack) can be +found in the `CICE Git and Workflow Guide `_. + +From your main CICE directory, execute:: + + ./cice.setup -c ~/mycase1 -g gx3 -m testmachine -s diag1,thread -p 8x1 + cd ~/mycase1 + ./cice.build + ./cice.submit + + +``testmachine`` is a generic machine name included with the cice scripts. +The local machine name will have to be substituted for ``testmachine`` and +there are working ports for several different machines. However, it may be necessary +to port the model to a new machine. See :ref:`porting` for +more information about how to port and :ref:`scripts` for more information about +how to use the cice.setup script. + +~~~~~~~~~~~~ +More Details +~~~~~~~~~~~~ + +``cice.setup -h`` will provide the latest information about how to use the tool. +``cice.setup --help`` will provide an extended version of the help. +There are three usage modes, + +* ``--case`` or ``-c`` creates individual stand alone cases. +* ``--test`` creates individual tests. Tests are just cases that have some extra automation in order to carry out particular tests such as exact restart. +* ``--suite`` creates a test suite. Test suites are predefined sets of tests and ``--suite`` provides the ability to quick setup, build, and run a full suite of tests. + +All modes will require use of ``--mach`` or ``-m`` to specify the machine and case and test modes +can use ``--set`` or ``-s`` to define specific options. ``--test`` and ``--suite`` will require ``--testid`` to be set +and both of the test modes can use ``--bdir``, ``--bgen``, ``--bcmp``, and ``--diff`` to generate (save) results and compare results with prior results. +Testing will be described in greater detail in the :ref:`testing` section. + +Again, ``cice.setup --help`` will show the latest usage information including +the available ``--set`` options, the current ported machines, and the test choices. + +To create a case, run **cice.setup**:: + + cice.setup -c mycase -m machine + cd mycase + +Once a case/test is created, several files are placed in the case directory + + - env.${machine} defines the environment + + - cice.settings defines many variables associated with building and running the model + + - makdep.c is a tool that will automatically generate the make dependencies + + - Macros.${machine} defines the Makefile Macros + + - Makefile is the makefile used to build the model + + - cice.build is a script that build the model + + - ice_in is the namelist file + + - cice.run is a batch run script + + - cice.submit is a simple script that submits the cice.run script + +Once the case is created, all scripts and namelist are fully resolved. Users can edit any +of the files in the case directory manually to change the model configuration. The file +dependency is indicated in the above list. For instance, if any of the files before +cice.build in the list are edited, cice.build should be rerun. + +The casescripts directory holds scripts used to create the case and can largely be ignored. + +In general, when cice.build is executed, the model will build from scratch due to the large +dependence on cpps. To change this behavior, edit the env variable ICE_CLEANBUILD in +cice.settings. + +The cice.submit script just submits the cice.run script. You can use cice.submit or just +submit the cice.run script on the command line. + +The model will run in the directory defined by the env variable CICE_RUNDIR in cice.settings. +Build and run logs will be copied into the case logs directory when complete. + +To port, an env.machine and Macros.machine file have to be added to scripts/machines and the cice.run.setup.csh file needs to be modified. + - cd to consortium/scripts/machines + - Copy an existing env and Macros file to new names for your new machine + - Edit the env and Macros file + - cd to consortium/scripts + - Edit the cice.run.setup.csh script to add a section for your machine for the batch settings and for the job launch settings + - Download and untar the 1997 dataset to the location defined by ICE_MACHINE_INPUTDATA in the env file + - Create a file in your home directory called .cice_proj and add your preferred account name to the first line. + - You can now create a case and test. If there are problems, you can manually edit the env, Macros, and cice.run files in the case directory until things are working properly. Then you can copy the env and Macros files back to consortium/scripts/machines. You will have to manually modify the cice.run.setup.csh script if there any changes needed there. + +~~~~~~~~~~~~ +Forcing data +~~~~~~~~~~~~ + +The code is currently configured to run in standalone mode on a 3 degree grid using +atmospheric data from 1997, available as detailed on the `wiki `_. +These data files are designed only for testing the code, not for use in production +runs or as observational data. Please do not publish results based on these data +sets. Module cicecore/dynamics/cicedynB/ice_forcing.F90 can be modified to change the +forcing data. + +As currently configured, the model runs on 4 processors. MPI is used for message passing +between processors, and OpenMP threading is available. The grid provided here is too +small for the code to scale well beyond about 8 processors. A 1 degree grid is provided also, +and details about this grid can be found on the `wiki `_. + diff --git a/doc/source/science_guide/.DS_Store b/doc/source/science_guide/.DS_Store new file mode 100644 index 000000000..57d2be059 Binary files /dev/null and b/doc/source/science_guide/.DS_Store differ diff --git a/doc/source/figures/EAP.png b/doc/source/science_guide/figures/EAP.png similarity index 100% rename from doc/source/figures/EAP.png rename to doc/source/science_guide/figures/EAP.png diff --git a/doc/source/figures/albedo.png b/doc/source/science_guide/figures/albedo.png similarity index 100% rename from doc/source/figures/albedo.png rename to doc/source/science_guide/figures/albedo.png diff --git a/doc/source/figures/deparr.png b/doc/source/science_guide/figures/deparr.png similarity index 100% rename from doc/source/figures/deparr.png rename to doc/source/science_guide/figures/deparr.png diff --git a/doc/source/figures/gplot.png b/doc/source/science_guide/figures/gplot.png similarity index 100% rename from doc/source/figures/gplot.png rename to doc/source/science_guide/figures/gplot.png diff --git a/doc/source/figures/topo.png b/doc/source/science_guide/figures/topo.png similarity index 100% rename from doc/source/figures/topo.png rename to doc/source/science_guide/figures/topo.png diff --git a/doc/source/figures/tracergraphic.png b/doc/source/science_guide/figures/tracergraphic.png similarity index 100% rename from doc/source/figures/tracergraphic.png rename to doc/source/science_guide/figures/tracergraphic.png diff --git a/doc/source/figures/triangles.png b/doc/source/science_guide/figures/triangles.png similarity index 100% rename from doc/source/figures/triangles.png rename to doc/source/science_guide/figures/triangles.png diff --git a/doc/source/science_guide/index.rst b/doc/source/science_guide/index.rst new file mode 100755 index 000000000..d576e56de --- /dev/null +++ b/doc/source/science_guide/index.rst @@ -0,0 +1,16 @@ +.. CICE-Consortium documentation master file, created by + sphinx-quickstart on Thu Jun 29 13:47:09 2017. + You can adapt this file completely to your liking, but it should at least + contain the root `toctree` directive. + +.. _science_guide: + +Science Guide +----------------- + +.. toctree:: + :maxdepth: 3 + + sg_coupling.rst + sg_modelcomps.rst + diff --git a/doc/source/science_guide/sg_coupling.rst b/doc/source/science_guide/sg_coupling.rst new file mode 100644 index 000000000..11108ed5c --- /dev/null +++ b/doc/source/science_guide/sg_coupling.rst @@ -0,0 +1,564 @@ +:tocdepth: 3 + +.. _coupl: + +Coupling with other climate model components +============================================ + +The sea ice model exchanges information with the other model components +via a flux coupler. CICE has been coupled into numerous climate models +with a variety of coupling techniques. This document is oriented +primarily toward the CESM Flux Coupler :cite:`KL02` +from NCAR, the first major climate model to incorporate CICE. The flux +coupler was originally intended to gather state variables from the +component models, compute fluxes at the model interfaces, and return +these fluxes to the component models for use in the next integration +period, maintaining conservation of momentum, heat, and fresh water. +However, several of these fluxes are now computed in the ice model +itself and provided to the flux coupler for distribution to the other +components, for two reasons. First, some of the fluxes depend strongly +on the state of the ice, and vice versa, implying that an implicit, +simultaneous determination of the ice state and the surface fluxes is +necessary for consistency and stability. Second, given the various ice +types in a single grid cell, it is more efficient for the ice model to +determine the net ice characteristics of the grid cell and provide the +resulting fluxes, rather than passing several values of the state +variables for each cell. These considerations are explained in more +detail below. + +The fluxes and state variables passed between the sea ice model and the +CESM flux coupler are listed in :ref:`tab-flux-cpl`. By convention, +directional fluxes are positive downward. In CESM, the sea ice model may +exchange coupling fluxes using a different grid than the computational +grid. This functionality is activated using the namelist variable +``gridcpl_file``. Another namelist variable ``highfreq``, allows the +high-frequency coupling procedure implemented in the Regional Arctic +System Model (RASM). In particular, the relative atmosphere-ice velocity +(:math:`\vec{U}_a-\vec{u}`) is used instead of the full atmospheric +velocity for computing turbulent fluxes in the atmospheric boundary +layer. + +:ref:`tab-flux-cpl`: *Data exchanged between the CESM flux coupler and the sea ice model* + +.. _tab-flux-cpl: + +.. table:: Table 1 + + =========================== ====================================== ======================================================================================= + Variable Description Interaction with flux coupler + =========================== ====================================== ======================================================================================= + :math:`z_o` Atmosphere level height From *atmosphere model* via flux coupler **to** *sea ice model* + + :math:`\vec{U}_a` Wind velocity From *atmosphere model* via flux coupler **to** *sea ice model* + + :math:`Q_a` Specific humidity From *atmosphere model* via flux coupler **to** *sea ice model* + + :math:`\rho_a` Air density From *atmosphere model* via flux coupler **to** *sea ice model* + + :math:`\Theta_a` Air potential temperature From *atmosphere model* via flux coupler **to** *sea ice model* + + :math:`T_a` Air temperature From *atmosphere model* via flux coupler **to** *sea ice model* + + :math:`F_{sw\downarrow}` Incoming shortwave radiation From *atmosphere model* via flux coupler **to** *sea ice model* + (4 bands) + + :math:`F_{L\downarrow}` Incoming longwave radiation From *atmosphere model* via flux coupler **to** *sea ice model* + + :math:`F_{rain}` Rainfall rate From *atmosphere model* via flux coupler **to** *sea ice model* + + :math:`F_{snow}` Snowfall rate From *atmosphere model* via flux coupler **to** *sea ice model* + + :math:`F_{frzmlt}` Freezing/melting potential From *ocean model* via flux coupler **to** *sea ice model* + + :math:`T_w` Sea surface temperature From *ocean model* via flux coupler **to** *sea ice model* + + :math:`S` Sea surface salinity From *ocean model* via flux coupler **to** *sea ice model* + + :math:`\nabla H_o` Sea surface slope From *ocean model* via flux coupler **to** *sea ice model* + + :math:`\vec{U}_w` Surface ocean currents From *ocean model* via flux coupler **to** *sea ice model* + + :math:`\vec{\tau}_a` Wind stress From *sea ice model* via flux coupler **to** *atmosphere model* + + :math:`F_s` Sensible heat flux From *sea ice model* via flux coupler **to** *atmosphere model* + + :math:`F_l` Latent heat flux From *sea ice model* via flux coupler **to** *atmosphere model* + + :math:`F_{L\uparrow}` Outgoing longwave radiation From *sea ice model* via flux coupler **to** *atmosphere model* + + :math:`F_{evap}` Evaporated water From *sea ice model* via flux coupler **to** *atmosphere model* + + :math:`\alpha` Surface albedo (4 bands) From *sea ice model* via flux coupler **to** *atmosphere model* + + :math:`T_{sfc}` Surface temperature From *sea ice model* via flux coupler **to** *atmosphere model* + + :math:`F_{sw\Downarrow}` Penetrating shortwave radiation From *sea ice model* via flux coupler **to** *ocean model* + + :math:`F_{water}` Fresh water flux From *sea ice model* via flux coupler **to** *ocean model* + + :math:`F_{hocn}` Net heat flux to ocean From *sea ice model* via flux coupler **to** *ocean model* + + :math:`F_{salt}` Salt flux From *sea ice model* via flux coupler **to** *ocean model* + + :math:`\vec{\tau}_w` Ice-ocean stress From *sea ice model* via flux coupler **to** *ocean model* + + :math:`F_{bio}` Biogeochemical fluxes From *sea ice model* via flux coupler **to** *ocean model* + + :math:`a_{i}` Ice fraction From *sea ice model* via flux coupler **to** both *ocean and atmosphere models* + + :math:`T^{ref}_{a}` 2m reference temperature (diagnostic) From *sea ice model* via flux coupler **to** both *ocean and atmosphere models* + + :math:`Q^{ref}_{a}` 2m reference humidity (diagnostic) From *sea ice model* via flux coupler **to** both *ocean and atmosphere models* + + :math:`F_{swabs}` Absorbed shortwave (diagnostic) From *sea ice model* via flux coupler **to** both *ocean and atmosphere models* + =========================== ====================================== ======================================================================================= + +The ice fraction :math:`a_i` (aice) is the total fractional ice +coverage of a grid cell. That is, in each cell, + +.. math:: + \begin{array}{cl} + a_{i}=0 & \mbox{if there is no ice} \\ + a_{i}=1 & \mbox{if there is no open water} \\ + 0 0 + :label: swflux + +where :math:`\cos Z` is the cosine of the solar zenith angle. + +.. _ocean: + +~~~~~ +Ocean +~~~~~ + +New sea ice forms when the ocean temperature drops below its freezing +temperature. In the Bitz and Lipscomb thermodynamics, +:cite:`BL99` :math:`T_f=-\mu S`, where :math:`S` is the +seawater salinity and :math:`\mu=0.054 \ ^\circ`/ppt is the ratio of the +freezing temperature of brine to its salinity (linear liquidus +approximation). For the mushy thermodynamics, :math:`T_f` is given by a +piecewise linear liquidus relation. The ocean model calculates the new +ice formation; if the freezing/melting potential +:math:`F_{frzmlt}` is positive, its value represents a certain +amount of frazil ice that has formed in one or more layers of the ocean +and floated to the surface. (The ocean model assumes that the amount of +new ice implied by the freezing potential actually forms.) + +If :math:`F_{frzmlt}` is negative, it is used to heat already +existing ice from below. In particular, the sea surface temperature and +salinity are used to compute an oceanic heat flux :math:`F_w` +(:math:`\left|F_w\right| \leq \left|F_{frzmlt}\right|`) which +is applied at the bottom of the ice. The portion of the melting +potential actually used to melt ice is returned to the coupler in +:math:`F_{hocn}`. The ocean model adjusts its own heat budget +with this quantity, assuming that the rest of the flux remained in the +ocean. + +In addition to runoff from rain and melted snow, the fresh water flux +:math:`F_{water}` includes ice melt water from the top surface +and water frozen (a negative flux) or melted at the bottom surface of +the ice. This flux is computed as the net change of fresh water in the +ice and snow volume over the coupling time step, excluding frazil ice +formation and newly accumulated snow. Setting the namelist option +update\_ocn\_f to true causes frazil ice to be included in the fresh +water and salt fluxes. + +There is a flux of salt into the ocean under melting conditions, and a +(negative) flux when sea water is freezing. However, melting sea ice +ultimately freshens the top ocean layer, since the ocean is much more +saline than the ice. The ice model passes the net flux of salt +:math:`F_{salt}` to the flux coupler, based on the net change +in salt for ice in all categories. In the present configuration, +ice\_ref\_salinity is used for computing the salt flux, although the ice +salinity used in the thermodynamic calculation has differing values in +the ice layers. + +A fraction of the incoming shortwave :math:`F_{sw\Downarrow}` +penetrates the snow and ice layers and passes into the ocean, as +described in Section :ref:`sfc-forcing`. + +Many ice models compute the sea surface slope :math:`\nabla H_\circ` +from geostrophic ocean currents provided by an ocean model or other data +source. In our case, the sea surface height :math:`H_\circ` is a +prognostic variable in POP—the flux coupler can provide the surface +slope directly, rather than inferring it from the currents. (The option +of computing it from the currents is provided in subroutine +*evp\_prep*.) The sea ice model uses the surface layer currents +:math:`\vec{U}_w` to determine the stress between the ocean and the ice, +and subsequently the ice velocity :math:`\vec{u}`. This stress, relative +to the ice, + +.. math:: + \begin{aligned} + \vec{\tau}_w&=&c_w\rho_w\left|{\vec{U}_w-\vec{u}}\right|\left[\left(\vec{U}_w-\vec{u}\right)\cos\theta + +\hat{k}\times\left(\vec{U}_w-\vec{u}\right)\sin\theta\right] \end{aligned} + :label: tauw + +is then passed to the flux coupler (relative to the ocean) for use by +the ocean model. Here, :math:`\theta` is the turning angle between +geostrophic and surface currents, :math:`c_w` is the ocean drag +coefficient, :math:`\rho_w` is the density of seawater, and +:math:`\hat{k}` is the vertical unit vector. The turning angle is +necessary if the top ocean model layers are not able to resolve the +Ekman spiral in the boundary layer. If the top layer is sufficiently +thin compared to the typical depth of the Ekman spiral, then +:math:`\theta=0` is a good approximation. Here we assume that the top +layer is thin enough. + +For CICE run in stand-alone mode (i.e., uncoupled), a thermodynamic slab +ocean mixed-layer parameterization is available in **ice\_ocean.F90**. +The turbulent fluxes are computed above the water surface using the same +parameterizations as for sea ice, but with parameters appropriate for +the ocean. The surface flux balance takes into account the turbulent +fluxes, oceanic heat fluxes from below the mixed layer, and shortwave +and longwave radiation, including that passing through the sea ice into +the ocean. If the resulting sea surface temperature falls below the +salinity-dependent freezing point, then new ice (frazil) forms. +Otherwise, heat is made available for melting the ice. + +.. _formdrag: + +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +Variable exchange coefficients +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +In the default CICE setup, atmospheric and oceanic neutral drag +coefficients (:math:`c_u` and :math:`c_w`) are assumed constant in time +and space. These constants are chosen to reflect friction associated +with an effective sea ice surface roughness at the ice–atmosphere and +ice–ocean interfaces. Sea ice (in both Arctic and Antarctic) contains +pressure ridges as well as floe and melt pond edges that act as discrete +obstructions to the flow of air or water past the ice, and are a source +of form drag. Following :cite:`TFSFFKLB14` and based on +recent theoretical developments :cite:`LGHA12,LLCL11`, the +neutral drag coefficients can now be estimated from properties of the +ice cover such as ice concentration, vertical extent and area of the +ridges, freeboard and floe draft, and size of floes and melt ponds. The +new parameterization allows the drag coefficients to be coupled to the +sea ice state and therefore to evolve spatially and temporally. This +parameterization is contained in the subroutine *neutral\_drag\_coeffs* +and is accessed by setting `formdrag` = true in the namelist. + +Following :cite:`TFSFFKLB14`, consider the general case of +fluid flow obstructed by N randomly oriented obstacles of height +:math:`H` and transverse length :math:`L_y`, distributed on a domain +surface area :math:`S_T`. Under the assumption of a logarithmic fluid +velocity profile, the general formulation of the form drag coefficient +can be expressed as + +.. math:: + C_d=\frac{N c S_c^2 \gamma L_y H}{2 S_T}\left[\frac{\ln(H/z_0)}{\ln(z_{ref}/z_0)}\right]^2, + :label: formdrag + +where :math:`z_0` is a roughness length parameter at the top or bottom +surface of the ice, :math:`\gamma` is a geometric factor, :math:`c` is +the resistance coefficient of a single obstacle, and :math:`S_c` is a +sheltering function that takes into account the shielding effect of the +obstacle, + +.. math:: + S_{c}=\left(1-\exp(-s_l D/H)\right)^{1/2}, + :label: shelter + +with :math:`D` the distance between two obstacles and :math:`s_l` an +attenuation parameter. + +As in the original drag formulation in CICE (sections :ref:`atmo` and +:ref:`ocean`), :math:`c_u` and :math:`c_w` along with the transfer +coefficients for sensible heat, :math:`c_{\theta}`, and latent heat, +:math:`c_{q}`, are initialized to a situation corresponding to neutral +atmosphere–ice and ocean–ice boundary layers. The corresponding neutral +exchange coefficients are then replaced by coefficients that explicitly +account for form drag, expressed in terms of various contributions as + +.. math:: + \tt{Cdn\_atm} = \tt{Cdn\_atm\_rdg} + \tt{Cdn\_atm\_floe} + \tt{Cdn\_atm\_skin} + \tt{Cdn\_atm\_pond} , + :label: Cda + +.. math:: + \tt{Cdn\_ocn} = \tt{Cdn\_ocn\_rdg} + \tt{Cdn\_ocn\_floe} + \tt{Cdn\_ocn\_skin}. + :label: Cdw + +The contributions to form drag from ridges (and keels underneath the +ice), floe edges and melt pond edges can be expressed using the general +formulation of equation :eq:`formdrag` (see :cite:`TFSFFKLB14` for +details). Individual terms in equation :eq:`Cdw` are fully described in +:cite:`TFSFFKLB14`. Following :cite:`Arya75` +the skin drag coefficient is parametrized as + +.. math:: + { \tt{Cdn\_(atm/ocn)\_skin}}=a_{i} \left(1-m_{(s/k)} \frac{H_{(s/k)}}{D_{(s/k)}}\right)c_{s(s/k)}, \mbox{ if $\displaystyle\frac{H_{(s/k)}}{D_{(s/k)}}\ge\frac{1}{m_{(s/k)}}$,} + :label: skindrag + +where :math:`m_s` (:math:`m_k`) is a sheltering parameter that depends +on the average sail (keel) height, :math:`H_s` (:math:`H_k`), but is +often assumed constant, :math:`D_s` (:math:`D_k`) is the average +distance between sails (keels), and :math:`c_{ss}` (:math:`c_{sk}`) is +the unobstructed atmospheric (oceanic) skin drag that would be attained +in the absence of sails (keels) and with complete ice coverage, +:math:`a_{ice}=1`. + +Calculation of equations :eq:`formdrag` – :eq:`skindrag` requires that small-scale geometrical +properties of the ice cover be related to average grid cell quantities +already computed in the sea ice model. These intermediate quantities are +briefly presented here and described in more detail in +:cite:`TFSFFKLB14`. The sail height is given by + +.. math:: + H_{s} = \displaystyle 2\frac{v_{rdg}}{a_{rdg}}\left(\frac{\alpha\tan \alpha_{k} R_d+\beta \tan \alpha_{s} R_h}{\phi_r\tan \alpha_{k} R_d+\phi_k \tan \alpha_{s} R_h^2}\right), + :label: Hs + +and the distance between sails\ + +.. math:: + D_{s} = \displaystyle 2 H_s\frac{a_{i}}{a_{rdg}} \left(\frac{\alpha}{\tan \alpha_s}+\frac{\beta}{\tan \alpha_k}\frac{R_h}{R_d}\right), + :label: Ds + +where :math:`0<\alpha<1` and :math:`0<\beta<1` are weight functions, +:math:`\alpha_{s}` and :math:`\alpha_{k}` are the sail and keel slope, +:math:`\phi_s` and :math:`\phi_k` are constant porosities for the sails +and keels, and we assume constant ratios for the average keel depth and +sail height (:math:`H_k/H_s=R_h`) and for the average distances between +keels and between sails (:math:`D_k/D_s=R_d`). With the assumption of +hydrostatic equilibrium, the effective ice plus snow freeboard is +:math:`H_{f}=\bar{h_i}(1-\rho_i/\rho_w)+\bar{h_s}(1-\rho_s/\rho_w)`, +where :math:`\rho_i`, :math:`\rho_w` and :math:`\rho_s` are +respectively the densities of sea ice, water and snow, :math:`\bar{h_i}` +is the mean ice thickness and :math:`\bar{h_s}` is the mean snow +thickness (means taken over the ice covered regions). For the melt pond +edge elevation we assume that the melt pond surface is at the same level +as the ocean surface surrounding the floes +:cite:`FF07,FFT10,FSFH12` and use the simplification +:math:`H_p = H_f`. Finally to estimate the typical floe size +:math:`L_A`, distance between floes, :math:`D_F`, and melt pond size, +:math:`L_P` we use the parameterizations of :cite:`LGHA12` +to relate these quantities to the ice and pond concentrations. All of +these intermediate quantities are available as history output, along +with `Cdn\_atm`, `Cdn\_ocn` and the ratio `Cdn\_atm\_ratio\_n` between the +total atmospheric drag and the atmospheric neutral drag coefficient. + +We assume that the total neutral drag coefficients are thickness +category independent, but through their dependance on the diagnostic +variables described above, they vary both spatially and temporally. The +total drag coefficients and heat transfer coefficients will also depend +on the type of stratification of the atmosphere and the ocean, and we +use the parameterization described in section :ref:`atmo` that accounts +for both stable and unstable atmosphere–ice boundary layers. In contrast +to the neutral drag coefficients the stability effect of the atmospheric +boundary layer is calculated separately for each ice thickness category. + +The transfer coefficient for oceanic heat flux to the bottom of the ice +may be varied based on form drag considerations by setting the namelist +variable `fbot\_xfer\_type` to `Cdn\_ocn`; this is recommended when using +the form drag parameterization. Its default value of the transfer +coefficient is 0.006 (`fbot\_xfer\_type = ’constant’`). \ No newline at end of file diff --git a/doc/source/cice_2_science_guide.rst b/doc/source/science_guide/sg_modelcomps.rst similarity index 86% rename from doc/source/cice_2_science_guide.rst rename to doc/source/science_guide/sg_modelcomps.rst index 86efea57d..badcb04cc 100644 --- a/doc/source/cice_2_science_guide.rst +++ b/doc/source/science_guide/sg_modelcomps.rst @@ -1,576 +1,9 @@ :tocdepth: 3 -Science Guide -================ - -.. _coupl: - --------------------------------------------- -Coupling with other climate model components --------------------------------------------- - -The sea ice model exchanges information with the other model components -via a flux coupler. CICE has been coupled into numerous climate models -with a variety of coupling techniques. This document is oriented -primarily toward the CESM Flux Coupler :cite:`KL02` -from NCAR, the first major climate model to incorporate CICE. The flux -coupler was originally intended to gather state variables from the -component models, compute fluxes at the model interfaces, and return -these fluxes to the component models for use in the next integration -period, maintaining conservation of momentum, heat, and fresh water. -However, several of these fluxes are now computed in the ice model -itself and provided to the flux coupler for distribution to the other -components, for two reasons. First, some of the fluxes depend strongly -on the state of the ice, and vice versa, implying that an implicit, -simultaneous determination of the ice state and the surface fluxes is -necessary for consistency and stability. Second, given the various ice -types in a single grid cell, it is more efficient for the ice model to -determine the net ice characteristics of the grid cell and provide the -resulting fluxes, rather than passing several values of the state -variables for each cell. These considerations are explained in more -detail below. - -The fluxes and state variables passed between the sea ice model and the -CESM flux coupler are listed in :ref:`tab-flux-cpl`. By convention, -directional fluxes are positive downward. In CESM, the sea ice model may -exchange coupling fluxes using a different grid than the computational -grid. This functionality is activated using the namelist variable -``gridcpl_file``. Another namelist variable ``highfreq``, allows the -high-frequency coupling procedure implemented in the Regional Arctic -System Model (RASM). In particular, the relative atmosphere-ice velocity -(:math:`\vec{U}_a-\vec{u}`) is used instead of the full atmospheric -velocity for computing turbulent fluxes in the atmospheric boundary -layer. - -:ref:`tab-flux-cpl`: *Data exchanged between the CESM flux coupler and the sea ice model* - -.. _tab-flux-cpl: - -.. table:: Table 1 - - =========================== ====================================== ======================================================================================= - Variable Description Interaction with flux coupler - =========================== ====================================== ======================================================================================= - :math:`z_o` Atmosphere level height From *atmosphere model* via flux coupler **to** *sea ice model* - - :math:`\vec{U}_a` Wind velocity From *atmosphere model* via flux coupler **to** *sea ice model* - - :math:`Q_a` Specific humidity From *atmosphere model* via flux coupler **to** *sea ice model* - - :math:`\rho_a` Air density From *atmosphere model* via flux coupler **to** *sea ice model* - - :math:`\Theta_a` Air potential temperature From *atmosphere model* via flux coupler **to** *sea ice model* - - :math:`T_a` Air temperature From *atmosphere model* via flux coupler **to** *sea ice model* - - :math:`F_{sw\downarrow}` Incoming shortwave radiation From *atmosphere model* via flux coupler **to** *sea ice model* - (4 bands) - - :math:`F_{L\downarrow}` Incoming longwave radiation From *atmosphere model* via flux coupler **to** *sea ice model* - - :math:`F_{rain}` Rainfall rate From *atmosphere model* via flux coupler **to** *sea ice model* - - :math:`F_{snow}` Snowfall rate From *atmosphere model* via flux coupler **to** *sea ice model* - - :math:`F_{frzmlt}` Freezing/melting potential From *ocean model* via flux coupler **to** *sea ice model* - - :math:`T_w` Sea surface temperature From *ocean model* via flux coupler **to** *sea ice model* - - :math:`S` Sea surface salinity From *ocean model* via flux coupler **to** *sea ice model* - - :math:`\nabla H_o` Sea surface slope From *ocean model* via flux coupler **to** *sea ice model* - - :math:`\vec{U}_w` Surface ocean currents From *ocean model* via flux coupler **to** *sea ice model* - - :math:`\vec{\tau}_a` Wind stress From *sea ice model* via flux coupler **to** *atmosphere model* - - :math:`F_s` Sensible heat flux From *sea ice model* via flux coupler **to** *atmosphere model* - - :math:`F_l` Latent heat flux From *sea ice model* via flux coupler **to** *atmosphere model* - - :math:`F_{L\uparrow}` Outgoing longwave radiation From *sea ice model* via flux coupler **to** *atmosphere model* - - :math:`F_{evap}` Evaporated water From *sea ice model* via flux coupler **to** *atmosphere model* - - :math:`\alpha` Surface albedo (4 bands) From *sea ice model* via flux coupler **to** *atmosphere model* - - :math:`T_{sfc}` Surface temperature From *sea ice model* via flux coupler **to** *atmosphere model* - - :math:`F_{sw\Downarrow}` Penetrating shortwave radiation From *sea ice model* via flux coupler **to** *ocean model* - - :math:`F_{water}` Fresh water flux From *sea ice model* via flux coupler **to** *ocean model* - - :math:`F_{hocn}` Net heat flux to ocean From *sea ice model* via flux coupler **to** *ocean model* - - :math:`F_{salt}` Salt flux From *sea ice model* via flux coupler **to** *ocean model* - - :math:`\vec{\tau}_w` Ice-ocean stress From *sea ice model* via flux coupler **to** *ocean model* - - :math:`F_{bio}` Biogeochemical fluxes From *sea ice model* via flux coupler **to** *ocean model* - - :math:`a_{i}` Ice fraction From *sea ice model* via flux coupler **to** both *ocean and atmosphere models* - - :math:`T^{ref}_{a}` 2m reference temperature (diagnostic) From *sea ice model* via flux coupler **to** both *ocean and atmosphere models* - - :math:`Q^{ref}_{a}` 2m reference humidity (diagnostic) From *sea ice model* via flux coupler **to** both *ocean and atmosphere models* - - :math:`F_{swabs}` Absorbed shortwave (diagnostic) From *sea ice model* via flux coupler **to** both *ocean and atmosphere models* - =========================== ====================================== ======================================================================================= - -The ice fraction :math:`a_i` (aice) is the total fractional ice -coverage of a grid cell. That is, in each cell, - -.. math:: - \begin{array}{cl} - a_{i}=0 & \mbox{if there is no ice} \\ - a_{i}=1 & \mbox{if there is no open water} \\ - 0 0 - :label: swflux - -where :math:`\cos Z` is the cosine of the solar zenith angle. - -.. _ocean: - -~~~~~ -Ocean -~~~~~ - -New sea ice forms when the ocean temperature drops below its freezing -temperature. In the Bitz and Lipscomb thermodynamics, -:cite:`BL99` :math:`T_f=-\mu S`, where :math:`S` is the -seawater salinity and :math:`\mu=0.054 \ ^\circ`/ppt is the ratio of the -freezing temperature of brine to its salinity (linear liquidus -approximation). For the mushy thermodynamics, :math:`T_f` is given by a -piecewise linear liquidus relation. The ocean model calculates the new -ice formation; if the freezing/melting potential -:math:`F_{frzmlt}` is positive, its value represents a certain -amount of frazil ice that has formed in one or more layers of the ocean -and floated to the surface. (The ocean model assumes that the amount of -new ice implied by the freezing potential actually forms.) - -If :math:`F_{frzmlt}` is negative, it is used to heat already -existing ice from below. In particular, the sea surface temperature and -salinity are used to compute an oceanic heat flux :math:`F_w` -(:math:`\left|F_w\right| \leq \left|F_{frzmlt}\right|`) which -is applied at the bottom of the ice. The portion of the melting -potential actually used to melt ice is returned to the coupler in -:math:`F_{hocn}`. The ocean model adjusts its own heat budget -with this quantity, assuming that the rest of the flux remained in the -ocean. - -In addition to runoff from rain and melted snow, the fresh water flux -:math:`F_{water}` includes ice melt water from the top surface -and water frozen (a negative flux) or melted at the bottom surface of -the ice. This flux is computed as the net change of fresh water in the -ice and snow volume over the coupling time step, excluding frazil ice -formation and newly accumulated snow. Setting the namelist option -update\_ocn\_f to true causes frazil ice to be included in the fresh -water and salt fluxes. - -There is a flux of salt into the ocean under melting conditions, and a -(negative) flux when sea water is freezing. However, melting sea ice -ultimately freshens the top ocean layer, since the ocean is much more -saline than the ice. The ice model passes the net flux of salt -:math:`F_{salt}` to the flux coupler, based on the net change -in salt for ice in all categories. In the present configuration, -ice\_ref\_salinity is used for computing the salt flux, although the ice -salinity used in the thermodynamic calculation has differing values in -the ice layers. - -A fraction of the incoming shortwave :math:`F_{sw\Downarrow}` -penetrates the snow and ice layers and passes into the ocean, as -described in Section :ref:`sfc-forcing`. - -Many ice models compute the sea surface slope :math:`\nabla H_\circ` -from geostrophic ocean currents provided by an ocean model or other data -source. In our case, the sea surface height :math:`H_\circ` is a -prognostic variable in POP—the flux coupler can provide the surface -slope directly, rather than inferring it from the currents. (The option -of computing it from the currents is provided in subroutine -*evp\_prep*.) The sea ice model uses the surface layer currents -:math:`\vec{U}_w` to determine the stress between the ocean and the ice, -and subsequently the ice velocity :math:`\vec{u}`. This stress, relative -to the ice, - -.. math:: - \begin{aligned} - \vec{\tau}_w&=&c_w\rho_w\left|{\vec{U}_w-\vec{u}}\right|\left[\left(\vec{U}_w-\vec{u}\right)\cos\theta - +\hat{k}\times\left(\vec{U}_w-\vec{u}\right)\sin\theta\right] \end{aligned} - :label: tauw - -is then passed to the flux coupler (relative to the ocean) for use by -the ocean model. Here, :math:`\theta` is the turning angle between -geostrophic and surface currents, :math:`c_w` is the ocean drag -coefficient, :math:`\rho_w` is the density of seawater, and -:math:`\hat{k}` is the vertical unit vector. The turning angle is -necessary if the top ocean model layers are not able to resolve the -Ekman spiral in the boundary layer. If the top layer is sufficiently -thin compared to the typical depth of the Ekman spiral, then -:math:`\theta=0` is a good approximation. Here we assume that the top -layer is thin enough. - -For CICE run in stand-alone mode (i.e., uncoupled), a thermodynamic slab -ocean mixed-layer parameterization is available in **ice\_ocean.F90**. -The turbulent fluxes are computed above the water surface using the same -parameterizations as for sea ice, but with parameters appropriate for -the ocean. The surface flux balance takes into account the turbulent -fluxes, oceanic heat fluxes from below the mixed layer, and shortwave -and longwave radiation, including that passing through the sea ice into -the ocean. If the resulting sea surface temperature falls below the -salinity-dependent freezing point, then new ice (frazil) forms. -Otherwise, heat is made available for melting the ice. - -.. _formdrag: - -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -Variable exchange coefficients -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -In the default CICE setup, atmospheric and oceanic neutral drag -coefficients (:math:`c_u` and :math:`c_w`) are assumed constant in time -and space. These constants are chosen to reflect friction associated -with an effective sea ice surface roughness at the ice–atmosphere and -ice–ocean interfaces. Sea ice (in both Arctic and Antarctic) contains -pressure ridges as well as floe and melt pond edges that act as discrete -obstructions to the flow of air or water past the ice, and are a source -of form drag. Following :cite:`TFSFFKLB14` and based on -recent theoretical developments :cite:`LGHA12,LLCL11`, the -neutral drag coefficients can now be estimated from properties of the -ice cover such as ice concentration, vertical extent and area of the -ridges, freeboard and floe draft, and size of floes and melt ponds. The -new parameterization allows the drag coefficients to be coupled to the -sea ice state and therefore to evolve spatially and temporally. This -parameterization is contained in the subroutine *neutral\_drag\_coeffs* -and is accessed by setting `formdrag` = true in the namelist. - -Following :cite:`TFSFFKLB14`, consider the general case of -fluid flow obstructed by N randomly oriented obstacles of height -:math:`H` and transverse length :math:`L_y`, distributed on a domain -surface area :math:`S_T`. Under the assumption of a logarithmic fluid -velocity profile, the general formulation of the form drag coefficient -can be expressed as - -.. math:: - C_d=\frac{N c S_c^2 \gamma L_y H}{2 S_T}\left[\frac{\ln(H/z_0)}{\ln(z_{ref}/z_0)}\right]^2, - :label: formdrag - -where :math:`z_0` is a roughness length parameter at the top or bottom -surface of the ice, :math:`\gamma` is a geometric factor, :math:`c` is -the resistance coefficient of a single obstacle, and :math:`S_c` is a -sheltering function that takes into account the shielding effect of the -obstacle, - -.. math:: - S_{c}=\left(1-\exp(-s_l D/H)\right)^{1/2}, - :label: shelter - -with :math:`D` the distance between two obstacles and :math:`s_l` an -attenuation parameter. - -As in the original drag formulation in CICE (sections :ref:`atmo` and -:ref:`ocean`), :math:`c_u` and :math:`c_w` along with the transfer -coefficients for sensible heat, :math:`c_{\theta}`, and latent heat, -:math:`c_{q}`, are initialized to a situation corresponding to neutral -atmosphere–ice and ocean–ice boundary layers. The corresponding neutral -exchange coefficients are then replaced by coefficients that explicitly -account for form drag, expressed in terms of various contributions as - -.. math:: - \tt{Cdn\_atm} = \tt{Cdn\_atm\_rdg} + \tt{Cdn\_atm\_floe} + \tt{Cdn\_atm\_skin} + \tt{Cdn\_atm\_pond} , - :label: Cda - -.. math:: - \tt{Cdn\_ocn} = \tt{Cdn\_ocn\_rdg} + \tt{Cdn\_ocn\_floe} + \tt{Cdn\_ocn\_skin}. - :label: Cdw - -The contributions to form drag from ridges (and keels underneath the -ice), floe edges and melt pond edges can be expressed using the general -formulation of equation :eq:`formdrag` (see :cite:`TFSFFKLB14` for -details). Individual terms in equation :eq:`Cdw` are fully described in -:cite:`TFSFFKLB14`. Following :cite:`Arya75` -the skin drag coefficient is parametrized as - -.. math:: - { \tt{Cdn\_(atm/ocn)\_skin}}=a_{i} \left(1-m_{(s/k)} \frac{H_{(s/k)}}{D_{(s/k)}}\right)c_{s(s/k)}, \mbox{ if $\displaystyle\frac{H_{(s/k)}}{D_{(s/k)}}\ge\frac{1}{m_{(s/k)}}$,} - :label: skindrag - -where :math:`m_s` (:math:`m_k`) is a sheltering parameter that depends -on the average sail (keel) height, :math:`H_s` (:math:`H_k`), but is -often assumed constant, :math:`D_s` (:math:`D_k`) is the average -distance between sails (keels), and :math:`c_{ss}` (:math:`c_{sk}`) is -the unobstructed atmospheric (oceanic) skin drag that would be attained -in the absence of sails (keels) and with complete ice coverage, -:math:`a_{ice}=1`. - -Calculation of equations :eq:`formdrag` – :eq:`skindrag` requires that small-scale geometrical -properties of the ice cover be related to average grid cell quantities -already computed in the sea ice model. These intermediate quantities are -briefly presented here and described in more detail in -:cite:`TFSFFKLB14`. The sail height is given by - -.. math:: - H_{s} = \displaystyle 2\frac{v_{rdg}}{a_{rdg}}\left(\frac{\alpha\tan \alpha_{k} R_d+\beta \tan \alpha_{s} R_h}{\phi_r\tan \alpha_{k} R_d+\phi_k \tan \alpha_{s} R_h^2}\right), - :label: Hs - -and the distance between sails\ - -.. math:: - D_{s} = \displaystyle 2 H_s\frac{a_{i}}{a_{rdg}} \left(\frac{\alpha}{\tan \alpha_s}+\frac{\beta}{\tan \alpha_k}\frac{R_h}{R_d}\right), - :label: Ds - -where :math:`0<\alpha<1` and :math:`0<\beta<1` are weight functions, -:math:`\alpha_{s}` and :math:`\alpha_{k}` are the sail and keel slope, -:math:`\phi_s` and :math:`\phi_k` are constant porosities for the sails -and keels, and we assume constant ratios for the average keel depth and -sail height (:math:`H_k/H_s=R_h`) and for the average distances between -keels and between sails (:math:`D_k/D_s=R_d`). With the assumption of -hydrostatic equilibrium, the effective ice plus snow freeboard is -:math:`H_{f}=\bar{h_i}(1-\rho_i/\rho_w)+\bar{h_s}(1-\rho_s/\rho_w)`, -where :math:`\rho_i`, :math:`\rho_w` and :math:`\rho_s` are -respectively the densities of sea ice, water and snow, :math:`\bar{h_i}` -is the mean ice thickness and :math:`\bar{h_s}` is the mean snow -thickness (means taken over the ice covered regions). For the melt pond -edge elevation we assume that the melt pond surface is at the same level -as the ocean surface surrounding the floes -:cite:`FF07,FFT10,FSFH12` and use the simplification -:math:`H_p = H_f`. Finally to estimate the typical floe size -:math:`L_A`, distance between floes, :math:`D_F`, and melt pond size, -:math:`L_P` we use the parameterizations of :cite:`LGHA12` -to relate these quantities to the ice and pond concentrations. All of -these intermediate quantities are available as history output, along -with `Cdn\_atm`, `Cdn\_ocn` and the ratio `Cdn\_atm\_ratio\_n` between the -total atmospheric drag and the atmospheric neutral drag coefficient. - -We assume that the total neutral drag coefficients are thickness -category independent, but through their dependance on the diagnostic -variables described above, they vary both spatially and temporally. The -total drag coefficients and heat transfer coefficients will also depend -on the type of stratification of the atmosphere and the ocean, and we -use the parameterization described in section :ref:`atmo` that accounts -for both stable and unstable atmosphere–ice boundary layers. In contrast -to the neutral drag coefficients the stability effect of the atmospheric -boundary layer is calculated separately for each ice thickness category. - -The transfer coefficient for oceanic heat flux to the bottom of the ice -may be varied based on form drag considerations by setting the namelist -variable `fbot\_xfer\_type` to `Cdn\_ocn`; this is recommended when using -the form drag parameterization. Its default value of the transfer -coefficient is 0.006 (`fbot\_xfer\_type = ’constant’`). - - ----------------- Model components ----------------- +================ The Arctic and Antarctic sea ice packs are mixtures of open water, thin first-year ice, thicker multiyear ice, and thick pressure ridges. The diff --git a/doc/source/user_guide/.DS_Store b/doc/source/user_guide/.DS_Store new file mode 100644 index 000000000..5008ddfcf Binary files /dev/null and b/doc/source/user_guide/.DS_Store differ diff --git a/doc/source/figures/.DS_Store b/doc/source/user_guide/figures/.DS_Store similarity index 100% rename from doc/source/figures/.DS_Store rename to doc/source/user_guide/figures/.DS_Store diff --git a/doc/source/figures/distrb.png b/doc/source/user_guide/figures/distrb.png similarity index 100% rename from doc/source/figures/distrb.png rename to doc/source/user_guide/figures/distrb.png diff --git a/doc/source/figures/extra/distrb_cart_X1_20x24_16.png b/doc/source/user_guide/figures/extra/distrb_cart_X1_20x24_16.png similarity index 100% rename from doc/source/figures/extra/distrb_cart_X1_20x24_16.png rename to doc/source/user_guide/figures/extra/distrb_cart_X1_20x24_16.png diff --git a/doc/source/figures/extra/distrb_cart_X2_20x24_16.png b/doc/source/user_guide/figures/extra/distrb_cart_X2_20x24_16.png similarity index 100% rename from doc/source/figures/extra/distrb_cart_X2_20x24_16.png rename to doc/source/user_guide/figures/extra/distrb_cart_X2_20x24_16.png diff --git a/doc/source/figures/extra/distrb_cart_sqr_20x24_16.png b/doc/source/user_guide/figures/extra/distrb_cart_sqr_20x24_16.png similarity index 100% rename from doc/source/figures/extra/distrb_cart_sqr_20x24_16.png rename to doc/source/user_guide/figures/extra/distrb_cart_sqr_20x24_16.png diff --git a/doc/source/figures/extra/distrb_rake_block_20x24_16.png b/doc/source/user_guide/figures/extra/distrb_rake_block_20x24_16.png similarity index 100% rename from doc/source/figures/extra/distrb_rake_block_20x24_16.png rename to doc/source/user_guide/figures/extra/distrb_rake_block_20x24_16.png diff --git a/doc/source/figures/extra/distrb_rake_lat_20x24_16.png b/doc/source/user_guide/figures/extra/distrb_rake_lat_20x24_16.png similarity index 100% rename from doc/source/figures/extra/distrb_rake_lat_20x24_16.png rename to doc/source/user_guide/figures/extra/distrb_rake_lat_20x24_16.png diff --git a/doc/source/figures/extra/distrb_sfc_lat_20x24_16.png b/doc/source/user_guide/figures/extra/distrb_sfc_lat_20x24_16.png similarity index 100% rename from doc/source/figures/extra/distrb_sfc_lat_20x24_16.png rename to doc/source/user_guide/figures/extra/distrb_sfc_lat_20x24_16.png diff --git a/doc/source/figures/extra/topo2.png b/doc/source/user_guide/figures/extra/topo2.png similarity index 100% rename from doc/source/figures/extra/topo2.png rename to doc/source/user_guide/figures/extra/topo2.png diff --git a/doc/source/figures/extra/topo3.png b/doc/source/user_guide/figures/extra/topo3.png similarity index 100% rename from doc/source/figures/extra/topo3.png rename to doc/source/user_guide/figures/extra/topo3.png diff --git a/doc/source/figures/grid.png b/doc/source/user_guide/figures/grid.png similarity index 100% rename from doc/source/figures/grid.png rename to doc/source/user_guide/figures/grid.png diff --git a/doc/source/figures/histograms.png b/doc/source/user_guide/figures/histograms.png similarity index 100% rename from doc/source/figures/histograms.png rename to doc/source/user_guide/figures/histograms.png diff --git a/doc/source/figures/pdf/EAP.pdf b/doc/source/user_guide/figures/pdf/EAP.pdf similarity index 100% rename from doc/source/figures/pdf/EAP.pdf rename to doc/source/user_guide/figures/pdf/EAP.pdf diff --git a/doc/source/figures/pdf/albedo.pdf b/doc/source/user_guide/figures/pdf/albedo.pdf similarity index 100% rename from doc/source/figures/pdf/albedo.pdf rename to doc/source/user_guide/figures/pdf/albedo.pdf diff --git a/doc/source/figures/pdf/craig_ciceperf_ehunke.pdf b/doc/source/user_guide/figures/pdf/craig_ciceperf_ehunke.pdf similarity index 100% rename from doc/source/figures/pdf/craig_ciceperf_ehunke.pdf rename to doc/source/user_guide/figures/pdf/craig_ciceperf_ehunke.pdf diff --git a/doc/source/figures/pdf/deparr.pdf b/doc/source/user_guide/figures/pdf/deparr.pdf similarity index 100% rename from doc/source/figures/pdf/deparr.pdf rename to doc/source/user_guide/figures/pdf/deparr.pdf diff --git a/doc/source/figures/pdf/distrb_cart_X1_20x24_16.pdf b/doc/source/user_guide/figures/pdf/distrb_cart_X1_20x24_16.pdf similarity index 100% rename from doc/source/figures/pdf/distrb_cart_X1_20x24_16.pdf rename to doc/source/user_guide/figures/pdf/distrb_cart_X1_20x24_16.pdf diff --git a/doc/source/figures/pdf/distrb_cart_X2_20x24_16.pdf b/doc/source/user_guide/figures/pdf/distrb_cart_X2_20x24_16.pdf similarity index 100% rename from doc/source/figures/pdf/distrb_cart_X2_20x24_16.pdf rename to doc/source/user_guide/figures/pdf/distrb_cart_X2_20x24_16.pdf diff --git a/doc/source/figures/pdf/distrb_cart_sqr_20x24_16.pdf b/doc/source/user_guide/figures/pdf/distrb_cart_sqr_20x24_16.pdf similarity index 100% rename from doc/source/figures/pdf/distrb_cart_sqr_20x24_16.pdf rename to doc/source/user_guide/figures/pdf/distrb_cart_sqr_20x24_16.pdf diff --git a/doc/source/figures/pdf/distrb_rake_block_20x24_16.pdf b/doc/source/user_guide/figures/pdf/distrb_rake_block_20x24_16.pdf similarity index 100% rename from doc/source/figures/pdf/distrb_rake_block_20x24_16.pdf rename to doc/source/user_guide/figures/pdf/distrb_rake_block_20x24_16.pdf diff --git a/doc/source/figures/pdf/distrb_rake_lat_20x24_16.pdf b/doc/source/user_guide/figures/pdf/distrb_rake_lat_20x24_16.pdf similarity index 100% rename from doc/source/figures/pdf/distrb_rake_lat_20x24_16.pdf rename to doc/source/user_guide/figures/pdf/distrb_rake_lat_20x24_16.pdf diff --git a/doc/source/figures/pdf/distrb_sfc_lat_20x24_16.pdf b/doc/source/user_guide/figures/pdf/distrb_sfc_lat_20x24_16.pdf similarity index 100% rename from doc/source/figures/pdf/distrb_sfc_lat_20x24_16.pdf rename to doc/source/user_guide/figures/pdf/distrb_sfc_lat_20x24_16.pdf diff --git a/doc/source/figures/pdf/gplot.pdf b/doc/source/user_guide/figures/pdf/gplot.pdf similarity index 100% rename from doc/source/figures/pdf/gplot.pdf rename to doc/source/user_guide/figures/pdf/gplot.pdf diff --git a/doc/source/figures/pdf/grid.pdf b/doc/source/user_guide/figures/pdf/grid.pdf similarity index 100% rename from doc/source/figures/pdf/grid.pdf rename to doc/source/user_guide/figures/pdf/grid.pdf diff --git a/doc/source/figures/pdf/histograms.pdf b/doc/source/user_guide/figures/pdf/histograms.pdf similarity index 100% rename from doc/source/figures/pdf/histograms.pdf rename to doc/source/user_guide/figures/pdf/histograms.pdf diff --git a/doc/source/figures/pdf/timings.pdf b/doc/source/user_guide/figures/pdf/timings.pdf similarity index 100% rename from doc/source/figures/pdf/timings.pdf rename to doc/source/user_guide/figures/pdf/timings.pdf diff --git a/doc/source/figures/pdf/topo2.pdf b/doc/source/user_guide/figures/pdf/topo2.pdf similarity index 100% rename from doc/source/figures/pdf/topo2.pdf rename to doc/source/user_guide/figures/pdf/topo2.pdf diff --git a/doc/source/figures/pdf/topo3.pdf b/doc/source/user_guide/figures/pdf/topo3.pdf similarity index 100% rename from doc/source/figures/pdf/topo3.pdf rename to doc/source/user_guide/figures/pdf/topo3.pdf diff --git a/doc/source/figures/pdf/tracergraphic.pdf b/doc/source/user_guide/figures/pdf/tracergraphic.pdf similarity index 100% rename from doc/source/figures/pdf/tracergraphic.pdf rename to doc/source/user_guide/figures/pdf/tracergraphic.pdf diff --git a/doc/source/figures/pdf/triangles.pdf b/doc/source/user_guide/figures/pdf/triangles.pdf similarity index 100% rename from doc/source/figures/pdf/triangles.pdf rename to doc/source/user_guide/figures/pdf/triangles.pdf diff --git a/doc/source/figures/scorecard.png b/doc/source/user_guide/figures/scorecard.png similarity index 100% rename from doc/source/figures/scorecard.png rename to doc/source/user_guide/figures/scorecard.png diff --git a/doc/source/user_guide/index.rst b/doc/source/user_guide/index.rst new file mode 100644 index 000000000..5f240be8b --- /dev/null +++ b/doc/source/user_guide/index.rst @@ -0,0 +1,19 @@ +.. CICE-Consortium documentation master file, created by + sphinx-quickstart on Thu Jun 29 13:47:09 2017. + You can adapt this file completely to your liking, but it should at least + contain the root `toctree` directive. + +.. _user_guide: + +User Guide +----------------- + +.. toctree:: + :maxdepth: 3 + + ug_implementation.rst + ug_running.rst + ug_performance.rst + ug_adding.rst + ug_troubleshooting.rst + ug_testing.rst diff --git a/doc/source/user_guide/ug_adding.rst b/doc/source/user_guide/ug_adding.rst new file mode 100644 index 000000000..86bc22304 --- /dev/null +++ b/doc/source/user_guide/ug_adding.rst @@ -0,0 +1,151 @@ +:tocdepth: 3 + +.. _adding: + +Adding things +============= + +.. _addtimer: + +~~~~~~ +Timers +~~~~~~ + +Timing any section of code, or multiple sections, consists of defining +the timer and then wrapping the code with start and stop commands for +that timer. Printing of the timer output is done simultaneously for all +timers. To add a timer, first declare it (`timer\_[tmr]`) at the top of +**ice\_timers.F90** (we recommend doing this in both the **mpi/** and +**serial/** directories), then add a call to *get\_ice\_timer* in the +subroutine *init\_ice\_timers*. In the module containing the code to be +timed, `call ice\_timer\_start`(`timer\_[tmr]`) at the beginning of the +section to be timed, and a similar call to `ice\_timer\_stop` at the end. +A use `ice\_timers` statement may need to be added to the subroutine being +modified. Be careful not to have one command outside of a loop and the +other command inside. Timers can be run for individual blocks, if +desired, by including the block ID in the timer calls. + +.. _addhist: + +~~~~~~~~~~~~~~ +History fields +~~~~~~~~~~~~~~ + +To add a variable to be printed in the history output, search for +‘example’ in **ice\_history\_shared.F90**: + +#. add a frequency flag for the new field + +#. add the flag to the namelist (here and also in **ice\_in**) + +#. add an index number + +and in **ice\_history.F90**: + +#. broadcast the flag + +#. add a call to `define\_hist\_field` + +#. add a call to `accum\_hist\_field` + +The example is for a standard, two-dimensional (horizontal) field; for +other array sizes, choose another history variable with a similar shape +as an example. Some history variables, especially tracers, are grouped +in other files according to their purpose (bgc, melt ponds, etc.). + +To add an output frequency for an existing variable, see +section :ref:`history`. + +.. _addtrcr: + +~~~~~~~ +Tracers +~~~~~~~ + +Each optional tracer has its own module, **ice\_[tracer].F90**, which +also contains as much of the additional tracer code as possible, and for +backward compatibility of binary restart files, each new tracer has its +own binary restart file. We recommend that the logical namelist variable +`tr\_[tracer]` be used for all calls involving the new tracer outside of +**ice\_[tracer].F90**, in case other users do not want to use that +tracer. + +A number of optional tracers are available in the code, including ice +age, first-year ice area, melt pond area and volume, brine height, +aerosols, and level ice area and volume (from which ridged ice +quantities are derived). Salinity, enthalpies, age, aerosols, level-ice +volume, brine height and most melt pond quantities are volume-weighted +tracers, while first-year area, pond area, level-ice area and all of the +biogeochemistry tracers in this release are area-weighted tracers. In +the absence of sources and sinks, the total mass of a volume-weighted +tracer such as aerosol (kg) is conserved under transport in horizontal +and thickness space (the mass in a given grid cell will change), whereas +the aerosol concentration (kg/m) is unchanged following the motion, and +in particular, the concentration is unchanged when there is surface or +basal melting. The proper units for a volume-weighted mass tracer in the +tracer array are kg/m. + +In several places in the code, tracer computations must be performed on +the conserved “tracer volume" rather than the tracer itself; for +example, the conserved quantity is :math:`h_{pnd}a_{pnd}a_{lvl}a_{i}`, +not :math:`h_{pnd}`. Conserved quantities are thus computed according to +the tracer dependencies, and code must be included to account for new +dependencies (e.g., :math:`a_{lvl}` and :math:`a_{pnd}` in +**ice\_itd.F90** and **ice\_mechred.F90**). + +To add a tracer, follow these steps using one of the existing tracers as +a pattern. + +#. **ice\_domain\_size.F90**: increase `max\_ntrcr` (can also add option + to **comp\_ice** and **bld/Macros.\***) + +#. **ice\_state.F90**: declare `nt\_[tracer]` and `tr\_[tracer]` + +#. **ice\_[tracer].F90**: create initialization, physics, restart + routines + +#. **ice\_fileunits.F90**: add new dump and restart file units + +#. **ice\_init.F90**: (some of this may be done in **ice\_[tracer].F90** + instead) + + - add new module and `tr\_[tracer]` to list of used modules and + variables + + - add logical namelist variable `tr\_[tracer]` + + - initialize namelist variable + + - broadcast namelist variable + + - print namelist variable to diagnostic output file + + - increment number of tracers in use based on namelist input (`ntrcr`) + + - define tracer types (`trcr\_depend` = 0 for ice area tracers, 1 for + ice volume, 2 for snow volume, 2+nt\_[tracer] for dependence on + other tracers) + +#. **ice\_itd.F90**, **ice\_mechred.F90**: Account for new dependencies + if needed. + +#. **CICE\_InitMod.F90**: initialize tracer (includes reading restart + file) + +#. **CICE\_RunMod.F90**, **ice\_step\_mod.F90**: + + - call routine to write tracer restart data + + - call physics routines in **ice\_[tracer].F90** (often called from + **ice\_step\_mod.F90**) + +#. **ice\_restart.F90**: define restart variables (for binary,  and PIO) + +#. **ice\_history\_[tracer].F90**: add history variables + (Section :ref:`addhist`) + +#. **ice\_in**: add namelist variables to *tracer\_nml* and + *icefields\_nml* + +#. If strict conservation is necessary, add diagnostics as noted for + topo ponds in Section :ref:`ponds`. diff --git a/doc/source/user_guide/ug_implementation.rst b/doc/source/user_guide/ug_implementation.rst new file mode 100644 index 000000000..5aac6242d --- /dev/null +++ b/doc/source/user_guide/ug_implementation.rst @@ -0,0 +1,1060 @@ +:tocdepth: 3 + + +Numerical implementation +======================== + +CICE is written in FORTRAN90 and runs on platforms using UNIX, LINUX, +and other operating systems. The code is parallelized via grid +decomposition with MPI or OpenMP threads and includes some optimizations +for vector architectures. + +A second, “external” layer of parallelization involves message passing +between CICE and the flux coupler, which may be running on different +processors in a distributed system. The parallelization scheme for CICE +was designed so that MPI could be used for the coupling along with MPI, +OpenMP or no parallelization internally. The internal parallelization +method is set at compile time with the `NTASK` and `THRD` definitions in the +compile script. Message passing between the ice model and the CESM flux +coupler is accomplished with MPI, regardless of the type of internal +parallelization used for CICE, although the ice model may be coupled to +another system without using MPI. + +.. _dirstructure: + +~~~~~~~~~~~~~~~~~~~ +Directory structure +~~~~~~~~~~~~~~~~~~~ + +The present code distribution includes make files, several scripts and +some input files. The main directory is **cice/**, and a run directory +(**rundir/**) is created upon initial execution of the script +**comp\_ice**. One year of atmospheric forcing data is also available +from the code distribution web site (see the **README** file for +details). + +basic information + +**bld/** makefiles + +**Macros.**\ :math:`\langle`\ OS\ :math:`\rangle`.\ :math:`\langle`\ SITE\ :math:`\rangle`.\ :math:`\langle`\ machine\ :math:`\rangle` + macro definitions for the given operating system, used by + **Makefile**.\ :math:`\langle` \ OS\ :math:`\rangle` + +**Makefile.**\ :math:`\langle`\ OS\ :math:`\rangle` + primary makefile for the given operating system + (**:math:`\langle`\ std\ :math:`\rangle`** works for most systems) + +**makedep.c** + perl script that determines module dependencies + +script that sets up the run directory and compiles the code + +modules based on “shared" code in CESM + +**shr\_orb\_mod.F90** + orbital parameterizations + +documentation + +**cicedoc.pdf** + this document + +**PDF/** + PDF documents of numerous publications related to CICE + +institution-specific modules + +**cice/** + official driver for CICE v5 (LANL) + + **CICE.F90** + main program + + **CICE\_FinalMod.F90** + routines for finishing and exiting a run + + **CICE\_InitMod.F90** + routines for initializing a run + + **CICE\_RunMod.F90** + main driver routines for time stepping + + **CICE\_RunMod.F90\_debug** + debugging version of **CICE\_RunMod.F90** + + **ice\_constants.F90** + physical and numerical constants and parameters + +sample diagnostic output files + +input files that may be modified for other CICE configurations + +**col/** + column configuration files + + **ice\_in** + namelist input data (data paths depend on particular system) + +**gx1/** + :math:`\left<1^\circ\right>` displaced pole grid files + + **global\_gx1.grid** + :math:`\left<1^\circ\right>` displaced pole grid (binary) + + **global\_gx1.kmt** + :math:`\left<1^\circ\right>` land mask (binary) + + **ice.restart\_file** + pointer for restart file name + + **ice\_in** + namelist input data (data paths depend on particular system) + + **ice\_in\_v4.1** + namelist input data for default CICE v4.1 configuration + + **iced\_gx1\_v5.nc** +  restart file used for initial condition + +**gx3/** + :math:`\left<3^\circ\right>` displaced pole grid files + + **global\_gx3.grid** + :math:`\left<3^\circ\right>` displaced pole grid (binary) + + **global\_gx3.kmt** + :math:`\left<3^\circ\right>` land mask (binary) + + **global\_gx3.grid.nc** + :math:`\left<3^\circ\right>` displaced pole grid () + + **global\_gx3.kmt.nc** + :math:`\left<3^\circ\right>` land mask () + + **ice.restart\_file** + pointer for restart file name + + **ice\_in** + namelist input data (data paths depend on particular system) + + **iced\_gx3\_v5.nc** +  restart file used for initial condition + +convert\_restarts.f90 + Fortran code to convert restart files from v4.1 to v5 (4 ice layers) + +**run\_ice.**\ :math:`\langle`\ OS\ :math:`\rangle`.\ :math:`\langle`\ SITE\ :math:`\rangle`.\ :math:`\langle`\ machine\ :math:`\rangle` + sample script for running on the given operating system + +binary history and restart modules + +**ice\_history\_write.F90** + subroutines with binary output + +**ice\_restart.F90** + read/write binary restart files + + history and restart modules + +**ice\_history\_write.F90** + subroutines with  output + +**ice\_restart.F90** + read/write   restart files + +parallel I/O history and restart modules + +**ice\_history\_write.F90** + subroutines with   output using PIO + +**ice\_pio.F90** + subroutines specific to PIO + +**ice\_restart.F90** + read/write  restart files using PIO + +modules that require MPI calls + +**ice\_boundary.F90** + boundary conditions + +**ice\_broadcast.F90** + routines for broadcasting data across processors + +**ice\_communicate.F90** + routines for communicating between processors + +**ice\_exit.F90** + aborts or exits the run + +**ice\_gather\_scatter.F90** + gathers/scatters data to/from one processor from/to all processors + +**ice\_global\_reductions.F90** + global sums, minvals, maxvals, etc., across processors + +**ice\_timers.F90** + timing routines + +same modules as in **mpi/** but without MPI calls + +general CICE source code + +handles most work associated with the aerosol tracers + +handles most work associated with the age tracer + +skeletal layer biogeochemistry + +stability-based parameterization for calculation of turbulent +ice–atmosphere fluxes + +for decomposing global domain into blocks + +evolves the brine height tracer + +keeps track of what time it is + +miscellaneous diagnostic and debugging routines + +for distributing blocks across processors + +decompositions, distributions and related parallel processing info + +domain and block sizes + +elastic-anisotropic-plastic dynamics component + +elastic-viscous-plastic dynamics component + +code shared by EVP and EAP dynamics + +unit numbers for I/O + +handles most work associated with the first-year ice area tracer + +fluxes needed/produced by the model + +routines to read and interpolate forcing data for stand-alone ice model +runs + +grid and land masks + +initialization and accumulation of history output variables + +history output of biogeochemistry variables + +history output of form drag variables + +history output of ridging variables + +history output of melt pond variables + +code shared by all history modules + +namelist and initializations + +utilities for managing ice thickness distribution + +basic definitions of reals, integers, etc. + +handles most work associated with the level ice area and volume tracers + +mechanical redistribution component (ridging) + +CESM melt pond parameterization + +level-ice melt pond parameterization + +topo melt pond parameterization + +mixed layer ocean model + +orbital parameters for Delta-Eddington shortwave parameterization + +utilities for reading and writing files + +driver for reading/writing restart files + +code shared by all restart options + +basic restoring for open boundary conditions + +shortwave and albedo parameterizations + +space-filling-curves distribution method + +essential arrays to describe the state of the ice + +routines for time stepping the major code components + +zero-layer thermodynamics of :cite:`Semtner76` + +multilayer thermodynamics of :cite:`BL99` + +thermodynamic changes mostly related to ice thickness distribution + +mushy-theory thermodynamics of:cite:`THB13` + +code shared by all thermodynamics parameterizations + +vertical growth rates and fluxes + +driver for horizontal advection + +horizontal advection via incremental remapping + +driver for ice biogeochemistry and brine tracer motion + +parameters and shared code for biogeochemistry and brine height + +execution or “run” directory created when the code is compiled using the +**comp\_ice** script (gx3) + +**cice** + code executable + +**compile/** + directory containing object files, etc. + +**grid** + horizontal grid file from **cice/input\_templates/gx3/** + +**ice.log.[ID]** + diagnostic output file + +**ice\_in** + namelist input data from **cice/input\_templates/gx3/** + +**history/iceh.[timeID].nc** + output history file + +**kmt** + land mask file from **cice/input\_templates/gx3/** + +**restart/** + restart directory + + **iced\_gx3\_v5.nc** + initial condition from **cice/input\_templates/gx3/** + + **ice.restart\_file** + restart pointer from **cice/input\_templates/gx3/** + +**run\_ice** + batch run script file from **cice/input\_templates/** + +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +Grid, boundary conditions and masks +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +The spatial discretization is specialized for a generalized orthogonal +B-grid as in :cite:`Murray96` or +:cite:`SKM95`. The ice and snow area, volume and energy are +given at the center of the cell, velocity is defined at the corners, and +the internal ice stress tensor takes four different values within a grid +cell; bilinear approximations are used for the stress tensor and the ice +velocity across the cell, as described in :cite:`HD02`. +This tends to avoid the grid decoupling problems associated with the +B-grid. EVP is available on the C-grid through the MITgcm code +distribution, http://mitgcm.org/viewvc/MITgcm/MITgcm/pkg/seaice/. + +Since ice thickness and thermodynamic variables such as temperature are given +in the center of each cell, the grid cells are referred to as “T cells.” +We also occasionally refer to “U cells,” which are centered on the +northeast corner of the corresponding T cells and have velocity in the +center of each. The velocity components are aligned along grid lines. + +The user has several choices of grid routines: *popgrid* reads grid +lengths and other parameters for a nonuniform grid (including tripole +and regional grids), and *rectgrid* creates a regular rectangular grid, +including that used for the column configuration. The input files +**global\_gx3.grid** and **global\_gx3.kmt** contain the +:math:`\left<3^\circ\right>` POP grid and land mask; +**global\_gx1.grid** and **global\_gx1.kmt** contain the +:math:`\left<1^\circ\right>` grid and land mask. These are binary +unformatted, direct access files produced on an SGI (Big Endian). If you +are using an incompatible (Little Endian) architecture, choose +`rectangular` instead of `displaced\_pole` in **ice\_in**, or follow +procedures as for conejo +(:math:`\langle`\ **OS**\ :math:`\rangle.\langle`\ **SITE**\ :math:`\rangle.\langle`\ **machine**\ :math:`\rangle` += Linux.LANL.conejo). There are versions of the gx3 grid files +available. + +In CESM, the sea ice model may exchange coupling fluxes using a +different grid than the computational grid. This functionality is +activated using the namelist variable `gridcpl\_file`. + +*********************** +Grid domains and blocks +*********************** + +In general, the global gridded domain is +`nx\_global` :math:`\times`\ `ny\_global`, while the subdomains used in the +block distribution are `nx\_block` :math:`\times`\ `ny\_block`. The +physical portion of a subdomain is indexed as [`ilo:ihi`, `jlo:jhi`], with +nghost “ghost” or “halo" cells outside the domain used for boundary +conditions. These parameters are illustrated in :ref:`fig-grid` in one +dimension. The routines *global\_scatter* and *global\_gather* +distribute information from the global domain to the local domains and +back, respectively. If MPI is not being used for grid decomposition in +the ice model, these routines simply adjust the indexing on the global +domain to the single, local domain index coordinates. Although we +recommend that the user choose the local domains so that the global +domain is evenly divided, if this is not possible then the furthest east +and/or north blocks will contain nonphysical points (“padding”). These +points are excluded from the computation domain and have little effect +on model performance. + +.. _fig-grid: + +.. figure:: ./figures/grid.png + :align: center + :scale: 20% + + Figure 8 + +:ref:`fig-grid` : Grid parameters for a sample one-dimensional, 20-cell +global domain decomposed into four local subdomains. Each local +domain has one ghost (halo) cell on each side, and the physical +portion of the local domains are labeled `ilo:ihi`. The parameter +`nx\_block` is the total number of cells in the local domain, including +ghost cells, and the same numbering system is applied to each of the +four subdomains. + +The user chooses a block size `BLCKX` :math:`\times`\ `BLCKY` and the +number of processors `NTASK` in **comp\_ice**. Parameters in the +*domain\_nml* namelist in **ice\_in** determine how the blocks are +distributed across the processors, and how the processors are +distributed across the grid domain. Recommended combinations of these +parameters for best performance are given in Section :ref:`performance`. +The script **comp\_ice** computes the maximum number of blocks on each +processor for typical Cartesian distributions, but for non-Cartesian +cases `MXBLCKS` may need to be set in the script. The code will print this +information to the log file before aborting, and the user will need to +adjust `MXBLCKS` in **comp\_ice** and recompile. The code will also print +a warning if the maximum number of blocks is too large. Although this is +not fatal, it does require excess memory. + +A loop at the end of routine *create\_blocks* in module +**ice\_blocks.F90** will print the locations for all of the blocks on +the global grid if dbug is set to be true. Likewise, a similar loop at +the end of routine *create\_local\_block\_ids* in module +**ice\_distribution.F90** will print the processor and local block +number for each block. With this information, the grid decomposition +into processors and blocks can be ascertained. The dbug flag must be +manually set in the code in each case (independently of the dbug flag in +**ice\_in**), as there may be hundreds or thousands of blocks to print +and this information should be needed only rarely. This information is +much easier to look at using a debugger such as Totalview. + +Alternatively, a new variable is provided in the history files, `blkmask`, +which labels the blocks in the grid decomposition according to `blkmask` = +`my\_task` + `iblk/100`. + +************* +Tripole grids +************* + +The tripole grid is a device for constructing a global grid with a +normal south pole and southern boundary condition, which avoids placing +a physical boundary or grid singularity in the Arctic Ocean. Instead of +a single north pole, it has two “poles” in the north, both located on +land, with a line of grid points between them. This line of points is +called the “fold,” and it is the “top row” of the physical grid. One +pole is at the left-hand end of the top row, and the other is in the +middle of the row. The grid is constructed by “folding” the top row, so +that the left-hand half and the right-hand half of it coincide. Two +choices for constructing the tripole grid are available. The one first +introduced to CICE is called “U-fold”, which means that the poles and +the grid cells between them are U cells on the grid. Alternatively the +poles and the cells between them can be grid T cells, making a “T-fold.” +Both of these options are also supported by the OPA/NEMO ocean model, +which calls the U-fold an “f-fold” (because it uses the Arakawa C-grid +in which U cells are on T-rows). The choice of tripole grid is given by +the namelist variable `ns\_boundary\_type`, ‘tripole’ for the U-fold and +‘tripoleT’ for the T-fold grid. + +In the U-fold tripole grid, the poles have U-index +:math:`{\tt nx\_global}/2` and `nx\_global` on the top U-row of the +physical grid, and points with U-index i and :math:`{\tt nx\_global-i}` +are coincident. Let the fold have U-row index :math:`n` on the global +grid; this will also be the T-row index of the T-row to the south of the +fold. There are ghost (halo) T- and U-rows to the north, beyond the +fold, on the logical grid. The point with index i along the ghost T-row +of index :math:`n+1` physically coincides with point +:math:`{\tt nx\_global}-{\tt i}+1` on the T-row of index :math:`n`. The +ghost U-row of index :math:`n+1` physically coincides with the U-row of +index :math:`n-1`. + +In the T-fold tripole grid, the poles have T-index 1 and and +:math:`{\tt nx\_global}/2+1` on the top T-row of the physical grid, and +points with T-index i and :math:`{\tt nx\_global}-{\tt i}+2` are +coincident. Let the fold have T-row index :math:`n` on the global grid. +It is usual for the northernmost row of the physical domain to be a +U-row, but in the case of the T-fold, the U-row of index :math:`n` is +“beyond” the fold; although it is not a ghost row, it is not physically +independent, because it coincides with U-row :math:`n-1`, and it +therefore has to be treated like a ghost row. Points i on U-row +:math:`n` coincides with :math:`{\tt nx\_global}-{\tt i}+1` on U-row +:math:`n-1`. There are still ghost T- and U-rows :math:`n+1` to the +north of U-row :math:`n`. Ghost T-row :math:`n+1` coincides with T-row +:math:`n-1`, and ghost U-row :math:`n+1` coincides with U-row +:math:`n-2`. + +The tripole grid thus requires two special kinds of treatment for +certain rows, arranged by the halo-update routines. First, within rows +along the fold, coincident points must always have the same value. This +is achieved by averaging them in pairs. Second, values for ghost rows +and the “quasi-ghost” U-row on the T-fold grid are reflected copies of +the coincident physical rows. Both operations involve the tripole +buffer, which is used to assemble the data for the affected rows. +Special treatment is also required in the scattering routine, and when +computing global sums one of each pair of coincident points has to be +excluded. + +.. _bio-grid: + +******** +Bio-grid +******** + +The bio-grid is a vertical grid used for solving the brine height +variable :math:`h_b`. In the future, it will also be used for +discretizing the vertical transport equations of biogeochemical tracers. +The bio-grid is a non-dimensional vertical grid which takes the value +zero at :math:`h_b` and one at the ice–ocean interface. The number of +grid levels is specified during compilation in **comp\_ice** by setting +the variable `NBGCLYR` equal to an integer (:math:`n_b`) . + +Ice tracers and microstructural properties defined on the bio-grid are +referenced in two ways: as `bgrid` :math:`=n_b+2` points and as +igrid\ :math:`=n_b+1` points. For both bgrid and igrid, the first and +last points reference :math:`h_b` and the ice–ocean interface, +respectively, and so take the values :math:`0` and :math:`1`, +respectively. For bgrid, the interior points :math:`[2, n_b+1]` are +spaced at :math:`1/n_b` intervals beginning with `bgrid(2)` :math:` = +1/(2n_b)`. The `igrid` interior points :math:`[2, n_b]` are also +equidistant with the same spacing, but physically coincide with points +midway between those of `bgrid`. + +******************** +Column configuration +******************** + +A column modeling capability is available. Because of the boundary +conditions and other spatial assumptions in the model, this is not a +single column, but a small array of columns (minimum grid size is 5x5). +However, the code is set up so that only the single, central column is +used (all other columns are designated as land). The column is located +near Barrow (71.35N, 156.5W). Options for choosing the column +configuration are given in **comp\_ice** (choose `RES col`) and in the +namelist file, **input\_templates/col/ice\_in**. Here, `istep0` and the +initial conditions are set such that the run begins September 1 with no +ice. The grid type is rectangular, dynamics are turned off (`kdyn` = 0) and +one processor is used. + +History variables available for column output are ice and snow +temperature, `Tinz` and `Tsnz`. These variables also include thickness +category as a fourth dimension. + +******************* +Boundary conditions +******************* + +Much of the infrastructure used in CICE, including the boundary +routines, is adopted from POP. The boundary routines perform boundary +communications among processors when MPI is in use and among blocks +whenever there is more than one block per processor. + +Open/cyclic boundary conditions are the default in CICE; the physical +domain can still be closed using the land mask. In our bipolar, +displaced-pole grids, one row of grid cells along the north and south +boundaries is located on land, and along east/west domain boundaries not +masked by land, periodic conditions wrap the domain around the globe. +CICE can be run on regional grids with open boundary conditions; except +for variables describing grid lengths, non-land halo cells along the +grid edge must be filled by restoring them to specified values. The +namelist variable `restore\_ice` turns this functionality on and off; the +restoring timescale `trestore` may be used (it is also used for restoring +ocean sea surface temperature in stand-alone ice runs). This +implementation is only intended to provide the “hooks" for a more +sophisticated treatment; the rectangular grid option can be used to test +this configuration. The ‘displaced\_pole’ grid option should not be used +unless the regional grid contains land all along the north and south +boundaries. The current form of the boundary condition routines does not +allow Neumann boundary conditions, which must be set explicitly. This +has been done in an unreleased branch of the code; contact Elizabeth for +more information. + +For exact restarts using restoring, set `restart\_ext` = true in namelist +to use the extended-grid subroutines. + +On tripole grids, the order of operations used for calculating elements +of the stress tensor can differ on either side of the fold, leading to +round-off differences. Although restarts using the extended grid +routines are exact for a given run, the solution will differ from +another run in which restarts are written at different times. For this +reason, explicit halo updates of the stress tensor are implemented for +the tripole grid, both within the dynamics calculation and for restarts. +This has not been implemented yet for tripoleT grids, pending further +testing. + +***** +Masks +***** + +A land mask hm (:math:`M_h`) is specified in the cell centers, with 0 +representing land and 1 representing ocean cells. A corresponding mask +uvm (:math:`M_u`) for velocity and other corner quantities is given by + +.. math:: + M_u(i,j)=\min\{M_h(l),\,l=(i,j),\,(i+1,j),\,(i,j+1),\,(i+1,j+1)\}. + +The logical masks `tmask` and `umask` (which correspond to the real masks +`hm` and `uvm`, respectively) are useful in conditional statements. + +In addition to the land masks, two other masks are implemented in +*evp\_prep* in order to reduce the dynamics component’s work on a global +grid. At each time step the logical masks `ice\_tmask` and `ice\_umask` are +determined from the current ice extent, such that they have the value +“true” wherever ice exists. They also include a border of cells around +the ice pack for numerical purposes. These masks are used in the +dynamics component to prevent unnecessary calculations on grid points +where there is no ice. They are not used in the thermodynamics +component, so that ice may form in previously ice-free cells. Like the +land masks `hm` and `uvm`, the ice extent masks `ice\_tmask` and `ice\_umask` +are for T cells and U cells, respectively. + +Improved parallel performance may result from utilizing halo masks for +boundary updates of the full ice state, incremental remapping transport, +or for EVP or EAP dynamics. These options are accessed through the +logical namelist flags `maskhalo\_bound`, `maskhalo\_remap`, and +`maskhalo\_dyn`, respectively. Only the halo cells containing needed +information are communicated. + +Two additional masks are created for the user’s convenience: `lmask\_n` +and `lmask\_s` can be used to compute or write data only for the northern +or southern hemispheres, respectively. Special constants (`spval` and +`spval\_dbl`, each equal to :math:`10^{30}`) are used to indicate land +points in the history files and diagnostics. + +~~~~~~~~~~~~~~~~~~~ +Test configurations +~~~~~~~~~~~~~~~~~~~ + +.. _init: + +~~~~~~~~~~~~~~~~~~~~~~~~~~~ +Initialization and coupling +~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +The ice model’s parameters and variables are initialized in several +steps. Many constants and physical parameters are set in +**ice\_constants.F90**. Namelist variables (:ref:`tabnamelist`), +whose values can be altered at run time, are handled in *input\_data* +and other initialization routines. These variables are given default +values in the code, which may then be changed when the input file +**ice\_in** is read. Other physical constants, numerical parameters, and +variables are first set in initialization routines for each ice model +component or module. Then, if the ice model is being restarted from a +previous run, core variables are read and reinitialized in +*restartfile*, while tracer variables needed for specific configurations +are read in separate restart routines associated with each tracer or +specialized parameterization. Finally, albedo and other quantities +dependent on the initial ice state are set. Some of these parameters +will be described in more detail in :ref:`tabnamelist`. + +The restart files supplied with the code release include the core +variables on the default configuration, that is, with seven vertical +layers and the ice thickness distribution defined by `kcatbound` = 0. +Restart information for some tracers is also included in the  restart +files. + +Three namelist variables control model initialization, `ice\_ic`, `runtype`, +and `restart`, as described in :ref:`tab-ic`. It is possible to do an +initial run from a file **filename** in two ways: (1) set runtype = +‘initial’, restart = true and ice\_ic = **filename**, or (2) runtype = +‘continue’ and pointer\_file = **./restart/ice.restart\_file** where +**./restart/ice.restart\_file** contains the line +“./restart/[filename]". The first option is convenient when repeatedly +starting from a given file when subsequent restart files have been +written. With this arrangement, the tracer restart flags can be set to +true or false, depending on whether the tracer restart data exist. With +the second option, tracer restart flags are set to ‘continue’ for all +active tracers. + +An additional namelist option, `restart\_ext` specifies whether halo cells +are included in the restart files. This option is useful for tripole and +regional grids, but can not be used with PIO. + +MPI is initialized in *init\_communicate* for both coupled and +stand-alone MPI runs. The ice component communicates with a flux coupler +or other climate components via external routiines that handle the +variables listed in :ref:`tab-flux-cpl`. For stand-alone runs, +routines in **ice\_forcing.F90** read and interpolate data from files, +and are intended merely to provide guidance for the user to write his or +her own routines. Whether the code is to be run in stand-alone or +coupled mode is determined at compile time, as described below. + +:ref:`tab-ic` : *Ice initial state resulting from combinations of* +`ice\_ic`, `runtype` and `restart`. :math:`^a`\ *If false, restart is reset to +true.* :math:`^b`\ *restart is reset to false.* :math:`^c`\ ice\_ic *is +reset to ‘none.’* + +.. _tab-ic: + +.. table:: Table 4 + + +----------------+--------------------------+--------------------------------------+----------------------------------------+ + | ice\_ic | | | | + +================+==========================+======================================+========================================+ + | | initial/false | initial/true | continue/true (or false\ :math:`^a`) | + +----------------+--------------------------+--------------------------------------+----------------------------------------+ + | none | no ice | no ice\ :math:`^b` | restart using **pointer\_file** | + +----------------+--------------------------+--------------------------------------+----------------------------------------+ + | default | SST/latitude dependent | SST/latitude dependent\ :math:`^b` | restart using **pointer\_file** | + +----------------+--------------------------+--------------------------------------+----------------------------------------+ + | **filename** | no ice\ :math:`^c` | start from **filename** | restart using **pointer\_file** | + +----------------+--------------------------+--------------------------------------+----------------------------------------+ + +.. _parameters: + +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +Choosing an appropriate time step +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +The time step is chosen based on stability of the transport component +(both horizontal and in thickness space) and on resolution of the +physical forcing. CICE allows the dynamics, advection and ridging +portion of the code to be run with a shorter timestep, +:math:`\Delta t_{dyn}` (`dt\_dyn`), than the thermodynamics timestep +:math:`\Delta t` (`dt`). In this case, `dt` and the integer ndtd are +specified, and `dt\_dyn` = `dt/ndtd`. + +A conservative estimate of the horizontal transport time step bound, or +CFL condition, under remapping yields + +.. math:: + \Delta t_{dyn} < {\min\left(\Delta x, \Delta y\right)\over 2\max\left(u, v\right)}. + +Numerical estimates for this bound for several POP grids, assuming +:math:`\max(u, v)=0.5` m/s, are as follows: + +.. csv-table:: + :widths: 20,40,40,40,40 + + grid label,N pole singularity,dimensions,min :math:`\sqrt{\Delta x\cdot\Delta y}`,max :math:`\Delta t_{dyn}` + gx3,Greenland,:math:`100\times 116`,:math:`39\times 10^3` m,10.8hr + gx1,Greenland,:math:`320\times 384`,:math:`18\times 10^3` m,5.0hr + p4,Canada,:math:`900\times 600`,:math:`6.5\times 10^3` m,1.8hr + +As discussed in section :ref:`mech-red` and +:cite:`LHMJ07`, the maximum time step in practice is +usually determined by the time scale for large changes in the ice +strength (which depends in part on wind strength). Using the strength +parameterization of :cite:`Rothrock75`, as in +Equation :eq:`roth-strength0`, limits the time step to :math:`\sim`\ 30 +minutes for the old ridging scheme (`krdg\_partic` = 0), and to +:math:`\sim`\ 2 hours for the new scheme (`krdg\_partic` = 1), assuming +:math:`\Delta x` = 10 km. Practical limits may be somewhat less, +depending on the strength of the atmospheric winds. + +Transport in thickness space imposes a similar restraint on the time +step, given by the ice growth/melt rate and the smallest range of +thickness among the categories, +:math:`\Delta t<\min(\Delta H)/2\max(f)`, where :math:`\Delta H` is the +distance between category boundaries and :math:`f` is the thermodynamic +growth rate. For the 5-category ice thickness distribution used as the +default in this distribution, this is not a stringent limitation: +:math:`\Delta t < 19.4` hr, assuming :math:`\max(f) = 40` cm/day. + +In the classic EVP or EAP approach (`kdyn` = 1 or 2, `revised\_evp` = false), +the dynamics component is subcycled ndte (:math:`N`) times per dynamics +time step so that the elastic waves essentially disappear before the +next time step. The subcycling time step (:math:`\Delta +t_e`) is thus + +.. math:: + dte = dt\_dyn/ndte. + +A second parameter, :math:`E_\circ` (`eyc`), defines the elastic wave +damping timescale :math:`T`, described in Section :ref:`dynam`, as +`eyc`\ * `dt\_dyn`. The forcing terms are not updated during the subcycling. +Given the small step (`dte`) at which the EVP dynamics model is subcycled, +the elastic parameter :math:`E` is also limited by stability +constraints, as discussed in :cite:`HD97`. Linear stability +analysis for the dynamics component shows that the numerical method is +stable as long as the subcycling time step :math:`\Delta t_e` +sufficiently resolves the damping timescale :math:`T`. For the stability +analysis we had to make several simplifications of the problem; hence +the location of the boundary between stable and unstable regions is +merely an estimate. In practice, the ratio +:math:`\Delta t_e ~:~ T ~:~ \Delta t`  = 1 : 40 : 120 provides both +stability and acceptable efficiency for time steps (:math:`\Delta t`) on +the order of 1 hour. + +For the revised EVP approach (`kdyn` = 1, `revised\_evp` = true), the +relaxation parameter `arlx1i` effectively sets the damping timescale in +the problem, and `brlx` represents the effective subcycling +:cite:`BFLM13`. In practice the parameters :math:`S_e>0.5` +and :math:`\xi<1` are set, along with an estimate of the ice strength +per unit mass, and the damping and subcycling parameters are then +calculated. With the addition of the revised EVP approach to CICE, the +code now uses these parameters internally for both classic and revised +EVP configurations (see Section :ref:`revp`). + +Note that only :math:`T` and :math:`\Delta t_e` figure into the +stability of the dynamics component; :math:`\Delta t` does not. Although +the time step may not be tightly limited by stability considerations, +large time steps (*e.g.,* :math:`\Delta t=1` day, given daily forcing) +do not produce accurate results in the dynamics component. The reasons +for this error are discussed in :cite:`HD97`; see +:cite:`HZ99` for its practical effects. The thermodynamics +component is stable for any time step, as long as the surface +temperature :math:`T_{sfc}` is computed internally. The +numerical constraint on the thermodynamics time step is associated with +the transport scheme rather than the thermodynamic solver. + +~~~~~~~~~~~~ +Model output +~~~~~~~~~~~~ + +.. _history: + +************* +History files +************* + +Model output data is averaged over the period(s) given by `histfreq` and +`histfreq\_n`, and written to binary or  files prepended by `history\_file` +in **ice\_in**. That is, if `history\_file` = ‘iceh’ then the filenames +will have the form **iceh.[timeID].nc** or **iceh.[timeID].da**, +depending on the output file format chosen in **comp\_ice** (set +`IO\_TYPE`). The  history files are CF-compliant; header information for +data contained in the  files is displayed with the command `ncdump -h +filename.nc`. Parallel  output is available using the PIO library; the +attribute `io\_flavor` distinguishes output files written with PIO from +those written with standard netCDF. With binary files, a separate header +file is written with equivalent information. Standard fields are output +according to settings in the **icefields\_nml** namelist in **ice\_in**. +The user may add (or subtract) variables not already available in the +namelist by following the instructions in section :ref:`addhist`. + +With this release, the history module has been divided into several +modules based on the desired formatting and on the variables +themselves. Parameters, variables and routines needed by multiple +modules is in **ice\_history\_shared.F90**, while the primary routines +for initializing and accumulating all of the history variables are in +**ice\_history.F90**. These routines call format-specific code in the +**io\_binary**, **io\_netcdf** and **io\_pio** directories. History +variables specific to certain components or parameterizations are +collected in their own history modules (**ice\_history\_bgc.F90**, +**ice\_history\_drag.F90**, **ice\_history\_mechred.F90**, +**ice\_history\_pond.F90**). + +The history modules allow output at different frequencies. Five output +frequencies (1, `h`, `d`, `m`, `y`) are available simultaneously during a run. +The same variable can be output at different frequencies (say daily and +monthly) via its namelist flag, `f\_` :math:`\left<{var}\right>`, which +is now a character string corresponding to `histfreq` or ‘x’ for none. +(Grid variable flags are still logicals, since they are written to all +files, no matter what the frequency is.) If there are no namelist flags +with a given `histfreq` value, or if an element of `histfreq\_n` is 0, then +no file will be written at that frequency. The output period can be +discerned from the filenames. + +For example, in namelist: + +:: + + `histfreq` = ’1’, ’h’, ’d’, ’m’, ’y’ + `histfreq\_n` = 1, 6, 0, 1, 1 + `f\_hi` = ’1’ + `f\_hs` = ’h’ + `f\_Tsfc` = ’d’ + `f\_aice` = ’m’ + `f\_meltb` = ’mh’ + `f\_iage` = ’x’ + +Here, `hi` will be written to a file on every timestep, `hs` will be +written once every 6 hours, `aice` once a month, `meltb` once a month AND +once every 6 hours, and `Tsfc` and `iage` will not be written. + +From an efficiency standpoint, it is best to set unused frequencies in +`histfreq` to ‘x’. Having output at all 5 frequencies takes nearly 5 times +as long as for a single frequency. If you only want monthly output, the +most efficient setting is `histfreq` = ’m’,’x’,’x’,’x’,’x’. The code counts +the number of desired streams (`nstreams`) based on `histfreq`. + +The history variable names must be unique for netcdf, so in cases where +a variable is written at more than one frequency, the variable name is +appended with the frequency in files after the first one. In the example +above, `meltb` is called `meltb` in the monthly file (for backward +compatibility with the default configuration) and `meltb\_h` in the +6-hourly file. + +Using the same frequency twice in `histfreq` will have unexpected +consequences and currently will cause the code to abort. It is not +possible at the moment to output averages once a month and also once +every 3 months, for example. + +If `write\_ic` is set to true in **ice\_in**, a snapshot of the same set +of history fields at the start of the run will be written to the history +directory in **iceh\_ic.[timeID].nc(da)**. Several history variables are +hard-coded for instantaneous output regardless of the averaging flag, at +the frequency given by their namelist flag. + +The normalized principal components of internal ice stress are computed +in *principal\_stress* and written to the history file. This calculation +is not necessary for the simulation; principal stresses are merely +computed for diagnostic purposes and included here for the user’s +convenience. + +Several history variables are available in two forms, a value +representing an average over the sea ice fraction of the grid cell, and +another that is multiplied by :math:`a_i`, representing an average over +the grid cell area. Our naming convention attaches the suffix “\_ai" to +the grid-cell-mean variable names. + +**************** +Diagnostic files +**************** + +Like `histfreq`, the parameter `diagfreq` can be used to regulate how often +output is written to a log file. The log file unit to which diagnostic +output is written is set in **ice\_fileunits.F90**. If `diag\_type` = +‘stdout’, then it is written to standard out (or to **ice.log.[ID]** if +you redirect standard out as in **run\_ice**); otherwise it is written +to the file given by `diag\_file`. In addition to the standard diagnostic +output (maximum area-averaged thickness, velocity, average albedo, total +ice area, and total ice and snow volumes), the namelist options +`print\_points` and `print\_global` cause additional diagnostic information +to be computed and written. `print\_global` outputs global sums that are +useful for checking global conservation of mass and energy. +`print\_points` writes data for two specific grid points. Currently, one +point is near the North Pole and the other is in the Weddell Sea; these +may be changed in **ice\_in**. + +Timers are declared and initialized in **ice\_timers.F90**, and the code +to be timed is wrapped with calls to *ice\_timer\_start* and +*ice\_timer\_stop*. Finally, *ice\_timer\_print* writes the results to +the log file. The optional “stats" argument (true/false) prints +additional statistics. Calling *ice\_timer\_print\_all* prints all of +the timings at once, rather than having to call each individually. +Currently, the timers are set up as in :ref:`timers`. +Section :ref:`addtimer` contains instructions for adding timers. + +The timings provided by these timers are not mutually exclusive. For +example, the column timer (5) includes the timings from 6–10, and +subroutine *bound* (timer 15) is called from many different places in +the code, including the dynamics and advection routines. + +The timers use *MPI\_WTIME* for parallel runs and the F90 intrinsic +*system\_clock* for single-processor runs. + +:ref:`timers` : *CICE timers* + +.. _timers: + +.. table:: Table 5 + + +--------------+-------------+----------------------------------------------------+ + | **Timer** | | | + +--------------+-------------+----------------------------------------------------+ + | **Index** | **Label** | | + +--------------+-------------+----------------------------------------------------+ + | 1 | Total | the entire run | + +--------------+-------------+----------------------------------------------------+ + | 2 | Step | total minus initialization and exit | + +--------------+-------------+----------------------------------------------------+ + | 3 | Dynamics | EVP | + +--------------+-------------+----------------------------------------------------+ + | 4 | Advection | horizontal transport | + +--------------+-------------+----------------------------------------------------+ + | 5 | Column | all vertical (column) processes | + +--------------+-------------+----------------------------------------------------+ + | 6 | Thermo | vertical thermodynamics | + +--------------+-------------+----------------------------------------------------+ + | 7 | Shortwave | SW radiation and albedo | + +--------------+-------------+----------------------------------------------------+ + | 8 | Meltponds | melt ponds | + +--------------+-------------+----------------------------------------------------+ + | 9 | Ridging | mechanical redistribution | + +--------------+-------------+----------------------------------------------------+ + | 10 | Cat Conv | transport in thickness space | + +--------------+-------------+----------------------------------------------------+ + | 11 | Coupling | sending/receiving coupler messages | + +--------------+-------------+----------------------------------------------------+ + | 12 | ReadWrite | reading/writing files | + +--------------+-------------+----------------------------------------------------+ + | 13 | Diags | diagnostics (log file) | + +--------------+-------------+----------------------------------------------------+ + | 14 | History | history output | + +--------------+-------------+----------------------------------------------------+ + | 15 | Bound | boundary conditions and subdomain communications | + +--------------+-------------+----------------------------------------------------+ + | 16 | BGC | biogeochemistry | + +--------------+-------------+----------------------------------------------------+ + +************* +Restart files +************* + +CICE now provides restart data in binary unformatted or  formats, via +the `IO\_TYPE` flag in **comp\_ice** and namelist variable +`restart\_format`. Restart and history files must use the same format. As +with the history output, there is also an option for writing parallel +restart files using PIO. + +The restart files created by CICE contain all of the variables needed +for a full, exact restart. The filename begins with the character string +‘iced.’, and the restart dump frequency is given by the namelist +variables `dumpfreq` and `dumpfreq\_n`. The pointer to the filename from +which the restart data is to be read for a continuation run is set in +`pointer\_file`. The code assumes that auxiliary binary tracer restart +files will be identified using the same pointer and file name prefix, +but with an additional character string in the file name that is +associated with each tracer set. All variables are included in  restart +files. + +Additional namelist flags provide further control of restart behavior. +`dump\_last` = true causes a set of restart files to be written at the end +of a run when it is otherwise not scheduled to occur. The flag +`use\_restart\_time` enables the user to choose to use the model date +provided in the restart files. If `use\_restart\_time` = false then the +initial model date stamp is determined from the namelist parameters. +lcdf64 = true sets 64-bit  output, allowing larger file sizes with +version 3. + +Routines for gathering, scattering and (unformatted) reading and writing +of the “extended" global grid, including the physical domain and ghost +(halo) cells around the outer edges, allow exact restarts on regional +grids with open boundary conditions, and they will also simplify +restarts on the various tripole grids. They are accessed by setting +`restart\_ext` = true in namelist. Extended grid restarts are not +available when using PIO; in this case extra halo update calls fill +ghost cells for tripole grids (do not use PIO for regional grids). + +Two restart files are included with the CICE v5 code distribution, for +the gx3 and gx1 grids. The were created using the default model +configuration (settings as in **comp\_ice** and **ice\_in**), but +initialized with no ice. The gx3 case was run for 1 year using the 1997 +forcing data provided with the code. The gx1 case was run for 20 years, +so that the date of restart in the file is 1978-01-01. Note that the +restart dates provided in the restart files can be overridden using the +namelist variables `use\_restart\_time`, `year\_init` and `istep0`. The +forcing time can also be overridden using `fyear\_init`. + +Several changes in CICE v5 have made restarting from v4.1 restart files +difficult. First, the ice and snow enthalpy state variables are now +carried as tracers instead of separate arrays, and salinity has been +added as a necessary restart field. Second, the default number of ice +layers has been increased from 4 to 7. Third, netcdf format is now used +for all I/O; it is no longer possible to have history output as  and +restart output in binary format. However, some facilities are included +with CICE v5 for converting v4.1 restart files to the new file structure +and format, provided that the same number of ice layers and basic +physics packages will be used for the new runs. See Section +:ref:`restarttrouble` for details. \ No newline at end of file diff --git a/doc/source/cice_3_user_guide.rst b/doc/source/user_guide/ug_performance.rst similarity index 98% rename from doc/source/cice_3_user_guide.rst rename to doc/source/user_guide/ug_performance.rst index 70f843942..df76dbfee 100644 --- a/doc/source/cice_3_user_guide.rst +++ b/doc/source/user_guide/ug_performance.rst @@ -413,7 +413,6 @@ on model performance. .. _fig-grid: .. figure:: ./figures/grid.png - :align: center :scale: 20% Figure 8 @@ -1261,7 +1260,6 @@ communications. .. _fig-distrb: .. figure:: ./figures/distrb.png - :align: center :scale: 50% Figure 9 @@ -1323,7 +1321,6 @@ for the distribution types. .. _fig-distribscorecard: .. figure:: ./figures/scorecard.png - :align: center :scale: 20% Figure 10 @@ -1367,7 +1364,6 @@ calculate it for each thickness category. .. _fig-timings: .. figure:: ./figures/histograms.png - :align: center :scale: 20% Figure 11 @@ -1815,14 +1811,14 @@ Individual tests and test suites The CICE scripts support both setup of individual tests as well as test suites. Individual tests are run from the command line like - > create.case -t smoke -m wolf -g gx3 -p 8x2 -s diag1,run5day -testid myid + > cice.setup -t smoke -m wolf -g gx3 -p 8x2 -s diag1,run5day -testid myid where -m designates a specific machine. Test suites are multiple tests that are specified in an input file and are started on the command line like - > create.case -ts base_suite -m wolf -testid myid + > cice.setup -ts base_suite -m wolf -testid myid -create.case with -t or -ts require a testid to uniquely name test directories. The format +cice.setup with -t or -ts require a testid to uniquely name test directories. The format of the case directory name for a test will always be ${machine}_${test}_${grid}_${pes}_${soptions}.${testid} @@ -1835,7 +1831,7 @@ To build and run a test, the process is the same as a case, The test results will be generated in a local file called "test_output". -When running a test suite, the create.case command line automatically generates all the tests +When running a test suite, the cice.setup command line automatically generates all the tests under a directory names ${test_suite}.${testid}. It then automatically builds and submits all tests. When the tests are complete, run the results.csh script to see the results from all the tests. @@ -1850,7 +1846,7 @@ Tests are defined under configuration/scripts/tests. The tests currently suppor The test passes if both the 10 day and 5 day restart run complete and if the restart files at day 10 from both runs are bit-for-bit identical. -Please run './create.case -h' for additional details. +Please run './cice.setup -h' for additional details. .. _additional: @@ -1858,7 +1854,7 @@ Please run './create.case -h' for additional details. Additional testing options ~~~~~~~~~~~~~~~~~~~~~~~~~~ -There are several additional options on the create.case command line for testing that +There are several additional options on the cice.setup command line for testing that provide the ability to regression test and compare tests to each other. -bd defines a baseline directory where tests can be stored for regression testing @@ -1870,23 +1866,23 @@ provide the ability to regression test and compare tests to each other. -td provides a way to compare tests with each other To use -bg, - > create.case -ts base_suite -m wolf -testid v1 -bg version1 -bd $SCRATCH/CICE_BASELINES + > cice.setup -ts base_suite -m wolf -testid v1 -bg version1 -bd $SCRATCH/CICE_BASELINES will copy all the results from the test suite to $SCRATCH/CICE_BASELINES/version1. To use -bc, - > create.case -ts base_suite -m wolf -testid v2 -bc version1 -bd $SCRATCH/CICE_BASELINES + > cice.setup -ts base_suite -m wolf -testid v2 -bc version1 -bd $SCRATCH/CICE_BASELINES will compare all the results from this test suite to results saved before in $SCRATCH/CICE_BASELINES/version1. -bc and -bg can be combined, - >create.case -ts base_suite -m wolf -testid v2 -bg version2 -bc version1 -bd $SCRATCH/CICE_BASELINES + >cice.setup -ts base_suite -m wolf -testid v2 -bg version2 -bc version1 -bd $SCRATCH/CICE_BASELINES will save the current results to $SCRATCH/CICE_BASELINES/version2 and compare the current results to results save before in $SCRATCH/CICE_BASELINES/version1. -bg, -bc, and -bd are used for regression testing. There is a default -bd on each machine. -td allows a user to compare one test result to another. For instance, - > create.case -t smoke -m wolf -g gx3 -p 8x2 -s run5day -testid t01 - > create.case -t smoke -m wolf -g gx3 -p 4x2 -s run5day -testid t01 -td smoke_gx3_8x2_run5day + > cice.setup -t smoke -m wolf -g gx3 -p 8x2 -s run5day -testid t01 + > cice.setup -t smoke -m wolf -g gx3 -p 4x2 -s run5day -testid t01 -td smoke_gx3_8x2_run5day An additional check will be done for the second test (because of the -td argument), and it will compare the output from the first test "smoke_gx3_8x2_run5day" to the output from it's test "smoke_gx3_4x2_run5day" @@ -1922,7 +1918,7 @@ columns like, The first column is the test name, the second the grid, the third the pe count, the fourth column is the -s options and the fifth column is the -td argument. The fourth and fifth columns are optional. -The argument to -ts defines which filename to choose and that argument can contain a path. create.case +The argument to -ts defines which filename to choose and that argument can contain a path. cice.setup will also look for the filename in configuration/scripts/tests where some preset test suites are defined. ~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -1933,7 +1929,7 @@ Example Tests (Quickstart) To generate a baseline dataset for a test case ********************************************** -./create.case -t smoke -m wolf -bg cicev6.0.0 -testid t00 +./cice.setup -t smoke -m wolf -bg cicev6.0.0 -testid t00 cd wolf_smoke_gx3_4x1.t00 @@ -1949,7 +1945,7 @@ cat test_output To run a test case and compare to a baseline dataset **************************************************** -./create.case -t smoke -m wolf -bc cicev6.0.0 -testid t01 +./cice.setup -t smoke -m wolf -bc cicev6.0.0 -testid t01 cd wolf_smoke_gx3_4x1.t01 @@ -1965,7 +1961,7 @@ cat test_output To run a test suite to generate baseline data ********************************************* -./create.case -m wolf -ts base_suite -testid t02 -bg cicev6.0.0bs +./cice.setup -m wolf -ts base_suite -testid t02 -bg cicev6.0.0bs cd base_suite.t02 @@ -1983,7 +1979,7 @@ ls \*.png To run a test suite to compare to baseline data *********************************************** -./create.case -m wolf -ts base_suite -testid t03 -bc cicev6.0.0bs +./cice.setup -m wolf -ts base_suite -testid t03 -bc cicev6.0.0bs cd base_suite.t03 @@ -2002,7 +1998,7 @@ To compare to another test ************************** `First:` -./create.case -m wolf -t smoke -testid t01 -p 8x2 +./cice.setup -m wolf -t smoke -testid t01 -p 8x2 cd wolf_smoke_gx3_8x2.t01 @@ -2016,7 +2012,7 @@ cat test_output `Then, do the comparison:` -./create.case -m wolf -t smoke -testid t01 -td smoke_gx3_8x2 -s thread -p 4x1 +./cice.setup -m wolf -t smoke -testid t01 -td smoke_gx3_8x2 -s thread -p 4x1 cd wolf_smoke_gx3_4x1_thread.t01 @@ -2044,7 +2040,7 @@ Additional Details use '-bg '. The scripts will then place the baseline dataset in $ICE_MACHINE_BASELINE// - The '-testid' flag allows users to specify a testing id that will be added to the - end of the case directory. For example, "./create.case -m wolf -t smoke -testid t12 -p 4x1" + end of the case directory. For example, "./cice.setup -m wolf -t smoke -testid t12 -p 4x1" creates the directory wolf_smoke_gx3_4x1.t12. This flag is REQUIRED if using -t or -ts. .. _compliance: @@ -2167,7 +2163,6 @@ autocorrelation :math:`r_1`. .. csv-table:: Table 1 :widths: 10, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5 - :align: right :math:`r_1`,-0.05,0.0,0.2,0.4,0.5,0.6,0.7,0.8,0.9,0.95,0.97,0.99 :math:`t_{crit}`,1.32,1.32,1.54,2.02,2.29,2.46,3.17,3.99,5.59,8.44,10.85,20.44 @@ -2276,7 +2271,7 @@ In order to run the script, the following requirements must be met: * basemap Python package (optional) In order to generate the files necessary for the compliance test, test cases should be -created with the ``qc`` option (i.e., ``-s qc``) when running create.case. This +created with the ``qc`` option (i.e., ``-s qc``) when running cice.setup. This option results in daily, non-averaged history files being written for a 5 year simulation. To install the necessary Python packages, the ``pip`` Python utility can be used. @@ -2335,9 +2330,9 @@ The run.suite script does the following: - Creates a fresh clone of the CICE-Consortium repository - ``cd`` to cloned repo -- run ``create.case`` to generate the base_suite directories. The output +- run ``cice.setup`` to generate the base_suite directories. The output is piped to ``log.suite`` -- Running ``create.case`` submits each individual job to the queue. +- Running ``cice.setup`` submits each individual job to the queue. - ``run.suite`` monitors the queue manager to determine when all jobs have finished (pings the queue manager once every 5 minutes). - Once all jobs complete, cd to base_suite directory and run ``./results.csh`` @@ -2350,10 +2345,10 @@ Manual Method To manually run the CICE tests and post the results to the CICE CDash dashboard, users essentially just need to perform all steps available in run.suite, detailed below: -- Pass the ``-report`` flag to create.case when running the ``base_suite`` test suite. +- Pass the ``-report`` flag to cice.setup when running the ``base_suite`` test suite. The ``-report`` flag copies the required CTest / CDash scripts to the suite directory. -- ``create.case`` compiles the CICE code, and submits all of the jobs to the +- ``cice.setup`` compiles the CICE code, and submits all of the jobs to the queue manager. - After every job has been submitted and completed, ``cd`` to the suite directory. - Parse the results, by running ``./results.csh``. @@ -2376,12 +2371,12 @@ in non-bit-for-bit results: .. code-block:: bash # Create a baseline dataset (only necessary if no baseline exists on the system) - ./create.case -m onyx -ts base_suite -testid base0 -bg cicev6.0.0 -a + ./cice.setup -m onyx -ts base_suite -testid base0 -bg cicev6.0.0 -a # Check out the updated code, or clone from a pull request # Run the test with the new code - ./create.case -m onyx -ts base_suite -testid test0 -bc cicev6.0.0 -a + ./cice.setup -m onyx -ts base_suite -testid test0 -bc cicev6.0.0 -a # Check the results cd base_suite.test0 @@ -2389,7 +2384,7 @@ in non-bit-for-bit results: #### If the BFB tests fail, perform the compliance testing #### # Create a QC baseline - ./create.case -m onyx -t smoke -g gx1 -p 44x1 -testid qc_base -s qc,medium -a + ./cice.setup -m onyx -t smoke -g gx1 -p 44x1 -testid qc_base -s qc,medium -a cd onyx_smoke_gx1_44x1_medium_qc.qc_base ./cice.build ./cice.submit @@ -2397,7 +2392,7 @@ in non-bit-for-bit results: # Check out the updated code or clone from a pull request # Create the t-test testing data - ./create.case -m onyx -t smoke -g gx1 -p 44x1 -testid qc_test -s qc,medium -a + ./cice.setup -m onyx -t smoke -g gx1 -p 44x1 -testid qc_test -s qc,medium -a cd onyx_smoke_gx1_44x1_medium_qc.qc_test ./cice.build ./cice.submit diff --git a/doc/source/user_guide/ug_running.rst b/doc/source/user_guide/ug_running.rst new file mode 100644 index 000000000..08dc141b2 --- /dev/null +++ b/doc/source/user_guide/ug_running.rst @@ -0,0 +1,167 @@ +:tocdepth: 3 + +.. _running: + +Execution procedures +==================== + +To compile and execute the code: in the source directory, + +#. Download the forcing data used for testing from the CICE-Consortium github page, + https://github.com/CICE-Consortium . + +#. Create **Macros.\*** and **run\_ice.\*** files for your particular + platform, if they do not already exist (type ‘uname -s’ at the prompt + to get :math:`\langle`\ OS\ :math:`\rangle`). + +#. Alter directories in the script **comp\_ice**. + +#. Run **comp\_ice** to set up the run directory and make the executable + ‘**cice**’. + +#. | To clean the compile directory and start fresh, simply execute + ‘/bin/rm -rf compile’ from the run directory. + +In the run directory, + +#. Alter `atm\_data\_dir` and `ocn\_data\_dir` in the namelist file + **ice\_in**. + +#. Alter the script **run\_ice** for your system. + +#. Execute **run\_ice**. + +If this fails, see Section :ref:`setup`. + +This procedure creates the output log file **ice.log.[ID]**, and if +`npt` is long enough compared with `dumpfreq` and `histfreq`, dump files +**iced.[timeID]** and   (or binary) history output files +**iceh\_[timeID].nc (.da)**. Using the :math:`\left<3^\circ\right>` +grid, the log file should be similar to +**ice.log.\ :math:`\langle`\ OS\ :math:`\rangle`**, provided for the +user’s convenience. These log files were created using MPI on 4 +processors on the :math:`\left<3^\circ\right>` grid. + +Several options are available in **comp\_ice** for configuring the run, +shown in :ref:`comp-ice`. If `NTASK` = 1, then the **serial/** +code is used, otherwise the code in **mpi/** is used. Loops over blocks +have been threaded throughout the code, so that their work will be +divided among `OMP\_NUM\_THREADS` if `THRD` is ‘yes.’ Note that the value of +`NTASK` in **comp\_ice** must equal the value of `nprocs` in **ice\_in**. +Generally the value of `MXBLCKS` computed by **comp\_ice** is sufficient, +but sometimes it will need to be set explicitly, as discussed in +Section :ref:`performance`. To conserve memory, match the tracer requests +in **comp\_ice** with those in **ice\_in**. CESM uses 3 aerosol tracers; +the number given in **comp\_ice** must be less than or equal to the +maximum allowed in **ice\_domain\_size.F90**. + +The scripts define a number of environment variables, mostly as +directories that you will need to edit for your own environment. +`$SYSTEM\_USERDIR`, which on machines at Oak Ridge National Laboratory +points automatically to scratch space, is intended to be a disk where +the run directory resides. `SHRDIR` is a path to the CESM shared code. + +:ref:`comp-ice` : Configuration options available in **comp_ice**. + +.. _comp-ice: + +.. table:: Table 6 + + +---------------------+--------------------------------------+------------------------------------------------------------------------------------+ + | variable | options | description | + +=====================+======================================+====================================================================================+ + |RES | col, gx3, gx1 | grid resolution | + +---------------------+--------------------------------------+------------------------------------------------------------------------------------+ + |NTASK | (integer) | total number of processors | + +---------------------+--------------------------------------+------------------------------------------------------------------------------------+ + |BLCKX | (integer) | number of grid cells on each block in the x-direction :math:`^\dagger` | + +---------------------+--------------------------------------+------------------------------------------------------------------------------------+ + |BLCKY | (integer) | number of grid cells on each block in the y-direction :math:`^\dagger` | + +---------------------+--------------------------------------+------------------------------------------------------------------------------------+ + |MXBLCKS | (integer) | maximum number of blocks per processor | + +---------------------+--------------------------------------+------------------------------------------------------------------------------------+ + |NICELYR | (integer) | number of vertical layers in the ice | + +---------------------+--------------------------------------+------------------------------------------------------------------------------------+ + |NSNWLYR | (integer) | number of vertical layers in the snow | + +---------------------+--------------------------------------+------------------------------------------------------------------------------------+ + |NICECAT | (integer) | number of ice thickness categories | + +---------------------+--------------------------------------+------------------------------------------------------------------------------------+ + |TRAGE | 0 or 1 | set to 1 for ice age tracer | + +---------------------+--------------------------------------+------------------------------------------------------------------------------------+ + |TRFY | 0 or 1 | set to 1 for first-year ice age tracer | + +---------------------+--------------------------------------+------------------------------------------------------------------------------------+ + |TRLVL | 0 or 1 | set to 1 for level and deformed ice tracers | + +---------------------+--------------------------------------+------------------------------------------------------------------------------------+ + |TRPND | 0 or 1 | set to 1 for melt pond tracers | + +---------------------+--------------------------------------+------------------------------------------------------------------------------------+ + |NTRAERO | 0 or 1 | number of aerosol tracers | + +---------------------+--------------------------------------+------------------------------------------------------------------------------------+ + |TRBRINE | set to 1 for brine height tracer | | + +---------------------+--------------------------------------+------------------------------------------------------------------------------------+ + |NBGCLYR | (integer) | number of vertical layers for biogeochemical transport | + +---------------------+--------------------------------------+------------------------------------------------------------------------------------+ + |IO_TYPE | none/netcdf/pio | use ‘none’ if  library is unavailable,‘pio’ for PIO | + +---------------------+--------------------------------------+------------------------------------------------------------------------------------+ + |DITTO | yes/no | for reproducible diagnostics | + +---------------------+--------------------------------------+------------------------------------------------------------------------------------+ + |BARRIERS | yes/no | flushes MPI buffers during global scatters and gathers | + +---------------------+--------------------------------------+------------------------------------------------------------------------------------+ + |THRD | yes/no | set to yes for OpenMP threaded parallelism | + +---------------------+--------------------------------------+------------------------------------------------------------------------------------+ + |OMP_NUM_THREADS | (integer) | the number of OpenMP threads requested | + +---------------------+--------------------------------------+------------------------------------------------------------------------------------+ + |NUMIN | (integer) | smallest unit number assigned to CICE files | + +---------------------+--------------------------------------+------------------------------------------------------------------------------------+ + |NUMAX | (integer) | largest unit number assigned to CICE files | + +---------------------+--------------------------------------+------------------------------------------------------------------------------------+ + +The ‘reproducible’ option (`DITTO`) makes diagnostics bit-for-bit when +varying the number of processors. (The simulation results are +bit-for-bit regardless, because they do not require global sums or +max/mins as do the diagnostics.) This was done mainly by increasing the +precision for the global reduction calculations, except for regular +double-precision (r8) calculations involving MPI; MPI can not handle +MPI\_REAL16 on some architectures. Instead, these cases perform sums or +max/min calculations across the global block structure, so that the +results are bit-for-bit as long as the block distribution is the same +(the number of processors can be different). + +A more flexible option is available for double-precision MPI +calculations, using the namelist variable `bfbflag`. When true, this flag +produces bit-for-bit identical diagnostics with different tasks, +threads, blocks and grid decompositions. + +CICE namelist variables available for changes after compile time appear +in **ice.log.\*** with values read from the file **ice\_in**; their +definitions are given in Section :ref:`index`. For example, to run for a +different length of time, say three days, set `npt` = 72 in **ice\_in**. +At present, the user supplies the time step `dt`, the number of +dynamics/advection/ridging subcycles `ndtd`, and for classic EVP, the +number of EVP subcycles `ndte`; `dte` is then calculated in subroutine +*init\_evp*. The primary reason for doing it this way is to ensure that +`ndte` is an integer. (This is done differently for `revised\_evp` = true.; +see Section :ref:`dynam`). + +To restart from a previous run, set restart = true in **ice\_in**. There +are two ways of restarting from a given file. The restart pointer file +**ice.restart\_file** (created by the previous run) contains the name of +the last written data file (**iced.[timeID]**). Alternatively, a +filename can be assigned to ice\_ic in **ice\_in**. Consult +Section :ref:`init` for more details. Restarts are exact for MPI or +single processor runs. + +~~~~~~~ +Scripts +~~~~~~~ + +~~~~~~~~~~~ +Directories +~~~~~~~~~~~ + +~~~~~~~~~~~~~~~~~~~ +Local modifications +~~~~~~~~~~~~~~~~~~~ + +~~~~~~~~~~~~ +Forcing data +~~~~~~~~~~~~ diff --git a/doc/source/user_guide/ug_testing.rst b/doc/source/user_guide/ug_testing.rst new file mode 100644 index 000000000..83fecf391 --- /dev/null +++ b/doc/source/user_guide/ug_testing.rst @@ -0,0 +1,899 @@ +:tocdepth: 3 + +Testing CICE +============ + +Version 6, August 2017 +This documents how to use the testing features developed for the +CICE Consortium CICE sea ice model. + +.. _basic: + +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +Individual tests and test suites +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +The CICE scripts support both setup of individual tests as well as test suites. Individual +tests are run from the command line like + + > cice.setup -t smoke -m wolf -g gx3 -p 8x2 -s diag1,run5day -testid myid + +where -m designates a specific machine. Test suites are multiple tests that are specified in +an input file and are started on the command line like + + > cice.setup -ts base_suite -m wolf -testid myid + +cice.setup with -t or -ts require a testid to uniquely name test directories. The format +of the case directory name for a test will always be +${machine}_${test}_${grid}_${pes}_${soptions}.${testid} + +To build and run a test, the process is the same as a case, + cd into the test directory, + + run cice.build + + run cice.submit + +The test results will be generated in a local file called "test_output". + +When running a test suite, the cice.setup command line automatically generates all the tests +under a directory names ${test_suite}.${testid}. It then automatically builds and submits all +tests. When the tests are complete, run the results.csh script to see the results from all the +tests. + +Tests are defined under configuration/scripts/tests. The tests currently supported are: + smoke - Runs the model for default length. The length and options can + be set with the -s commmand line option. The test passes if the + model completes successfully. + restart - Runs the model for 10 days, writing a restart file at day 5 and + again at day 10. Runs the model a second time starting from the + day 5 restart and writing a restart at day 10 of the model run. + The test passes if both the 10 day and 5 day restart run complete and + if the restart files at day 10 from both runs are bit-for-bit identical. + +Please run './cice.setup -h' for additional details. + +.. _additional: + +~~~~~~~~~~~~~~~~~~~~~~~~~~ +Additional testing options +~~~~~~~~~~~~~~~~~~~~~~~~~~ + +There are several additional options on the cice.setup command line for testing that +provide the ability to regression test and compare tests to each other. + + -bd defines a baseline directory where tests can be stored for regression testing + + -bg defines a version name that where the current tests can be saved for regression testing + + -bc defines a version name that the current tests should be compared to for regression testing + + -td provides a way to compare tests with each other + +To use -bg, + > cice.setup -ts base_suite -m wolf -testid v1 -bg version1 -bd $SCRATCH/CICE_BASELINES + will copy all the results from the test suite to $SCRATCH/CICE_BASELINES/version1. + +To use -bc, + > cice.setup -ts base_suite -m wolf -testid v2 -bc version1 -bd $SCRATCH/CICE_BASELINES + will compare all the results from this test suite to results saved before in $SCRATCH/CICE_BASELINES/version1. + +-bc and -bg can be combined, + >cice.setup -ts base_suite -m wolf -testid v2 -bg version2 -bc version1 -bd $SCRATCH/CICE_BASELINES + will save the current results to $SCRATCH/CICE_BASELINES/version2 and compare the current results to + results save before in $SCRATCH/CICE_BASELINES/version1. + +-bg, -bc, and -bd are used for regression testing. There is a default -bd on each machine. + +-td allows a user to compare one test result to another. For instance, + > cice.setup -t smoke -m wolf -g gx3 -p 8x2 -s run5day -testid t01 + > cice.setup -t smoke -m wolf -g gx3 -p 4x2 -s run5day -testid t01 -td smoke_gx3_8x2_run5day + +An additional check will be done for the second test (because of the -td argument), and it will compare +the output from the first test "smoke_gx3_8x2_run5day" to the output from it's test "smoke_gx3_4x2_run5day" +and generate a result for that. It's important that the first test complete before the second test is +done. Also, the -td option works only if the testid and the machine are the same for the baseline +run and the current run. + +.. _format: + +~~~~~~~~~~~~~~~~~ +Test suite format +~~~~~~~~~~~~~~~~~ + +The format for the test suite file is relatively simple. It is a text file with white space delimited +columns like, + +.. _tab-test: + +.. csv-table:: Table 7 + :header: "#Test", "Grid", "PEs", "Sets", "BFB-compare" + :widths: 7, 7, 7, 15, 15 + + "smoke", "gx3", "8x2", "diag1,run5day", "" + "smoke", "gx3", "8x2", "diag24,run1year,medium", "" + "smoke", "gx3", "4x1", "debug,diag1,run5day", "" + "smoke", "gx3", "8x2", "debug,diag1,run5day", "" + "smoke", "gx3", "4x2", "diag1,run5day", "smoke_gx3_8x2_diag1_run5day" + "smoke", "gx3", "4x1", "diag1,run5day,thread", "smoke_gx3_8x2_diag1_run5day" + "smoke", "gx3", "4x1", "diag1,run5day", "smoke_gx3_4x1_diag1_run5day_thread" + "restart", "gx3", "8x1", "", "" + "restart", "gx3", "4x2", "debug", "" + + +The first column is the test name, the second the grid, the third the pe count, the fourth column is +the -s options and the fifth column is the -td argument. The fourth and fifth columns are optional. +The argument to -ts defines which filename to choose and that argument can contain a path. cice.setup +will also look for the filename in configuration/scripts/tests where some preset test suites are defined. + +~~~~~~~~~~~~~~~~~~~~~~~~~~ +Example Tests (Quickstart) +~~~~~~~~~~~~~~~~~~~~~~~~~~ + +********************************************** +To generate a baseline dataset for a test case +********************************************** + +./cice.setup -t smoke -m wolf -bg cicev6.0.0 -testid t00 + +cd wolf_smoke_gx3_4x1.t00 + +./cice.build + +./cice.submit + +# After job finishes, check output + +cat test_output + +**************************************************** +To run a test case and compare to a baseline dataset +**************************************************** + +./cice.setup -t smoke -m wolf -bc cicev6.0.0 -testid t01 + +cd wolf_smoke_gx3_4x1.t01 + +./cice.build + +./cice.submit + +# After job finishes, check output + +cat test_output + +********************************************* +To run a test suite to generate baseline data +********************************************* + +./cice.setup -m wolf -ts base_suite -testid t02 -bg cicev6.0.0bs + +cd base_suite.t02 + +# Once all jobs finish, concatenate all output + +./results.csh # All tests results will be stored in results.log + +# To plot a timeseries of "total ice extent", "total ice area", and "total ice volume" + +./timeseries.csh + +ls \*.png + +*********************************************** +To run a test suite to compare to baseline data +*********************************************** + +./cice.setup -m wolf -ts base_suite -testid t03 -bc cicev6.0.0bs + +cd base_suite.t03 + +# Once all jobs finish, concatenate all output + +./results.csh # All tests results will be stored in results.log + +# To plot a timeseries of "total ice extent", "total ice area", and "total ice volume" + +./timeseries.csh + +ls \*.png + +************************** +To compare to another test +************************** +`First:` + +./cice.setup -m wolf -t smoke -testid t01 -p 8x2 + +cd wolf_smoke_gx3_8x2.t01 + +./cice.build + +./cice.submit + +# After job finishes, check output + +cat test_output + +`Then, do the comparison:` + +./cice.setup -m wolf -t smoke -testid t01 -td smoke_gx3_8x2 -s thread -p 4x1 + +cd wolf_smoke_gx3_4x1_thread.t01 + +./cice.build + +./cice.submit + +# After job finishes, check output + +cat test_output + +****************** +Additional Details +****************** + +- In general, the baseline generation, baseline compare, and test diff are independent. +- Use the '-bd' flag to specify the location where you want the baseline dataset + to be written. Without specifying '-bd', the baseline dataset will be written + to the default baseline directory found in the env. file (ICE_MACHINE_BASELINE). +- If '-bd' is not passed, the scripts will look for baseline datasets in the default + baseline directory found in the env. file (ICE_MACHINE_BASELINE). + If the '-bd' option is passed, the scripts will look for baseline datasets in the + location passed to the -bd argument. +- To generate a baseline dataset for a specific version (for regression testing), + use '-bg '. The scripts will then place the baseline dataset + in $ICE_MACHINE_BASELINE// +- The '-testid' flag allows users to specify a testing id that will be added to the + end of the case directory. For example, "./cice.setup -m wolf -t smoke -testid t12 -p 4x1" + creates the directory wolf_smoke_gx3_4x1.t12. This flag is REQUIRED if using -t or -ts. + +.. _compliance: + +~~~~~~~~~~~~~~~~~~~~ +Code Compliance Test +~~~~~~~~~~~~~~~~~~~~ + +A core tenet of CICE dycore and Icepack innovations is that they must not change +the physics and biogeochemistry of existing model configurations, notwithstanding +obsolete model components. Therefore, alterations to existing CICE Consortium code +must only fix demonstrable numerical or scientific inaccuracies or bugs, or be +necessary to introduce new science into the code. New physics and biogeochemistry +introduced into the model must not change model answers when switched off, and in +that case CICEcore and Icepack must reproduce answers bit-for-bit as compared to +previous simulations with the same namelist configurations. This bit-for-bit +requirement is common in Earth System Modeling projects, but often cannot be achieved +in practice because model additions may require changes to existing code. In this +circumstance, bit-for-bit reproducibility using one compiler may not be unachievable +on a different computing platform with a different compiler. Therefore, tools for +scientific testing of CICE code changes have been developed to accompany bit-for-bit +testing. These tools exploit the statistical properties of simulated sea ice thickness +to confirm or deny the null hypothesis, which is that new additions to the CICE dycore +and Icepack have not significantly altered simulated ice volume using previous model +configurations. Here we describe the CICE testing tools, which are applies to output +from five-year gx-1 simulations that use the standard CICE atmospheric forcing. +A scientific justification of the testing is provided in +:cite:`Hunke2018`. + +.. _paired: + +******************************* +Two-Stage Paired Thickness Test +******************************* + +The first quality check aims to confirm the null hypotheses +:math:`H_0\!:\!\mu_d{=}0` at every model grid point, given the mean +thickness difference :math:`\mu_d` between paired CICE simulations +‘:math:`a`’ and ‘:math:`b`’ that should be identical. :math:`\mu_d` is +approximated as +:math:`\bar{h}_{d}=\tfrac{1}{n}\sum_{i=1}^n (h_{ai}{-}h_{bi})` for +:math:`n` paired samples of ice thickness :math:`h_{ai}` and +:math:`h_{bi}` in each grid cell of the gx-1 mesh. Following +:cite:`Wilks2006`, the associated :math:`t`-statistic +expects a zero mean, and is therefore + +.. math:: + t=\frac{\bar{h}_{d}}{\sigma_d/\sqrt{n_{eff}}} + :label: t-distribution + +given variance +:math:`\sigma_d^{\;2}=\frac{1}{n-1}\sum_{i=1}^{n}(h_{di}-\bar{h}_d)^2` +of :math:`h_{di}{=}(h_{ai}{-}h_{bi})` and effective sample size + +.. math:: + n_{eff}{=}n\frac{({1-r_1})}{({1+r_1})} + :label: neff + +for lag-1 autocorrelation: + +.. math:: + r_1=\frac{\sum\limits_{i=1}^{n-1}\big[(h_{di}-\bar{h}_{d1:n-1})(h_{di+1}-\bar{h}_{d2:n})\big]}{\sqrt{\sum\limits_{i=1}^{n-1} (h_{di}-\bar{h}_{d1:n-1})^2 \sum\limits_{i=2}^{n} (h_{di}-\bar{h}_{d2:n})^2 }}. + :label: r1 + +Here, :math:`\bar{h}_{d1:n-1}` is the mean of all samples except the +last, and :math:`\bar{h}_{d2:n}` is the mean of samples except the +first, and both differ from the overall mean :math:`\bar{h}_d` in +equations (:eq:`t-distribution`). That is: + +.. math:: + \bar{h}_{d1:n-1}=\frac{1}{n{-}1} \sum \limits_{i=1}^{n-1} h_{di},\quad + \bar{h}_{d2:n}=\frac{1}{n{-}1} \sum \limits_{i=2}^{n} h_{di},\quad + \bar{h}_d=\frac{1}{n} \sum \limits_{i=1}^{n} {h}_{di} + :label: short-means + +Following :cite:`Zwiers1995`, the effective sample size is +limited to :math:`n_{eff}\in[2,n]`. This definition of :math:`n_{eff}` +assumes ice thickness evolves as an AR(1) process +:cite:`VonStorch1999`, which can be justified by analyzing +the spectral density of daily samples of ice thickness from 5-year +records in CICE Consortium member models :cite:`Hunke2018`. +The AR(1) approximation is inadmissible for paired velocity samples, +because ice drift possesses periodicity from inertia and tides +:cite:`Hibler2006,Lepparanta2012,Roberts2015`. Conversely, +tests of paired ice concentration samples may be less sensitive to ice +drift than ice thickness. In short, ice thickness is the best variable +for CICE Consortium quality control (QC), and for the test of the mean +in particular. + +Care is required in analyzing mean sea ice thickness changes using +(:eq:`t-distribution`) with +:math:`N{=}n_{eff}{-}1` degrees of freedom. +:cite:`Zwiers1995` demonstrate that the :math:`t`-test in +(:eq:`t-distribution`) becomes conservative when +:math:`n_{eff} < 30`, meaning that :math:`H_0` may be erroneously +confirmed for highly auto-correlated series. Strong autocorrelation +frequently occurs in modeled sea ice thickness, and :math:`r_1>0.99` is +possible in parts of the gx-1 domain for the five-year QC simulations. +In the event that :math:`H_0` is confirmed but :math:`2\leq n_{eff}<30`, +the :math:`t`-test progresses to the ‘Table Lookup Test’ of +:cite:`Zwiers1995`, to check that the first-stage test +using (:eq:`t-distribution`) was not +conservative. The Table Lookup Test chooses critical :math:`t` values +:math:`|t|`_. +There are 2 options for posting CICE results to CDash: 1) The automated +script, 2) The manual method. + +***************** +Automatic Script +***************** + +To automatically run the CICE tests, and post the results to the CICE Cdash dashboard, +users need to copy and run the ``run.suite`` script: + +.. code-block:: bash + + cp configuration/scripts/run.suite . + ./run.suite -m -testid -bc -bg + +The run.suite script does the following: + +- Creates a fresh clone of the CICE-Consortium repository +- ``cd`` to cloned repo +- run ``cice.setup`` to generate the base_suite directories. The output + is piped to ``log.suite`` +- Running ``cice.setup`` submits each individual job to the queue. +- ``run.suite`` monitors the queue manager to determine when all jobs have + finished (pings the queue manager once every 5 minutes). +- Once all jobs complete, cd to base_suite directory and run ``./results.csh`` +- Run ``./run_ctest.csh`` in order to post the test results to the CDash dashboard + +***************** +Manual Method +***************** + +To manually run the CICE tests and post the results to the CICE CDash dashboard, +users essentially just need to perform all steps available in run.suite, detailed below: + +- Pass the ``-report`` flag to cice.setup when running the ``base_suite`` test suite. + The ``-report`` flag copies the required CTest / CDash scripts to the suite + directory. +- ``cice.setup`` compiles the CICE code, and submits all of the jobs to the + queue manager. +- After every job has been submitted and completed, ``cd`` to the suite directory. +- Parse the results, by running ``./results.csh``. +- Run the CTest / CDash script ``./run_ctest.csh``. + +If the ``run_ctest.csh`` script is unable to post the testing results to the CDash +server, a message will be printed to the screen detailing instructions on how to attempt +to post the results from another server. If ``run_ctest.csh`` fails to submit the results, +it will generate a tarball ``cice_ctest.tgz`` that contains the necessary files for +submission. Copy this file to another server (CMake version 2.8+ required), extract the +archive, and run ``./run_ctest.csh -submit``. + +~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +End-To-End Testing Procedure +~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Below is an example of a step-by-step procedure for testing a code change that results +in non-bit-for-bit results: + +.. code-block:: bash + + # Create a baseline dataset (only necessary if no baseline exists on the system) + ./cice.setup -m onyx -ts base_suite -testid base0 -bg cicev6.0.0 -a + + # Check out the updated code, or clone from a pull request + + # Run the test with the new code + ./cice.setup -m onyx -ts base_suite -testid test0 -bc cicev6.0.0 -a + + # Check the results + cd base_suite.test0 + ./results.csh + + #### If the BFB tests fail, perform the compliance testing #### + # Create a QC baseline + ./cice.setup -m onyx -t smoke -g gx1 -p 44x1 -testid qc_base -s qc,medium -a + cd onyx_smoke_gx1_44x1_medium_qc.qc_base + ./cice.build + ./cice.submit + + # Check out the updated code or clone from a pull request + + # Create the t-test testing data + ./cice.setup -m onyx -t smoke -g gx1 -p 44x1 -testid qc_test -s qc,medium -a + cd onyx_smoke_gx1_44x1_medium_qc.qc_test + ./cice.build + ./cice.submit + + # Wait for runs to finish + + # Perform the QC test + cp configuration/scripts/tests/QC/cice.t-test.py + ./cice.t-test.py /p/work/turner/CICE_RUNS/onyx_smoke_gx1_44x1_medium_qc.qc_base \ + /p/work/turner/CICE_RUNS/onyx_smoke_gx1_44x1_medium_qc.qc_test + + # Example output: + INFO:__main__:Number of files: 1825 + INFO:__main__:Two-Stage Test Passed + INFO:__main__:Quadratic Skill Test Passed for Northern Hemisphere + INFO:__main__:Quadratic Skill Test Passed for Southern Hemisphere + INFO:__main__: + INFO:__main__:Quality Control Test PASSED + +.. _tabnamelist: + +------------------------- +Table of namelist options +------------------------- + +.. _tab-namelist: + +.. csv-table:: Table 8 + :header: "variable", "options/format", "description", "recommended value" + :widths: 15, 15, 30, 15 + + "*setup_nml*", " ", " ", " " + "", "", "*Time, Diagnostics*", "" + "``days_per_year``", "``360`` or ``365``", "number of days in a model year", "365" + "``use_leap_years``", "true/false", "if true, include leap days", "" + "``year_init``", "yyyy", "the initial year, if not using restart", "" + "``istep0``", "integer", "initial time step number", "0" + "``dt``", "seconds", "thermodynamics time step length", "3600." + "``npt``", "integer", "total number of time steps to take", "" + "``ndtd``", "integer", "number of dynamics/advection/ridging/steps per thermo timestep", "1" + "", "", "*Initialization/Restarting*", "" + "``runtype``", "``initial``", "start from ``ice_ic``", "" + "", "``continue``", "restart using ``pointer_file``", "" + "``ice_ic``", "``default``", "latitude and sst dependent", "default" + "", "``none``", "no ice", "" + "", "path/file", "restart file name", "" + "``restart``", "true/false", "initialize using restart file", "``.true.``" + "``use_restart_time``", "true/false", "set initial date using restart file", "``.true.``" + "``restart_format``", "nc", "read/write  restart files (use with PIO)", "" + "", "bin", "read/write binary restart files", "" + "``lcdf64``", "true/false", "if true, use 64-bit  format", "" + "``restart_dir``", "path/", "path to restart directory", "" + "``restart_ext``", "true/false", "read/write halo cells in restart files", "" + "``restart_file``", "filename prefix", "output file for restart dump", "‘iced’" + "``pointer_file``", "pointer filename", "contains restart filename", "" + "``dumpfreq``", "``y``", "write restart every ``dumpfreq_n`` years", "y" + "", "``m``", "write restart every ``dumpfreq_n`` months", "" + "", "``d``", "write restart every ``dumpfreq_n`` days", "" + "``dumpfreq_n``", "integer", "frequency restart data is written", "1" + "``dump_last``", "true/false", "if true, write restart on last time step of simulation", "" + "", "", "*Model Output*", "" + "``bfbflag``", "true/false", "for bit-for-bit diagnostic output", "" + "``diagfreq``", "integer", "frequency of diagnostic output in ``dt``", "24" + "", "*e.g.*, 10", "once every 10 time steps", "" + "``diag_type``", "``stdout``", "write diagnostic output to stdout", "" + "", "``file``", "write diagnostic output to file", "" + "``diag_file``", "filename", "diagnostic output file (script may reset)", "" + "``print_global``", "true/false", "print diagnostic data, global sums", "``.false.``" + "``print_points``", "true/false", "print diagnostic data for two grid points", "``.false.``" + "``latpnt``", "real", "latitude of (2) diagnostic points", "" + "``lonpnt``", "real", "longitude of (2) diagnostic points", "" + "``dbug``", "true/false", "if true, write extra diagnostics", "``.false.``" + "``histfreq``", "string array", "defines output frequencies", "" + "", "``y``", "write history every ``histfreq_n`` years", "" + "", "``m``", "write history every ``histfreq_n`` months", "" + "", "``d``", "write history every ``histfreq_n`` days", "" + "", "``h``", "write history every ``histfreq_n`` hours", "" + "", "``1``", "write history every time step", "" + "", "``x``", "unused frequency stream (not written)", "" + "``histfreq_n``", "integer array", "frequency history output is written", "" + "", "0", "do not write to history", "" + "``hist_avg``", "true", "write time-averaged data", "``.true.``" + "", "false", "write snapshots of data", "" + "``history_dir``", "path/", "path to history output directory", "" + "``history_file``", "filename prefix", "output file for history", "‘iceh’" + "``write_ic``", "true/false", "write initial condition", "" + "``incond_dir``", "path/", "path to initial condition directory", "" + "``incond_file``", "filename prefix", "output file for initial condition", "‘iceh’" + "``runid``", "string", "label for run (currently CESM only)", "" + "", "", "", "" + "*grid_nml*", "", "", "" + "", "", "*Grid*", "" + "``grid_format``", "``nc``", "read  grid and kmt files", "‘bin’" + "", "``bin``", "read direct access, binary file", "" + "``grid_type``", "``rectangular``", "defined in *rectgrid*", "" + "", "``displaced_pole``", "read from file in *popgrid*", "" + "", "``tripole``", "read from file in *popgrid*", "" + "", "``regional``", "read from file in *popgrid*", "" + "``grid_file``", "filename", "name of grid file to be read", "‘grid’" + "``kmt_file``", "filename", "name of land mask file to be read", "‘kmt’" + "``gridcpl_file``", "filename", "input file for coupling grid info", "" + "``kcatbound``", "``0``", "original category boundary formula", "0" + "", "``1``", "new formula with round numbers", "" + "", "``2``", "WMO standard categories", "" + "", "``-1``", "one category", "" + "", "", "", "" + "*domain_nml*", "", "", "" + "", "", "*Domain*", "" + "``nprocs``", "integer", "number of processors to use", "" + "``processor_shape``", "``slenderX1``", "1 processor in the y direction (tall, thin)", "" + "", "``slenderX2``", "2 processors in the y direction (thin)", "" + "", "``square-ice``", "more processors in x than y, :math:`\sim` square", "" + "", "``square-pop``", "more processors in y than x, :math:`\sim` square", "" + "``distribution_type``", "``cartesian``", "distribute blocks in 2D Cartesian array", "" + "", "``roundrobin``", "1 block per proc until blocks are used", "" + "", "``sectcart``", "blocks distributed to domain quadrants", "" + "", "``sectrobin``", "several blocks per proc until used", "" + "", "``rake``", "redistribute blocks among neighbors", "" + "", "``spacecurve``", "distribute blocks via space-filling curves", "" + "``distribution_weight``", "``block``", "full block size sets ``work_per_block``", "" + "", "``latitude``", "latitude/ocean sets ``work_per_block``", "" + "``ew_boundary_type``", "``cyclic``", "periodic boundary conditions in x-direction", "" + "", "``open``", "Dirichlet boundary conditions in x", "" + "``ns_boundary_type``", "``cyclic``", "periodic boundary conditions in y-direction", "" + "", "``open``", "Dirichlet boundary conditions in y", "" + "", "``tripole``", "U-fold tripole boundary conditions in y", "" + "", "``tripoleT``", "T-fold tripole boundary conditions in y", "" + "``maskhalo_dyn``", "true/false", "mask unused halo cells for dynamics", "" + "``maskhalo_remap``", "true/false", "mask unused halo cells for transport", "" + "``maskhalo_bound``", "true/false", "mask unused halo cells for boundary updates", "" + "", "", "", "" + "*tracer_nml*", "", "", "" + "", "", "*Tracers*", "" + "``tr_iage``", "true/false", "ice age", "" + "``restart_age``", "true/false", "restart tracer values from file", "" + "``tr_FY``", "true/false", "first-year ice area", "" + "``restart_FY``", "true/false", "restart tracer values from file", "" + "``tr_lvl``", "true/false", "level ice area and volume", "" + "``restart_lvl``", "true/false", "restart tracer values from file", "" + "``tr_pond_cesm``", "true/false", "CESM melt ponds", "" + "``restart_pond_cesm``", "true/false", "restart tracer values from file", "" + "``tr_pond_topo``", "true/false", "topo melt ponds", "" + "``restart_pond_topo``", "true/false", "restart tracer values from file", "" + "``tr_pond_lvl``", "true/false", "level-ice melt ponds", "" + "``restart_pond_lvl``", "true/false", "restart tracer values from file", "" + "``tr_aero``", "true/false", "aerosols", "" + "``restart_aero``", "true/false", "restart tracer values from file", "" + "*thermo_nml*", "", "", "" + "", "", "*Thermodynamics*", "" + "``kitd``", "``0``", "delta function ITD approximation", "1" + "", "``1``", "linear remapping ITD approximation", "" + "``ktherm``", "``0``", "zero-layer thermodynamic model", "" + "", "``1``", "Bitz and Lipscomb thermodynamic model", "" + "", "``2``", "mushy-layer thermodynamic model", "" + "``conduct``", "``MU71``", "conductivity :cite:`MU71`", "" + "", "``bubbly``", "conductivity :cite:`PETB07`", "" + "``a_rapid_mode``", "real", "brine channel diameter", "0.5x10 :math:`^{-3}` m" + "``Rac_rapid_mode``", "real", "critical Rayleigh number", "10" + "``aspect_rapid_mode``", "real", "brine convection aspect ratio", "1" + "``dSdt_slow_mode``", "real", "drainage strength parameter", "-1.5x10 :math:`^{-7}` m/s/K" + "``phi_c_slow_mode``", ":math:`0<\phi_c < 1`", "critical liquid fraction", "0.05" + "``phi_i_mushy``", ":math:`0<\phi_i < 1`", "solid fraction at lower boundary", "0.85" + "", "", "", "" + "*dynamics_nml*", "", "", "" + "", "", "*Dynamics*", "" + "``kdyn``", "``0``", "dynamics OFF", "1" + "", "``1``", "EVP dynamics", "" + "", "``2``", "EAP dynamics", "" + "``revised_evp``", "true/false", "use revised EVP formulation", "" + "``ndte``", "integer", "number of EVP subcycles", "120" + "``advection``", "``remap``", "linear remapping advection", "‘remap’" + "", "``upwind``", "donor cell advection", "" + "``kstrength``", "``0``", "ice strength formulation :cite:`Hibler79`", "1" + "", "``1``", "ice strength formulation :cite:`Rothrock75`", "" + "``krdg_partic``", "``0``", "old ridging participation function", "1" + "", "``1``", "new ridging participation function", "" + "``krdg_redist``", "``0``", "old ridging redistribution function", "1" + "", "``1``", "new ridging redistribution function", "" + "``mu_rdg``", "real", "e-folding scale of ridged ice", "" + "``Cf``", "real", "ratio of ridging work to PE change in ridging", "17." + "", "", "", "" + "*shortwave_nml*", "", "", "" + "", "", "*Shortwave*", "" + "``shortwave``", "``default``", "NCAR CCSM3 distribution method", "" + "", "``dEdd``", "Delta-Eddington method", "" + "``albedo_type``", "``default``", "NCAR CCSM3 albedos", "‘default’" + "", "``constant``", "four constant albedos", "" + "``albicev``", ":math:`0<\alpha <1`", "visible ice albedo for thicker ice", "" + "``albicei``", ":math:`0<\alpha <1`", "near infrared ice albedo for thicker ice", "" + "``albsnowv``", ":math:`0<\alpha <1`", "visible, cold snow albedo", "" + "``albsnowi``", ":math:`0<\alpha <1`", "near infrared, cold snow albedo", "" + "``ahmax``", "real", "albedo is constant above this thickness", "0.3 m" + "``R_ice``", "real", "tuning parameter for sea ice albedo from Delta-Eddington shortwave", "" + "``R_pnd``", "real", "... for ponded sea ice albedo …", "" + "``R_snw``", "real", "... for snow (broadband albedo) …", "" + "``dT_mlt``", "real", ":math:`\Delta` temperature per :math:`\Delta` snow grain radius", "" + "``rsnw_mlt``", "real", "maximum melting snow grain radius", "" + "``kalg``", "real", "absorption coefficient for algae", "" + "", "", "", "" + "*ponds_nml*", "", "", "" + "", "", "*Melt Ponds*", "" + "``hp1``", "real", "critical ice lid thickness for topo ponds", "0.01 m" + "``hs0``", "real", "snow depth of transition to bare sea ice", "0.03 m" + "``hs1``", "real", "snow depth of transition to pond ice", "0.03 m" + "``dpscale``", "real", "time scale for flushing in permeable ice", ":math:`1\times 10^{-3}`" + "``frzpnd``", "``hlid``", "Stefan refreezing with pond ice thickness", "‘hlid’" + "", "``cesm``", "CESM refreezing empirical formula", "" + "``rfracmin``", ":math:`0 \le r_{min} \le 1`", "minimum melt water added to ponds", "0.15" + "``rfracmax``", ":math:`0 \le r_{max} \le 1`", "maximum melt water added to ponds", "1.0" + "``pndaspect``", "real", "aspect ratio of pond changes (depth:area)", "0.8" + "", "", "", "" + "*zbgc_nml*", "", "", "" + "", "", "*Biogeochemistry*", "" + "``tr_brine``", "true/false", "brine height tracer", "" + "``tr_zaero``", "true/false", "vertical aerosol tracers", "" + "``modal_aero``", "true/false", "modal aersols", "" + "``restore_bgc``", "true/false", "restore bgc to data", "" + "``solve_zsal`", "true/false", "update salinity tracer profile", "" + "``bgc_data_dir``", "path/", "data directory for bgc", "" + "``skl_bgc``", "true/false", "biogeochemistry", "" + "``sil_data_type``", "``default``", "default forcing value for silicate", "" + "", "``clim``", "silicate forcing from ocean climatology :cite:`GLBA06`", "" + "``nit_data_type``", "``default``", "default forcing value for nitrate", "" + "", "``clim``", "nitrate forcing from ocean climatology :cite:`GLBA06`", "" + "", "``sss``", "nitrate forcing equals salinity", "" + "``fe_data_type``", "``default``", "default forcing value for iron", "" + "", "``clim``", "iron forcing from ocean climatology", "" + "``bgc_flux_type``", "``Jin2006``", "ice–ocean flux velocity of :cite:`JDWSTWLG06`", "" + "", "``constant``", "constant ice–ocean flux velocity", "" + "``restart_bgc``", "true/false", "restart tracer values from file", "" + "``tr_bgc_C_sk``", "true/false", "algal carbon tracer", "" + "``tr_bgc_chl_sk``", "true/false", "algal chlorophyll tracer", "" + "``tr_bgc_Am_sk``", "true/false", "ammonium tracer", "" + "``tr_bgc_Sil_sk``", "true/false", "silicate tracer", "" + "``tr_bgc_DMSPp_sk``", "true/false", "particulate DMSP tracer", "" + "``tr_bgc_DMSPd_sk``", "true/false", "dissolved DMSP tracer", "" + "``tr_bgc_DMS_sk``", "true/false", "DMS tracer", "" + "``phi_snow``", "real", "snow porosity for brine height tracer", "" + "", "", "", "" + "*forcing_nml*", "", "", "" + "", "", "*Forcing*", "" + "``formdrag``", "true/false", "calculate form drag", "" + "``atmbndy``", "``default``", "stability-based boundary layer", "‘default’" + "", "``constant``", "bulk transfer coefficients", "" + "``fyear_init``", "yyyy", "first year of atmospheric forcing data", "" + "``ycycle``", "integer", "number of years in forcing data cycle", "" + "``atm_data_format``", "``nc``", "read  atmo forcing files", "" + "", "``bin``", "read direct access, binary files", "" + "``atm_data_type``", "``default``", "constant values defined in the code", "" + "", "``LYq``", "AOMIP/Large-Yeager forcing data", "" + "", "``monthly``", "monthly forcing data", "" + "", "``ncar``", "NCAR bulk forcing data", "" + "", "``oned``", "column forcing data", "" + "``atm_data_dir``", "path/", "path to atmospheric forcing data directory", "" + "``calc_strair``", "true", "calculate wind stress and speed", "" + "", "false", "read wind stress and speed from files", "" + "``highfreq``", "true/false", "high-frequency atmo coupling", "" + "``natmiter``", "integer", "number of atmo boundary layer iterations", "" + "``calc_Tsfc``", "true/false", "calculate surface temperature", "``.true.``" + "``precip_units``", "``mks``", "liquid precipitation data units", "" + "", "``mm_per_month``", "", "" + "", "``mm_per_sec``", "(same as MKS units)", "" + "``tfrz_option``", "``minus1p8``", "constant ocean freezing temperature (:math:`-1.8^{\circ} C`)", "" + "", "``linear_salt``", "linear function of salinity (ktherm=1)", "" + "", "``mushy_layer``", "matches mushy-layer thermo (ktherm=2)", "" + "``ustar_min``", "real", "minimum value of ocean friction velocity", "0.0005 m/s" + "``fbot_xfer_type``", "``constant``", "constant ocean heat transfer coefficient", "" + "", "``Cdn_ocn``", "variable ocean heat transfer coefficient", "" + "``update_ocn_f``", "true", "include frazil water/salt fluxes in ocn fluxes", "" + "", "false", "do not include (when coupling with POP)", "" + "``l_mpond_fresh``", "true", "retain (topo) pond water until ponds drain", "" + "", "false", "release (topo) pond water immediately to ocean", "" + "``oceanmixed_ice``", "true/false", "active ocean mixed layer calculation", "``.true.`` (if uncoupled)" + "``ocn_data_format``", "``nc``", "read  ocean forcing files", "" + "", "``bin``", "read direct access, binary files", "" + "``sss_data_type``", "``default``", "constant values defined in the code", "" + "", "``clim``", "climatological data", "" + "", "``near``", "POP ocean forcing data", "" + "``sst_data_type``", "``default``", "constant values defined in the code", "" + "", "``clim``", "climatological data", "" + "", "``ncar``", "POP ocean forcing data", "" + "``ocn_data_dir``", "path/", "path to oceanic forcing data directory", "" + "``oceanmixed_file``", "filename", "data file containing ocean forcing data", "" + "``restore_sst``", "true/false", "restore sst to data", "" + "``trestore``", "integer", "sst restoring time scale (days)", "" + "``restore_ice``", "true/false", "restore ice state along lateral boundaries", "" + "", "", "", "" + "*icefields_tracer_nml*", "", "", "" + "", "", "*History Fields*", "" + "``f_``", "string", "frequency units for writing ```` to history", "" + "", "``y``", "write history every ``histfreq_n`` years", "" + "", "``m``", "write history every ``histfreq_n`` months", "" + "", "``d``", "write history every ``histfreq_n`` days", "" + "", "``h``", "write history every ``histfreq_n`` hours", "" + "", "``1``", "write history every time step", "" + "", "``x``", "do not write ```` to history", "" + "", "``md``", "*e.g.,* write both monthly and daily files", "" + "``f__ai``", "", "grid cell average of ```` (:math:`\times a_i`)", "" + diff --git a/doc/source/user_guide/ug_troubleshooting.rst b/doc/source/user_guide/ug_troubleshooting.rst new file mode 100644 index 000000000..c71d64c55 --- /dev/null +++ b/doc/source/user_guide/ug_troubleshooting.rst @@ -0,0 +1,268 @@ +:tocdepth: 3 + +Troubleshooting +=============== + +Check the FAQ: http://oceans11.lanl.gov/drupal/CICE/FAQ. + +.. _setup: + +~~~~~~~~~~~~~ +Initial setup +~~~~~~~~~~~~~ + +The script **comp\_ice** is configured so that the files **grid**, +**kmt**, **ice\_in**, **run\_ice**, **iced\_gx3\_v5.0** and +**ice.restart\_file** are NOT overwritten after the first setup. If you +wish to make changes to the original files in **input\_templates/** +rather than those in the run directory, either remove the files from the +run directory before executing **comp\_ice** or edit the script. + +The code may abort during the setup phase for any number of reasons, and +often the buffer containing the diagnostic output fails to print before +the executable exits. The quickest way to get the diagnostic information +is to run the code in an interactive shell with just the command `cice` +for serial runs or “`mpirun -np N cice`” for MPI runs, where N is the +appropriate number of processors (or a command appropriate for your +computer’s software). + +If the code fails to compile or run, or if the model configuration is +changed, try the following: + +- create **Macros.\***. **Makefile.\*** and **run\_ice.\*** files for + your particular platform, if they do not already exist (type ‘uname + -s’ at the prompt and compare the result with the file suffixes; we + rename `UNICOS/mp` as `UNICOS` for simplicity). + +- modify the `INCLUDE` directory path and other settings for your system + in the scripts, **Macros.\*** and **Makefile.\*** files. + +- alter directory paths, file names and the execution command as needed + in **run\_ice** and **ice\_in**. + +- ensure that `nprocs` in **ice\_in** is equal to `NTASK` in **comp\_ice**. + +- ensure that the block size `NXBLOCK`, `NYBLOCK` in **comp\_ice** is + compatible with the processor\_shape and other domain options in + **ice\_in** + +- if using the rake or space-filling curve algorithms for block + distribution (`distribution\_type` in **ice\_in**) the code will abort + if `MXBLCKS` is not large enough. The correct value is provided in the + diagnostic output. + +- if starting from a restart file, ensure that kcatbound is the same as + that used to create the file (`kcatbound` = 0 for the files included in + this code distribution). Other configuration parameters, such as + `NICELYR`, must also be consistent between runs. + +- for stand-alone runs, check that `-Dcoupled` is *not* set in the + **Macros.\*** file. + +- for coupled runs, check that `-Dcoupled` and other + coupled-model-specific (e.g., CESM, popcice or hadgem) preprocessing + options are set in the **Macros.\*** file. + +- edit the grid size and other parameters in **comp\_ice**. + +- remove the **compile/** directory completely and recompile. + +.. _restarttrouble: + +~~~~~~~~ +Restarts +~~~~~~~~ + +CICE version 5 introduces a new model configuration that makes +restarting from older simulations difficult. In particular, the number +of ice categories, the category boundaries, and the number of vertical +layers within each category must be the same in the restart file and in +the run restarting from that file. Moreover, significant differences in +the physics, such as the salinity profile, may cause the code to fail +upon restart. Therefore, new model configurations may need to be started +using `runtype` = ‘initial’. Binary restart files that were provided with +CICE v4.1 were made using the BL99 thermodynamics with 4 layers and 5 +thickness categories (`kcatbound` = 0) and therefore can not be used for +the default CICE v5 configuration (7 layers). In addition, CICE’s +default restart file format is now  instead of binary. + +Restarting a run using `runtype` = ‘continue’ requires restart data for +all tracers used in the new run. If tracer restart data is not +available, use `runtype` = ‘initial’, setting `ice\_ic` to the name of the +core restart file and setting to true the namelist restart flags for +each tracer that is available. The unavailable tracers will be +initialized to their default settings. + +On tripole grids, use `restart\_ext` = true when using either binary or +regular (non-PIO) netcdf. + +Provided that the same number of ice layers (default: 4) will be used +for the new runs, it is possible to convert v4.1 restart files to the +new file structure and then to  format. If the same physical +parameterizations are used, the code should be able to execute from +these files. However if different physics is used (for instance, mushy +thermo instead of BL99), the code may still fail. To convert a v4.1 +restart file: + +#. Edit the code **input\_templates/convert\_restarts.f90** for your + model configuration and path names. Compile and run this code to + create a binary restart file that can be read using v5. Copy the + resulting file to the **restart/** subdirectory in your working + directory. + +#. In your working directory, turn off all tracer restart flags in + **ice\_in** and set the following: + + - runtype = ‘initial’ + + - ice\_ic = ‘./restart/[your binary file name]’ + + - restart = .true. + + - use\_restart\_time = .true. + +#. In **CICE\_InitMod.F90**, comment out the call to + restartfile(ice\_ic) and uncomment the call to + restartfile\_v4(ice\_ic) immediately below it. This will read the + v4.1 binary file and write a v5  file containing the same + information. + +If restart files are taking a long time to be written serially (i.e., +not using PIO), see the next section. + +~~~~~~~~~~~~~~ +Slow execution +~~~~~~~~~~~~~~ + +On some architectures, underflows (:math:`10^{-300}` for example) are +not flushed to zero automatically. Usually a compiler flag is available +to do this, but if not, try uncommenting the block of code at the end of +subroutine *stress* in **ice\_dyn\_evp.F90** or **ice\_dyn\_eap.F90**. +You will take a hit for the extra computations, but it will not be as +bad as running with the underflows. + +In some configurations, multiple calls to scatter or gather global +variables may overfill MPI’s buffers, causing the code to slow down +(particularly when writing large output files such as restarts). To +remedy this problem, set `BARRIERS yes` in **comp\_ice**. This +synchronizes MPI messages, keeping the buffers in check. + +~~~~~~~~~~~~~~~ +Debugging hints +~~~~~~~~~~~~~~~ + +Several utilities are available that can be helpful when debugging the +code. Not all of these will work everywhere in the code, due to possible +conflicts in module dependencies. + +*debug\_ice* (**CICE.F90**) + A wrapper for *print\_state* that is easily called from numerous + points during the timestepping loop (see + **CICE\_RunMod.F90\_debug**, which can be substituted for + **CICE\_RunMod.F90**). + +*print\_state* (**ice\_diagnostics.F90**) + Print the ice state and forcing fields for a given grid cell. + +`dbug` = true (**ice\_in**) + Print numerous diagnostic quantities. + +`print\_global` (**ice\_in**) + If true, compute and print numerous global sums for energy and mass + balance analysis. This option can significantly degrade code + efficiency. + +`print\_points` (**ice\_in**) + If true, print numerous diagnostic quantities for two grid cells, + one near the north pole and one in the Weddell Sea. This utility + also provides the local grid indices and block and processor numbers + (`ip`, `jp`, `iblkp`, `mtask`) for these points, which can be used in + conjunction with `check\_step`, to call *print\_state*. These flags + are set in **ice\_diagnostics.F90**. This option can be fairly slow, + due to gathering data from processors. + +*global\_minval, global\_maxval, global\_sum* (**ice\_global\_reductions.F90**) + Compute and print the minimum and maximum values for an individual + real array, or its global sum. + +~~~~~~~~~~ +Known bugs +~~~~~~~~~~ + +#. Fluxes sent to the CESM coupler may have incorrect values in grid + cells that change from an ice-free state to having ice during the + given time step, or vice versa, due to scaling by the ice area. The + authors of the CESM flux coupler insist on the area scaling so that + the ice and land models are treated consistently in the coupler (but + note that the land area does not suddenly become zero in a grid cell, + as does the ice area). + +#. With the old CCSM radiative scheme (`shortwave` = ‘default’ or + ‘ccsm3’), a sizable fraction (more than 10%) of the total shortwave + radiation is absorbed at the surface but should be penetrating into + the ice interior instead. This is due to use of the aggregated, + effective albedo rather than the bare ice albedo when + `snowpatch` :math:`< 1`. + +#. The date-of-onset diagnostic variables, `melt\_onset` and `frz\_onset`, + are not included in the core restart file, and therefore may be + incorrect for the current year if the run is restarted after Jan 1. + Also, these variables were implemented with the Arctic in mind and + may be incorrect for the Antarctic. + +#. The single-processor *system\_clock* time may give erratic results on + some architectures. + +#. History files that contain time averaged data (`hist\_avg` = true in + **ice\_in**) will be incorrect if restarting from midway through an + averaging period. + +#. In stand-alone runs, restarts from the end of `ycycle` will not be + exact. + +#. Using the same frequency twice in `histfreq` will have unexpected + consequences and causes the code to abort. + +#. Latitude and longitude fields in the history output may be wrong when + using padding. + +~~~~~~~~~~~~~~~~~~~~~~~~~ +Interpretation of albedos +~~~~~~~~~~~~~~~~~~~~~~~~~ + +The snow-and-ice albedo, `albsni`, and diagnostic albedos `albice`, `albsno`, +and `albpnd` are merged over categories but not scaled (divided) by the +total ice area. (This is a change from CICE v4.1 for `albsni`.) The latter +three history variables represent completely bare or completely snow- or +melt-pond-covered ice; that is, they do not take into account the snow +or melt pond fraction (`albsni` does, as does the code itself during +thermodyamic computations). This is to facilitate comparison with +typical values in measurements or other albedo parameterizations. The +melt pond albedo `albpnd` is only computed for the Delta-Eddington +shortwave case. + +With the Delta-Eddington parameterization, the albedo depends on the +cosine of the zenith angle (:math:`\cos\varphi`, `coszen`) and is zero if +the sun is below the horizon (:math:`\cos\varphi < 0`). Therefore +time-averaged albedo fields would be low if a diurnal solar cycle is +used, because zero values would be included in the average for half of +each 24-hour period. To rectify this, a separate counter is used for the +averaging that is incremented only when :math:`\cos\varphi > 0`. The +albedos will still be zero in the dark, polar winter hemisphere. + +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +Proliferating subprocess parameterizations +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +With the addition of several alternative parameterizations for sea ice +processes, a number of subprocesses now appear in multiple parts of the +code with differing descriptions. For instance, sea ice porosity and +permeability, along with associated flushing and flooding, are +calculated separately for mushy thermodynamics, topo and level-ice melt +ponds, and for the brine height tracer, each employing its own +equations. Likewise, the BL99 and mushy thermodynamics compute freeboard +and snow–ice formation differently, and the topo and level-ice melt pond +schemes both allow fresh ice to grow atop melt ponds, using slightly +different formulations for Stefan freezing. These various process +parameterizations will be compared and their subprocess descriptions +possibly unified in the future. \ No newline at end of file diff --git a/doc/sphinx-documentation-workflow.txt b/doc/sphinx-documentation-workflow.txt deleted file mode 100644 index ffaa4be75..000000000 --- a/doc/sphinx-documentation-workflow.txt +++ /dev/null @@ -1,72 +0,0 @@ -Basic workflow for github/sphinx documentation -Alice DuVivier -July 14, 2017 ----------------------------------------------------------- - -Most of this is adapted from https://github.com/ESMCI/cime/wiki/CIME-Git-Workflow, which has Figure 1 that is **VERY** useful -- Assumes you have sphinx installed on your own, personal machine. This includes the sphinxcontrib.bibtex library. Need to do this first. See the about-sphinx-documentation.txt file for more details. - -1. Editing *.rst files, testing html code, etc. in master branch -* Things to do once: -- Remotely: Create fork on personal GitHub area. Just do this once (usually). Use button on website of original repo to create this personal fork. -- Locally: Switch to your local machine -> cd ~/Documents/Research/github/CICE-Consortium/ —> go to local directory where you want to keep GitHub code, make changes, etc. -> git clone https://github.com/duvivier/CICE.git —> Clone the fork to your local machine from GitHub. Get the URL from which to clone from personal GitHub repository page, green “clone or download” button. -- This has now created a “local” copy of your fork called CICE. From here on you do changes from “local” fork on your local machine and push to your remote repo called “origin”. You can pull from “origin” or the original “upstream” remote repo. Any changes you want to eventually merge need to be pushed to the “origin” before issuing a pull request to “upstream” -> git status —> check that you’re on the master branch and have checked this out. -> git remote —v —> check your remote branches. Default will have just “origin” and it will push to your local fork. -> git remote add upstream https://github.com/CICE-Consortium/CICE —> add the consortium as the ultimate upstream source. Will need this for daily updates (see below). -> git remote —v —> check that the “upstream” branch has been added - -* Things you do daily: -> cd ~/Documents/Research/github/CICE-Consortium/CICE/ -> git status —> tells you what branch you are on and any commits that need to be made -> git branch —> tells you what branches are available locally -> git remote —v —> lists the remote sources. want “origin” to point to personal remote repo and “upstream” to point to the original code you forked from. -> git pull upstream master —> will fetch+merge any changes to the local master branch since you last stopped working on it. Need to specify “master” branch. If there are not changes it will tell you that you are already up-to-date can also do > git pull —rebase upstream master and the —rebase tells git to move your commits to the tip of the master branch after synchronizing with changes from central repository. Better to rebase than do a merge commit. Rebasing is unlikely to cause problems unless you’re working on the same code or feature as someone else. To stop this process just execute git rebase —abort. -- Make your local edits to *.rst files, code, etc. Then issue sphinx commands to test these. -> cd ../ (be in the /doc/ directory, not the source directory. Must be one directory up). -> make clean —> gets rid of old html -> make html —> makes new html from sphinx *.rst files. -> cd build/html -> open index.html (or other html code) —> opens html locally to check it quickly -- iterate on the steps above till you’re happy with the html code -- note that sometimes the math doesn't render properly the first time you try this. If this is the case, you should do another >make html and check it. If -that still doesn't work try just touching the *.rst file that isn't rendering properly (open it, save it, close it) and try >make html again. This has worked -in the past to get the math to render properly. At this time we are unsure why this is necessary. -> git status —> gives you list of files that are changed but not yet staged in red -> git add *.rst —> add *rst files or whatever else needs to be staged for documentation stuff. -> git status —> now should show list of changes that have been staged in green -> git commit -m “message” —> commit the changes to your local fork. This makes it ready to push to external fork. -> git push origin —> will push the local code changes to your remote “origin” fork. In this case the master fork with the *.rst files. Note that we set this up so that the push will ignore *.html files. -* note you may want to add the path for the documentation from gh-pages to the README.md file (or another file). The path is: https://duvivier.github.io/CICE/ (or use CICE-Consortium instead of duvivier for the consortium repository once the pull request is complete) - -2. Pushing *.html code in gh-pages branch -* Things to do once: -> cd ~/Documents/Research/github/CICE-Consortium/ -> git clone https://github.com/duvivier/CICE.git CICE.gh-pages -- note that this checks out the master branch. So we need to switch to the gh-pages branch. -> git checkout gh-pages -- This switches to the gh-pages branch, which *only* is used for html pages. - -* Things to do daily: -> cd ~/Documents/Research/github/CICE-Consortium/CICE.gh-pages/ -> git status —> check that you are on gh-pages branch with this tag. -> rm -rf . —> remove old html files in here -> cd ~/Documents/Research/github/CICE-Consortium/CICE/doc/ —> change to master branch -> make clean —> clean up old html code -> make html —> make the correct html code for *rst files you just committed to master branch -> cd build/html -> cp -r . ~/Documents/Research/github/CICE-Consortium/CICE.gh-pages/ -> cd ~/Documents/Research/github/CICE-Consortium/CICE.gh-pages/ -> git add . —> add the files to those needing a commit to local branch -> git commit -m “updates….” —> commit *.html files to local fork -> git push origin —> will push local changes to remote “origin” fork, which in this case is the gh-pages branch on my personal fork from the consortium. -- Check this online at personal pages to make sure it looks right, is pointing to right path, etc. etc. - -3. Merging with original repository -- Once you’ve checked and tested the documentation on your local fork, it’s time for a pull request to the original repository -- On personal GitHub webpage there is a button on left called “New Pull Request”. Click that. -- It then takes you to original repository (CICE-Consoritum/CICE/) from which you forked. It shows the number of changed files and the differences in green (additions) or red (subtractions) in these files with the files that exist on that branch. If you add a new file then everything is green. -- Once you’ve checked your code, then click the big, green “Create pull request” button and this will send the changes to the administrators of the CICE-Consoritum repository. (Elizabeth, Tony, Alice, others). -- Always issue a pull request to merge with the original repository rather than just merging it yourself. This is the main, well tested branch that we release from so we want multiple eyes to look everything over. This is less crucial for documentation than actual code, but still important.