diff --git a/.github/workflows/model-build-test-ci.yml b/.github/workflows/model-build-test-ci.yml new file mode 100644 index 00000000..eda22173 --- /dev/null +++ b/.github/workflows/model-build-test-ci.yml @@ -0,0 +1,15 @@ +name: Test model build + +on: + workflow_dispatch: + pull_request: + push: + branches: + - "master" + +jobs: + build: + name: Build ${{ github.repository }} via spack + uses: access-nri/build-ci/.github/workflows/model-1-build.yml@main + permissions: + packages: read diff --git a/.gitmodules b/.gitmodules deleted file mode 100644 index f27a7b92..00000000 --- a/.gitmodules +++ /dev/null @@ -1,3 +0,0 @@ -[submodule "ParallelIO"] - path = ParallelIO - url = https://github.com/NCAR/ParallelIO.git diff --git a/LICENSE.pdf b/LICENSE.pdf new file mode 100644 index 00000000..0fef8f73 Binary files /dev/null and b/LICENSE.pdf differ diff --git a/ParallelIO b/ParallelIO deleted file mode 160000 index 7e242f78..00000000 --- a/ParallelIO +++ /dev/null @@ -1 +0,0 @@ -Subproject commit 7e242f78bd1b4766518aff44fda17ff50eed6188 diff --git a/README.md b/README.md index 0d59b8c5..a8c82c35 100644 --- a/README.md +++ b/README.md @@ -1,10 +1,14 @@ ## Overview -This repository contains the trunk from the subversion (svn) repository of the Los Alamos Sea Ice Model, CICE, including release tags through version 5.1.2. +This repository contains the access/auscom fork of cice5 used in the ACCESS-ESM1.6 and ACCESS-OM2. It was forked from https://github.com/CICE-Consortium/CICE-svn-trunk/ which in turn captured the trunk from the subversion (svn) repository of the Los Alamos Sea Ice Model, CICE, including release tags through version 5.1.2. + +ACCESS-ESM1.6 related code came from https://code.metoffice.gov.uk/trac/cice/browser/main/branches/pkg/Config/vn5.1.2_GSI8.1_package_branch/cice?order=name&rev=334#source (MOSRS account required) More recent versions are found in the [CICE](https://github.com/CICE-Consortium/CICE) and [Icepack](https://github.com/CICE-Consortium/Icepack) repositories, which are maintained by the CICE Consortium. If you expect to make any changes to the code, we recommend that you work in the CICE and Icepack repositories. Changes made to code in this repository will not be accepted, other than critical bug fixes. +There is [PDF documentation](https://github.com/ACCESS-NRI/cice5/blob/master/doc/cicedoc.pdf) available for CICE 5.1.2, however some changes were made to this fork to support coupling with ACCESS-OM2 and ACCESS-ESM1.6, Parallel IO, ERA5 Forcing, BGC modelling and for other updates. Some of these changes are described in the [ACCESS-OM2 Technical Report](https://github.com/COSIMA/ACCESS-OM2-1-025-010deg-report). + ## Useful links * **Wiki**: https://github.com/CICE-Consortium/CICE-svn-trunk/wiki @@ -16,4 +20,4 @@ If you expect to make any changes to the code, we recommend that you work in the * **Resource Index**: https://github.com/CICE-Consortium/About-Us/wiki/Resource-Index - List of resources for information about the Consortium and its repositories as well as model documentation, testing, and development. + List of resources for information about the Consortium and its repositories as well as model documentation, testing, and development. diff --git a/bld/Macros.Linux.raijin-185 b/bld/Macros.Linux.raijin-185 index 3e488077..8e52c6e0 100644 --- a/bld/Macros.Linux.raijin-185 +++ b/bld/Macros.Linux.raijin-185 @@ -26,7 +26,7 @@ FC := mpifort ifeq ($(DEBUG), yes) FFLAGS := -r8 -i4 -O0 -g -align all -w -ftz -convert big_endian -assume byterecl -no-vec -mcmodel=medium -xHost -fp-model precise else - FFLAGS := -r8 -i4 -O2 -align all -w -ftz -convert big_endian -assume byterecl -no-vec -mcmodel=medium -xHost -fp-model precise + FFLAGS := -r8 -i4 -O2 -align all -w -ftz -convert big_endian -assume byterecl -no-vec -mcmodel=medium -xHost -fp-model precise -assume buffered_io -check noarg_temp_created endif MOD_SUFFIX := mod LD := $(FC) diff --git a/bld/Makefile b/bld/Makefile index c2d232ff..3edf5900 100644 --- a/bld/Makefile +++ b/bld/Makefile @@ -113,11 +113,11 @@ $(EXEC): $(OBJS) cc $(CFLAGS) $(CPPDEFS) $(INCS) $(INCLDIR) $< .F.o: - $(CPP) $(CPPFLAGS) $(CPPDEFS) $(INCS) $(INCLDIR) $< > $*.f + $(FC) -P $(CPPFLAGS) $(CPPDEFS) $(INCS) $(INCLDIR) $< -o $*.f $(FC) -c $(FFLAGS) $(FIXEDFLAGS) $(INCS) $(INCLDIR) $*.f .F90.o: - $(CPP) $(CPPFLAGS) $(CPPDEFS) $(INCS) $(INCLDIR) $< > $*.f90 + $(FC) -P $(CPPFLAGS) $(CPPDEFS) $(INCS) $(INCLDIR) $< -o $*.f90 $(FC) -c $(FFLAGS) $(FREEFLAGS) $(INCS) $(INCLDIR) $*.f90 mostlyclean: diff --git a/bld/Makefile.std b/bld/Makefile.std index c2d232ff..3edf5900 100644 --- a/bld/Makefile.std +++ b/bld/Makefile.std @@ -113,11 +113,11 @@ $(EXEC): $(OBJS) cc $(CFLAGS) $(CPPDEFS) $(INCS) $(INCLDIR) $< .F.o: - $(CPP) $(CPPFLAGS) $(CPPDEFS) $(INCS) $(INCLDIR) $< > $*.f + $(FC) -P $(CPPFLAGS) $(CPPDEFS) $(INCS) $(INCLDIR) $< -o $*.f $(FC) -c $(FFLAGS) $(FIXEDFLAGS) $(INCS) $(INCLDIR) $*.f .F90.o: - $(CPP) $(CPPFLAGS) $(CPPDEFS) $(INCS) $(INCLDIR) $< > $*.f90 + $(FC) -P $(CPPFLAGS) $(CPPDEFS) $(INCS) $(INCLDIR) $< -o $*.f90 $(FC) -c $(FFLAGS) $(FREEFLAGS) $(INCS) $(INCLDIR) $*.f90 mostlyclean: diff --git a/bld/build.sh b/bld/build.sh index d6807064..d0ba8776 100755 --- a/bld/build.sh +++ b/bld/build.sh @@ -78,17 +78,10 @@ if ( $AusCOM == 'yes' ) then setenv CPL_INCS '-I$(CPLINCDIR)/include -I$(OASISDIR)/psmile.MPI1 -I$(OASISDIR)/mct -I$(SRCDIR)/ParallelIO/build/include/' endif -### Setup the version string, this is the git hash of the commit used to build -### the code. The version of an executable can be found with the following -### command: strings | grep 'CICE_COMMIT_HASH=' -setenv GIT_CONFIG_NOGLOBAL 'yes' - -set old_hash=`grep 'public :: CICE_COMMIT_HASH =' $SRCDIR/drivers/$driver/version.F90 | cut -d '"' -f 2 | cut -d '=' -f 2` -set new_hash=`git rev-parse HEAD` - -if ( $old_hash != $new_hash ) then - sed -e "s/{CICE_COMMIT_HASH}/$new_hash/g" $SRCDIR/drivers/$driver/version.F90.template > $SRCDIR/drivers/$driver/version.F90 -endif +### The version of an executable can be found with the following +### command: strings | grep 'CICE_VERSION=' +set version='202301' +sed -e "s/{CICE_VERSION}/$version/g" $SRCDIR/drivers/$driver/version.F90.template > $SRCDIR/drivers/$driver/version_mod.F90 ### Location and name of the generated exectuable setenv EXE cice_${driver}_${resolution}_${NTASK}p.exe diff --git a/bld/makdep.c b/bld/makdep.c index ca9a9e78..f4786770 100755 --- a/bld/makdep.c +++ b/bld/makdep.c @@ -51,7 +51,7 @@ static struct node *suffix_list; /* List of Fortran suffixes to look for */ static void check (char *, struct node *, char *, int); static int already_found (char *, struct node *); -main (int argc, char **argv) +int main (int argc, char **argv) { int lastdot; /* points to the last . in fname */ int c; /* return from getopt */ diff --git a/compile/comp_access-cm1440-185_ac330 b/compile/comp_access-cm1440-185_ac330 new file mode 100755 index 00000000..a32f5c50 --- /dev/null +++ b/compile/comp_access-cm1440-185_ac330 @@ -0,0 +1,238 @@ +#! /bin/csh -f + +set echo on +#setenv DEBUG yes # set to yes for debug + +if ( $1 == '') then + echo '*** Please issue the command like ***' + echo ' > ./comp_auscom_cice.RJ.nP #nproc ' + echo 'here #proc is the number of cpu to be used for CICE5 (e.g. 1, 2, 4, 6...)' + set nproc = 192 + echo *** $nproc processors will be used to run CICE5... *** + sleep 3 + #exit +else + set nproc = $1 + echo *** $nproc processors will be used to run CICE5... *** +endif + +### Change these to your own site and user directory! +### You will need to create a Makefile Macro in bld +### Platform and its architecture ($HOST = xe) +setenv ARCH raijin-185 + +# Set AusCOM home: +setenv AusCOMHOME $cwd:h:h:h + +#---------------------------------------------------------------------- + +### Specialty code +setenv CAM_ICE no # set to yes for CAM runs (single column) +setenv SHRDIR csm_share # location of CCSM shared code +setenv IO_TYPE netcdf # set to none if netcdf library is unavailable + # set to pio for parallel netcdf +setenv DITTO no # reproducible diagnostics +setenv THRD no # set to yes for OpenMP threading +if ( $THRD == 'yes') setenv OMP_NUM_THREADS 2 # positive integer + +setenv ACCESS yes # set to yes for ACCESS +setenv AusCOM yes # set to yes for AusCOM +setenv OASIS3_MCT yes # oasis3-mct version +setenv CHAN MPI1 # MPI1 or MPI2 (always MPI1!) +setenv NICELYR 4 # number of vertical layers in the ice +setenv NSNWLYR 1 # number of vertical layers in the snow +setenv NICECAT 5 # number of ice thickness categories + +### Location of ACCESS system +setenv SYSTEMDIR $AusCOMHOME +echo SYSTEMDIR: $SYSTEMDIR + +### Location of this model (source) +setenv SRCDIR $cwd:h #$SYSTEMDIR/submodels/cice5.0.4 +echo SRCDIR: $SRCDIR + +source ${SRCDIR}/compile/environs.$ARCH # environment variables and loadable modules + +### Location and names of coupling libraries and inclusions +### Location and names of coupling libraries +#setenv CPLLIBDIR ~access/access-cm2/prebuild/oasis3-mct/Linux-182/lib +setenv CPLLIBDIR /projects/access/apps/oasis3-mct/ompi185/lib +setenv CPLLIBS '-L$(CPLLIBDIR) -lpsmile.${CHAN} -lmct -lmpeu -lscrip' +#echo CPLLIBS: ${CPLLIBS} + +### Location of coupling inclusions +#setenv CPLINCDIR ~access/access-cm2/prebuild/oasis3-mct/Linux-182/build/lib +setenv CPLINCDIR /projects/access/apps/oasis3-mct/ompi185/include +setenv CPL_INCS '-I$(CPLINCDIR)/psmile.$(CHAN) -I$(CPLINCDIR)/pio -I$(CPLINCDIR)/mct' +#echo CPL_INCS: $CPL_INCS + +### For multi-Layer ice (standard) configuration +setenv N_ILYR 1 # 4 for standard multi-layer ice. for ktherm=0, zero-layer thermodynamics + +### Location and name of the generated exectuable +setenv DATESTR `date +%Y%m%d` +setenv BINDIR $SYSTEMDIR/bin +setenv EXE cice_GC3GA7-cm1440-185.${DATESTR}_${nproc}p_${NICELYR}lyr + +### Where this model is compiled +setenv OBJDIR $SRCDIR/compile/build_${CHAN}_{$nproc}p-mct-185 +if !(-d $OBJDIR) mkdir -p $OBJDIR +#/bin/rm $OBJDIR/* +# + +### Grid resolution +#setenv GRID gx3 ; setenv RES 100x116 +#setenv GRID gx1 ; setenv RES 320x384 +#setenv GRID tx1 ; setenv RES 360x240 +#setenv GRID tp1 ; setenv RES 360x300 +setenv GRID tp1 ; setenv RES 1440x1080 + +set NXGLOB = `echo $RES | sed s/x.\*//` +set NYGLOB = `echo $RES | sed s/.\*x//` +echo NXGLOB: $NXGLOB +echo NYGLOB: $NYGLOB + +# Recommendations: +# NTASK equals nprocs in ice_in +# use processor_shape = slenderX1 or slenderX2 in ice_in +# one per processor with distribution_type='cartesian' or +# squarish blocks with distribution_type='rake' +# If BLCKX (BLCKY) does not divide NXGLOB (NYGLOB) evenly, padding +# will be used on the right (top) of the grid. +setenv NTASK $nproc +#setenv BLCKX 45 # x-dimension of blocks ( not including ) +#setenv BLCKY 38 # y-dimension of blocks ( ghost cells ) +setenv BLCKX `expr $NXGLOB / $nproc` +setenv BLCKY `expr $NYGLOB` +echo BLCKX: $BLCKX +echo BLCKY: $BLCKY + +echo +#24 : 12x2 +setenv BLCKX 120 +setenv BLCKY 540 +#144 : 16x9 +setenv BLCKX 90 +setenv BLCKY 120 +#16 : 8x2 +setenv BLCKX 180 +setenv BLCKY 540 +#16 : 4x4 +setenv BLCKX 360 +setenv BLCKY 270 +##216 : 24x9 +#setenv BLCKX 60 +#setenv BLCKY 120 +#192 : 16x12 -->square-ice +setenv BLCKX 90 +setenv BLCKY 90 +#192 : 96x2 +setenv BLCKX 15 +setenv BLCKY 540 + +# may need to increase MXBLCKS with rake distribution or padding +@ a = $NXGLOB * $NYGLOB ; @ b = $BLCKX * $BLCKY * $NTASK +@ m = $a / $b ; setenv MXBLCKS $m ; if ($MXBLCKS == 0) setenv MXBLCKS 1 +echo Autimatically generated: MXBLCKS = $MXBLCKS +##setenv MXBLCKS 8 # if necessary (code will print proper value) +#20110830: increase it to 12 as required by code: +# (but no clue why it never happened before!) +#setenv MXBLCKS 12 # if necessary (code will print proper value) + +########################################### +# ars599: 24032014 +# copy from /short/p66/ars599/CICE.v5.0/accice.v504_csiro +# solo_ice_comp +########################################### +### Tracers # match ice_in tracer_nml to conserve memory +setenv TRAGE 1 # set to 1 for ice age tracer +setenv TRFY 0 # set to 1 for first-year ice area tracer +setenv TRLVL 0 # set to 1 for level and deformed ice tracers +setenv TRPND 1 # set to 1 for melt pond tracers +setenv NTRAERO 0 # number of aerosol tracers + # (up to max_aero in ice_domain_size.F90) + # CESM uses 3 aerosol tracers +setenv TRBRI 0 # set to 1 for brine height tracer +setenv NBGCLYR 0 # number of zbgc layers +setenv TRBGCS 0 # number of skeletal layer bgc tracers + # TRBGCS=0 or 2<=TRBGCS<=9) + +### File unit numbers +setenv NUMIN 11 # minimum file unit number +setenv NUMAX 99 # maximum file unit number + +if ($IO_TYPE == 'netcdf') then + setenv IODIR io_netcdf +else if ($IO_TYPE == 'pio') then + setenv IODIR io_pio +else + setenv IODIR io_binary +endif + +########################################### + +setenv CBLD $SRCDIR/bld + +if ( $ARCH == 'UNICOS/mp') setenv ARCH UNICOS +if ( $ARCH == 'UNICOS') then + cp -f $CBLD/Makefile.$ARCH $CBLD/Makefile +else + cp -f $CBLD/Makefile.std $CBLD/Makefile +endif + +if ($NTASK == 1) then + setenv COMMDIR serial +else + setenv COMMDIR mpi +endif +echo COMMDIR: $COMMDIR + +if ($ACCESS == 'yes') then + setenv DRVDIR access +else + setenv DRVDIR cice +endif +echo DRVDIR: $DRVDIR + +cd $OBJDIR + +### List of source code directories (in order of importance). +cat >! Filepath << EOF +$SRCDIR/drivers/$DRVDIR +$SRCDIR/source +$SRCDIR/$COMMDIR +$SRCDIR/$IODIR +$SRCDIR/$SHRDIR +EOF + +if ( $ARCH == 'UNICOS.ORNL.phoenix' ) then + ### use -h command for phoenix + cc -o makdep -h command $CBLD/makdep.c || exit 2 +else if ( $ARCH == 'Linux.ORNL.jaguar' ) then + gcc -g -o makdep $CBLD/makdep.c || exit 2 +else + cc -o makdep $CBLD/makdep.c || exit 2 +endif + +setenv MACFILE $CBLD/Macros.Linux.${ARCH} + +gmake VPFILE=Filepath EXEC=$BINDIR/$EXE \ + NXGLOB=$NXGLOB NYGLOB=$NYGLOB \ + BLCKX=$BLCKX BLCKY=$BLCKY MXBLCKS=$MXBLCKS \ + -f $CBLD/Makefile MACFILE=$MACFILE || exit 2 + +cd .. +pwd +echo NTASK = $NTASK +echo "global N, block_size" +echo "x $NXGLOB, $BLCKX" +echo "y $NYGLOB, $BLCKY" +echo max_blocks = $MXBLCKS +echo $TRAGE = TRAGE, iage tracer +echo $TRFY = TRFY, first-year ice tracer +echo $TRLVL = TRLVL, level-ice tracers +echo $TRPND = TRPND, melt pond tracers +echo $NTRAERO = NTRAERO, number of aerosol tracers +echo $TRBRI = TRBRI, brine height tracer +echo $NBGCLYR = NBGCLYR, number of bio grid layers +echo $TRBGCS = TRBGCS, number of BGC tracers diff --git a/compile/comp_access-cm1440-185_r47 b/compile/comp_access-cm1440-185_r47 new file mode 100755 index 00000000..f3e09f0d --- /dev/null +++ b/compile/comp_access-cm1440-185_r47 @@ -0,0 +1,238 @@ +#! /bin/csh -f + +set echo on +#setenv DEBUG yes # set to yes for debug + +if ( $1 == '') then + echo '*** Please issue the command like ***' + echo ' > ./comp_auscom_cice.RJ.nP #nproc ' + echo 'here #proc is the number of cpu to be used for CICE5 (e.g. 1, 2, 4, 6...)' + set nproc = 192 + echo *** $nproc processors will be used to run CICE5... *** + sleep 3 + #exit +else + set nproc = $1 + echo *** $nproc processors will be used to run CICE5... *** +endif + +### Change these to your own site and user directory! +### You will need to create a Makefile Macro in bld +### Platform and its architecture ($HOST = xe) +setenv ARCH raijin-185 + +# Set AusCOM home: +setenv AusCOMHOME $cwd:h:h:h + +#---------------------------------------------------------------------- + +### Specialty code +setenv CAM_ICE no # set to yes for CAM runs (single column) +setenv SHRDIR csm_share # location of CCSM shared code +setenv IO_TYPE netcdf # set to none if netcdf library is unavailable + # set to pio for parallel netcdf +setenv DITTO no # reproducible diagnostics +setenv THRD no # set to yes for OpenMP threading +if ( $THRD == 'yes') setenv OMP_NUM_THREADS 2 # positive integer + +setenv ACCESS yes # set to yes for ACCESS +setenv AusCOM yes # set to yes for AusCOM +setenv OASIS3_MCT yes # oasis3-mct version +setenv CHAN MPI1 # MPI1 or MPI2 (always MPI1!) +setenv NICELYR 4 # number of vertical layers in the ice +setenv NSNWLYR 1 # number of vertical layers in the snow +setenv NICECAT 5 # number of ice thickness categories + +### Location of ACCESS system +setenv SYSTEMDIR $AusCOMHOME +echo SYSTEMDIR: $SYSTEMDIR + +### Location of this model (source) +setenv SRCDIR $cwd:h #$SYSTEMDIR/submodels/cice5.0.4 +echo SRCDIR: $SRCDIR + +source ${SRCDIR}/compile/environs.$ARCH # environment variables and loadable modules + +### Location and names of coupling libraries and inclusions +### Location and names of coupling libraries +#setenv CPLLIBDIR ~access/access-cm2/prebuild/oasis3-mct/Linux-182/lib +setenv CPLLIBDIR /short/p66/hxy599/ACCESS/submodels/oasis3-mct_local/Linux-185_r47/lib +setenv CPLLIBS '-L$(CPLLIBDIR) -lpsmile.${CHAN} -lmct -lmpeu -lscrip' +#echo CPLLIBS: ${CPLLIBS} + +### Location of coupling inclusions +#setenv CPLINCDIR ~access/access-cm2/prebuild/oasis3-mct/Linux-182/build/lib +setenv CPLINCDIR /short/p66/hxy599/ACCESS/submodels/oasis3-mct_local/Linux-185_r47/build/lib +setenv CPL_INCS '-I$(CPLINCDIR)/psmile.$(CHAN) -I$(CPLINCDIR)/pio -I$(CPLINCDIR)/mct' +#echo CPL_INCS: $CPL_INCS + +### For multi-Layer ice (standard) configuration +setenv N_ILYR 1 # 4 for standard multi-layer ice. for ktherm=0, zero-layer thermodynamics + +### Location and name of the generated exectuable +setenv DATESTR `date +%Y%m%d` +setenv BINDIR $SYSTEMDIR/bin +setenv EXE cice_GC3GA7-cm1440-185.${DATESTR}_${nproc}p_${NICELYR}lyr + +### Where this model is compiled +setenv OBJDIR $SRCDIR/compile/build_${CHAN}_{$nproc}p-mct-185 +if !(-d $OBJDIR) mkdir -p $OBJDIR +#/bin/rm $OBJDIR/* +# + +### Grid resolution +#setenv GRID gx3 ; setenv RES 100x116 +#setenv GRID gx1 ; setenv RES 320x384 +#setenv GRID tx1 ; setenv RES 360x240 +#setenv GRID tp1 ; setenv RES 360x300 +setenv GRID tp1 ; setenv RES 1440x1080 + +set NXGLOB = `echo $RES | sed s/x.\*//` +set NYGLOB = `echo $RES | sed s/.\*x//` +echo NXGLOB: $NXGLOB +echo NYGLOB: $NYGLOB + +# Recommendations: +# NTASK equals nprocs in ice_in +# use processor_shape = slenderX1 or slenderX2 in ice_in +# one per processor with distribution_type='cartesian' or +# squarish blocks with distribution_type='rake' +# If BLCKX (BLCKY) does not divide NXGLOB (NYGLOB) evenly, padding +# will be used on the right (top) of the grid. +setenv NTASK $nproc +#setenv BLCKX 45 # x-dimension of blocks ( not including ) +#setenv BLCKY 38 # y-dimension of blocks ( ghost cells ) +setenv BLCKX `expr $NXGLOB / $nproc` +setenv BLCKY `expr $NYGLOB` +echo BLCKX: $BLCKX +echo BLCKY: $BLCKY + +echo +#24 : 12x2 +setenv BLCKX 120 +setenv BLCKY 540 +#144 : 16x9 +setenv BLCKX 90 +setenv BLCKY 120 +#16 : 8x2 +setenv BLCKX 180 +setenv BLCKY 540 +#16 : 4x4 +setenv BLCKX 360 +setenv BLCKY 270 +##216 : 24x9 +#setenv BLCKX 60 +#setenv BLCKY 120 +#192 : 16x12 -->square-ice +setenv BLCKX 90 +setenv BLCKY 90 +#192 : 96x2 +setenv BLCKX 15 +setenv BLCKY 540 + +# may need to increase MXBLCKS with rake distribution or padding +@ a = $NXGLOB * $NYGLOB ; @ b = $BLCKX * $BLCKY * $NTASK +@ m = $a / $b ; setenv MXBLCKS $m ; if ($MXBLCKS == 0) setenv MXBLCKS 1 +echo Autimatically generated: MXBLCKS = $MXBLCKS +##setenv MXBLCKS 8 # if necessary (code will print proper value) +#20110830: increase it to 12 as required by code: +# (but no clue why it never happened before!) +#setenv MXBLCKS 12 # if necessary (code will print proper value) + +########################################### +# ars599: 24032014 +# copy from /short/p66/ars599/CICE.v5.0/accice.v504_csiro +# solo_ice_comp +########################################### +### Tracers # match ice_in tracer_nml to conserve memory +setenv TRAGE 1 # set to 1 for ice age tracer +setenv TRFY 0 # set to 1 for first-year ice area tracer +setenv TRLVL 0 # set to 1 for level and deformed ice tracers +setenv TRPND 1 # set to 1 for melt pond tracers +setenv NTRAERO 0 # number of aerosol tracers + # (up to max_aero in ice_domain_size.F90) + # CESM uses 3 aerosol tracers +setenv TRBRI 0 # set to 1 for brine height tracer +setenv NBGCLYR 0 # number of zbgc layers +setenv TRBGCS 0 # number of skeletal layer bgc tracers + # TRBGCS=0 or 2<=TRBGCS<=9) + +### File unit numbers +setenv NUMIN 11 # minimum file unit number +setenv NUMAX 99 # maximum file unit number + +if ($IO_TYPE == 'netcdf') then + setenv IODIR io_netcdf +else if ($IO_TYPE == 'pio') then + setenv IODIR io_pio +else + setenv IODIR io_binary +endif + +########################################### + +setenv CBLD $SRCDIR/bld + +if ( $ARCH == 'UNICOS/mp') setenv ARCH UNICOS +if ( $ARCH == 'UNICOS') then + cp -f $CBLD/Makefile.$ARCH $CBLD/Makefile +else + cp -f $CBLD/Makefile.std $CBLD/Makefile +endif + +if ($NTASK == 1) then + setenv COMMDIR serial +else + setenv COMMDIR mpi +endif +echo COMMDIR: $COMMDIR + +if ($ACCESS == 'yes') then + setenv DRVDIR access +else + setenv DRVDIR cice +endif +echo DRVDIR: $DRVDIR + +cd $OBJDIR + +### List of source code directories (in order of importance). +cat >! Filepath << EOF +$SRCDIR/drivers/$DRVDIR +$SRCDIR/source +$SRCDIR/$COMMDIR +$SRCDIR/$IODIR +$SRCDIR/$SHRDIR +EOF + +if ( $ARCH == 'UNICOS.ORNL.phoenix' ) then + ### use -h command for phoenix + cc -o makdep -h command $CBLD/makdep.c || exit 2 +else if ( $ARCH == 'Linux.ORNL.jaguar' ) then + gcc -g -o makdep $CBLD/makdep.c || exit 2 +else + cc -o makdep $CBLD/makdep.c || exit 2 +endif + +setenv MACFILE $CBLD/Macros.Linux.${ARCH} + +gmake VPFILE=Filepath EXEC=$BINDIR/$EXE \ + NXGLOB=$NXGLOB NYGLOB=$NYGLOB \ + BLCKX=$BLCKX BLCKY=$BLCKY MXBLCKS=$MXBLCKS \ + -f $CBLD/Makefile MACFILE=$MACFILE || exit 2 + +cd .. +pwd +echo NTASK = $NTASK +echo "global N, block_size" +echo "x $NXGLOB, $BLCKX" +echo "y $NYGLOB, $BLCKY" +echo max_blocks = $MXBLCKS +echo $TRAGE = TRAGE, iage tracer +echo $TRFY = TRFY, first-year ice tracer +echo $TRLVL = TRLVL, level-ice tracers +echo $TRPND = TRPND, melt pond tracers +echo $NTRAERO = NTRAERO, number of aerosol tracers +echo $TRBRI = TRBRI, brine height tracer +echo $NBGCLYR = NBGCLYR, number of bio grid layers +echo $TRBGCS = TRBGCS, number of BGC tracers diff --git a/drivers/access/CICE_InitMod.F90 b/drivers/access/CICE_InitMod.F90 index 6f8db6b7..7eccbff0 100644 --- a/drivers/access/CICE_InitMod.F90 +++ b/drivers/access/CICE_InitMod.F90 @@ -20,8 +20,7 @@ module CICE_InitMod use cpl_parameters use cpl_forcing_handler use cpl_interface -!ars599: 27032014: defind my_task - use ice_communicate, only: my_task + use ice_communicate, only: my_task, master_task #endif implicit none @@ -68,9 +67,7 @@ subroutine cice_init use ice_algae, only: get_forcing_bgc use ice_calendar, only: dt, dt_dyn, time, istep, istep1, write_ic, & init_calendar, calendar, idate, month -!ars599: 27032014 - use ice_communicate, only: MPI_COMM_ICE - use ice_communicate, only: init_communicate + use ice_communicate, only: MPI_COMM_ICE, init_communicate use ice_diagnostics, only: init_diags use ice_domain, only: init_domain_blocks use ice_dyn_eap, only: init_eap @@ -89,6 +86,7 @@ subroutine cice_init use ice_restoring, only: ice_HaloRestore_init use ice_shortwave, only: init_shortwave use ice_state, only: tr_aero + use ice_therm_shared, only: calc_Tsfc, heat_capacity use ice_therm_vertical, only: init_thermo_vertical use ice_timers, only: timer_total, init_ice_timers, ice_timer_start use ice_transport_driver, only: init_transport @@ -97,9 +95,13 @@ subroutine cice_init #ifdef popcice use drv_forcing, only: sst_sss #endif +#ifdef ACCESS + use ice_coupling, only: top_layer_Tandk_init +#endif #ifdef AusCOM integer(kind=int_kind) :: idate_save + character (char_len_long) :: filename #endif call init_communicate ! initial setup for message passing @@ -108,13 +110,13 @@ subroutine cice_init MPI_COMM_ICE = il_commlocal ! call init_cpl ! initialize message passing call get_cpl_timecontrol - write(il_out,*)' CICE (cice_init) 1 jobnum = ',jobnum - write(il_out,*)' CICE (cice_init) 1 inidate = ',inidate - write(il_out,*)' CICE (cice_init) 1 init_date = ',init_date - write(il_out,*)' CICE (cice_init) 1 runtime0 = ',runtime0 - write(il_out,*)' CICE (cice_init) 1 runtime = ',runtime - write(il_out,*)' CICE (cice_init) 1 idate = ',my_task, idate - !write(il_out,*)' CICE (cice_init) 1 runtype = ',runtype + if (my_task == master_task) then + write(il_out,*)' CICE (cice_init) 1 jobnum = ',jobnum + write(il_out,*)' CICE (cice_init) 1 init_date = ',init_date + write(il_out,*)' CICE (cice_init) 1 runtime = ',runtime + write(il_out,*)' CICE (cice_init) 1 idate = ',my_task, idate + !write(il_out,*)' CICE (cice_init) 1 runtype = ',runtype + end if #endif call init_fileunits ! unit numbers @@ -145,7 +147,7 @@ subroutine cice_init call sst_sss ! POP data for CICE initialization #endif call init_thermo_vertical ! initialize vertical thermodynamics - call init_itd ! initialize ice thickness distribution + call init_itd(calc_Tsfc, heat_capacity)! initialize ice thickness distribution call calendar(time) ! determine the initial date !ars599: 11042014: remove most of the lines based on cice4.1_fm @@ -175,14 +177,23 @@ subroutine cice_init call init_restart ! initialize restart variables #ifdef AusCOM - write(il_out,*) 'CICE (cice_init) 2 time = ', my_task, time - write(il_out,*) 'CICE (cice_init) 2 runtime0 = ', my_task, runtime0 - write(il_out,*) 'CICE (cice_init) 2 idate = ', my_task, idate + if (my_task == master_task) then + write(il_out,*) 'CICE (cice_init) 2 time = ', my_task, time + write(il_out,*) 'CICE (cice_init) 2 runtime0 = ', my_task, runtime0 + !write(il_out,*) 'CICE (cice_init) 2 idate = ', my_task, idate + end if if (jobnum == 1 ) then time = 0.0 !NOTE, the first job must be set back to 0 and idate = idate_save !idate back to the 'initial' value, in any case +<<<<<<< HEAD endif +======= + runtime0 = 0.0 + else !BX: 20160720 + runtime0 = time ! Record initial time read from init_restart + endif +>>>>>>> origin/access-esm1.6 #endif call init_diags ! initialize diagnostic output points @@ -203,10 +214,22 @@ subroutine cice_init #else !ars599: 26032014 original code ! call calendar(time) ! at the end of the first timestep +<<<<<<< HEAD call calendar(time-runtime0) write(il_out,*) 'CICE (cice_init) 3 time = ', my_task, time write(il_out,*) 'CICE (cice_init) 3 runtime0 = ', my_task, runtime0 write(il_out,*) 'CICE (cice_init) 3 idate = ', my_task, idate +======= + call calendar(time-runtime0) + if (my_task == master_task) then + write(il_out,*) 'CICE (cice_init) 3 time = ', my_task, time + write(il_out,*) 'CICE (cice_init) 3 runtime0 = ', my_task, runtime0 + write(il_out,*) 'CICE (cice_init) 3 iniyear = ', my_task, iniyear + write(il_out,*) 'CICE (cice_init) 3 inimon = ', my_task, inimon + write(il_out,*) 'CICE (cice_init) 3 iniday = ', my_task, iniday + write(il_out,*) 'CICE (cice_init) 3 idate = ', my_task, idate + end if +>>>>>>> origin/access-esm1.6 #endif !-------------------------------------------------------------------- @@ -234,6 +257,13 @@ subroutine cice_init call init_flux_ocn ! initialize ocean fluxes sent to coupler !#endif +<<<<<<< HEAD +======= + if (.not. calc_Tsfc .and. heat_capacity) & + call top_layer_Tandk_init ! initialise top layer temperature and + ! effective conductivity + +>>>>>>> origin/access-esm1.6 !ars599: 11042014: ice_write_hist is no longer there now change to accum_hist ! so wrapup this line n use the new code !dhb599 20111128: this call is moved here from 'downstair', because it *re-initilaise* @@ -249,6 +279,7 @@ subroutine cice_init ! for continue runs, need restart o2i forcing fields and time-averaged ice ! variables ('mice')saved at the end of last run from ice models; ! for initial run, pre-processed o2i (and maybe mice) fields are required. +<<<<<<< HEAD ! call get_restart_o2i('o2i.nc') call get_restart_o2i(trim(restartdir)//'/o2i.nc') @@ -288,6 +319,69 @@ subroutine cice_init ! so wrapup this line n markout !dhb599: 20111128: the following call is moved 'upstair' ! if (write_ic) call accum_hist(dt) ! write initial conditions +======= + if ( trim(runtype) == 'continue' ) then + write(il_out,*)' calling get_restart_o2i at time_sec = ',0 + filename = trim(restartdir)//'/o2i.nc' + if ( file_exist(filename) ) then + call get_restart_o2i(filename) + else + call abort_ice('file NOT found: '//filename //& + " This is allowed for runtype='initial' ONLY") + endif + !if no lag for ice to atm coupling, then cice has to read restart file i2a.nc and + !put the data to atm. the call is not needed if there is lag for ice2atm coupling + !must call after get_restart_o2i(), by which the ocn_sst ect are read in and re-used by put_restart_i2a() + ! call put_restart_i2a('i2a.nc', 0) + filename = trim(restartdir)//'/mice.nc' + if ( file_exist(filename) ) then + !for continue runs, mice data MUST be available. + call get_restart_mice(filename) + else + call abort_ice("file NOT found: "//filename//& + " This is allowed for runtype='initial' ONLY") + endif + endif + if (use_core_runoff) then + call get_core_runoff(trim(inputdir)//'/core_runoff_regrid.nc',& + 'runoff',1) + endif + + if (my_task == master_task) then + write(il_out,*)' calling ave_ocn_fields_4_i2a time_sec = ',0 !time_sec + endif + call time_average_ocn_fields_4_i2a + !accumulate/average ocn fields needed for IA coupling + +#endif + +!20171024: read in mask for land ice discharge into ocean off Antarctica and Greenland. +#ifdef ACCESS + !!! options for land ice discharged as iceberg melting around AA and Gnld + ! 0: "even" distribution as for u-ar676; + !================== for ESM1.5/1.6, "0" option is NOT used ===============! + ! 1: use AC2 data but GC3.1 iceberg climatological pattern, each month takes + ! the total discharge as that diagnosed in u-ar676 (yrs2-101); + ! 2: use GC3 iceberg climatological pattern, each month enhanced by ac2/gc3 + ! annual ratio of land ice discharge to make sure the annual total + ! discharge is same as case 1; + ! 3: as case 1 but use annual mean + ! 4: as case 2 but use annual mean + !!! Note 3 and 4 are similar but NOT the same; 1-4 cases should have identical annual + !!! discharge of land ice (as iceberg) into ocean. + + filename = trim(inputdir)//'/lice_discharge_iceberg.nc' + if ( file_exist(filename) ) then + call get_lice_discharge(filename) + else + if (my_task == master_task) then + write(6,*)'* CICE stopped -- iceberg datafile missing.*' + endif + call abort_ice ('ice: land ice discharge iceberg datafile missing: '//& + filename //' *') + endif +#endif +>>>>>>> origin/access-esm1.6 end subroutine cice_init @@ -329,7 +423,16 @@ subroutine init_restart call restartfile() ! given by pointer in ice_in !ars599: 11042014: markout call calendar ! according to dhb599 initmod at cice4.1_fm +#ifdef ACCESS + ! TODO: 'time' argument in the calendar call below affects frz_onset output. + ! Hardcoding time=0.0 gives the same results as the older CM2 based calendar + ! initialisation. + ! See https://github.com/ACCESS-NRI/cice5/issues/49 + ! and https://github.com/ACCESS-NRI/cice5/pull/48 for details. + call calendar(0.0) ! update time parameters +#else call calendar(time) ! update time parameters +#endif if (kdyn == 2) call read_restart_eap ! EAP else if (restart) then ! ice_ic = core restart file call restartfile (ice_ic) ! or 'default' or 'none' diff --git a/drivers/access/CICE_RunMod.F90 b/drivers/access/CICE_RunMod.F90 index b2472636..adf71938 100644 --- a/drivers/access/CICE_RunMod.F90 +++ b/drivers/access/CICE_RunMod.F90 @@ -17,12 +17,7 @@ module CICE_RunMod use ice_kinds_mod -#ifdef AusCOM - !For stuff in this AusCOM's own driver the "#ifdef AusCOM" is NOT needed! - !but for consistency with the code in other places, we keep it anyway ... - !...to "indentify" the modification to the original code, easier - !...to locate future code update Aug. 2008 - +#ifdef ACCESS use cpl_parameters use cpl_arrays_setup use cpl_interface @@ -52,12 +47,10 @@ subroutine CICE_Run use ice_algae, only: get_forcing_bgc use ice_calendar, only: istep, istep1, time, dt, stop_now, calendar use ice_forcing, only: get_forcing_atmo, get_forcing_ocn -#ifdef AusCOM -!ars599: 27032014 add in - use ice_calendar, only: month, mday, istep, istep1, & - time, dt, stop_now, calendar - use ice_forcing, only: get_forcing_atmo, get_forcing_ocn, & - get_forcing_atmo_ready +#ifdef ACCESS + use ice_calendar, only: month, mday, istep, istep1, time, dt, stop_now, calendar, & + write_restart, dump_last + use ice_restart_driver, only: dumpfile !temporary debug #endif use ice_flux, only: init_flux_atm, init_flux_ocn use ice_state, only: tr_aero @@ -65,22 +58,18 @@ subroutine CICE_Run timer_couple, timer_step use ice_zbgc_shared, only: skl_bgc -#ifdef AusCOM -!ars599: 27032014 add in - use ice_timers, only: timer_from_ocn, timer_into_ocn, & - timer_from_atm, timer_into_atm, ice_timer_start, & - ice_timer_stop, timer_couple, timer_step +#ifdef ACCESS + use ice_timers, only: ice_timer_start, & + ice_timer_stop, timer_couple, timer_step, & + timer_from_atm, timer_into_atm, timer_from_ocn, timer_into_ocn use ice_grid, only: t2ugrid_vector, u2tgrid_vector - - - integer (kind=int_kind) :: time_sec, itap, icpl_ai, icpl_io, tmp_time + integer (kind=int_kind) :: time_sec, itap, icpl_ai, tmp_time integer (kind=int_kind) :: rtimestamp_ai, stimestamp_ai integer (kind=int_kind) :: rtimestamp_io, stimestamp_io !receive and send timestamps (seconds) integer (kind=int_kind) :: imon -!ars: 08052014 according to dhb599 fm changed, and mark out the one from OM -! logical :: first_step = .true. !1st time step of experiment or not - logical :: need_i2o = .true. + + logical :: write_tmp_dump = .true. #endif !-------------------------------------------------------------------- @@ -93,208 +82,132 @@ subroutine CICE_Run ! timestep loop !-------------------------------------------------------------------- -#ifdef AusCOM - write(il_out,*)'A <==> I coupling num_cpl_ai= ',num_cpl_ai - write(il_out,*)' to/from ocean num_cpl_io= ',num_cpl_io - write(il_out,*)' ice steps num_ice_io = ', num_ice_io - write(il_out,*)'runtime, runtime0=', runtime, runtime0 +#ifdef ACCESS + write(il_out,*)'A <==> I coupling num_cpl_ai = ',num_cpl_ai + write(il_out,*)' ice steps per ai interval num_ice_ai = ',num_ice_ai + write(il_out,*)' runtime, runtime0 = ',runtime, runtime0 time_sec = 0 - ! get from atm once at time 0 -! rtimestamp_ai = time_sec -! call ice_timer_start(timer_from_atm) ! atm/ocn coupling -! call from_atm(rtimestamp_ai) -! call ice_timer_stop(timer_from_atm) ! atm/ocn coupling -! -! !set time averaged ice and ocn variables back to 0 -! call initialize_mice_fields_4_i2a -! call initialize_mocn_fields_4_i2a DO icpl_ai = 1, num_cpl_ai !begin A <==> I coupling iterations - Do icpl_io = 1, num_cpl_io !begin I <==> O coupling iterations - - if(icpl_ai <= num_cpl_ai .and. mod(time_sec, dt_cpl_ai ) == 0) then ! atm ice coupling time except last step - rtimestamp_ai = time_sec - call ice_timer_start(timer_from_atm) ! atm/ice coupling - call from_atm(rtimestamp_ai) - call ice_timer_stop(timer_from_atm) ! atm/ice coupling - -!! !set time averaged ice and ocn variables back to 0 - write(il_out,*)' calling init_mice_fields_4_i2a at time_sec = ',time_sec - call initialize_mice_fields_4_i2a -! call initialize_mocn_fields_4_i2a - end if - - - stimestamp_io = time_sec - - !at the beginning of the run, cice (CICE_init) reads in the required ice fields - !(time averaged over the last coupling interval of previous run), which include - !strocnx/yT, aice, fresh_gbm, fsalt_gbm, fhocn_gbm, fswthru_gbm, sicemass etc. - !(named as mstrocnx/yT, maice, mfresh, mfsalt, mfhocn, mfswthru, msicemass ...) - - !together with the a2i fields (sent from um at the end of previous run) received - !above, the time0 i2o fields can be obtained here - - !if (runtime0 == 0 .and. need_i2o) then - ! write(6,*)'*** CICE: initial run calls get_restart_i2o *** ' - ! write(6,*)'*** CICE: time_sec = ', time_sec - ! write(il_out,*)' calling get_restart_i2o at time_sec = ',time_sec - ! call get_restart_i2o('i2o.nc') - ! need_i2o = .false. - !else - ! write(6,*)'*** CICE: calling get_i2o_fields... ' - ! write(6,*)'*** CICE: time_sec = ', time_sec - ! write(6,*)'*** CICE: calling get_i2o_fields... ' - ! write(il_out,*)' calling get_i2o_fields at time_sec = ',time_sec - call get_i2o_fields - !endif - - !shift stresses from T onto U grid before sending into ocn - write(il_out,*)' calling t2ugrid_vector - u/v at time_sec = ', time_sec - call t2ugrid_vector(io_strsu) - call t2ugrid_vector(io_strsv) - - write(il_out,*)' calling into_ocn at time_sec = ', time_sec - call ice_timer_start(timer_into_ocn) ! atm/ocn coupling - call into_ocn(stimestamp_io) - call ice_timer_stop(timer_into_ocn) ! atm/ocn coupling - - !at the beginning of the run, cice (CICE_init) reads in the required o2i fields - !(saved from the last timestep of ocean). - - !together with the a2i fields (sent from um at the end of previous run) received - !above, the time0 boundary condition for ice 'internal time loop' is set here - - !-------------------------------------------------------------------------------- - !* This needs be investigated: calling set_sbc_ice outside the itap loop causes - ! cice to crash ('departure error') due probably to "aice" "mismatch?" for each - ! time step in the set_sbc_ice calculation.... (?) - ! We therefore still call "get_sbc_ice" inside the ice time loop (below) - ! - !write(il_out,*)' calling set_sbc_ice at time_sec = ',time_sec - !call set_sbc_ice - !-------------------------------------------------------------------------------- - - !set time averaged ice variables back to 0 - write(il_out,*)' calling init_mice_fields_4_i2o at time_sec = ',time_sec - call initialize_mice_fields_4_i2o - - do itap = 1, num_ice_io ! cice time loop within each i<=>o cpl interval - - !------------------------------------------------------------------------------ - !* see comments above - call get_sbc_ice - !set boundary condition (forcing) for ice time step - !------------------------------------------------------------------------------ - - call ice_step - write(il_out,*)' calling ave_ice_fields_4_i2a at time_sec = ',time_sec - -!======================================= - tmp_time = time_sec + dt - if ( mod(tmp_time, dt_cpl_ai) == 0 ) then !put to atm i step before coupling - write(il_out,*)' calling get_i2a_fields at time_sec = ',time_sec - call ice_timer_start(timer_into_atm) ! atm/ocn coupling - call get_i2a_fields ! i2a fields ready to be sent for next IA cpl int in atm. - -! if(tmp_time < runtime ) then - ! * because of using lag=+dt_ice, we must take one step off the time_sec - ! * to make the sending happen at right time: - stimestamp_ai = time_sec ! - dt - write(il_out,*)' calling into_atm at time_sec = ',time_sec - call into_atm(stimestamp_ai) - -! !set time averaged ice and ocn variables back to 0 - write(il_out,*)' calling init_mocn_fields_4_i2a at time_sec = ',time_sec - !call initialize_mice_fields_4_i2a - call initialize_mocn_fields_4_i2a -! end if - call ice_timer_stop(timer_into_atm) ! atm/ocn coupling - end if -!====================================== - - ! note ice_step makes call to time_average_fields_4_i2o - ! and time_average_fields_4_i2a - ! to get time-averaged ice variables required for setting up i2o and i2a cpl fields - - istep = istep + 1 ! update time step counters - istep1 = istep1 + 1 - time = time + dt ! determine the time and date + !receive a2i fields + rtimestamp_ai = time_sec + !call ice_timer_start(timer_from_atm) ! atm/ice coupling + write(il_out,*)' calling from_atm at icpl_ai, time_sec = ', icpl_ai, time_sec + !=========================== + call from_atm(rtimestamp_ai) + !=========================== + !call ice_timer_stop(timer_from_atm) ! atm/ice coupling + + !"TTI" approach ice fluxes converted to GBM units + !call atm_icefluxes_back2GBM (CM2 requires) + + + do itap = 1, num_ice_ai ! cice time loop + ! Note I <==> O coupling happens at each time step - time_sec = time_sec + dt - - call calendar(time-runtime0) - - !initialize fluxes sent to coupler (WHY should still need do this? CH: NOT needed!) - call init_flux_atm - call init_flux_ocn - - !CH: should be doing things here - !get_i2o_fields - !get_i2a_fields - - end do !itap - - !!write(il_out,*)' calling get_i2o_fields at time_sec = ',time_sec - !!call get_i2o_fields !i2o fields ready to be sent for next IO cpl int in ocn. - rtimestamp_io = time_sec - if (rtimestamp_io < runtime) then !get coupling from ocean except the last time step - write(il_out,*)' calling from_ocn at time_sec = ',time_sec - call ice_timer_start(timer_from_ocn) ! atm/ocn coupling - call from_ocn(rtimestamp_io) !get o2i fields for next IO cpl int - call ice_timer_stop(timer_from_ocn) ! atm/ocn coupling - - write(il_out,*)' calling ave_ocn_fields_4_i2a at time_sec = ',time_sec - call time_average_ocn_fields_4_i2a !accumulate/average ocn fields needed for IA coupling - end if - - !CH: maybe-- - ! call get_i2a_fields -#ifdef WRONG_INTO_ATM - tmp_time = time_sec + dt - if ( mod(tmp_time, dt_cpl_ai) == 0 ) then !put to atm i step before coupling - write(il_out,*)' calling get_i2a_fields at time_sec = ',time_sec - call ice_timer_start(timer_into_atm) ! atm/ocn coupling - call get_i2a_fields ! i2a fields ready to be sent for next IA cpl int in atm. - -! if(tmp_time < runtime ) then - ! * because of using lag=+dt_ice, we must take one step off the time_sec - ! * to make the sending happen at right time: - stimestamp_ai = time_sec ! - dt - write(il_out,*)' calling into_atm at time_sec = ',time_sec - call into_atm(stimestamp_ai) - -! !set time averaged ice and ocn variables back to 0 - write(il_out,*)' calling init_mocn_fields_4_i2a at time_sec = ',time_sec - !call initialize_mice_fields_4_i2a - call initialize_mocn_fields_4_i2a -! end if - call ice_timer_stop(timer_into_atm) ! atm/ocn coupling - end if -#endif - End Do !icpl_io + stimestamp_io = time_sec -! write(il_out,*)' calling get_i2a_fields at time_sec = ',time_sec -! call ice_timer_start(timer_into_atm) ! atm/ocn coupling -! call get_i2a_fields ! i2a fields ready to be sent for next IA cpl int in atm. -! -! ! * because of using lag=+dt_ice, we must take one step off the time_sec -! ! * to make the sending happen at right time: -! stimestamp_ai = time_sec - dt -! write(il_out,*)' calling into_atm at time_sec = ',time_sec -! call into_atm(stimestamp_ai) -! call ice_timer_stop(timer_into_atm) ! atm/ocn coupling + !"combine" a2i fields and ice fields to get i2o fields + call get_i2o_fields + + !shift stresses from T onto U grid before sending into ocn + call t2ugrid_vector(io_strsu) + call t2ugrid_vector(io_strsv) + + write(il_out,'(a,3i10)') & + ' calling into_ocn at icpl_ai, itap, time_sec = ', icpl_ai, itap, time_sec + !call ice_timer_start(timer_into_ocn) ! atm/ocn coupling + !=========================== + !call check_iceberg_fields('chk_iceberg_i2o.nc') + call into_ocn(stimestamp_io) + !=========================== + !call ice_timer_stop(timer_into_ocn) ! atm/ocn coupling + + !set boundary condition (forcing) + call get_sbc_ice + + !Debug: 20170825 -- check sbc_ice variables from "get_sbc_ice" + !call check_ice_sbc_fields('chk_ice_sbc.nc') + + !Debug: 20170927 -- check the restart fields at the beginning of day 3 + !if (icpl_ai == 17 .and. itap == 1) then + ! write(il_out,'(a,4i10)') & + ! ' calling dumpfile at icpl_ai, itap, time_sec, idate = ', icpl_ai, itap, time_sec, idate + ! call dumpfile + !endif + + ! Write restart on final timestep + if (dump_last .and. (itap == num_ice_ai) .and. (icpl_ai == num_cpl_ai)) then + write_restart = 1 + endif + + !*** ice "update" ***! + call ice_step + + !Debug: 20170827 -- check updated ice varables after ice_step + !call check_ice_fields('chk_ice_fields.nc') + + !time-average ice variables required for setting up i2o and i2a cpl fields + call time_average_fields_4_i2o !actually "instant" ice vairables + call time_average_fields_4_i2a !time averaging over ia cpl interval + + tmp_time = time_sec + dt + if ( mod(tmp_time, dt_cpl_ai) == 0 ) then !this happens at itap = num_ice_ai + !call ice_timer_start(timer_into_atm) + !i2a fields ready to be sent for next IA cpl int in atm. + call get_i2a_fields + + stimestamp_ai = time_sec + + write(il_out,'(a,3i10)') & + ' calling into_atm at icpl_ai, itap, time_sec = ',icpl_ai, itap, time_sec + !=========================== + call into_atm(stimestamp_ai) + !=========================== + + !set time averaged ice and ocn variables back to 0 + call initialize_mocn_fields_4_i2a + call initialize_mice_fields_4_i2a + !call ice_timer_stop(timer_into_atm) ! atm/ocn coupling + endif + + istep = istep + 1 ! update time step counters + istep1 = istep1 + 1 + time = time + dt ! determine the time and date + + time_sec = time_sec + dt + call calendar(time-runtime0) + + !initialize fluxes sent to coupler + !WHY should still need this? CH: NOT needed! ==> but model crashes if NOT! + call init_flux_atm + call init_flux_ocn + + rtimestamp_io = time_sec + if (rtimestamp_io < runtime) then + !get o2i fields for next time step ice update + write(il_out,'(a,3i10)') & + ' calling from_ocn at icpl_ai, itap, time_sec = ',icpl_ai, itap, time_sec + !call ice_timer_start(timer_from_ocn) + !=========================== + call from_ocn(rtimestamp_io) + !=========================== + !call ice_timer_stop(timer_from_ocn) + !accumulate/average ocn fields needed for IA coupling + call time_average_ocn_fields_4_i2a + end if + + end do !itap + + newstep_ai = .true. END DO !icpl_ai ! final update of the stimestamp_io, ie., put back the last dt_cice: stimestamp_io = stimestamp_io + dt - ! *** need save o2i fields here instead of in mom4 *** - !call save_restart_o2i('o2i.nc', stimestamp_io) !it is done in mom4 - ! *** need save the last IO cpl int (time-averaged) ice variables used to get i2o fields *** ! *** at the beginning of next run *** call save_restart_mice('mice.nc',stimestamp_io) @@ -466,16 +379,6 @@ subroutine ice_step call ice_timer_stop(timer_thermo) ! thermodynamics call ice_timer_stop(timer_column) ! column physics -!ars599: 04092014: add in -! not sure should add inside the loop or not? -!ars599: 09052014: move from after line 458 "enddo ! iblk" to here -#ifdef AusCOM - !need some time-mean ice fields - !(so as to get i2o and i2a fields for next coupling interval) - call time_average_fields_4_i2o - call time_average_fields_4_i2a -#endif - !----------------------------------------------------------------- ! write data !----------------------------------------------------------------- @@ -523,6 +426,7 @@ subroutine coupling_prep (iblk) use ice_blocks, only: block, nx_block, ny_block use ice_calendar, only: dt, nstreams use ice_constants, only: c0, c1, puny, rhofresh + use ice_coupling, only: top_layer_Tandk_run, sfcflux_to_ocn use ice_domain_size, only: ncat use ice_flux, only: alvdf, alidf, alvdr, alidr, albice, albsno, & albpnd, albcnt, apeff_ai, coszen, fpond, fresh, & @@ -531,13 +435,14 @@ subroutine coupling_prep (iblk) fswthru_ai, fhocn, fswthru, scale_factor, & swvdr, swidr, swvdf, swidf, Tf, Tair, Qa, strairxT, strairyt, & fsens, flat, fswabs, flwout, evap, Tref, Qref, faero_ocn, & - fsurfn_f, flatn_f, scale_fluxes, frzmlt_init, frzmlt + fsurfn_f, flatn_f, scale_fluxes, frzmlt_init, frzmlt, & + snowfrac, snowfracn, evap_ice, evap_snow use ice_grid, only: tmask use ice_ocean, only: oceanmixed_ice, ocean_mixed_layer use ice_shortwave, only: alvdfn, alidfn, alvdrn, alidrn, & albicen, albsnon, albpndn, apeffn use ice_state, only: aicen, aice, aice_init, nbtrcr - use ice_therm_shared, only: calc_Tsfc + use ice_therm_shared, only: calc_Tsfc, heat_capacity use ice_timers, only: timer_couple, ice_timer_start, ice_timer_stop use ice_zbgc_shared, only: flux_bio, flux_bio_ai @@ -589,6 +494,7 @@ subroutine coupling_prep (iblk) albsno(i,j,iblk) = c0 albpnd(i,j,iblk) = c0 apeff_ai(i,j,iblk) = c0 + snowfrac(i,j,iblk) = c0 ! for history averaging cszn = c0 @@ -621,6 +527,8 @@ subroutine coupling_prep (iblk) apeff_ai(i,j,iblk) = apeff_ai(i,j,iblk) & ! for history + apeffn(i,j,n,iblk)*aicen(i,j,n,iblk) + snowfrac(i,j,iblk) = snowfrac(i,j,iblk) & ! for history + + snowfracn(i,j,n,iblk)*aicen(i,j,n,iblk) enddo enddo enddo @@ -684,6 +592,7 @@ subroutine coupling_prep (iblk) fsens (:,:,iblk), flat (:,:,iblk), & fswabs (:,:,iblk), flwout (:,:,iblk), & evap (:,:,iblk), & + evap_ice (:,:,iblk), evap_snow(:,:,iblk),& Tref (:,:,iblk), Qref (:,:,iblk), & fresh (:,:,iblk), fsalt (:,:,iblk), & fhocn (:,:,iblk), fswthru (:,:,iblk), & @@ -713,76 +622,18 @@ subroutine coupling_prep (iblk) call ice_timer_stop(timer_couple) ! atm/ocn coupling - end subroutine coupling_prep - -!======================================================================= -! -! If surface heat fluxes are provided to CICE instead of CICE calculating -! them internally (i.e. .not. calc_Tsfc), then these heat fluxes can -! be provided at points which do not have ice. (This is could be due to -! the heat fluxes being calculated on a lower resolution grid or the -! heat fluxes not recalculated at every CICE timestep.) At ice free points, -! conserve energy and water by passing these fluxes to the ocean. -! -! author: A. McLaren, Met Office - - subroutine sfcflux_to_ocn(nx_block, ny_block, & - tmask, aice, & - fsurfn_f, flatn_f, & - fresh, fhocn) - - use ice_domain_size, only: ncat - - integer (kind=int_kind), intent(in) :: & - nx_block, ny_block ! block dimensions - - logical (kind=log_kind), dimension (nx_block,ny_block), & - intent(in) :: & - tmask ! land/boundary mask, thickness (T-cell) - - real (kind=dbl_kind), dimension(nx_block,ny_block), & - intent(in):: & - aice ! initial ice concentration - - real (kind=dbl_kind), dimension(nx_block,ny_block,ncat), & - intent(in) :: & - fsurfn_f, & ! net surface heat flux (provided as forcing) - flatn_f ! latent heat flux (provided as forcing) - - real (kind=dbl_kind), dimension(nx_block,ny_block), & - intent(inout):: & - fresh , & ! fresh water flux to ocean (kg/m2/s) - fhocn ! actual ocn/ice heat flx (W/m**2) - -!ars599: 08052014 not sure but add auscom to try, copy from dhb599 fm -!#ifdef CICE_IN_NEMO -#ifdef AusCOM - - ! local variables - integer (kind=int_kind) :: & - i, j, n ! horizontal indices - - real (kind=dbl_kind) :: & - rLsub ! 1/Lsub - - rLsub = c1 / Lsub - - do n = 1, ncat - do j = 1, ny_block - do i = 1, nx_block - if (tmask(i,j) .and. aice(i,j) <= puny) then - fhocn(i,j) = fhocn(i,j) & - + fsurfn_f(i,j,n) + flatn_f(i,j,n) - fresh(i,j) = fresh(i,j) & - + flatn_f(i,j,n) * rLsub - endif - enddo ! i - enddo ! j - enddo ! n +! AEW: Calculate new top layer temp and effective cond after each +! timestep + if (.not. calc_Tsfc .and. heat_capacity) then + !---------------------------------------- + ! Get top layer temperature and effective conductivity + ! for passing to atmos + + call top_layer_Tandk_run (iblk) + endif -#endif + end subroutine coupling_prep - end subroutine sfcflux_to_ocn !======================================================================= diff --git a/drivers/access/cpl_arrays_setup.F90 b/drivers/access/cpl_arrays_setup.F90 index 9b7320ba..b7727e9f 100644 --- a/drivers/access/cpl_arrays_setup.F90 +++ b/drivers/access/cpl_arrays_setup.F90 @@ -25,8 +25,8 @@ module cpl_arrays_setup ! (22) long wave radiation (net down) um_lwflx ! (23) sensible heat flux um_shflx ! (24) surface pressure um_press -! (25) co2 um_co2 -! (26) wind speed um_wnd +! (25) co2 um_co2 +! (26) wind speed um_wnd ! ! B> ocn (MOM4) ==> ice (CICE) [* at T or U cell center *] ! @@ -37,8 +37,8 @@ module cpl_arrays_setup ! (5) sea surface gradient (zonal) (m/m) ocn_sslx ! (6) sea surface gradient (meridional)(m/m) ocn_ssly ! (7) potential ice frm/mlt heatflux (W/m^2) ocn_pfmice -! (8) co2 () ocn_co2 -! (9) co2 flux () ocn_co2fx +! (8) co2 () ocn_co2 +! (9) co2 flux () ocn_co2fx ! ! C> ice (CICE) ==> atm (UM) [* all from T to T, U or V cell center *] ! @@ -48,8 +48,8 @@ module cpl_arrays_setup ! (12 - 16) ice thickness (m ?) ia_thikn(,,1:5) ! (17) ice/ocn velocity 'zonal' ia_uvel ! (18) ice/ocn velocity 'meridional' ia_vvel -! (19) co2 ia_co2 -! (20) co2 flux ia_co2fx +! (19) co2 ia_co2 +! (20) co2 flux ia_co2fx ! ! D> ice (CICE) ==> ocn (MOM4) [* at T or U cell center *] ! @@ -73,19 +73,25 @@ module cpl_arrays_setup !(12) pressure io_press !(13) ice concentration (fraction) io_aice ! -! Seperate ice melting/forcation associated water fluxes from the rainfall field: +! Seperate ice melting/formation associated water fluxes from the rainfall field: ! !(14) ice melt waterflux io_melt !(15) ice form waterflux io_form !(16) co2 io_co2 !(17) wind speed io_wnd ! +! 2 more added for "iceberg melt" (induced from land ice change): +! +!(18) iceberg melt waterflux io_licefw +!(19) iceberg melt heatflux io_liceht +! ! Therefore, currently we have ! -! 31 in, 33 out => thus we set jpfldout=33, jpfldin=31 in cpl_parameters. -! -!---------------------------------------------------------------------------- -! This module will be largely modified/'simplifed after ACCESS works ! +! *for ACCESS-ESM1.x, (26 + 9) in, (20 + 19) out => thus jpfldout=39, jpfldin=35 in cpl_parameters. +! for ACCESS-CM2, 47 in, 63 out => thus jpfldout=63, jpfldin=47 in cpl_parameters. +! now (20171024) 47 in, 65 out 65 47 +!---------------------------------------------------------------------------------- +! This module will be largely modified/'simplifed' after ACCESS works ! !============================================================================ !cice stuff @@ -99,10 +105,11 @@ module cpl_arrays_setup um_thflx, um_pswflx, um_runoff, um_wme, um_snow, um_rain, & um_evap, um_lhflx, um_taux, um_tauy, & um_swflx, um_lwflx, um_shflx, um_press,um_co2, um_wnd + real(kind=dbl_kind), dimension(:,:,:,:), allocatable :: & um_tmlt, um_bmlt -! CORE runoff remapped onto the AusCOM grid (prepared by S.Marsland) +! CORE runoff remapped onto the AusCOM grid (optional) real(kind=dbl_kind), dimension(:,:,:), allocatable :: & core_runoff @@ -118,14 +125,15 @@ module cpl_arrays_setup ! Fields out: !============ real(kind=dbl_kind),dimension(:,:,:), allocatable :: & !to atm (timeaveraged) - ia_sst, ia_uvel, ia_vvel, ia_co2, ia_co2fx + ia_sst, ia_uvel, ia_vvel, ia_co2, ia_co2fx !!!, ia_sstfz real(kind=dbl_kind), dimension(:,:,:,:), allocatable :: & ia_aicen, ia_snown, ia_thikn real(kind=dbl_kind),dimension(:,:,:), allocatable :: & !to ocn (time averaged) io_strsu, io_strsv, io_rain, io_snow, io_stflx, io_htflx, io_swflx, & io_qflux, io_shflx, io_lwflx, io_runof, io_press, io_aice, & - io_melt, io_form, io_co2, io_wnd + io_melt, io_form, io_co2, io_wnd, & + io_licefw, io_liceht !2 more added 20171024 ! Temporary arrays !================== @@ -135,10 +143,15 @@ module cpl_arrays_setup maiu, muvel, mvvel real(kind=dbl_kind), dimension(:,:,:,:), allocatable :: & maicen, msnown, mthikn +real(kind=dbl_kind), dimension(:,:,:,:), allocatable :: & !BX: just in case...... + maicen_saved ! 2. ice fields averaged over IO cpl interval: real(kind=dbl_kind),dimension(:,:,:), allocatable :: & maice, mstrocnxT, mstrocnyT, mfresh, mfsalt, mfhocn, mfswthru, msicemass +!BX---extra one: (maybe better save the IA interval one......anyway:) +!real(kind=dbl_kind),dimension(:,:,:), allocatable :: & +! maice_saved ! 3. ocn fields averaged over IA cpl interval: real(kind=dbl_kind),dimension(:,:,:), allocatable :: & @@ -150,6 +163,15 @@ module cpl_arrays_setup real(kind=dbl_kind),dimension(:,:,:), allocatable :: & sicemass !ice mass +real(kind=dbl_kind),dimension(:,:,:), allocatable :: & + gicebergfw !monthly iceberg flux on global domain +real(kind=dbl_kind),dimension(:,:), allocatable :: & + gtarea, & !tarea on global domain + grunoff !runoff on global domain + +real(kind=dbl_kind),dimension(:), allocatable :: & + ticeberg_s, ticeberg_n !monthly land ice off Anrarctica and Greenland (NH) + !=========================================================================== end module cpl_arrays_setup diff --git a/drivers/access/cpl_forcing_handler.F90 b/drivers/access/cpl_forcing_handler.F90 index 07e0590d..b45788cf 100644 --- a/drivers/access/cpl_forcing_handler.F90 +++ b/drivers/access/cpl_forcing_handler.F90 @@ -1,33 +1,34 @@ MODULE cpl_forcing_handler ! -! It contains subroutines handling coupling fields. They are -! -! nullify_i2o_fluxes: -! tavg_i2o_fluxes: -! ............... -! ............... +! It contains subroutines handling coupling fields. ! use ice_blocks use ice_forcing use ice_read_write use ice_domain_size use ice_domain, only : distrb_info, nblocks -use ice_flux !forcing data definition (Tair, Qa, uocn, etc.) -use ice_state, only : aice, aicen, trcr !ice concentration and tracers +use ice_flux !forcing data definition (Tair, Qa, uocn, etc.) + !Tn_top, keffn_top ...(for multilayer configuration) +!use ice_state, only : aice, aicen, trcr, trcrn, nt_hpnd !ice concentration and tracers +use ice_state, only : aice, aicen, trcr !!!, trcrn, nt_hpnd, nt_Tsfc !ice concentration and tracers use ice_state, only: uvel, vvel, vsnon, vicen use ice_gather_scatter -!ars599: 11042014: use all ice_constants -!use ice_constants, only : gravit, Lvap, Lsub +use ice_broadcast use ice_constants use ice_grid, only : tmask, to_ugrid use ice_communicate, only : my_task, master_task !use ice_ocean, only : cprho use ice_exit, only : abort_ice +use ice_shortwave, only : apeffn +use ice_grid, only: tarea +use ice_calendar, only: month use cpl_parameters use cpl_netcdf_setup use cpl_arrays_setup +use ice_calendar, only: month + implicit none real (kind=dbl_kind), dimension (nx_block,ny_block,max_blocks) :: & @@ -35,7 +36,7 @@ MODULE cpl_forcing_handler contains -!=============================================================================== +!================================================= subroutine get_core_runoff(fname, vname, nrec) ! read in the remapped core runoff data (S.Marsland) which will be used to replace ! the ncep2 runoff sent from matm via coupler @@ -63,7 +64,7 @@ subroutine get_core_runoff(fname, vname, nrec) return end subroutine get_core_runoff -!=============================================================================== +!================================================= subroutine get_time0_sstsss(fname, nmonth) ! This routine is to be used only once at the beginning at an exp. @@ -103,7 +104,7 @@ subroutine get_time0_sstsss(fname, nmonth) return end subroutine get_time0_sstsss -!=============================================================================== +!================================================= ! temporary use ... subroutine read_access_a2i_data(fname,nrec,istep) @@ -161,7 +162,7 @@ subroutine read_access_a2i_data(fname,nrec,istep) end subroutine read_access_a2i_data -!============================================================================= +!================================================= subroutine read_restart_i2a(fname, sec) !'i2a.nc', 0) ! read ice to atm coupling fields from restart file, and send to atm module @@ -195,8 +196,11 @@ subroutine read_restart_i2a(fname, sec) !'i2a.nc', 0) call ice_read_nc(ncid, 1, 'icethk04', ia_thikn(:,:,4,:), dbug) call ice_read_nc(ncid, 1, 'icethk05', ia_thikn(:,:,5,:), dbug) call ice_read_nc(ncid, 1, 'isst_ia', ia_sst, dbug) - call ice_read_nc(ncid, 1, 'uvel_ia', ia_uvel, dbug) - call ice_read_nc(ncid, 1, 'vvel_ia', ia_vvel, dbug) + call ice_read_nc(ncid, 1, 'uvel_ia', ia_uvel, dbug) + call ice_read_nc(ncid, 1, 'vvel_ia', ia_vvel, dbug) + call ice_read_nc(ncid, 1, 'co2_i2', ia_co2, dbug) + call ice_read_nc(ncid, 1, 'co2fx_i2', ia_co2fx, dbug) + if (my_task == master_task) then call ice_close_nc(ncid) write(il_out,*) '(read_restart_i2a) has read in 18 i2a fields.' @@ -211,8 +215,7 @@ subroutine read_restart_i2a(fname, sec) !'i2a.nc', 0) endif end subroutine read_restart_i2a - -!============================================================================= +!================================================= subroutine read_restart_i2asum(fname, sec) !'i2a.nc', 0) ! read ice to atm coupling fields from restart file, and send to atm module @@ -245,12 +248,15 @@ subroutine read_restart_i2asum(fname, sec) !'i2a.nc', 0) call ice_read_nc(ncid, 1, 'mthikn3', mthikn(:,:,3,:), dbug) call ice_read_nc(ncid, 1, 'mthikn4', mthikn(:,:,4,:), dbug) call ice_read_nc(ncid, 1, 'mthikn5', mthikn(:,:,5,:), dbug) - call ice_read_nc(ncid, 1, 'msst', msst, dbug) - call ice_read_nc(ncid, 1, 'mssu', mssu, dbug) - call ice_read_nc(ncid, 1, 'mssv', mssv, dbug) - call ice_read_nc(ncid, 1, 'muvel', muvel, dbug) - call ice_read_nc(ncid, 1, 'mvvel', mvvel, dbug) - call ice_read_nc(ncid, 1, 'maiu', maiu, dbug) + call ice_read_nc(ncid, 1, 'msst', msst, dbug) + call ice_read_nc(ncid, 1, 'mssu', mssu, dbug) + call ice_read_nc(ncid, 1, 'mssv', mssv, dbug) + call ice_read_nc(ncid, 1, 'muvel', muvel, dbug) + call ice_read_nc(ncid, 1, 'mvvel', mvvel, dbug) + call ice_read_nc(ncid, 1, 'maiu', maiu, dbug) + ! + !call ice_read_nc(ncid, 1, 'maice_ia', maice_ia, dbug) + if (my_task == master_task) then call ice_close_nc(ncid) write(il_out,*) '(read_restart_i2asum) has read in 21 i2a fields.' @@ -265,7 +271,7 @@ subroutine read_restart_i2asum(fname, sec) !'i2a.nc', 0) endif end subroutine read_restart_i2asum -!============================================================================== +!================================================= subroutine put_restart_i2a(fname, sec) ! call this subroutine after called get_restart_oi2 ! it uses ocn_sst etc to calculate average ocn fields which will be used to send @@ -293,7 +299,7 @@ subroutine put_restart_i2a(fname, sec) end subroutine put_restart_i2a -!=============================================================================== +!================================================= subroutine get_restart_o2i(fname) ! To be called at beginning of each run trunk to read in restart o2i fields @@ -335,7 +341,7 @@ subroutine get_restart_o2i(fname) return end subroutine get_restart_o2i -!=============================================================================== +!================================================= subroutine get_restart_mice(fname) ! Called at beginning of the run to get 'last' IO cpl int T-M ice variables @@ -352,9 +358,15 @@ subroutine get_restart_mice(fname) dbug = .true. if ( file_exist(fname) ) then if (my_task==0) then - write(il_out,*) '(get_restart_mice) reading in mice variables......' + write(il_out,*) '(get_restart_mice) opening file: ', fname endif + call ice_open_nc(fname, ncid_o2i) + call ice_read_nc(ncid_o2i, 1, 'maicen1', maicen_saved(:,:,1,:), dbug) + call ice_read_nc(ncid_o2i, 1, 'maicen2', maicen_saved(:,:,2,:), dbug) + call ice_read_nc(ncid_o2i, 1, 'maicen3', maicen_saved(:,:,3,:), dbug) + call ice_read_nc(ncid_o2i, 1, 'maicen4', maicen_saved(:,:,4,:), dbug) + call ice_read_nc(ncid_o2i, 1, 'maicen5', maicen_saved(:,:,5,:), dbug) call ice_read_nc(ncid_o2i, 1, 'maice', maice, dbug) call ice_read_nc(ncid_o2i, 1, 'mstrocnxT', mstrocnxT, dbug) call ice_read_nc(ncid_o2i, 1, 'mstrocnyT', mstrocnyT, dbug) @@ -363,6 +375,7 @@ subroutine get_restart_mice(fname) call ice_read_nc(ncid_o2i, 1, 'mfhocn', mfhocn, dbug) call ice_read_nc(ncid_o2i, 1, 'mfswthru', mfswthru, dbug) call ice_read_nc(ncid_o2i, 1, 'msicemass', msicemass, dbug) + write(il_out,*) '(get_restart_mice) ALL variables read in! ' if (my_task == master_task) then call ice_close_nc(ncid_o2i) @@ -379,7 +392,83 @@ subroutine get_restart_mice(fname) return end subroutine get_restart_mice -!=============================================================================== + +!================================================= +subroutine get_lice_discharge(fname) + +! Called at beginning of each run trunk to read in land ice discharge mask or iceberg +! (off Antarctica and Greenland). + +implicit none + +character(len=*), intent(in) :: fname +character*80 :: myvar = 'ficeberg' +integer(kind=int_kind) :: ncid_i2o, im, k, i, j +logical :: dbug = .true. + +call ice_open_nc(trim(fname), ncid_i2o) + +write(il_out,*) '(get_lice_discharge) opened datafile: ', trim(fname) +write(il_out,*) '(get_lice_discharge) ncid_i2o= ', ncid_i2o + +if (iceberg .lt. 1 .or. iceberg .gt. 4) then + write(il_out,*) '(get_lice_discharge) in ESM only supports iceberg = 1,2,3,4) ' + call abort_ice('CICE stopped: ESM only supports iceberg = 1,2,3,4. Please set it to 1,2,3,4') +else + call gather_global(gtarea, tarea, master_task, distrb_info) + select case (iceberg) + case (1); myvar = 'FICEBERG_AC2' + case (2); myvar = 'FICEBERG_GC3' + case (3); myvar = 'FICEBERG_AC2_AVE' + case (4); myvar = 'FICEBERG_GC3_AVE' + end select + write(il_out,*)'(get_lice_discharge), iceberg = ', iceberg + write(il_out,'(a,a)') '(get_lice_discharge) reading in iceberg data, myvar= ',trim(myvar) + do im = 1, 12 + write(il_out,*) '(get_lice_discharge) reading in data, month= ',im + call ice_read_nc(ncid_i2o, im, trim(myvar), vwork, dbug) + + ! Restrict iceberg fluxes to ocean points + where (tmask) + vwork = vwork + else where + vwork = c0 + end where + + call gather_global(gwork, vwork, master_task, distrb_info) + + if ( my_task == master_task ) then + gicebergfw(:,:,im) = gwork(:,:) + + ticeberg_s(im) = 0.0 + do j = 1, iceberg_je_s !1, ny_global/2 (iceberg_je_s smaller than ny_global/2 thus saves time) + do i = 1, nx_global + ticeberg_s(im) = ticeberg_s(im) + gtarea(i,j) * gwork(i,j) + enddo + enddo + ticeberg_n(im) = 0.0 + do j = iceberg_js_n, ny_global !ny_global/2 + 1, ny_global !(iceberg_js_n bigger than ny_global/2 +1) + do i = 1, nx_global + ticeberg_n(im) = ticeberg_n(im) + gtarea(i,j) * gwork(i,j) + enddo + enddo + + write(il_out, *) '(get_lice_discharge) check: im, ticeberg_s, ticeberg_n = ',im, ticeberg_s(im), ticeberg_n(im) + endif + + enddo + +endif +if (my_task == master_task) then + call ice_close_nc(ncid_i2o) +endif + +return + +end subroutine get_lice_discharge + + +!================================================= subroutine get_restart_i2o(fname) ! To be called at beginning of each run trunk to read in restart i2o fields @@ -417,11 +506,14 @@ subroutine get_restart_i2o(fname) case ('form_io'); io_form = vwork case ('co2_i1'); io_co2 = vwork case ('wnd_i1'); io_wnd = vwork +!2 more added 20171024: + case ('lice_fw'); io_licefw = vwork + case ('lice_ht'); io_liceht = vwork end select enddo if (my_task == master_task) then call ice_close_nc(ncid_i2o) - write(il_out,*) '(get_time0_i2o_fields) has read in 11 i2o fields.' + write(il_out,*) '(get_time0_i2o_fields) has read in 19 i2o fields.' endif else if (my_task==0) then @@ -434,8 +526,11 @@ subroutine get_restart_i2o(fname) return end subroutine get_restart_i2o -!=============================================================================== +!================================================= subroutine set_sbc_ice +!------------------------- +!This routine is NOT used! +!------------------------- ! ! Set coupling fields (in units of GMB, from UM and MOM4) needed for CICE ! @@ -447,6 +542,9 @@ subroutine set_sbc_ice implicit none +real :: r1_S0 +real, dimension(nx_block,ny_block,nblocks) :: zzs + integer :: i,j,k,cat !*** Fields from UM (all on T cell center): @@ -470,6 +568,8 @@ subroutine set_sbc_ice else do cat = 1, ncat flatn_f(i,j,cat,k) = um_lhflx(i,j,k) * maicen(i,j,cat,k)/maice(i,j,k) + !???: flatn_f(i,j,cat,k) = um_iceevp(i,j,cat,k) * Lsub + !flatn_f(i,j,cat,k) = - um_iceevp(i,j,cat,k) * Lsub enddo endif enddo @@ -489,6 +589,10 @@ subroutine set_sbc_ice !(15) rainfall frain = max(maice * um_rain, 0.0) +!BX-20160718: "save" the ice concentration "maice" used here for scaling-up frain etc in +!ice_step for "consistency"-- +!maice_saved = maice + !*** Fields from MOM4 (SSU/V and sslx/y are on U points): !(1) freezing/melting potential @@ -519,8 +623,17 @@ subroutine set_sbc_ice !(7) surface slope ssly ss_tlty = ocn_ssly -!(as per S.O.) make sure Tf if properly initialized +!(as per S.O.) make sure Tf is properly initialized Tf (:,:,:) = -depressT*sss(:,:,:) ! freezing temp (C) +! +!B: May use different formula for Tf such as TEOS-10 formulation: +! +!r1_S0 = 0.875/35.16504 +!zzs(:,:,:) = sqrt(abs(sss(:,:,:)) * r1_S0) +!Tf(:,:,:) = ((((1.46873e-03 * zzs(:,:,:) - 9.64972e-03) * zzs(:,:,:) + & +! 2.28348e-02) * zzs(:,:,:) - 3.12775e-02) * zzs(:,:,:) + & +! 2.07679e-02) * zzs(:,:,:) - 5.87701e-02 +!Tf(:,:,:) = Tf(:,:,:) * sss(:,:,:) ! - 7.53e-4 * 5.0 !!!5.0 is depth in meters end subroutine set_sbc_ice @@ -533,28 +646,36 @@ subroutine get_sbc_ice ! for the "nsbc = 5" case. ! ! It should be called after calling "from_atm" and "from_ocn". -! -! *** This routine is used/called within ice time loop (itap) -! *** in case "set_sbc_ice" call (outside the itap loop) fails -! *** which is unfortunately the case the moment (Jan2010) ! !------------------------------------------------------------------------------- implicit none +real :: r1_S0 +real, dimension(nx_block,ny_block,nblocks) :: zzs + integer :: i,j,k,cat ! Fields from UM (all on T cell center): !(1) windstress taux: -strax = um_taux * aice !*tmask ? +strax = um_taux * aice !(2) windstress tauy: -stray = um_tauy * aice !*tmask ? +stray = um_tauy * aice !(3) surface downward latent heat flux (==> multi_category) do j = 1, ny_block do i = 1, nx_block do k = 1, nblocks + ! Notes from ACCESS-CM2: + !BX 20160826: as in NEMO sbccpl.F90, there is no "open water field" um_lhflx involved: + ! qla_ice(:,:,1:jpl) = - frcv(jpr_ievp)%z3(:,:,1:jpl) * lsub + !------------------------------------------------------------------------------------- + ! CM2 Uses this, as um_lhflx is not available: + ! do cat = 1, ncat + ! flatn_f(i,j,cat,k) = - um_iceevp(i,j,cat,k) * Lsub + ! enddo + if (aice(i,j,k)==0.0) then do cat = 1, ncat flatn_f(i,j,cat,k) = 0.0 @@ -577,19 +698,8 @@ subroutine get_sbc_ice fsurfn_f (:,:,cat,:) = um_tmlt(:,:,cat,:) + um_bmlt(:,:,cat,:) enddo -!!! 20130419: Martin Dix's investigation suggests that frain and fsnow should NOT be scaled by -!!! aice here. This scaling would caused double-scaling with "fresh" calculation.. -!(14) snowfall -!!!fsnow = max(aice * um_snow,0.0) -!fsnow = max(um_snow,0.0) !no more scaling as per M.D.! -!(15) rainfall -!!!frain = max(aice * um_rain,0.0) -!frain = max(um_rain,0.0) !no more scaling as per M.D.! -!!! 20130420: I dug deeper and checked all the associated steps of "fresh" calculation, found -!!! the original weighting is CORRECT! so back to *aice: fsnow = max(aice * um_snow,0.0) frain = max(aice * um_rain,0.0) -!!!------------------------------------------------------------------------------------------ ! Fields from MOM4 (SSU/V and sslx/y are on U points): @@ -621,12 +731,21 @@ subroutine get_sbc_ice ss_tlty = ocn_ssly ! * (as per S. O'Farrel) make sure Tf if properly initialized -sss = ocn_sss +!----- should use eos formula to calculate Tf for "consistency" with GCx ----! Tf (:,:,:) = -depressT*sss(:,:,:) ! freezing temp (C) - +! +!B: May use different formula for Tf such as TEOS-10 formulation: +! +!r1_S0 = 0.875/35.16504 +!zzs(:,:,:) = sqrt(abs(sss(:,:,:)) * r1_S0) +!Tf(:,:,:) = ((((1.46873e-03 * zzs(:,:,:) - 9.64972e-03) * zzs(:,:,:) + & +! 2.28348e-02) * zzs(:,:,:) - 3.12775e-02) * zzs(:,:,:) + & +! 2.07679e-02) * zzs(:,:,:) - 5.87701e-02 +!Tf(:,:,:) = Tf(:,:,:) * sss(:,:,:) ! - 7.53e-4 * 5.0 !!!5.0 is depth in meters +! end subroutine get_sbc_ice -!=============================================================================== +!================================================= subroutine save_restart_o2i(fname, nstep) ! output the last o2i forcing data received in cice by the end of the run, @@ -670,7 +789,7 @@ subroutine save_restart_o2i(fname, nstep) return end subroutine save_restart_o2i -!============================================================================== +!================================================= subroutine save_restart_i2asum(fname, nstep) ! output the last i2a forcing data in cice at the end of the run, ! to be read in at the beginning of next run by cice and sent to atm @@ -682,7 +801,7 @@ subroutine save_restart_i2asum(fname, nstep) integer(kind=int_kind) :: ncid integer(kind=int_kind) :: jf, jfs, ll, ilout -integer(kind=int_kind), parameter :: sumfldin = 21 +integer(kind=int_kind), parameter :: sumfldin = 46 !21 character(len=8), dimension(sumfldin) :: sumfld sumfld(1)='msst' @@ -720,20 +839,20 @@ subroutine save_restart_i2asum(fname, nstep) case('mvvel'); vwork = mvvel case('maiu'); vwork = maiu case('maicen1'); vwork = maicen(:,:,1,:) - case('maicen2'); vwork =maicen(:,:,2,:) - case('maicen3'); vwork =maicen(:,:,3,:) - case('maicen4'); vwork =maicen(:,:,4,:) - case('maicen5'); vwork =maicen(:,:,5,:) - case('mthikn1'); vwork =mthikn(:,:,1,:) - case('mthikn2'); vwork =mthikn(:,:,2,:) - case('mthikn3'); vwork =mthikn(:,:,3,:) - case('mthikn4'); vwork =mthikn(:,:,4,:) - case('mthikn5'); vwork =mthikn(:,:,5,:) - case('msnown1'); vwork =msnown(:,:,1,:) - case('msnown2'); vwork =msnown(:,:,2,:) - case('msnown3'); vwork =msnown(:,:,3,:) - case('msnown4'); vwork =msnown(:,:,4,:) - case('msnown5'); vwork =msnown(:,:,5,:) + case('maicen2'); vwork = maicen(:,:,2,:) + case('maicen3'); vwork = maicen(:,:,3,:) + case('maicen4'); vwork = maicen(:,:,4,:) + case('maicen5'); vwork = maicen(:,:,5,:) + case('mthikn1'); vwork = mthikn(:,:,1,:) + case('mthikn2'); vwork = mthikn(:,:,2,:) + case('mthikn3'); vwork = mthikn(:,:,3,:) + case('mthikn4'); vwork = mthikn(:,:,4,:) + case('mthikn5'); vwork = mthikn(:,:,5,:) + case('msnown1'); vwork = msnown(:,:,1,:) + case('msnown2'); vwork = msnown(:,:,2,:) + case('msnown3'); vwork = msnown(:,:,3,:) + case('msnown4'); vwork = msnown(:,:,4,:) + case('msnown5'); vwork = msnown(:,:,5,:) end select call gather_global(gwork, vwork, master_task, distrb_info) if (my_task == 0) then @@ -746,7 +865,7 @@ subroutine save_restart_i2asum(fname, nstep) end subroutine save_restart_i2asum -!=============================================================================== +!================================================= subroutine save_restart_mice(fname, nstep) ! output ice variable averaged over the last IO cpl int of this run, @@ -765,6 +884,28 @@ subroutine save_restart_mice(fname, nstep) call write_nc_1Dtime(real(nstep), 1, 'time', ncid) endif +! maicen_saved appears to be the same as maicen in CICE5-UM7.3 +!B: 20170825 ==> add maicen_saved for atm_icefluxes_back2GBM calculation! +! note maicen_saved is the last ia interval mean. +vwork(:,:,:) = maicen_saved(:,:,1,:) +call gather_global(gwork, vwork, master_task, distrb_info) +if (my_task == 0) call write_nc2D(ncid, 'maicen1', gwork, 2, il_im, il_jm, 1, ilout=il_out) +vwork(:,:,:) = maicen_saved(:,:,2,:) +call gather_global(gwork, vwork, master_task, distrb_info) +if (my_task == 0) call write_nc2D(ncid, 'maicen2', gwork, 2, il_im, il_jm, 1, ilout=il_out) +vwork(:,:,:) = maicen_saved(:,:,3,:) +call gather_global(gwork, vwork, master_task, distrb_info) +if (my_task == 0) call write_nc2D(ncid, 'maicen3', gwork, 2, il_im, il_jm, 1, ilout=il_out) +vwork(:,:,:) = maicen_saved(:,:,4,:) +call gather_global(gwork, vwork, master_task, distrb_info) +if (my_task == 0) call write_nc2D(ncid, 'maicen4', gwork, 2, il_im, il_jm, 1, ilout=il_out) +vwork(:,:,:) = maicen_saved(:,:,5,:) +call gather_global(gwork, vwork, master_task, distrb_info) +if (my_task == 0) call write_nc2D(ncid, 'maicen5', gwork, 2, il_im, il_jm, 1, ilout=il_out) +!b. + +!The following fields are actually the ice state of last timestep +!(no time-averaging is required in each timestep io coupling, see time_average_fields_4_i2o) vwork = maice call gather_global(gwork, vwork, master_task, distrb_info) if (my_task == 0) call write_nc2D(ncid, 'maice', gwork, 2, il_im, il_jm, 1, ilout=il_out) @@ -795,23 +936,22 @@ subroutine save_restart_mice(fname, nstep) return end subroutine save_restart_mice -!=============================================================================== +!================================================= subroutine get_i2a_fields -implicit none - ! all fields (except for vector) obtained here are all on T cell center !(1) ocean surface temperature ia_sst(:,:,:) = msst(:,:,:) !(2-3) ice/ocn combined surface velocity -!CH: should use "aiu", not aice! ia_uvel(:,:,:) = mssu(:,:,:) * (1. - maiu(:,:,:)) + muvel(:,:,:) * maiu(:,:,:) ia_vvel(:,:,:) = mssv(:,:,:) * (1. - maiu(:,:,:)) + mvvel(:,:,:) * maiu(:,:,:) !(4-8) ice concentration ia_aicen(:,:,:,:) = maicen(:,:,:,:) +!BX: save it for use in atm_icefluxes_back2GBM --- +maicen_saved = maicen !(9-13) ice thickness ia_thikn(:,:,:,:) = mthikn(:,:,:,:) @@ -819,21 +959,29 @@ subroutine get_i2a_fields !(14-18) snow thickness ia_snown(:,:,:,:) = msnown(:,:,:,:) +!(19-20) co2 flux stuff ia_co2 = mco2 ia_co2fx = mco2fx return end subroutine get_i2a_fields -!=============================================================================== +!================================================= subroutine get_i2o_fields ! All fluxes should be in GBM units before passing into coupler. ! e.g., io_htflx(:,:,:) = fhocn_gbm(:,:,:) implicit none - real (kind=dbl_kind), dimension(nx_block,ny_block,max_blocks) :: pice +integer (kind=int_kind) :: i,j,k + +real (kind=dbl_kind) :: & + trunoff_s = c0, & + trunoff_n = c0, & + r_s = c1, & + r_n = c1, & + r_runoff= c1 !=(1-min(r_max_iceberg. r_s(or r_n)) ! Fields obtained here are all at T cell center. before being sent to MOM4, vector ! (Taux, Tauy) should be shifted on to U point as required @@ -844,17 +992,16 @@ subroutine get_i2o_fields ! have already been 'weighted' using aice (when calculated in "evp_finish". ! But, this weight has been removed in strocnx/yT (see "evp_finish"), therfore ! we need put it on again here. -io_strsu = um_taux * (1. - maice) - mstrocnxT * maice -io_strsv = um_tauy * (1. - maice) - mstrocnyT * maice +io_strsu = um_taux * (c1 - maice) - mstrocnxT * maice +io_strsv = um_tauy * (c1 - maice) - mstrocnyT * maice !(3) freshwater flux to ocean: rainfall (+ ice melting water flux ?) -io_rain = um_rain * (1. - maice) -!!CH: confirmed: -!!if (ice_fwflux) io_rain = io_rain + mfresh !always .t. -!!NOTE mfresh is now splitted into melt (14) and form (15) and passed into ocn seperately. +io_rain = um_rain * (c1 - maice) +!202412: fixing watermass loss from ocean by adding a small, constant fwflux into rain-- +io_rain = io_rain + add_lprec !(4) freshwater flux to ocean: snowfall -io_snow = um_snow * (1. - maice) +io_snow = um_snow * (c1 - maice) !(5) salt flux to ocean io_stflx = mfsalt @@ -865,66 +1012,139 @@ subroutine get_i2o_fields !(7) short wave radiation !(CH: the (1-aice) weight should not be here 'cos all fluxes passed in from ! UM have already been aice-weighted when they are calculated there!!!) -!io_swflx = um_swflx * (1. - maice) + mfswthru +!io_swflx = um_swflx * (c1 - maice) + mfswthru io_swflx = um_swflx + mfswthru -!!!20100616: test for more swflx -!!!io_swflx = 1.064 * um_swflx + mfswthru !(8) latent heat flux (positive out of ocean as required by MOM4) io_qflux = um_evap * Lvap !Note it's already weighted in UM for open sea. -!20101210: NOT sure about the ice weghting in UM, 'cos the ice area does see -! non-zero (positive) evap. -if (imsk_evap) then - io_qflux = um_evap * Lvap * (1. - maice) -endif !(9) sensible heat flux (positive out of ocean as required by MOM4) -!io_shflx = um_shflx * (1. - maice) io_shflx = um_shflx !(10) net long wave radiation positive down -!io_lwflx = um_lwflx * (1. - maice) io_lwflx = um_lwflx -!(11) runoff (!check the incoming field! pattern? remapping ok? conserved? ...) -io_runof = um_runoff -! CHECK with SM about the annual cycle of core-runoff! (we only have annual mean) +!(11) runoff +!*** mask off "extra/useless" runoff on dry points *** +where (tmask) + io_runof = um_runoff +else where + um_runoff = c0 +end where +call gather_global(grunoff, io_runof, master_task, distrb_info) + +if (my_task == master_task) then + + trunoff_s = c0 + do j = 1, runoff_je_s + do i = 1, nx_global + trunoff_s = trunoff_s + gtarea(i,j) * grunoff(i,j) + grunoff(i,j) = grunoff(i,j) * (c1 - iceberg_rate_s) !do deduction + enddo + enddo + trunoff_n = c0 + do j = runoff_js_n, runoff_je_n + do i = runoff_is_n, runoff_ie_n + trunoff_n = trunoff_n + gtarea(i,j) * grunoff(i,j) + grunoff(i,j) = grunoff(i,j) * (c1 - iceberg_rate_n) !do deduction + enddo + enddo + !Now global runoff has been "updated" (deduction done for iceberg). +endif + +!distributed the resultant runoff and iceberg fluxes: +call scatter_global(vwork, grunoff, master_task, distrb_info, & + field_loc_center, field_type_scalar) +io_runof(:,:,:) = vwork(:,:,:) + +!2 new flux items associated with the iceberg discharged into ocean +!(18) water flux due to land ice melt off Antarctica and Greenland (kg/m^2/s) +!(19) heat flux due to land ice melt off Antarctica and Greenland + +!XXXXXX +IF (my_task == master_task) THEN + + gwork(:,:) = c0 + do i = 1, nx_global + do j = 1, iceberg_je_s + gwork(i, j) = gicebergfw(i, j, month) * iceberg_rate_s * trunoff_s / ticeberg_s(month) + enddo + do j = iceberg_js_n, ny_global + gwork(i, j) = gicebergfw(i, j, month) * iceberg_rate_n * trunoff_n / ticeberg_n(month) + enddo + enddo + !Now global iceberg has been defined (using the deduction from runoff) + +ENDIF +!XXXXXX + +call scatter_global(vwork, gwork, master_task, distrb_info, & + field_loc_center, field_type_scalar) +io_licefw(:,:,:) = vwork(:,:,:) !i2o field No 18. + +!Also count in the latent heat carried with the runoff part, as done below, thus allowing +!for (rough) consistency of energy exchange no matter what iceberg_rate_s/n are used. +!Warning: the follow approach would lose all the runoff LH, if runoff_lh=.false., in no-iceberg case + +if (my_task == master_task) then + if ( runoff_lh ) then + + do i = 1, nx_global + do j = 1, runoff_je_s + gwork(i,j) = gwork(i,j) + grunoff(i,j) + enddo + enddo + do i = runoff_is_n, runoff_ie_n + do j = runoff_js_n, runoff_je_n + gwork(i,j) = gwork(i,j) + grunoff(i,j) + enddo + enddo + else + !If runoff with latent heat flux crashes the model in no-iceberg case, due probably to too big LH(?) + !all the LH carried by runoff is applied to iceberg areas. + do i = 1, nx_global + do j = 1, iceberg_je_s + gwork(i,j) = gwork(i,j)/max(iceberg_rate_s, 0.0001_dbl_kind) !get the whole runoff LH onto iceberg + enddo + do j = iceberg_js_n, ny_global + gwork(i,j) = gwork(i,j)/max(iceberg_rate_n, 0.0001_dbl_kind) + enddo + enddo + endif +endif + +call scatter_global(vwork, gwork, master_task, distrb_info, & + field_loc_center, field_type_scalar) +io_liceht = - vwork * Lfresh * iceberg_lh !FW converted into LH flux (W/m^2). + !!i2o field No 19. !(12) pressure pice = gravit * msicemass !---------------------------------------------------------------------------- !sicemass = rho_ice x hi + rho_snow x hs (in m) -! -! Should we set limit to the ovelying ice pressure as suggested in MOM4 code? -!(see ocean_sbc.F90) if yes, we may use following -!pice(i,j) = min(pice(i,j), gravit*rhow*max_ice_thickness) -! (note rhow = 1026 kg/m^3 here, but mom4 instead uses rho0 = 1035 kg/m^3) -! No, let mom4 handle it (see ocean_sbc.F90) -! -!as GFDL SIS, we use patm 'anormaly' and then add in the ice/snow pressure! -!29/11/2007 -!---------------------------------------------------------------------------- if (ice_pressure_on) then io_press = pice * maice endif if (air_pressure_on) then !as GFDL SIS, we use patm anormaly, i.e., taking off 1.e5 Pa ! - io_press(:,:,:) = io_press(:,:,:) + um_press(:,:,:) - 1.0e5 + io_press(:,:,:) = io_press(:,:,:) + um_press(:,:,:) - 1.0e5_dbl_kind endif !(13) ice concentration io_aice = maice !(14) ice melt fwflux -io_melt = max(0.0,mfresh(:,:,:)) +io_melt = max(c0,mfresh(:,:,:)) !(15) ice form fwflux -io_form = min(0.0,mfresh(:,:,:)) +io_form = min(c0,mfresh(:,:,:)) +!(16) CO2 io_co2 = um_co2 +!(17) 10m winnspeed io_wnd = um_wnd return end subroutine get_i2o_fields -!=============================================================================== +!================================================= subroutine initialize_mice_fields_4_i2o implicit none @@ -941,7 +1161,7 @@ subroutine initialize_mice_fields_4_i2o return end subroutine initialize_mice_fields_4_i2o -!=============================================================================== +!================================================= subroutine initialize_mice_fields_4_i2a implicit none @@ -957,7 +1177,7 @@ subroutine initialize_mice_fields_4_i2a return end subroutine initialize_mice_fields_4_i2a -!=============================================================================== +!================================================= subroutine initialize_mocn_fields_4_i2a implicit none @@ -971,92 +1191,51 @@ subroutine initialize_mocn_fields_4_i2a return end subroutine initialize_mocn_fields_4_i2a -!=============================================================================== +!================================================= subroutine time_average_ocn_fields_4_i2a implicit none -msst(:,:,:) = msst(:,:,:) + ocn_sst(:,:,:) * coef_cpl -mssu(:,:,:) = mssu(:,:,:) + ocn_ssu(:,:,:) * coef_cpl -mssv(:,:,:) = mssv(:,:,:) + ocn_ssv(:,:,:) * coef_cpl -mco2(:,:,:) = mco2(:,:,:) + ocn_co2(:,:,:) * coef_cpl -mco2fx(:,:,:) = mco2fx(:,:,:) + ocn_co2fx(:,:,:) * coef_cpl +msst(:,:,:) = msst(:,:,:) + ocn_sst(:,:,:) * coef_ai +mssu(:,:,:) = mssu(:,:,:) + ocn_ssu(:,:,:) * coef_ai +mssv(:,:,:) = mssv(:,:,:) + ocn_ssv(:,:,:) * coef_ai +mco2(:,:,:) = mco2(:,:,:) + ocn_co2(:,:,:) * coef_ai +mco2fx(:,:,:) = mco2fx(:,:,:) + ocn_co2fx(:,:,:) * coef_ai return end subroutine time_average_ocn_fields_4_i2a -!=============================================================================== -!dhb599-20131002: resuming the old 'approach' (used before 20130420) which sets -!do_scale_fluxes = .t. and thus the "flatn_f/Lsub" terms is NOT used as part of -!'fresh' and passed into ocean...since the 'evaporation out of ice surface' is -!going into atmosphere, not supposed to change the ocean water volume! -!------------------------------------------------------------------------------- - +!================================================= subroutine time_average_fields_4_i2o - +!now for each timestep io coupling, so no time-averaging is required. implicit none -maice(:,:,:) = maice(:,:,:) + aice(:,:,:) * coef_io -mstrocnxT(:,:,:) = mstrocnxT(:,:,:) + strocnxT(:,:,:) * coef_io -mstrocnyT(:,:,:) = mstrocnyT(:,:,:) + strocnyT(:,:,:) * coef_io -!20130420: possible bug due to missing term "flatn_f/Lsub" in the last update for fresh -! use scale_fluxes=.f. to avoid flux scaling by /aice -! meaning fluxes are all grid-box-mean by the end of ice_step. -!mfresh(:,:,:) = mfresh(:,:,:) + fresh_gbm(:,:,:) * coef_io -!mfsalt(:,:,:) = mfsalt(:,:,:) + fsalt_gbm(:,:,:) * coef_io -!mfhocn(:,:,:) = mfhocn(:,:,:) + fhocn_gbm(:,:,:) * coef_io -!mfswthru(:,:,:) = mfswthru(:,:,:) + fswthru_gbm(:,:,:) * coef_io -mfresh(:,:,:) = mfresh(:,:,:) + fresh(:,:,:) * coef_io -mfsalt(:,:,:) = mfsalt(:,:,:) + fsalt(:,:,:) * coef_io -mfhocn(:,:,:) = mfhocn(:,:,:) + fhocn(:,:,:) * coef_io -mfswthru(:,:,:) = mfswthru(:,:,:) + fswthru(:,:,:) * coef_io -!--------------------------------------------------------------------------------------- -!--------------------------------------------------------------------------------------- -msicemass(:,:,:) = msicemass(:,:,:) + sicemass(:,:,:) * coef_io +maice(:,:,:) = aice(:,:,:) +mstrocnxT(:,:,:) = strocnxT(:,:,:) +mstrocnyT(:,:,:) = strocnyT(:,:,:) +mfresh(:,:,:) = fresh(:,:,:) +mfsalt(:,:,:) = fsalt(:,:,:) +mfhocn(:,:,:) = fhocn(:,:,:) +mfswthru(:,:,:) = fswthru(:,:,:) +msicemass(:,:,:) = sicemass(:,:,:) return end subroutine time_average_fields_4_i2o -!=============================================================================== -subroutine time_average_fields_4_i2o_20130420 - -implicit none - -maice(:,:,:) = maice(:,:,:) + aice(:,:,:) * coef_io -mstrocnxT(:,:,:) = mstrocnxT(:,:,:) + strocnxT(:,:,:) * coef_io -mstrocnyT(:,:,:) = mstrocnyT(:,:,:) + strocnyT(:,:,:) * coef_io -!20130420: possible bug due to missing term "flatn_f/Lsub" in the last update -!for fresh -! use scale_fluxes=.f. to avoid flux scaling by /aice -! meaning fluxes are all grid-box-mean by the end of ice_step. -!mfresh(:,:,:) = mfresh(:,:,:) + fresh_gbm(:,:,:) * coef_io -!mfsalt(:,:,:) = mfsalt(:,:,:) + fsalt_gbm(:,:,:) * coef_io -!mfhocn(:,:,:) = mfhocn(:,:,:) + fhocn_gbm(:,:,:) * coef_io -!mfswthru(:,:,:) = mfswthru(:,:,:) + fswthru_gbm(:,:,:) * coef_io -mfresh(:,:,:) = mfresh(:,:,:) + fresh(:,:,:) * coef_io -mfsalt(:,:,:) = mfsalt(:,:,:) + fsalt(:,:,:) * coef_io -mfhocn(:,:,:) = mfhocn(:,:,:) + fhocn(:,:,:) * coef_io -mfswthru(:,:,:) = mfswthru(:,:,:) + fswthru(:,:,:) * coef_io -!--------------------------------------------------------------------------------------- -msicemass(:,:,:) = msicemass(:,:,:) + sicemass(:,:,:) * coef_io - -return -end subroutine time_average_fields_4_i2o_20130420 - -!=============================================================================== +!================================================= subroutine time_average_fields_4_i2a implicit none ! ice fields: -muvel(:,:,:) = muvel(:,:,:) + uvel(:,:,:) * coef_ia -mvvel(:,:,:) = mvvel(:,:,:) + vvel(:,:,:) * coef_ia -maicen(:,:,:,:) = maicen(:,:,:,:) + aicen(:,:,:,:) * coef_ia !T cat. ice concentration -mthikn(:,:,:,:) = mthikn(:,:,:,:) + vicen(:,:,:,:) * coef_ia !T cat. ice thickness -msnown(:,:,:,:) = msnown(:,:,:,:) + vsnon(:,:,:,:) * coef_ia !T cat. snow thickness +muvel(:,:,:) = muvel(:,:,:) + uvel(:,:,:) * coef_ai +mvvel(:,:,:) = mvvel(:,:,:) + vvel(:,:,:) * coef_ai +maicen(:,:,:,:) = maicen(:,:,:,:) + aicen(:,:,:,:) * coef_ai !T cat. ice concentration +mthikn(:,:,:,:) = mthikn(:,:,:,:) + vicen(:,:,:,:) * coef_ai !T cat. ice thickness +msnown(:,:,:,:) = msnown(:,:,:,:) + vsnon(:,:,:,:) * coef_ai !T cat. snow thickness call to_ugrid(aice, aiiu) -maiu(:,:,:) = maiu(:,:,:) + aiiu(:,:,:) * coef_ia !U cell ice concentraction +maiu(:,:,:) = maiu(:,:,:) + aiiu(:,:,:) * coef_ai !U cell ice concentraction !ocn fields: !must be done after calling from_ocn so as to get the most recently updated ocn fields, @@ -1065,7 +1244,7 @@ subroutine time_average_fields_4_i2a return end subroutine time_average_fields_4_i2a -!=============================================================================== +!================================================= subroutine check_i2a_fields(nstep) implicit none @@ -1125,7 +1304,7 @@ subroutine check_i2a_fields(nstep) return end subroutine check_i2a_fields -!============================================================================ +!================================================= subroutine check_a2i_fields(nstep) implicit none @@ -1192,7 +1371,7 @@ subroutine check_a2i_fields(nstep) return end subroutine check_a2i_fields -!============================================================================ +!================================================= subroutine check_i2o_fields(nstep, scale) implicit none @@ -1252,6 +1431,11 @@ subroutine check_i2o_fields(nstep, scale) vwork = scale * io_co2 case('wnd_i1') vwork = scale * io_wnd + !202407: 2 more fields added: + case('lice_fw') + vwork = scale * io_licefw + case('lice_ht') + vwork = scale * io_liceht end select call gather_global(gwork, vwork, master_task, distrb_info) @@ -1267,7 +1451,7 @@ subroutine check_i2o_fields(nstep, scale) return end subroutine check_i2o_fields -!============================================================================ +!================================================= subroutine check_o2i_fields(nstep) implicit none @@ -1351,7 +1535,204 @@ subroutine check_frzmlt_sst(ncfilenm) return end subroutine check_frzmlt_sst -!============================================================================ +!================================================= +subroutine check_i2o_uvfluxes(ncfilenm) + +!this is temporarily used to check i2o fields (uflux, vflux and maice) +!for debug purpose + +implicit none + +character*(*), intent(in) :: ncfilenm +integer(kind=int_kind) :: ncid,currstep, ilout, ll +data currstep/0/ +save currstep + +currstep=currstep+1 + +if (my_task == 0 .and. .not. file_exist(ncfilenm) ) then + call create_ncfile(ncfilenm,ncid,il_im,il_jm,ll=1,ilout=il_out) +endif + +if (my_task == 0) then + write(il_out,*) 'opening ncfile at nstep ', ncfilenm, currstep + call ncheck( nf_open(ncfilenm, nf_write,ncid) ) + call write_nc_1Dtime(real(currstep),currstep,'time',ncid) +end if + +call gather_global(gwork, maice, master_task, distrb_info) +if (my_task == 0) call write_nc2D(ncid, 'maice', gwork, 1, il_im,il_jm,currstep,ilout=il_out) +call gather_global(gwork, um_taux, master_task, distrb_info) +if (my_task == 0) call write_nc2D(ncid, 'um_taux', gwork, 1, il_im,il_jm,currstep,ilout=il_out) +call gather_global(gwork, um_tauy, master_task, distrb_info) +if (my_task == 0) call write_nc2D(ncid, 'um_tauy', gwork, 1, il_im,il_jm,currstep,ilout=il_out) +call gather_global(gwork, mstrocnxT, master_task, distrb_info) +if (my_task == 0) call write_nc2D(ncid, 'mstrocnxT', gwork, 1, il_im,il_jm,currstep,ilout=il_out) +call gather_global(gwork, mstrocnyT, master_task, distrb_info) +if (my_task == 0) call write_nc2D(ncid, 'mstrocnyT', gwork, 1, il_im,il_jm,currstep,ilout=il_out) + +if (my_task == 0) call ncheck(nf_close(ncid)) + +return +end subroutine check_i2o_uvfluxes + +!================================================= +subroutine check_ice_fields(ncfilenm) +!this is temporarily used to check ice fields immediately after ice_step +!for debug purpose + +implicit none + +character*(*), intent(in) :: ncfilenm +integer(kind=int_kind) :: ncid,currstep, ilout, ll +data currstep/0/ +save currstep + +currstep=currstep+1 + +if (my_task == 0 .and. .not. file_exist(ncfilenm) ) then + call create_ncfile(ncfilenm,ncid,il_im,il_jm,ll=1,ilout=il_out) +endif + +if (my_task == 0) then + write(il_out,*) 'opening ncfile at nstep ', ncfilenm, currstep + call ncheck( nf_open(ncfilenm, nf_write,ncid) ) + call write_nc_1Dtime(real(currstep),currstep,'time',ncid) +end if + +call gather_global(gwork, aice, master_task, distrb_info) +if (my_task == 0) call write_nc2D(ncid, 'aice', gwork, 1, il_im,il_jm,currstep,ilout=il_out) +call gather_global(gwork, aicen(:,:,1,:), master_task, distrb_info) +if (my_task == 0) call write_nc2D(ncid, 'aicen1', gwork, 1, il_im,il_jm,currstep,ilout=il_out) +call gather_global(gwork, aicen(:,:,2,:), master_task, distrb_info) +if (my_task == 0) call write_nc2D(ncid, 'aicen2', gwork, 1, il_im,il_jm,currstep,ilout=il_out) +call gather_global(gwork, aicen(:,:,3,:), master_task, distrb_info) +if (my_task == 0) call write_nc2D(ncid, 'aicen3', gwork, 1, il_im,il_jm,currstep,ilout=il_out) +call gather_global(gwork, aicen(:,:,4,:), master_task, distrb_info) +if (my_task == 0) call write_nc2D(ncid, 'aicen4', gwork, 1, il_im,il_jm,currstep,ilout=il_out) +call gather_global(gwork, aicen(:,:,5,:), master_task, distrb_info) +if (my_task == 0) call write_nc2D(ncid, 'aicen5', gwork, 1, il_im,il_jm,currstep,ilout=il_out) + +if (my_task == 0) call ncheck(nf_close(ncid)) + +return +end subroutine check_ice_fields + +!================================================= +subroutine check_ice_sbc_fields(ncfilenm) + +!this is temporarily used to check ice_sbc fields got from get_ice_sbc +!for debug purpose + +implicit none + +real (kind=dbl_kind), dimension(nx_block,ny_block,max_blocks) :: v3d + +character*(*), intent(in) :: ncfilenm +integer(kind=int_kind) :: ncid,currstep, ilout, ll +data currstep/0/ +save currstep + +v3d = 0.0 + +currstep=currstep+1 + +if (my_task == 0 .and. .not. file_exist(ncfilenm) ) then + call create_ncfile(ncfilenm,ncid,il_im,il_jm,ll=1,ilout=il_out) +endif + +if (my_task == 0) then + write(il_out,*) 'opening ncfile at nstep ', ncfilenm, currstep + call ncheck( nf_open(ncfilenm, nf_write,ncid) ) + call write_nc_1Dtime(real(currstep),currstep,'time',ncid) +end if + +call gather_global(gwork, aice, master_task, distrb_info) +if (my_task == 0) call write_nc2D(ncid, 'aice', gwork, 1, il_im,il_jm,currstep,ilout=il_out) +call gather_global(gwork, strax, master_task, distrb_info) +if (my_task == 0) call write_nc2D(ncid, 'strax', gwork, 1, il_im,il_jm,currstep,ilout=il_out) +call gather_global(gwork, stray, master_task, distrb_info) +if (my_task == 0) call write_nc2D(ncid, 'stray', gwork, 1, il_im,il_jm,currstep,ilout=il_out) + +v3d(:,:,:) = flatn_f(:,:,1,:) +call gather_global(gwork, v3d, master_task, distrb_info) +if (my_task == 0) call write_nc2D(ncid, 'flatn_f1', gwork, 1, il_im,il_jm,currstep,ilout=il_out) +v3d(:,:,:) = flatn_f(:,:,2,:) +call gather_global(gwork, v3d, master_task, distrb_info) +if (my_task == 0) call write_nc2D(ncid, 'flatn_f2', gwork, 1, il_im,il_jm,currstep,ilout=il_out) +v3d(:,:,:) = flatn_f(:,:,3,:) +call gather_global(gwork, v3d, master_task, distrb_info) +if (my_task == 0) call write_nc2D(ncid, 'flatn_f3', gwork, 1, il_im,il_jm,currstep,ilout=il_out) +v3d(:,:,:) = flatn_f(:,:,4,:) +call gather_global(gwork, v3d, master_task, distrb_info) +if (my_task == 0) call write_nc2D(ncid, 'flatn_f4', gwork, 1, il_im,il_jm,currstep,ilout=il_out) +v3d(:,:,:) = flatn_f(:,:,5,:) +call gather_global(gwork, v3d, master_task, distrb_info) +if (my_task == 0) call write_nc2D(ncid, 'flatn_f5', gwork, 1, il_im,il_jm,currstep,ilout=il_out) + +v3d(:,:,:) = fcondtopn_f(:,:,1,:) +call gather_global(gwork, v3d, master_task, distrb_info) +if (my_task == 0) call write_nc2D(ncid, 'fcondtopn_f1', gwork, 1, il_im,il_jm,currstep,ilout=il_out) +v3d(:,:,:) = fcondtopn_f(:,:,2,:) +call gather_global(gwork, v3d, master_task, distrb_info) +if (my_task == 0) call write_nc2D(ncid, 'fcondtopn_f2', gwork, 1, il_im,il_jm,currstep,ilout=il_out) +v3d(:,:,:) = fcondtopn_f(:,:,3,:) +call gather_global(gwork, v3d, master_task, distrb_info) +if (my_task == 0) call write_nc2D(ncid, 'fcondtopn_f3', gwork, 1, il_im,il_jm,currstep,ilout=il_out) +v3d(:,:,:) = fcondtopn_f(:,:,4,:) +call gather_global(gwork, v3d, master_task, distrb_info) +if (my_task == 0) call write_nc2D(ncid, 'fcondtopn_f4', gwork, 1, il_im,il_jm,currstep,ilout=il_out) +v3d(:,:,:) = fcondtopn_f(:,:,5,:) +call gather_global(gwork, v3d, master_task, distrb_info) +if (my_task == 0) call write_nc2D(ncid, 'fcondtopn_f5', gwork, 1, il_im,il_jm,currstep,ilout=il_out) + +v3d(:,:,:) = fsurfn_f(:,:,1,:) +call gather_global(gwork, v3d, master_task, distrb_info) +if (my_task == 0) call write_nc2D(ncid, 'fsurfn_f1', gwork, 1, il_im,il_jm,currstep,ilout=il_out) +v3d(:,:,:) = fsurfn_f(:,:,2,:) +call gather_global(gwork, v3d, master_task, distrb_info) +if (my_task == 0) call write_nc2D(ncid, 'fsurfn_f2', gwork, 1, il_im,il_jm,currstep,ilout=il_out) +v3d(:,:,:) = fsurfn_f(:,:,3,:) +call gather_global(gwork, v3d, master_task, distrb_info) +if (my_task == 0) call write_nc2D(ncid, 'fsurfn_f3', gwork, 1, il_im,il_jm,currstep,ilout=il_out) +v3d(:,:,:) = fsurfn_f(:,:,4,:) +call gather_global(gwork, v3d, master_task, distrb_info) +if (my_task == 0) call write_nc2D(ncid, 'fsurfn_f4', gwork, 1, il_im,il_jm,currstep,ilout=il_out) +v3d(:,:,:) = fsurfn_f(:,:,5,:) +call gather_global(gwork, v3d, master_task, distrb_info) +if (my_task == 0) call write_nc2D(ncid, 'fsurfn_f5', gwork, 1, il_im,il_jm,currstep,ilout=il_out) + +call gather_global(gwork, fsnow, master_task, distrb_info) +if (my_task == 0) call write_nc2D(ncid, 'fsnow', gwork, 1, il_im,il_jm,currstep,ilout=il_out) +call gather_global(gwork, frain, master_task, distrb_info) +if (my_task == 0) call write_nc2D(ncid, 'frain', gwork, 1, il_im,il_jm,currstep,ilout=il_out) + +!from ocen: +call gather_global(gwork, frzmlt, master_task, distrb_info) +if (my_task == 0) call write_nc2D(ncid, 'frzmlt', gwork, 1, il_im,il_jm,currstep,ilout=il_out) +call gather_global(gwork, sst, master_task, distrb_info) +if (my_task == 0) call write_nc2D(ncid, 'sst', gwork, 1, il_im,il_jm,currstep,ilout=il_out) +call gather_global(gwork, sss, master_task, distrb_info) +if (my_task == 0) call write_nc2D(ncid, 'sss', gwork, 1, il_im,il_jm,currstep,ilout=il_out) +call gather_global(gwork, uocn, master_task, distrb_info) +if (my_task == 0) call write_nc2D(ncid, 'uocn', gwork, 1, il_im,il_jm,currstep,ilout=il_out) +call gather_global(gwork, vocn, master_task, distrb_info) +if (my_task == 0) call write_nc2D(ncid, 'vocn', gwork, 1, il_im,il_jm,currstep,ilout=il_out) + +call gather_global(gwork, ss_tltx, master_task, distrb_info) +if (my_task == 0) call write_nc2D(ncid, 'ss_tltx', gwork, 1, il_im,il_jm,currstep,ilout=il_out) +call gather_global(gwork, ss_tlty, master_task, distrb_info) +if (my_task == 0) call write_nc2D(ncid, 'ss_tlty', gwork, 1, il_im,il_jm,currstep,ilout=il_out) + +call gather_global(gwork, Tf, master_task, distrb_info) +if (my_task == 0) call write_nc2D(ncid, 'Tf', gwork, 1, il_im,il_jm,currstep,ilout=il_out) + +if (my_task == 0) call ncheck(nf_close(ncid)) + +return +end subroutine check_ice_sbc_fields + +!================================================= subroutine check_sstsss(ncfilenm) !this is used to check cice sst/sss : temporary use (20091019) @@ -1385,8 +1766,102 @@ subroutine check_sstsss(ncfilenm) return end subroutine check_sstsss +!================================================= +subroutine check_iceberg_fields(ncfilenm) -!============================================================================ +!this is used to check land ice fields + +implicit none + +character*(*), intent(in) :: ncfilenm +integer(kind=int_kind) :: ncid,currstep, ilout, ll +data currstep/0/ +save currstep + +currstep=currstep+1 + +if (my_task == 0 .and. .not. file_exist(ncfilenm) ) then + call create_ncfile(ncfilenm,ncid,il_im,il_jm,ll=1,ilout=il_out) +endif + +if (my_task == 0) then + write(il_out,*) 'opening ncfile at nstep ', ncfilenm, currstep + call ncheck( nf_open(ncfilenm, nf_write,ncid) ) + call write_nc_1Dtime(real(currstep),currstep,'time',ncid) +end if + +call gather_global(gwork, io_licefw, master_task, distrb_info) +if (my_task == 0) call write_nc2D(ncid, 'io_licefw', gwork, 1, il_im,il_jm,currstep,ilout=il_out) +call gather_global(gwork, io_liceht, master_task, distrb_info) +if (my_task == 0) call write_nc2D(ncid, 'io_liceht', gwork, 1, il_im,il_jm,currstep,ilout=il_out) + +if (my_task == 0) call ncheck(nf_close(ncid)) + +return + +end subroutine check_iceberg_fields + +!================================================= +subroutine check_landice_fields_1(ncfilenm) + +!this is used to check land ice fields + +implicit none + +character*(*), intent(in) :: ncfilenm +integer(kind=int_kind) :: ncid,currstep, ilout, ll +data currstep/0/ +save currstep + +currstep=currstep+1 + +if (my_task == 0 .and. .not. file_exist(ncfilenm) ) then + call create_ncfile(ncfilenm,ncid,il_im,il_jm,ll=1,ilout=il_out) +endif + +if (my_task == 0) then + write(il_out,*) 'opening ncfile at nstep ', ncfilenm, currstep + call ncheck( nf_open(ncfilenm, nf_write,ncid) ) + call write_nc_1Dtime(real(currstep),currstep,'time',ncid) +end if + +if (my_task == 0) call ncheck(nf_close(ncid)) + +return + +end subroutine check_landice_fields_1 + +!================================================= +subroutine check_landice_fields_2(ncfilenm) + +!this is used to check land ice fields + +implicit none + +character*(*), intent(in) :: ncfilenm +integer(kind=int_kind) :: ncid,currstep, ilout, ll +data currstep/0/ +save currstep + +currstep=currstep+1 + +if (my_task == 0 .and. .not. file_exist(ncfilenm) ) then + call create_ncfile(ncfilenm,ncid,il_im,il_jm,ll=1,ilout=il_out) +endif + +if (my_task == 0) then + write(il_out,*) 'opening ncfile at nstep ', ncfilenm, currstep + call ncheck( nf_open(ncfilenm, nf_write,ncid) ) + call write_nc_1Dtime(real(currstep),currstep,'time',ncid) +end if + +if (my_task == 0) call ncheck(nf_close(ncid)) + +return + +end subroutine check_landice_fields_2 + +!================================================= function file_exist (file_name) ! character(len=*), intent(in) :: file_name @@ -1400,6 +1875,6 @@ function file_exist (file_name) end function file_exist -!============================================================================ +!================================================= end module cpl_forcing_handler diff --git a/drivers/access/cpl_interface.F90 b/drivers/access/cpl_interface.F90 index a625f4b2..940763f9 100644 --- a/drivers/access/cpl_interface.F90 +++ b/drivers/access/cpl_interface.F90 @@ -62,6 +62,9 @@ module cpl_interface real(kind=dbl_kind), dimension(:), allocatable :: rla_bufsend real(kind=dbl_kind), dimension(:,:), allocatable :: vwork2d !local domain work array, 4 coupling data passing + + logical, parameter :: debug = .false. + contains !====================================================================== @@ -277,9 +280,9 @@ subroutine init_cpl ihi = this_block%ihi jlo = this_block%jlo jhi = this_block%jhi - write(il_out,*) ' this block: iblock, jblock=', this_block%iblock, this_block%jblock -! write(il_out,*) ' block:', iblk, "ilo, jlo, ihi, jhi=", ilo, jlo, ihi, jhi - write(il_out,*) ' block:', iblk, "gilo, gjlo, gihi, gjhi=", this_block%i_glob(ilo), this_block%j_glob(jlo), this_block%i_glob(ihi), this_block%j_glob(jhi) + ! write(il_out,*) ' this block: iblock, jblock=', this_block%iblock, this_block%jblock + ! write(il_out,*) ' block:', iblk, "ilo, jlo, ihi, jhi=", ilo, jlo, ihi, jhi + ! write(il_out,*) ' block:', iblk, "gilo, gjlo, gihi, gjhi=", this_block%i_glob(ilo), this_block%j_glob(jlo), this_block%i_glob(ihi), this_block%j_glob(jhi) if (this_block%i_glob(ilo) < l_ilo) then l_ilo = this_block%i_glob(ilo) gh_ilo = this_block%i_glob(ilo-nghost) @@ -305,10 +308,10 @@ subroutine init_cpl endif end do - write(il_out,*) ' local partion, ilo, ihi, jlo, jhi=', l_ilo, l_ihi, l_jlo, l_jhi - write(il_out,*) ' partition x,y sizes:', l_ihi-l_ilo+1, l_jhi-l_jlo+1 + ! write(il_out,*) ' local partion, ilo, ihi, jlo, jhi=', l_ilo, l_ihi, l_jlo, l_jhi + ! write(il_out,*) ' partition x,y sizes:', l_ihi-l_ilo+1, l_jhi-l_jlo+1 !print ghost info - write(il_out,*) ' ghost global:',gh_ilo, gh_ihi, gh_jlo, gh_jhi + ! write(il_out,*) ' ghost global:',gh_ilo, gh_ihi, gh_jlo, gh_jhi !calculate partition using nprocsX and nprocsX l_ilo=mod(my_task,nprocsX)*nx_global/nprocsX+1 @@ -316,8 +319,8 @@ subroutine init_cpl l_jlo=int(my_task/nprocsX) * ny_global/nprocsY+1 l_jhi=l_jlo+ny_global/nprocsY - 1 - write(il_out,*) ' 2local partion, ilo, ihi, jlo, jhi=', l_ilo, l_ihi, l_jlo, l_jhi - write(il_out,*) ' 2partition x,y sizes:', l_ihi-l_ilo+1, l_jhi-l_jlo+1 + ! write(il_out,*) ' 2local partion, ilo, ihi, jlo, jhi=', l_ilo, l_ihi, l_jlo, l_jhi + ! write(il_out,*) ' 2partition x,y sizes:', l_ihi-l_ilo+1, l_jhi-l_jlo+1 call mpi_gather(l_ilo, 1, mpi_integer, vilo, 1, mpi_integer, 0, MPI_COMM_ICE, ierror) call broadcast_array(vilo, 0) @@ -350,10 +353,10 @@ subroutine init_cpl !disps(n) = ((vilo(n)-1)*ny_global + (vjlo(n)-1)) end do - write(il_out,*) ' vilo ', vilo - write(il_out,*) ' vjlo ', vjlo - write(il_out,*) ' counts ', counts - write(il_out,*) ' disps ', disps + ! write(il_out,*) ' vilo ', vilo + ! write(il_out,*) ' vjlo ', vjlo + ! write(il_out,*) ' counts ', counts + ! write(il_out,*) ' disps ', disps ! if ( ll_comparal ) then ! il_im = l_ihi-l_ilo+1 !nx_global @@ -382,7 +385,7 @@ subroutine init_cpl call decomp_def (il_part_id, il_length, il_imjm, & my_task, il_nbcplproc, ll_comparal, il_out) - write(il_out,*)'(init_cpl) called decomp_def, my_task, ierror = ',my_task, ierror + if (debug) write(il_out,*)'(init_cpl) called decomp_def, my_task, ierror = ',my_task, ierror ! ! PSMILe coupling fields declaration @@ -449,7 +452,6 @@ subroutine init_cpl cl_writ(nsend_i2a)='co2_i2' nsend_i2a = nsend_i2a + 1 cl_writ(nsend_i2a)='co2fx_i2' - if (my_task == 0) then write(il_out,*) 'init_cpl: Number of fields sent to atm: ',nsend_i2a endif @@ -492,10 +494,15 @@ subroutine init_cpl cl_writ(nsend_i2o)='co2_i1' nsend_i2o = nsend_i2o + 1 cl_writ(nsend_i2o)='wnd_i1' +!2 more added 20171024: + nsend_i2o = nsend_i2o + 1 + cl_writ(nsend_i2o)='lice_fw' + nsend_i2o = nsend_i2o + 1 + cl_writ(nsend_i2o)='lice_ht' if (my_task == 0 .or. ll_comparal) then - write(il_out,*) 'init_cpl: Number of fields sent to ocn: ',nsend_i2o - nsend_i2a + if (debug) write(il_out,*) 'init_cpl: Number of fields sent to ocn: ',nsend_i2o - nsend_i2a if (nsend_i2o /= jpfldout) then write(il_out,*) @@ -504,7 +511,7 @@ subroutine init_cpl call abort_ice('CICE: Number of outgoing coupling fields incorrect!') endif - write(il_out,*) 'init_cpl: Total number of fields sent from ice: ',jpfldout + if (debug) write(il_out,*) 'init_cpl: Total number of fields sent from ice: ',jpfldout !jpfldout == nsend_i2o! !---------------------! @@ -567,7 +574,7 @@ subroutine init_cpl cl_read(nrecv_a2i) = 'wnd_ai' if (my_task==0 .or. ll_comparal) then - write(il_out,*) 'init_cpl: Number of fields rcvd from atm: ',nrecv_a2i + if (debug) write(il_out,*) 'init_cpl: Number of fields rcvd from atm: ',nrecv_a2i endif ! @@ -596,7 +603,7 @@ subroutine init_cpl if (my_task==0 .or. ll_comparal) then - write(il_out,*) 'init_cpl: Number of fields rcvd from ocn: ',nrecv_o2i-nrecv_a2i + if (debug) write(il_out,*) 'init_cpl: Number of fields rcvd from ocn: ',nrecv_o2i-nrecv_a2i if (nrecv_o2i /= jpfldin) then write(il_out,*) @@ -607,7 +614,7 @@ subroutine init_cpl !jpfldin == nrecv_o2i! !--------------------! - write(il_out,*) 'init_cpl: Total number of fields rcvd by ice: ',jpfldin + if (debug) write(il_out,*) 'init_cpl: Total number of fields rcvd by ice: ',jpfldin do jf=1, jpfldin call prism_def_var_proto (il_var_id_in(jf), cl_read(jf), il_part_id, & @@ -646,7 +653,6 @@ subroutine init_cpl allocate (um_bmlt(nx_block,ny_block,ncat,max_blocks)); um_bmlt(:,:,:,:) = 0 allocate (um_co2(nx_block,ny_block,max_blocks)); um_co2(:,:,:) = 0 allocate (um_wnd(nx_block,ny_block,max_blocks)); um_wnd(:,:,:) = 0 - ! allocate ( core_runoff(nx_block,ny_block,max_blocks)); core_runoff(:,:,:) = 0. ! @@ -692,6 +698,9 @@ subroutine init_cpl allocate (io_form(nx_block,ny_block,max_blocks)); io_form(:,:,:) = 0 allocate (io_co2(nx_block,ny_block,max_blocks)); io_co2(:,:,:) = 0 allocate (io_wnd(nx_block,ny_block,max_blocks)); io_wnd(:,:,:) = 0 +!20171024: 2 more added + allocate (io_licefw(nx_block,ny_block,max_blocks)); io_licefw(:,:,:) = 0 + allocate (io_liceht(nx_block,ny_block,max_blocks)); io_liceht(:,:,:) = 0 ! temporary arrays: ! IO cpl int time-average @@ -712,15 +721,32 @@ subroutine init_cpl allocate (mssv(nx_block,ny_block,max_blocks)); mssv(:,:,:) = 0 allocate (mco2(nx_block,ny_block,max_blocks)); mco2(:,:,:) = 0 allocate (mco2fx(nx_block,ny_block,max_blocks)); mco2fx(:,:,:) = 0 -! IA cpl int time-average (4D) + ! IA cpl int time-average (4D) allocate (maicen(nx_block,ny_block,ncat,max_blocks)); maicen(:,:,:,:) = 0 allocate (msnown(nx_block,ny_block,ncat,max_blocks)); msnown(:,:,:,:) = 0 allocate (mthikn(nx_block,ny_block,ncat,max_blocks)); mthikn(:,:,:,:) = 0 + allocate (maicen_saved(nx_block,ny_block,ncat,max_blocks)); maicen_saved(:,:,:,:) = 0 + + allocate (ticeberg_s(12)); ticeberg_s(:) = 0 + allocate (ticeberg_n(12)); ticeberg_n(:) = 0 + allocate (gwork(nx_global,ny_global)); gwork(:,:) = 0 + + + if (my_task == master_task) then + !global domain runoff for iceberg runoff calcs + allocate (gicebergfw(nx_global,ny_global,12)); gicebergfw(:,:,:) = 0 + allocate (grunoff(nx_global,ny_global)); grunoff(:,:) = 0 + allocate (gtarea(nx_global,ny_global)); gtarea(:,:) = 0 + else + allocate (gicebergfw(1,1,12)); gicebergfw(:,:,:) = 0 + allocate (grunoff(1,1)); grunoff(:,:) = 0 + allocate (gtarea(1,1)); gtarea(:,:) = 0 + endif allocate (vwork(nx_block,ny_block,max_blocks)); vwork(:,:,:) = 0 - allocate (gwork(nx_global,ny_global)); gwork(:,:) = 0 allocate (sicemass(nx_block,ny_block,max_blocks)); sicemass(:,:,:) = 0. allocate (vwork2d(l_ilo:l_ihi, l_jlo:l_jhi)); vwork2d(:,:) = 0. !l_ihi-l_ilo+1, l_jhi-l_jlo+1 + end subroutine init_cpl !======================================================================= @@ -758,14 +784,13 @@ subroutine from_atm(isteps) write(il_out,*) '(from_atm) Total number of fields to be rcvd: ', nrecv_a2i endif - write(il_out,*) "prism_get from_atm at sec: ", isteps + if (debug) write(il_out,*) "prism_get from_atm at sec: ", isteps do jf = 1, nrecv_a2i if (my_task==0 .or. ll_comparal ) then !jf-th field in - write(il_out,*) - write(il_out,*) '*** receiving coupling field No. ', jf, cl_read(jf) + if (debug) write(il_out,*) '*** receiving coupling field No. ', jf, cl_read(jf) !call flush(il_out) if (ll_comparal) then @@ -778,8 +803,7 @@ subroutine from_atm(isteps) write(il_out,*) 'Err in _get_ sst at time with error: ', isteps, ierror call prism_abort_proto(il_comp_id, 'cice from_atm','stop 1') else - write(il_out,*) - write(il_out,*)'(from_atm) rcvd at time with err: ',cl_read(jf),isteps,ierror + if (debug) write(il_out,*)'(from_atm) rcvd at time with err: ',cl_read(jf),isteps,ierror if (ll_comparal .and. chk_a2i_fields) then call mpi_gatherv(vwork2d(l_ilo:l_ihi, l_jlo:l_jhi),1,sendsubarray,gwork, & @@ -817,9 +841,6 @@ subroutine from_atm(isteps) um_runoff(1+nghost:nx_block-nghost,1+nghost:ny_block-nghost,1) =vwork2d(:,:) case ('wme_i'); um_wme(1+nghost:nx_block-nghost,1+nghost:ny_block-nghost,1) = vwork2d(:,:) -! case ('rain_i'); um_rain(:,:,:) = vwork(:,:,:) -! case ('snow_i'); um_snow(:,:,:) = vwork(:,:,:) -!---20100825 -- just be cauious: ------------------------- case ('rain_i'); um_rain(1+nghost:nx_block-nghost,1+nghost:ny_block-nghost,1) =max(0.0,vwork2d(:,:)) case ('snow_i'); @@ -848,12 +869,11 @@ subroutine from_atm(isteps) end select if (my_task == 0 .or. ll_comparal) then - write(il_out,*) - write(il_out,*)'(from_atm) done: ', jf, trim(cl_read(jf)) + if (debug) write(il_out,*)'(from_atm) done: ', jf, trim(cl_read(jf)) endif enddo - +!---------------------------------------------------------------------------------------------------- call ice_HaloUpdate(um_thflx, halo_info,field_loc_center,field_type_scalar) call ice_HaloUpdate(um_pswflx, halo_info,field_loc_center,field_type_scalar) call ice_HaloUpdate(um_runoff, halo_info,field_loc_center,field_type_scalar) @@ -944,14 +964,13 @@ subroutine from_ocn(isteps) endif endif - write(il_out,*) "prism_get from_ocn at sec: ", isteps + if (debug) write(il_out,*) "prism_get from_ocn at sec: ", isteps do jf = nrecv_a2i + 1, jpfldin if (my_task==0 .or. ll_comparal) then !jf-th field in - write(il_out,*) - write(il_out,*) '*** receiving coupling fields No. ', jf, cl_read(jf) + if (debug) write(il_out,*) '*** receiving coupling fields No. ', jf, cl_read(jf) if(ll_comparal) then call prism_get_proto (il_var_id_in(jf), isteps, vwork2d(l_ilo:l_ihi, l_jlo:l_jhi), ierror) else @@ -962,8 +981,7 @@ subroutine from_ocn(isteps) write(il_out,*) 'Err in _get_ sst at time with error: ', isteps, ierror call prism_abort_proto(il_comp_id, 'cice from_ocn','stop 1') else - write(il_out,*) - write(il_out,*)'(from_ocn) rcvd at time with err: ',cl_read(jf),isteps,ierror + if (debug) write(il_out,*)'(from_ocn) rcvd at time with err: ',cl_read(jf),isteps,ierror if(ll_comparal .and. chk_o2i_fields) then call mpi_gatherv(vwork2d(l_ilo:l_ihi, l_jlo:l_jhi),1,sendsubarray,gwork, & counts,disps,resizedrecvsubarray, 0,MPI_COMM_ICE,ierror) @@ -1025,6 +1043,21 @@ subroutine from_ocn(isteps) !endif !------------------------------- + !------------------------------------------------------------------------------- + !B: calculate freezing point here (before "time_average_ocn_fields_4_i2a") ! + !----- should use eos formula to calculate Tf for "consistency" with GCx ----! + Tf (:,:,:) = -depressT*ocn_sss(:,:,:) ! (deg C) + ! + !May use different formula for Tf such as TEOS-10 formulation: + ! + !r1_S0 = 0.875/35.16504 + !zzs(:,:,:) = sqrt(abs(ocn_sss(:,:,:)) * r1_S0) + !Tf(:,:,:) = ((((1.46873e-03 * zzs(:,:,:) - 9.64972e-03) * zzs(:,:,:) + & + ! 2.28348e-02) * zzs(:,:,:) - 3.12775e-02) * zzs(:,:,:) + & + ! 2.07679e-02) * zzs(:,:,:) - 5.87701e-02 + !Tf(:,:,:) = Tf(:,:,:) * sss(:,:,:) ! - 7.53e-4 * 5.0 !!!5.0 is depth in meters + !--------------------------------------------------------------------------------- + if ( chk_o2i_fields .and. my_task == 0 ) then call ncheck(nf_close(ncid)) endif @@ -1049,8 +1082,7 @@ subroutine into_ocn(isteps) currstep=currstep+1 if (my_task == 0) then - write(il_out,*) - write(il_out,*) '(into_ocn) sending coupling fields at stime= ', isteps + if (debug) write(il_out,*) '(into_ocn) sending coupling fields at stime= ', isteps if (chk_i2o_fields) then if ( .not. file_exist('fields_i2o_in_ice.nc') ) then call create_ncfile('fields_i2o_in_ice.nc',ncid,il_im,il_jm,ll=1,ilout=il_out) @@ -1061,7 +1093,7 @@ subroutine into_ocn(isteps) endif endif - write(il_out,*) "prism_put into_ocn at sec: ", isteps + if (debug) write(il_out,*) "prism_put into_ocn at sec: ", isteps do jf = nsend_i2a + 1, jpfldout !CH: make sure the 'LIMITS' are to be released! @@ -1100,6 +1132,9 @@ subroutine into_ocn(isteps) case('form_io'); vwork = io_form case('co2_i1'); vwork = io_co2 case('wnd_i1'); vwork = io_wnd + !2 more added 20171024: + case('lice_fw'); vwork = io_licefw + case('lice_ht'); vwork = io_liceht end select if(.not. ll_comparal) then @@ -1107,27 +1142,11 @@ subroutine into_ocn(isteps) else call pack_global_dbl(gwork, vwork, master_task, distrb_info) vwork2d(l_ilo:l_ihi, l_jlo:l_jhi) = gwork(l_ilo:l_ihi, l_jlo:l_jhi) -! do iblk=1,nblocks_tot -! -! if (distrb_info%blockLocation(iblk) == my_task+1) then -! -! this_block = get_block(iblk,iblk) -! ilo = this_block%ilo -! ihi = this_block%ihi -! jlo = this_block%jlo -! jhi = this_block%jhi -! -! vwork2d(this_block%i_glob(ilo):this_block%i_glob(ihi), & -! this_block%j_glob(jlo):this_block%j_glob(jhi)) = & -! vwork(ilo:ihi,jlo:jhi,distrb_info%blockLocalID(iblk)) -! endif -! end do - endif if (my_task == 0 .or. ll_comparal) then - write(il_out,*) - write(il_out,*) '*** sending coupling field No. ', jf, cl_writ(jf) + if (debug) write(il_out,*) '*** sending coupling field No. ', jf, cl_writ(jf) + if(ll_comparal) then call prism_put_proto(il_var_id_out(jf), isteps, vwork2d(l_ilo:l_ihi, l_jlo:l_jhi), ierror) else @@ -1138,8 +1157,7 @@ subroutine into_ocn(isteps) write(il_out,*) '(into_ocn) Err in _put_ ', cl_writ(jf), isteps, ierror call prism_abort_proto(il_comp_id, 'cice into_ocn','STOP 1') else - write(il_out,*) - write(il_out,*)'(into_ocn) sent: ', cl_writ(jf), isteps, ierror + if (debug) write(il_out,*)'(into_ocn) sent: ', cl_writ(jf), isteps, ierror if(chk_i2o_fields .and. ll_comparal) then call mpi_gatherv(vwork2d(l_ilo:l_ihi, l_jlo:l_jhi),1,sendsubarray,gwork, & counts,disps,resizedrecvsubarray, 0,MPI_COMM_ICE,ierror) @@ -1192,13 +1210,12 @@ subroutine into_atm(isteps) !end if if (my_task == 0) then - write(il_out,*) - write(il_out,*) '(into_atm) sending coupling fields at stime= ', isteps + if (debug) write(il_out,*) '(into_atm) sending coupling fields at stime= ', isteps if (chk_i2a_fields) then if ( .not. file_exist('fields_i2a_in_ice.nc') ) then call create_ncfile('fields_i2a_in_ice.nc',ncid,il_im,il_jm,ll=1,ilout=il_out) else - write(il_out,*) 'opening file fields_i2a_in_ice.nc at nstep = ', isteps + if (debug) write(il_out,*) 'opening file fields_i2a_in_ice.nc at nstep = ', isteps call ncheck( nf_open('fields_i2a_in_ice.nc',nf_write,ncid) ) end if call write_nc_1Dtime(real(isteps),currstep,'time',ncid) @@ -1234,7 +1251,7 @@ subroutine into_atm(isteps) call u2tgrid_vector(ia_uvel) call u2tgrid_vector(ia_vvel) - write(il_out,*) "prism_put into_atm at sec: ", isteps + if (debug) write(il_out,*) "prism_put into_atm at sec: ", isteps do jf = 1, nsend_i2a select case (trim(cl_writ(jf))) @@ -1285,13 +1302,11 @@ subroutine into_atm(isteps) end if if (my_task == 0 .or. ll_comparal) then - write(il_out,*) - write(il_out,*) '*** sending coupling field No. ', jf, cl_writ(jf) + if (debug) write(il_out,*) '*** sending coupling field No. ', jf, cl_writ(jf) !call prism_put_inquire_proto(il_var_id_out(jf),isteps,ierror) - write(il_out,*) - write(il_out,*) '(into_atm) what to do with this var==> Err= ',ierror + if (debug) write(il_out,*) '(into_atm) what to do with this var==> Err= ',ierror if(ll_comparal) then call prism_put_proto(il_var_id_out(jf), isteps, vwork2d(l_ilo:l_ihi, l_jlo:l_jhi), ierror) else @@ -1302,8 +1317,7 @@ subroutine into_atm(isteps) write(il_out,*) '(into_atm) Err in _put_ ', cl_writ(jf), isteps, ierror call prism_abort_proto(il_comp_id, 'cice into_atm','STOP 1') else - write(il_out,*) - write(il_out,*)'(into_atm) sent: ', cl_writ(jf), isteps, ierror + if (debug) write(il_out,*)'(into_atm) sent: ', cl_writ(jf), isteps, ierror if(chk_i2a_fields .and. ll_comparal) then call mpi_gatherv(vwork2d(l_ilo:l_ihi, l_jlo:l_jhi),1,sendsubarray,gwork, & counts,disps,resizedrecvsubarray, 0,MPI_COMM_ICE,ierror) diff --git a/drivers/access/cpl_parameters.F90 b/drivers/access/cpl_parameters.F90 index f72bb193..88c24e8e 100644 --- a/drivers/access/cpl_parameters.F90 +++ b/drivers/access/cpl_parameters.F90 @@ -8,6 +8,12 @@ module cpl_parameters implicit none +#ifdef __INTEL_COMPILER +! for intel runtime errors +! see https://www.intel.com/content/www/us/en/docs/fortran-compiler/developer-guide-reference/2025-2/list-of-runtime-error-messages.html +include "for_iosdef.for" +#endif + integer(kind=int_kind) :: il_im, il_jm, il_imjm ! il_im=nx_global, il_jm=ny_global ! assigned in prism_init integer (kind=int_kind) :: xdim, ydim @@ -15,8 +21,8 @@ module cpl_parameters !integer(kind=int_kind), parameter :: nrecv = 50 ! maxium no of flds rcvd allowed integer(kind=int_kind) :: nsend_i2a, nsend_i2o integer(kind=int_kind) :: nrecv_a2i, nrecv_o2i -integer(kind=int_kind), parameter :: jpfldout = 37 ! actual number of fields sent -integer(kind=int_kind), parameter :: jpfldin = 35 ! actual umber of fields rcvd +integer(kind=int_kind), parameter :: jpfldout = 39 ! actual number of fields sent +integer(kind=int_kind), parameter :: jpfldin = 35 ! actual number of fields rcvd character(len=8), dimension(jpfldout) :: cl_writ ! Symb names fields sent character(len=8), dimension(jpfldin) :: cl_read ! Symb names fields rcvd @@ -32,8 +38,7 @@ module cpl_parameters !integer(kind=int_kind) :: il_master=0 ! master_task id integer(kind=int_kind) :: num_cpl_ai ! num of (a2i) cpl periods -integer(kind=int_kind) :: num_cpl_io ! num of (i2o) cpl periods -integer(kind=int_kind) :: num_ice_io ! ice time loop iter num per i2o cpl interval +integer(kind=int_kind) :: num_ice_ai ! ice time loop iter num per a2i cpl interval real(kind=dbl_kind) :: meltlimit = -200. !12/03/2008: set max melt real(kind=dbl_kind) :: ocn_albedo = 0.06 ! for compability with AusCOM @@ -60,33 +65,56 @@ module cpl_parameters chk_i2o_fields = .false. , & chk_o2i_fields = .false. integer(kind=int_kind) :: jobnum = 1 !1 for initial, >1 restart -integer(kind=int_kind) :: inidate = 01010101 !beginning date of this run (yyyymmdd) integer(kind=int_kind) :: init_date = 00010101 !beginning date of this EXP (yyyymmdd) +integer(kind=int_kind) :: iniday = 1, & ! beginning date of this run. Read from restart + inimon = 1, & + iniyear = 1 integer(kind=int_kind) :: dt_cice = 3600 !time step of this model (seconds) integer(kind=int_kind) :: dt_cpl_ai = 21600 !atm<==>ice coupling interval (seconds) -integer(kind=int_kind) :: dt_cpl_io = 21600 !ice<==>ocn coupling interval (seconds) -integer(kind=int_kind) :: caltype = 0 !calendar type: 0 (365daye/yr, 'Juilian' ) - ! 1 (365/366 days/yr, 'Gregorian') - ! n (n days/month) -!integer(kind=int_kind) :: runtime0 !accumulated run time by the end of last run (s) -real(kind=dbl_kind) :: runtime0 = 0.0 ! can be too large as int to read in correctly! +integer(kind=int_kind) :: dt_cpl_io = -99 !ice<==>ocn coupling interval (seconds). + !Hardwired to equal dt_cice and should not + !be set in namelist. +real(kind=dbl_kind) :: runtime0 = 0.0 !accumulated runtime from init_date to + !run start date integer(kind=int_kind) :: runtime = 86400 !the time length for this run segment (s) !20100305: Harry Henden suggests turning off ocean current into UM might reduce the ! tropical cooling bias: real(kind=dbl_kind) :: ocn_ssuv_factor = 1.0 ! 0.0 -- turn off the ocn_current into UM. real(kind=dbl_kind) :: iostress_factor = 1.0 ! 0.0 -- turn off stresses into MOM4. +! +!20171227: Adding options for land ice discharge as iceberg melt (0,1,2,3,4) +integer(kind=int_kind) :: iceberg = 2 +!Allow scaling: factor for the iceberg waterflux (won't change water mass budget) +real(kind=dbl_kind) :: & + iceberg_rate_s = 0.5, & !rate of "iceberg" taken from the runoff off Antarctica + iceberg_rate_n = 0.5, & !........................................... Greenland + iceberg_lh = 1.0 !iceberg latent heat (=0 if CABLE already calculated melting) + +logical :: runoff_lh = .true. !allow runoff to carry LH when discharged into ocean + !which would lead to ocean surface cooling, + !when .false., only carry LH to areas where + !runoff spread by the lice (iceberg) mask +integer(kind=int_kind) :: & + iceberg_je_s = 70, & !(iceberg_js_s=1, always) + runoff_je_s = 45, & !(runoff_js_s =1, always) + iceberg_js_n = 201, & !(iceberg_je_n=300, always) + runoff_is_n = 222, & !------ + runoff_ie_n = 270, & !These 4 indices define the + runoff_js_n = 230, & !Greenland runoff domain + runoff_je_n = 300 !----- +!202412: add option for "fixing" ocean water mass imbalance: ESM1.5 sees ~ 0.18543417E+08 kg/s +! (annual mean) water loss from ocean in a 100-year test run (liceA0G0), meaning a net +! loss rate of 0.18543417E+08/0.36133599E+15(ocean-surface-area) = 0.513190E-07 kg/m2/s. +! which will be compensated for by adding this much of waterflux to global lprec field-- +real(kind=dbl_kind) :: & + add_lprec = 0.513190E-07 !kg/m2/s. ==> set to 0.0 if no fixin! namelist/coupling/ & - caltype, & jobnum, & - inidate, & - init_date, & - runtime0, & runtime, & dt_cice, & dt_cpl_ai, & - dt_cpl_io, & inputdir, & restartdir, & pop_icediag, & @@ -104,6 +132,19 @@ module cpl_parameters do_scale_fluxes, & extreme_test, & imsk_evap, & + iceberg, & + iceberg_rate_s, & + iceberg_rate_n, & + iceberg_lh, & + iceberg_je_s, & + runoff_je_s, & + iceberg_js_n, & + runoff_is_n, & + runoff_ie_n, & + runoff_js_n, & + runoff_je_n, & + runoff_lh, & + add_lprec, & ocn_ssuv_factor,& iostress_factor,& chk_a2i_fields, & @@ -111,10 +152,7 @@ module cpl_parameters chk_i2o_fields, & chk_o2i_fields -integer(kind=int_kind) :: iniday, inimon, iniyear !from inidate -real(kind=dbl_kind) :: coef_io !dt_ice/dt_cpl_io, for i2o fields tavg -real(kind=dbl_kind) :: coef_ia !dt_ice/dt_cpl_ai, for i2a fields tavg -real(kind=dbl_kind) :: coef_cpl !dt_cpl_io/dt_cpl_ai, for ocn fields tavg +real(kind=dbl_kind) :: coef_ai !dt_ice/dt_cpl_ai, for i2a fields tavg real(kind=dbl_kind) :: frazil_factor = 0.5 !frazil_factor is associated with the difference between ocean @@ -126,6 +164,8 @@ module cpl_parameters ! the received frazil energy by multiplying 0.5... !--------------------------------------------------------------------------------------- +logical :: newstep_ai = .false. !20171024: for land ice availiblity control + contains !======================================================================================= @@ -137,74 +177,91 @@ subroutine get_cpl_timecontrol_simple open(unit=99,file="input_ice.nml",form="formatted",status="old") read (99, coupling) close(unit=99) + ! *** make sure dt_cpl_ai is multiple of dt_cpl_io, and dt_cpl_io if multiple of dt_ice *** -num_cpl_ai = runtime/dt_cpl_ai -num_cpl_io = dt_cpl_ai/dt_cpl_io -num_ice_io = dt_cpl_io/dt_cice -coef_io = float(dt_cice)/float(dt_cpl_io) -coef_ia = float(dt_cice)/float(dt_cpl_ai) -coef_cpl = float(dt_cpl_io)/float(dt_cpl_ai) +!hardrwire dt_cpl_io == dt_cice +dt_cpl_io = dt_cice + +num_cpl_ai = runtime/dt_cpl_ai +num_ice_ai = dt_cpl_ai/dt_cice -iniday = mod(inidate, 100) -inimon = mod( (inidate - iniday)/100, 100) -iniyear = inidate / 10000 +coef_ai = float(dt_cice)/float(dt_cpl_ai) return end subroutine get_cpl_timecontrol_simple !=============================================================================== -subroutine get_cpl_timecontrol -use ice_exit -use ice_fileunits +subroutine get_cpl_timecontrol +use ice_exit, only: abort_ice +use ice_fileunits, only: nu_nml, ice_stderr, ice_stdout, get_fileunit, release_fileunit +use ice_communicate, only: my_task, master_task implicit none integer (int_kind) :: nml_error ! namelist read error flag +character (len=256) :: errstr, tmpstr ! For holding namelist read errors ! all processors read the namelist-- call get_fileunit(nu_nml) -open(unit=nu_nml,file="input_ice.nml",form="formatted",status="old",iostat=nml_error) +open(unit=nu_nml,file="input_ice.nml",form="formatted",status="old",iostat=nml_error, iomsg=errstr) ! -write(6,*)'CICE: input_ice.nml opened at unit = ', nu_nml +if (my_task == master_task) then + write(ice_stdout,*)'CICE: input_ice.nml opened at unit = ', nu_nml +endif ! if (nml_error /= 0) then - nml_error = -1 + write(tmpstr, '(a,i3,a)') 'CICE: ERROR failed to open input_ice.nml. Error code: ', nml_error, & + ' - ' // trim(errstr) + call abort_ice(trim(tmpstr)) else nml_error = 1 endif + do while (nml_error > 0) - read(nu_nml, nml=coupling,iostat=nml_error) - if (nml_error > 0) read(nu_nml,*) ! for Nagware compiler + read(nu_nml, nml=coupling,iostat=nml_error,iomsg=errstr) + ! check if error + if (nml_error /= 0) then + if (my_task == master_task) then + ! backspace and re-read erroneous line + backspace(nu_nml) + read(nu_nml,fmt=*) tmpstr +#ifdef __INTEL_COMPILER + if (nml_error == FOR$IOS_INVREFVAR) then + write(ice_stderr,*)'CICE: Invalid reference to variable '//trim(tmpstr) + write(ice_stderr,*)'CICE: is '//trim(tmpstr)//' deprecated ?' + endif +#endif + call abort_ice('CICE ERROR in input_ice.nml when' // & + ' reading ' // trim(tmpstr) // ' - ' //errstr) + endif + endif end do if (nml_error == 0) close(nu_nml) -write(6,coupling) +if (my_task == master_task) then + write(6,coupling) +endif call release_fileunit(nu_nml) if (nml_error /= 0) then - !!!call abort_ice('ice: error reading coupling') - write(6, *) - write(6, *)'XXX Warning: after reading coupling, nml_error = ',nml_error - write(6, *) + if (my_task == master_task) then + call abort_ice('ice: error reading coupling namelist in "input_ice.nml"') + endif endif +!hardwire dt_cpl_io == dt_cice +dt_cpl_io = dt_cice + ! * make sure runtime is mutliple of dt_cpl_ai, dt_cpl_ai is mutliple of dt_cpl_io, ! * and dt_cpl_io is mutliple of dt_cice! num_cpl_ai = runtime/dt_cpl_ai -num_cpl_io = dt_cpl_ai/dt_cpl_io -num_ice_io = dt_cpl_io/dt_cice - -coef_io = float(dt_cice)/float(dt_cpl_io) -coef_ia = float(dt_cice)/float(dt_cpl_ai) -coef_cpl = float(dt_cpl_io)/float(dt_cpl_ai) +num_ice_ai = dt_cpl_ai/dt_cice -iniday = mod(inidate, 100) -inimon = mod( (inidate - iniday)/100, 100) -iniyear = inidate / 10000 +coef_ai = float(dt_cice)/float(dt_cpl_ai) return end subroutine get_cpl_timecontrol diff --git a/drivers/access/ice_constants.F90 b/drivers/access/ice_constants.F90 index 604a59f2..2b59c433 100644 --- a/drivers/access/ice_constants.F90 +++ b/drivers/access/ice_constants.F90 @@ -1,4 +1,4 @@ -! SVN:$Id: ice_constants.F90 726 2013-09-17 14:58:52Z eclare $ +! SVN:$Id: ice_constants.F90 700 2013-08-15 19:17:39Z eclare $ !======================================================================= ! ! This module defines a variety of physical and numerical constants @@ -21,15 +21,20 @@ module ice_constants real (kind=dbl_kind), parameter, public :: & rhos = 330.0_dbl_kind ,&! density of snow (kg/m^3) rhoi = 917.0_dbl_kind ,&! density of ice (kg/m^3) +!#ifdef AusCOM +! rhow = 1035.0_dbl_kind ,&! density of seawater (kg/m^3) +! !mom uses this value---arguable for sea ice--- +!#else rhow = 1026.0_dbl_kind ,&! density of seawater (kg/m^3) +!#endif cp_air = 1005.0_dbl_kind ,&! specific heat of air (J/kg/K) ! (Briegleb JGR 97 11475-11485 July 1992) emissivity= 0.95_dbl_kind ,&! emissivity of snow and ice cp_ice = 2106._dbl_kind ,&! specific heat of fresh ice (J/kg/K) !ars599: 11042014: add AusCOM #ifdef AusCOM - cp_ocn = 3989._dbl_kind ,&! specific heat of ocn (J/kg/K) - ! freshwater value needed for enthalpy + cp_ocn = 3989.24495292815_dbl_kind, & ! mom5 constant + ! cp_ocn = 3992.10322329649_dbl_kind,& ! used for cm2 #else cp_ocn = 4218._dbl_kind ,&! specific heat of ocn (J/kg/K) ! freshwater value needed for enthalpy @@ -38,23 +43,17 @@ module ice_constants !ars599: 26032014 new code (CODE: dragio) ! use new code, mark out #ifndef AusCOM -#ifndef AusCOM - dragio = 0.00536_dbl_kind ,&! ice-ocn drag coefficient -#endif + albocn = 0.06_dbl_kind ! ocean albedo real (kind=dbl_kind), parameter, public :: & - gravit = 9.80616_dbl_kind ,&! gravitational acceleration (m/s^2) - omega = 7.292e-5_dbl_kind ,&! angular velocity of earth (rad/sec) - radius = 6.371e6_dbl_kind ! earth radius (m) + gravit = 9.80665_dbl_kind ,&! gravitational acceleration (m/s^2) + omega = 7.292116e-5_dbl_kind,&! angular velocity of earth (rad/sec) + radius = 6.371229e6_dbl_kind ! earth radius (m) real (kind=dbl_kind), parameter, public :: & secday = 86400.0_dbl_kind ,&! seconds in calendar day viscosity_dyn = 1.79e-3_dbl_kind, & ! dynamic viscosity of brine (kg/m/s) -#ifndef AusCOM - Tocnfrz = -1.8_dbl_kind ,&! freezing temp of seawater (C), - ! used as Tsfcn for open water -#endif rhofresh = 1000.0_dbl_kind ,&! density of fresh water (kg/m^3) zvir = 0.606_dbl_kind ,&! rh2o/rair - 1.0 vonkar = 0.4_dbl_kind ,&! von Karman constant @@ -66,7 +65,6 @@ module ice_constants Lfresh = Lsub-Lvap ,&! latent heat of melting of fresh ice (J/kg) Timelt = 0.0_dbl_kind ,&! melting temperature, ice top surface (C) Tsmelt = 0.0_dbl_kind ,&! melting temperature, snow top surface (C) - ice_ref_salinity = 4._dbl_kind ,&! (ppt) ! ocn_ref_salinity = 34.7_dbl_kind,&! (ppt) spval_dbl = 1.0e30_dbl_kind ! special value (double precision) @@ -74,35 +72,37 @@ module ice_constants spval = 1.0e30_real_kind ! special value for netCDF output real (kind=dbl_kind), parameter, public :: & -#ifndef AusCOM - iceruf = 0.0005_dbl_kind ,&! ice surface roughness (m) -#endif - ! (Ebert, Schramm and Curry JGR 100 15965-15975 Aug 1995) kappav = 1.4_dbl_kind ,&! vis extnctn coef in ice, wvlngth<700nm (1/m) !kappan = 17.6_dbl_kind,&! vis extnctn coef in ice, wvlngth<700nm (1/m) ! kice is not used for mushy thermo kice = 2.03_dbl_kind ,&! thermal conductivity of fresh ice(W/m/deg) + !!!kice = 2.63_dbl_kind ,&!!! !20170922: spo suggests to test new kice and ksno ! kseaice is used only for zero-layer thermo kseaice= 2.00_dbl_kind ,&! thermal conductivity of sea ice (W/m/deg) ! (used in zero layer thermodynamics option) - ksno = 0.30_dbl_kind ,&! thermal conductivity of snow (W/m/deg) - hs_min = 1.e-4_dbl_kind ,&! min snow thickness for computing zTsn (m) #ifndef AusCOM - snowpatch = 0.02_dbl_kind,&! parameter for fractional snow area (m) + snowpatch = 0.02_dbl_kind, & ! parameter for fractional snow area (m) #endif - zref = 10._dbl_kind ! reference height for stability (m) - -#ifdef AusCOM - ! in namelist therefore not parameter, which is counterintuitive, - ! since this modules name is ice_constants -!ars599: 26032014: change to public -!ars599: 24042015: remove dragio!! + zref = 10._dbl_kind ! reference height for stability (m) +#ifndef AusCOM + real (kind=dbl_kind), parameter, public :: & + !!! dragio = 0.00536_dbl_kind ,&! ice-ocn drag coefficient + dragio = 0.01_dbl_kind ,&!!! 20170922 test new value as per spo + Tocnfrz = -1.8_dbl_kind ,&! freezing temp of seawater (C), + ! used as Tsfcn for open water + ice_ref_salinity = 5._dbl_kind, & ! reference salinity for ice–ocean exchanges (ppt) + ! n.b. CICE6 uses 4 ppt + ksno = 0.3_dbl_kind ! thermal conductivity of snow (W/m/deg) +#else + ! get these in ice_init from namelist real (kind=dbl_kind), public :: & - dragio , & ! ice-ocn drag coefficient - Tocnfrz ! freezing temp of seawater (C), - ! used as Tsfcn for open water + dragio , & ! ice-ocn drag coefficient + Tocnfrz , & ! freezing temp of seawater (C), + ! used as Tsfcn for open water + ice_ref_salinity, & ! reference salinity for ice–ocean exchanges (ppt) + ksno ! thermal conductivity of snow (W/m/deg) #endif ! weights for albedos @@ -158,6 +158,7 @@ module ice_constants c20 = 20.0_dbl_kind, & c25 = 25.0_dbl_kind, & c30 = 30.0_dbl_kind, & + c60 = 60.0_dbl_kind, & c100 = 100.0_dbl_kind, & c180 = 180.0_dbl_kind, & c360 = 360.0_dbl_kind, & diff --git a/drivers/access/ice_coupling.F90 b/drivers/access/ice_coupling.F90 new file mode 100644 index 00000000..732e8e78 --- /dev/null +++ b/drivers/access/ice_coupling.F90 @@ -0,0 +1,484 @@ +!======================================================================= +! +!BOP +! +! !MODULE: ice_coupling - contains coupling related routines used by Met Office +! +! !DESCRIPTION: +! +! Contains routines relating to coupling fields used by Met Office +! +! !REVISION HISTORY: +! SVN:$Id: +! +! authors: Alison McLaren, Met Office +! Feb 2014: Amended by Alex West for use in CICE 5.0. +! +! !INTERFACE: +! + module ice_coupling +! +! !USES: +! + use ice_constants + use ice_kinds_mod +! +!EOP +! + implicit none + save + +! !PUBLIC MEMBER FUNCTIONS: + + public :: sfcflux_to_ocn, set_sfcflux, top_layer_Tandk_init, top_layer_Tandk_run +! +!EOP +! +!======================================================================= + + contains + +!======================================================================= +!BOP +! +! !IROUTINE: sfcflux_to_ocn +! +! !DESCRIPTION: +! +! If surface heat fluxes are provided to CICE instead of CICE calculating +! them internally (i.e. .not. calc_Tsfc), then these heat fluxes can +! be provided at points which do not have ice. (This is could be due to +! the heat fluxes being calculated on a lower resolution grid or the +! heat fluxes not recalculated at every CICE timestep.) At ice free points, +! conserve energy and water by passing these fluxes to the ocean. +! +! !INTERFACE: +! + subroutine sfcflux_to_ocn(nx_block, ny_block, & + tmask, aice, & + fsurfn_f, flatn_f, & + fresh, fhocn) +! +! !REVISION HISTORY: +! +! authors: A. McLaren, Met Office +! +! !USES: +! + use ice_domain_size, only: ncat +! +! !INPUT/OUTPUT PARAMETERS: + integer (kind=int_kind), intent(in) :: & + nx_block, ny_block ! block dimensions + + logical (kind=log_kind), dimension (nx_block,ny_block), & + intent(in) :: & + tmask ! land/boundary mask, thickness (T-cell) + + real (kind=dbl_kind), dimension(nx_block,ny_block), & + intent(in):: & + aice ! initial ice concentration + + real (kind=dbl_kind), dimension(nx_block,ny_block,ncat), & + intent(in) :: & + fsurfn_f, & ! net surface heat flux (provided as forcing) + flatn_f ! latent heat flux (provided as forcing) + + real (kind=dbl_kind), dimension(nx_block,ny_block), & + intent(inout):: & + fresh , & ! fresh water flux to ocean (kg/m2/s) + fhocn ! actual ocn/ice heat flx (W/m**2) +! +!EOP +! +!#ifdef CICE_IN_NEMO +#ifdef ACCESS + integer (kind=int_kind) :: & + i, j, n ! horizontal indices + + real (kind=dbl_kind) :: & + rLsub ! 1/Lsub + + rLsub = c1 / Lsub + + do n = 1, ncat + do j = 1, ny_block + do i = 1, nx_block + if (tmask(i,j) .and. aice(i,j) <= puny) then + fhocn(i,j) = fhocn(i,j) & + + fsurfn_f(i,j,n) + flatn_f(i,j,n) + fresh(i,j) = fresh(i,j) & + + flatn_f(i,j,n) * rLsub + endif + enddo ! i + enddo ! j + enddo ! n + +#endif + end subroutine sfcflux_to_ocn + +!======================================================================= + +! If model is not calculating surface temperature, set the surface +! flux values using values read in from forcing data or supplied via +! coupling (stored in ice_flux). +! +! If CICE is running in NEMO environment, convert fluxes from GBM values +! to per unit ice area values. If model is not running in NEMO environment, +! the forcing is supplied as per unit ice area values. +! +! authors Alison McLaren, Met Office + + subroutine set_sfcflux (nx_block, ny_block, & + n, iblk, & + icells, & + indxi, indxj, & + aicen, & + flatn, & + fsensn, & + fsurfn, & + fcondtopn) + + use ice_fileunits, only: nu_diag + use ice_flux, only: fsurfn_f, fcondtopn_f, flatn_f, fsensn_f + + integer (kind=int_kind), intent(in) :: & + nx_block, ny_block, & ! block dimensions + n, & ! thickness category index + iblk, & ! block index + icells ! number of cells with aicen > puny + + integer (kind=int_kind), dimension(nx_block*ny_block), & + intent(in) :: & + indxi, indxj ! compressed indices for cells with aicen > puny + + ! ice state variables + real (kind=dbl_kind), dimension (nx_block,ny_block), & + intent(in) :: & + aicen ! concentration of ice + + real (kind=dbl_kind), dimension (nx_block,ny_block), intent(out):: & + flatn , & ! latent heat flux (W/m^2) + fsensn , & ! sensible heat flux (W/m^2) + fsurfn , & ! net flux to top surface, not including fcondtopn + fcondtopn ! downward cond flux at top surface (W m-2) + + ! local variables + + integer (kind=int_kind) :: & + i, j , & ! horizontal indices + ij ! horizontal indices, combine i and j loops + + real (kind=dbl_kind) :: & + raicen ! 1 or 1/aicen + + logical (kind=log_kind) :: & + extreme_flag ! flag for extreme forcing values + + logical (kind=log_kind), parameter :: & + extreme_test=.false. ! test and write out extreme forcing data + + raicen = c1 + do ij = 1, icells + i = indxi(ij) + j = indxj(ij) + +!#ifdef CICE_IN_NEMO +#ifdef ACCESS +!---------------------------------------------------------------------- +! Convert fluxes from GBM values to per ice area values when +! running in NEMO environment. (When in standalone mode, fluxes +! are input as per ice area.) +!---------------------------------------------------------------------- + raicen = c1 / aicen(i,j) +#endif + fsurfn(i,j) = fsurfn_f(i,j,n,iblk)*raicen + fcondtopn(i,j)= fcondtopn_f(i,j,n,iblk)*raicen + flatn(i,j) = flatn_f(i,j,n,iblk)*raicen + fsensn(i,j) = fsensn_f(i,j,n,iblk)*raicen + + enddo + +!---------------------------------------------------------------- +! Flag up any extreme fluxes +!--------------------------------------------------------------- + + if (extreme_test) then + extreme_flag = .false. + + do ij = 1, icells + i = indxi(ij) + j = indxj(ij) + + if (fcondtopn(i,j) < -100.0_dbl_kind & + .or. fcondtopn(i,j) > 20.0_dbl_kind) then + extreme_flag = .true. + endif + + if (fsurfn(i,j) < -100.0_dbl_kind & + .or. fsurfn(i,j) > 80.0_dbl_kind) then + extreme_flag = .true. + endif + + if (flatn(i,j) < -20.0_dbl_kind & + .or. flatn(i,j) > 20.0_dbl_kind) then + extreme_flag = .true. + endif + + enddo ! ij + + if (extreme_flag) then + do ij = 1, icells + i = indxi(ij) + j = indxj(ij) + + if (fcondtopn(i,j) < -100.0_dbl_kind & + .or. fcondtopn(i,j) > 20.0_dbl_kind) then + write(nu_diag,*) & + 'Extreme forcing: -100 > fcondtopn > 20' + write(nu_diag,*) & + 'i,j,n,iblk,aicen,fcondtopn = ', & + i,j,n,iblk,aicen(i,j),fcondtopn(i,j) + endif + + if (fsurfn(i,j) < -100.0_dbl_kind & + .or. fsurfn(i,j) > 80.0_dbl_kind) then + write(nu_diag,*) & + 'Extreme forcing: -100 > fsurfn > 40' + write(nu_diag,*) & + 'i,j,n,iblk,aicen,fsurfn = ', & + i,j,n,iblk,aicen(i,j),fsurfn(i,j) + endif + + if (flatn(i,j) < -20.0_dbl_kind & + .or. flatn(i,j) > 20.0_dbl_kind) then + write(nu_diag,*) & + 'Extreme forcing: -20 > flatn > 20' + write(nu_diag,*) & + 'i,j,n,iblk,aicen,flatn = ', & + i,j,n,iblk,aicen(i,j),flatn(i,j) + endif + + enddo ! ij + + endif ! extreme_flag + endif ! extreme_test + + + end subroutine set_sfcflux + +!======================================================================= +!======================================================================= +!BOP +! +! !ROUTINE: top_layer_Tandk_init +! +! !DESCRIPTION: +! +! Hacked version to be called upon initialisation (when we're not +! parallelised) +! Calculate the top layer temperature and conductivity for passing +! to atmosphere model or calculating Tsfc explicitly. +! +! This routine is only called if calc_Tsfc = F and heat_capacity = T. +! +! !REVISION HISTORY: +! +! authors: Alison McLaren, Met Office +! Feb 2014: Modified by Alex West to work in CICE 5.0 +! +! !INTERFACE: + + subroutine top_layer_Tandk_init +! +! !USES: +! + use ice_blocks + use ice_constants + use ice_domain, only: nblocks + use ice_domain_size + use ice_fileunits, only: nu_diag + use ice_flux, only: Tn_top, keffn_top + use ice_itd, only: hs_min + use ice_state, only: aicen, vicen, vsnon, trcrn, nt_qice, nt_sice, nt_qsno + use ice_therm_mushy, only: liquidus_temperature_mush + use ice_therm_shared, only: calculate_ki_from_Tin, calculate_Tin_from_qin, ktherm, Tmlt, conduct + +! +! !INPUT/OUTPUT PARAMETERS: +! +!EOP +! + + integer (kind=int_kind) :: & + iblk , & ! block index + n , & ! thickness category index + i,j ! horizontal indices + + real (kind=dbl_kind) :: & + rnslyr , & ! real(nslyr) + rnilyr , & ! real(nilyr) + hs1 , & ! thickness of top snow layer + ! (so we know whether the top layer is snow or ice) + hi1 , & ! thickness of top ice layer + Tmlt1 , & ! melting temperature of top ice layer + ki ! top ice layer conductivity + + + real (kind=dbl_kind) :: & + ki_hold, & + Ti_hold ! debugging variables + + keffn_top(:,:,:,:) = c0 ! initialise + Tn_top(:,:,:,:) = c0 + rnslyr = real(nslyr,kind=dbl_kind) + rnilyr = real(nilyr,kind=dbl_kind) + + do iblk = 1, max_blocks + do n = 1, ncat + do j = 1, ny_block + do i = 1, nx_block + + if (aicen(i,j,n,iblk) > puny) then + + hs1 = vsnon(i,j,n,iblk)/(aicen(i,j,n,iblk)*rnslyr) + + if (hs1 > hs_min/rnslyr) then + + !snow is top layer + Tn_top(i,j,n,iblk) = (Lfresh + trcrn(i,j,nt_qsno,n,iblk) / rhos)/cp_ice + keffn_top(i,j,n,iblk) = c2 * ksno / hs1 + + else + !ice is top layer + hi1 = vicen(i,j,n,iblk)/(aicen(i,j,n,iblk)*rnilyr) + if (ktherm == 2) then + Tmlt1 = liquidus_temperature_mush(trcrn(i,j,nt_sice,n,iblk)) + else + Tmlt1 = - trcrn(i,j,nt_sice,n,iblk) * depressT + endif + + Tn_top(i,j,n,iblk) = & + calculate_Tin_from_qin(trcrn(i,j,nt_qice,n,iblk),Tmlt1) + Ti_hold = calculate_Tin_from_qin(trcrn(i,j,nt_qice,n,iblk),Tmlt1) + ki_hold = calculate_ki_from_Tin(Tn_top(i,j,n,iblk),trcrn(i,j,nt_sice,n,iblk)) + ki = calculate_ki_from_Tin(Tn_top(i,j,n,iblk),trcrn(i,j,nt_sice,n,iblk)) + keffn_top(i,j,n,iblk) = c2 * ki / hi1 + endif + + endif ! aice > puny + enddo ! i + enddo ! i + enddo ! n + enddo ! iblk + + end subroutine top_layer_Tandk_init + +!======================================================================= +!BOP +! +! !ROUTINE: top_layer_Tandk_run +! +! !DESCRIPTION: +! +! Calculate the top layer temperature and conductivity for passing +! to atmosphere model or calculating Tsfc explicitly. +! +! This routine is only called if calc_Tsfc = F and heat_capacity = T. +! +! !REVISION HISTORY: +! +! authors: Alison McLaren, Met Office +! Feb 2014: Modified by Alex West to work in CICE 5.0 +! +! !INTERFACE: + + subroutine top_layer_Tandk_run (iblk) +! +! !USES: +! + use ice_blocks + use ice_constants + use ice_domain, only: nblocks + use ice_domain_size + use ice_fileunits, only: nu_diag + use ice_flux, only: Tn_top, keffn_top + use ice_itd, only: hs_min + use ice_state, only: aicen, vicen, vsnon, trcrn, nt_qice, nt_sice, nt_qsno + use ice_therm_mushy, only: liquidus_temperature_mush + use ice_therm_shared, only: calculate_ki_from_Tin, calculate_Tin_from_qin, ktherm, Tmlt, conduct + +! +! !INPUT/OUTPUT PARAMETERS: +! +!EOP +! + + integer (kind=int_kind), intent(in) :: & + iblk ! block index + integer (kind=int_kind) :: & + n , & ! thickness category index + i,j ! horizontal indices + + real (kind=dbl_kind) :: & + rnslyr , & ! real(nslyr) + rnilyr , & ! real(nilyr) + hs1 , & ! thickness of top snow layer + ! (so we know whether the top layer is snow or ice) + hi1 , & ! thickness of top ice layer + Tmlt1 , & ! melting temperature of top ice layer + ki ! top ice layer conductivity + + + real (kind=dbl_kind) :: & + ki_hold, & + Ti_hold ! debugging variables + + keffn_top(:,:,:,:) = c0 ! initialise + Tn_top(:,:,:,:) = c0 + rnslyr = real(nslyr,kind=dbl_kind) + rnilyr = real(nilyr,kind=dbl_kind) + + do n = 1, ncat + do j = 1, ny_block + do i = 1, nx_block + + if (aicen(i,j,n,iblk) > puny) then + + hs1 = vsnon(i,j,n,iblk)/(aicen(i,j,n,iblk)*rnslyr) + + if (hs1 > hs_min/rnslyr) then + + !snow is top layer + Tn_top(i,j,n,iblk) = (Lfresh + trcrn(i,j,nt_qsno,n,iblk) / rhos)/cp_ice + keffn_top(i,j,n,iblk) = c2 * ksno / hs1 + + else + !ice is top layer + hi1 = vicen(i,j,n,iblk)/(aicen(i,j,n,iblk)*rnilyr) + if (ktherm == 2) then + Tmlt1 = liquidus_temperature_mush(trcrn(i,j,nt_sice,n,iblk)) + else + Tmlt1 = - trcrn(i,j,nt_sice,n,iblk) * depressT + endif + + Tn_top(i,j,n,iblk) = & + calculate_Tin_from_qin(trcrn(i,j,nt_qice,n,iblk),Tmlt1) + Ti_hold = calculate_Tin_from_qin(trcrn(i,j,nt_qice,n,iblk),Tmlt1) + ki_hold = calculate_ki_from_Tin(Tn_top(i,j,n,iblk),trcrn(i,j,nt_sice,n,iblk)) + ki = calculate_ki_from_Tin(Tn_top(i,j,n,iblk),trcrn(i,j,nt_sice,n,iblk)) + keffn_top(i,j,n,iblk) = c2 * ki / hi1 + endif + + endif ! aice > puny + enddo ! i + enddo ! i + enddo ! n + + end subroutine top_layer_Tandk_run + +!====================================================================== + + end module ice_coupling + +!====================================================================== diff --git a/drivers/auscom/CICE_InitMod.F90 b/drivers/auscom/CICE_InitMod.F90 index e22646d8..7bf28170 100644 --- a/drivers/auscom/CICE_InitMod.F90 +++ b/drivers/auscom/CICE_InitMod.F90 @@ -108,7 +108,7 @@ subroutine cice_init(accessom2) #ifdef popcice use drv_forcing, only: sst_sss #endif - use version_mod, only: CICE_COMMIT_HASH + use version_mod, only: CICE_VERSION type(accessom2_type), intent(inout) :: accessom2 @@ -268,7 +268,7 @@ subroutine cice_init(accessom2) ! Print out my version if (my_task == master_task) then - print*, CICE_COMMIT_HASH + print*, CICE_VERSION call accessom2%print_version_info() endif diff --git a/drivers/auscom/cpl_interface.F90 b/drivers/auscom/cpl_interface.F90 index 8d42679a..969db86f 100644 --- a/drivers/auscom/cpl_interface.F90 +++ b/drivers/auscom/cpl_interface.F90 @@ -149,7 +149,7 @@ subroutine sort_segments(seg_list) do i=1, size(seg_list) - 1 do j=i+1, size(seg_list) - if (seg_list(i).global_offset > seg_list(j).global_offset) then + if (seg_list(i)%global_offset > seg_list(j)%global_offset) then tmp = seg_list(i) seg_list(i) = seg_list(j) seg_list(j) = tmp @@ -199,8 +199,8 @@ subroutine init_cpl(runtime_seconds, coupling_field_timesteps) do j = jlo, jhi ! Oasis uses zero-indexing to define the global offset, hence the final - 1 - part_def(part_idx).global_offset = ((this_block%j_glob(j) - 1) * nx_global) + this_block%i_glob(ilo) - 1 - part_def(part_idx).block_index = n + part_def(part_idx)%global_offset = ((this_block%j_glob(j) - 1) * nx_global) + this_block%i_glob(ilo) - 1 + part_def(part_idx)%block_index = n part_idx = part_idx + 1 enddo enddo @@ -217,7 +217,7 @@ subroutine init_cpl(runtime_seconds, coupling_field_timesteps) oasis_part_def(1) = ORANGE ! The total number of segments oasis_part_def(2) = block_size_y*nblocks - oasis_part_def(3::2) = part_def(:).global_offset + oasis_part_def(3::2) = part_def(:)%global_offset oasis_part_def(4::2) = block_size_x call oasis_def_partition(part_id, oasis_part_def, err, nx_global * ny_global) @@ -425,7 +425,7 @@ subroutine unpack_coupling_array(input, output) offset = 0 do iseg=1, size(part_def) - iblk = part_def(iseg).block_index + iblk = part_def(iseg)%block_index output(isc:iec, blk_seg_num(iblk), iblk) = input((offset + 1):(offset + block_size_x)) @@ -457,7 +457,7 @@ subroutine pack_coupling_array(input, output) ! part_def being sorted so simply incrementing the blk_seg_num gets data ! from the next segmennt. do iseg=1, size(part_def) - iblk = part_def(iseg).block_index + iblk = part_def(iseg)%block_index output((offset + 1):(offset + block_size_x)) = input(isc:iec, blk_seg_num(iblk), iblk) diff --git a/drivers/auscom/sat_vapor_pres_mod.F90 b/drivers/auscom/sat_vapor_pres_mod.F90 index eebd4fb6..72fb294e 100644 --- a/drivers/auscom/sat_vapor_pres_mod.F90 +++ b/drivers/auscom/sat_vapor_pres_mod.F90 @@ -711,8 +711,8 @@ subroutine show_all_bad_0d ( temp ) ind = int(dtinv*(temp-tmin+teps)) if (ind < 0 .or. ind > nlim) then -!! write(stdout(),'(a,e,a,i4)') 'Bad temperature=',temp,' pe=',mpp_pe() - write(6,'(a,e,a,i4)') 'ice: Bad temperature=',temp,' pe=',my_task +!! write(stdout(),'(a,e14.7,a,i4)') 'Bad temperature=',temp,' pe=',mpp_pe() + write(6,'(a,e14.7,a,i4)') 'ice: Bad temperature=',temp,' pe=',my_task endif end subroutine show_all_bad_0d @@ -726,8 +726,8 @@ subroutine show_all_bad_1d ( temp ) do i=1,size(temp) ind = int(dtinv*(temp(i)-tmin+teps)) if (ind < 0 .or. ind > nlim) then -!! write(stdout(),'(a,e,a,i4,a,i4)') 'Bad temperature=',temp(i),' at i=',i,' pe=',mpp_pe() - write(6,'(a,e,a,i4,a,i4)') 'ice: Bad temperature=',temp(i),' at i=',i,' pe=',my_task +!! write(stdout(),'(a,e14.7,a,i4,a,i4)') 'Bad temperature=',temp(i),' at i=',i,' pe=',mpp_pe() + write(6,'(a,e14.7,a,i4,a,i4)') 'ice: Bad temperature=',temp(i),' at i=',i,' pe=',my_task endif enddo @@ -743,8 +743,8 @@ subroutine show_all_bad_2d ( temp ) do i=1,size(temp,1) ind = int(dtinv*(temp(i,j)-tmin+teps)) if (ind < 0 .or. ind > nlim) then -!! write(stdout(),'(a,e,a,i4,a,i4,a,i4)') 'Bad temperature=',temp(i,j),' at i=',i,' j=',j,' pe=',mpp_pe() - write(6,'(a,e,a,i4,a,i4,a,i4)') 'ice: Bad temperature=',temp(i,j),' at i=',i,' j=',j,' pe=',my_task +!! write(stdout(),'(a,e14.7,a,i4,a,i4,a,i4)') 'Bad temperature=',temp(i,j),' at i=',i,' j=',j,' pe=',mpp_pe() + write(6,'(a,e14.7,a,i4,a,i4,a,i4)') 'ice: Bad temperature=',temp(i,j),' at i=',i,' j=',j,' pe=',my_task endif enddo enddo @@ -762,8 +762,8 @@ subroutine show_all_bad_3d ( temp ) do i=1,size(temp,1) ind = int(dtinv*(temp(i,j,k)-tmin+teps)) if (ind < 0 .or. ind > nlim) then -!! write(stdout(),'(a,e,a,i4,a,i4,a,i4,a,i4)') 'Bad temperature=',temp(i,j,k),' at i=',i,' j=',j,' k=',k,' pe=',mpp_pe() - write(6,'(a,e,a,i4,a,i4,a,i4,a,i4)') 'ice: Bad temperature=',temp(i,j,k),' at i=',i,' j=',j,' k=',k,' pe=',my_task +!! write(stdout(),'(a,e14.7,a,i4,a,i4,a,i4,a,i4)') 'Bad temperature=',temp(i,j,k),' at i=',i,' j=',j,' k=',k,' pe=',mpp_pe() + write(6,'(a,e14.7,a,i4,a,i4,a,i4,a,i4)') 'ice: Bad temperature=',temp(i,j,k),' at i=',i,' j=',j,' k=',k,' pe=',my_task endif enddo enddo diff --git a/drivers/auscom/version.F90.template b/drivers/auscom/version.F90.template index ccfa8c28..f3bb651a 100644 --- a/drivers/auscom/version.F90.template +++ b/drivers/auscom/version.F90.template @@ -6,13 +6,13 @@ module version_mod ! used to build this executable. ! ! This can be read from the command line with the following command: -! $ strings | grep 'CICE_COMMIT_HASH=' +! $ strings | grep 'CICE_VERSION=' ! implicit none private -character (len=*), parameter, public :: CICE_COMMIT_HASH = "CICE_COMMIT_HASH={CICE_COMMIT_HASH}" +character (len=*), parameter, public :: CICE_VERSION = "CICE_VERSION={CICE_VERSION}" contains diff --git a/io_netcdf/ice_history_write.F90 b/io_netcdf/ice_history_write.F90 index 14e27351..d7fc0c06 100644 --- a/io_netcdf/ice_history_write.F90 +++ b/io_netcdf/ice_history_write.F90 @@ -7,7 +7,7 @@ ! Elizabeth C. Hunke and William H. Lipscomb, LANL ! C. M. Bitz, UW ! -! 2004 WHL: Block structure added +! 2004 WHL: Block structure added ! 2006 ECH: Accepted some CCSM code into mainstream CICE ! Added ice_present, aicen, vicen; removed aice1...10, vice1...1. ! Added histfreq_n and histfreq='h' options, removed histfreq='w' @@ -38,7 +38,7 @@ module ice_history_write use ice_history_shared use ice_itd, only: hin_max use ice_calendar, only: write_ic, histfreq - + use ice_fileunits, only: nu_diag, ice_stderr, ice_stdout implicit none private @@ -74,7 +74,11 @@ subroutine check(status, msg) character(len=*), intent (in) :: msg if(status /= nf90_noerr) then - call abort_ice('ice: NetCDF error '//trim(nf90_strerror(status)//' '//trim(msg))) + !sometimes the netcdf error string is quite long, so print seperately to prevent overrun + write(nu_diag,*) trim(nf90_strerror(status)) + write (ice_stdout,*) trim(nf90_strerror(status)) + write (ice_stderr,*) trim(nf90_strerror(status)) + call abort_ice('ice: NetCDF error '//trim(msg)) end if end subroutine check @@ -85,244 +89,377 @@ end subroutine check ! ! author: Elizabeth C. Hunke, LANL -subroutine ice_write_hist (ns) + subroutine ice_write_hist (ns) - use ice_calendar, only: time, sec, idate, idate0, & + use ice_calendar, only: time, sec, idate, idate0, & +#ifdef ACCESS + month, daymo, & +#endif dayyr, days_per_year, use_leap_years - use ice_fileunits, only: nu_diag - use ice_restart_shared, only: runid - - integer (kind=int_kind), intent(in) :: ns - - ! local variables - - real (kind=dbl_kind), dimension(:,:), allocatable :: work_g1 - real (kind=real_kind), dimension(:,:), allocatable :: work_gr - real (kind=real_kind), dimension(:,:,:), allocatable :: work_gr3 - real (kind=dbl_kind), dimension(nx_block,ny_block,max_blocks) :: & - work1 - - integer (kind=int_kind) :: i,k,ic,n,nn, & - ncid,status,imtid,jmtid,kmtidi,kmtids,kmtidb, cmtid,timid,varid, & - nvertexid,ivertex - integer (kind=int_kind), dimension(3) :: dimid - integer (kind=int_kind), dimension(4) :: dimidz - integer (kind=int_kind), dimension(5) :: dimidcz - integer (kind=int_kind), dimension(3) :: dimid_nverts - integer (kind=int_kind), dimension(4) :: dimidex - real (kind=real_kind) :: ltime - character (char_len) :: title - character (char_len_long) :: ncfile(max_nstrm) - - integer (kind=int_kind) :: shuffle, deflate, deflate_level - - integer (kind=int_kind) :: ind,boundid - - character (char_len) :: start_time,current_date,current_time - character (len=8) :: cdate - - TYPE(req_attributes), dimension(nvar) :: var - TYPE(coord_attributes), dimension(ncoord) :: coord_var - TYPE(coord_attributes), dimension(nvar_verts) :: var_nverts - TYPE(coord_attributes), dimension(nvarz) :: var_nz - CHARACTER (char_len), dimension(ncoord) :: coord_bounds - - ! We leave shuffle at 0, this is only useful for integer data. - shuffle = 0 - - ! If history_deflate_level < 0 then don't do deflation, - ! otherwise it sets the deflate level - if (history_deflate_level < 0) then - deflate = 0 - deflate_level = 0 - else - deflate = 1 - deflate_level = history_deflate_level - endif - - if (my_task == master_task .or. history_parallel_io) then + integer (kind=int_kind), intent(in) :: ns !history stream number + + ! local variables + + real (kind=real_kind) :: ltime !history timestamp in seconds + character (char_len_long) :: ncfile(max_nstrm) !filenames + character (char_len) :: time_string !model time for logging + logical :: file_exists + integer (kind=int_kind) :: & + ncid, & ! netcdf id + varid, & + i_time, & ! time index + timid ! time var id + + TYPE(req_attributes), dimension(nvar) :: var + TYPE(coord_attributes), dimension(ncoord) :: coord_var + TYPE(coord_attributes), dimension(nvar_verts) :: var_nverts + TYPE(coord_attributes), dimension(nvarz) :: var_nz + + if (my_task == master_task .or. history_parallel_io) then +#if defined(ACCESS) + ! set timestamp in middle of time interval + if (histfreq(ns) == 'm' .or. histfreq(ns) == 'M') then + if (month /= 1) then + ltime=time/int(secday)-real(daymo(month-1))/2.0 + else + ltime=time/int(secday)-real(daymo(12))/2.0 + endif + else if(histfreq(ns) == 'd' .or. histfreq(ns) == 'D') then + ltime=time/int(secday) - 0.5 + else + ltime=time/int(secday) + endif +#else ltime=time/int(secday) +#endif - call construct_filename(ncfile(ns),'nc',ns) + call construct_filename(ncfile(ns),'nc',ns,time_string) ! add local directory path name to ncfile if (write_ic) then - ncfile(ns) = trim(incond_dir)//ncfile(ns) + ncfile(ns) = trim(incond_dir)//ncfile(ns) else - ncfile(ns) = trim(history_dir)//ncfile(ns) + ncfile(ns) = trim(history_dir)//ncfile(ns) endif - ! create file - if (history_parallel_io) then - call check(nf90_create(ncfile(ns), ior(NF90_NETCDF4, NF90_MPIIO), ncid, & - comm=MPI_COMM_ICE, info=MPI_INFO_NULL), & - 'create history ncfile '//ncfile(ns)) - if (.not. equal_num_blocks_per_cpu) then - call abort_ice('ice: error history_parallel_io needs equal_num_blocks_per_cpu') - endif + inquire(file=trim(ncfile(ns)),exist=file_exists) + if (.not. file_exists) then + call ice_hist_create(ns, ncfile(ns), ncid, var, coord_var, var_nverts, var_nz) + write(nu_diag,*) 'Created:'//trim(ncfile(ns)) else - call check(nf90_create(ncfile(ns), ior(NF90_CLASSIC_MODEL, NF90_HDF5), ncid), & - 'create history ncfile '//ncfile(ns)) + if (history_parallel_io) then + call check(nf90_open(trim(ncfile(ns)), NF90_WRITE, ncid, & + comm=MPI_COMM_ICE, info=MPI_INFO_NULL), & + 'parallel open existing history file '//ncfile(ns)) + else + call check(nf90_open(trim(ncfile(ns)), NF90_WRITE, ncid), & + "opening existing history file "//ncfile(ns)) + endif endif - !----------------------------------------------------------------- - ! define dimensions - !----------------------------------------------------------------- + !----------------------------------------------------------------- + ! write time variable + !----------------------------------------------------------------- + call check(nf90_inq_dimid(ncid, 'time', timid), & + 'inq dimid time') + call check(nf90_inquire_dimension(ncid, timid, len=i_time), & + 'inquire dim time') + call check(nf90_inq_varid(ncid,'time',varid), & + 'inq varid time') + if (history_parallel_io) then + ! unlimited dimensions need to have collective access set + call check(nf90_var_par_access(ncid, varid, NF90_COLLECTIVE), & + 'parallel access time') + endif + i_time = i_time + 1 ! index of the current history time + call check(nf90_put_var(ncid,varid,ltime,start=(/i_time/)), & + 'put var time') + + !----------------------------------------------------------------- + ! write time_bounds info + !----------------------------------------------------------------- if (hist_avg) then - call check(nf90_def_dim(ncid,'d2',2,boundid), 'def dim d2') + call check(nf90_inq_varid(ncid,'time_bounds',varid), & + 'inq varid time_bounds') + if (history_parallel_io) then + call check(nf90_var_par_access(ncid, varid, NF90_COLLECTIVE), & + 'parallel access time_bounds') + endif + call check(nf90_put_var(ncid,varid,time_beg(ns),start=(/1,i_time/)), & + 'put var time_bounds beginning') + call check(nf90_put_var(ncid,varid,time_end(ns),start=(/2,i_time/)), & + 'put var time_bounds end') endif + endif ! master_task or history_parallel_io + + call broadcast_scalar(i_time, master_task) !we need this on every processor for parallel writes + if (i_time == 1) then + ! these variables are time-invariant, only write once per file + ! ice_hist_create is only run on master task, but these variables are distributed, so call on all tasks + !----------------------------------------------------------------- + ! write coordinate variables + !----------------------------------------------------------------- + if (history_parallel_io) then + call write_coordinate_variables_parallel(ncid, coord_var, var_nz) + else + call write_coordinate_variables(ncid, coord_var, var_nz) + endif + + !----------------------------------------------------------------- + ! write grid masks, area and rotation angle + !----------------------------------------------------------------- + if (history_parallel_io) then + call write_grid_variables_parallel(ncid, var, var_nverts) + else + call write_grid_variables(ncid, var, var_nverts) + endif - call check(nf90_def_dim(ncid, 'ni', nx_global, imtid), & - 'def dim ni') - call check(nf90_def_dim(ncid, 'nj', ny_global, jmtid), & - 'def dim nj') - call check(nf90_def_dim(ncid, 'nc', ncat_hist, cmtid), & - 'def dim nc') - call check(nf90_def_dim(ncid, 'nkice', nzilyr, kmtidi), & - 'def dim nkice') - call check(nf90_def_dim(ncid, 'nksnow', nzslyr, kmtids), & - 'def dim nksnow') - call check(nf90_def_dim(ncid, 'nkbio', nzblyr, kmtidb), & - 'def dim nkbio') - call check(nf90_def_dim(ncid, 'time', 1, timid), & - 'def dim time') - call check(nf90_def_dim(ncid, 'nvertices', nverts, nvertexid), & - 'def dim nverts') + endif + !----------------------------------------------------------------- + ! write 2d variable data + !----------------------------------------------------------------- - !----------------------------------------------------------------- - ! define coordinate variables - !----------------------------------------------------------------- + if (history_parallel_io) then + call write_2d_variables_parallel(ns, ncid, i_time) + else + call write_2d_variables(ns, ncid, i_time) + endif - call check(nf90_def_var(ncid,'time',nf90_float,timid,varid), & - 'def var time') - call check(nf90_put_att(ncid,varid,'long_name','model time'), & - 'put_att long_name') + if (history_parallel_io) then + call write_3d_and_4d_variables_parallel(ns, ncid, i_time) + else + call write_3d_and_4d_variables(ns, ncid, i_time) + endif - write(cdate,'(i8.8)') idate0 - write(title,'(a,a,a,a,a,a,a,a)') 'days since ', & - cdate(1:4),'-',cdate(5:6),'-',cdate(7:8),' 00:00:00' - call check(nf90_put_att(ncid,varid,'units',title), & - 'put_att time units') - - if (days_per_year == 360) then - call check(nf90_put_att(ncid,varid,'calendar','360_day'), & - 'att time calendar') - elseif (days_per_year == 365 .and. .not.use_leap_years ) then - call check(nf90_put_att(ncid,varid,'calendar','NoLeap'), & - 'att time calendar') - elseif (use_leap_years) then - call check(nf90_put_att(ncid,varid,'calendar','Gregorian'), & - 'att time calendar') - else - call abort_ice( 'ice Error: invalid calendar settings') - endif + !----------------------------------------------------------------- + ! close output dataset + !----------------------------------------------------------------- - if (hist_avg) then - call check(nf90_put_att(ncid,varid,'bounds','time_bounds'), & - 'att time bounds') - endif + if (my_task == master_task .or. history_parallel_io) then + call check(nf90_close(ncid), 'closing netCDF history file') + write(nu_diag,*) 'Wrote ',trim(ncfile(ns)),' at time ',trim(time_string) + endif - !----------------------------------------------------------------- - ! Define attributes for time bounds if hist_avg is true - !----------------------------------------------------------------- +end subroutine ice_write_hist - if (hist_avg) then - dimid(1) = boundid - dimid(2) = timid - call check(nf90_def_var(ncid, 'time_bounds', & - nf90_float,dimid(1:2),varid), & - 'def var time_bounds') - - call check(nf90_put_att(ncid,varid,'long_name', & - 'boundaries for time-averaging interval'), & - 'att time_bounds long_name') - write(cdate,'(i8.8)') idate0 - write(title,'(a,a,a,a,a,a,a,a)') 'days since ', & - cdate(1:4),'-',cdate(5:6),'-',cdate(7:8),' 00:00:00' - call check(nf90_put_att(ncid,varid,'units',title), & - 'att time_bounds units') - endif +subroutine ice_hist_create(ns, ncfile, ncid, var, coord_var, var_nverts, var_nz) - !----------------------------------------------------------------- - ! define information for required time-invariant variables - !----------------------------------------------------------------- + use ice_calendar, only: idate, idate0, & + dayyr, days_per_year, use_leap_years + use ice_restart_shared, only: runid - ind = 0 - ind = ind + 1 - coord_var(ind) = coord_attributes('TLON', & - 'T grid center longitude', 'degrees_east') - coord_bounds(ind) = 'lont_bounds' - ind = ind + 1 - coord_var(ind) = coord_attributes('TLAT', & - 'T grid center latitude', 'degrees_north') - coord_bounds(ind) = 'latt_bounds' - ind = ind + 1 - coord_var(ind) = coord_attributes('ULON', & - 'U grid center longitude', 'degrees_east') - coord_bounds(ind) = 'lonu_bounds' - ind = ind + 1 - coord_var(ind) = coord_attributes('ULAT', & - 'U grid center latitude', 'degrees_north') - coord_bounds(ind) = 'latu_bounds' - - var_nz(1) = coord_attributes('NCAT', 'category maximum thickness', 'm') - var_nz(2) = coord_attributes('VGRDi', 'vertical ice levels', '1') - var_nz(3) = coord_attributes('VGRDs', 'vertical snow levels', '1') - var_nz(4) = coord_attributes('VGRDb', 'vertical ice-bio levels', '1') + integer (kind=int_kind), intent(in) :: ns + character (char_len_long), intent(in) :: ncfile + integer (kind=int_kind), intent(out) :: ncid + TYPE(req_attributes), dimension(nvar), intent(inout) :: var + TYPE(coord_attributes), dimension(ncoord), intent(inout) :: coord_var + TYPE(coord_attributes), dimension(nvar_verts), intent(inout) :: var_nverts + TYPE(coord_attributes), dimension(nvarz), intent(inout) :: var_nz - !----------------------------------------------------------------- - ! define information for optional time-invariant variables - !----------------------------------------------------------------- + ! local variables - var(n_tarea)%req = coord_attributes('tarea', & - 'area of T grid cells', 'm^2') - var(n_tarea)%coordinates = 'TLON TLAT' - var(n_uarea)%req = coord_attributes('uarea', & - 'area of U grid cells', 'm^2') - var(n_uarea)%coordinates = 'ULON ULAT' - var(n_dxt)%req = coord_attributes('dxt', & - 'T cell width through middle', 'm') - var(n_dxt)%coordinates = 'TLON TLAT' - var(n_dyt)%req = coord_attributes('dyt', & - 'T cell height through middle', 'm') - var(n_dyt)%coordinates = 'TLON TLAT' - var(n_dxu)%req = coord_attributes('dxu', & - 'U cell width through middle', 'm') - var(n_dxu)%coordinates = 'ULON ULAT' - var(n_dyu)%req = coord_attributes('dyu', & - 'U cell height through middle', 'm') - var(n_dyu)%coordinates = 'ULON ULAT' - var(n_HTN)%req = coord_attributes('HTN', & - 'T cell width on North side','m') - var(n_HTN)%coordinates = 'TLON TLAT' - var(n_HTE)%req = coord_attributes('HTE', & - 'T cell width on East side', 'm') - var(n_HTE)%coordinates = 'TLON TLAT' - var(n_ANGLE)%req = coord_attributes('ANGLE', & - 'angle grid makes with latitude line on U grid', & - 'radians') - var(n_ANGLE)%coordinates = 'ULON ULAT' - var(n_ANGLET)%req = coord_attributes('ANGLET', & - 'angle grid makes with latitude line on T grid', & - 'radians') - var(n_ANGLET)%coordinates = 'TLON TLAT' - - ! These fields are required for CF compliance - ! dimensions (nx,ny,nverts) - var_nverts(n_lont_bnds) = coord_attributes('lont_bounds', & - 'longitude boundaries of T cells', 'degrees_east') - var_nverts(n_latt_bnds) = coord_attributes('latt_bounds', & - 'latitude boundaries of T cells', 'degrees_north') - var_nverts(n_lonu_bnds) = coord_attributes('lonu_bounds', & - 'longitude boundaries of U cells', 'degrees_east') - var_nverts(n_latu_bnds) = coord_attributes('latu_bounds', & - 'latitude boundaries of U cells', 'degrees_north') + integer (kind=int_kind) :: i,k,ic,n,nn, & + status,imtid,jmtid,kmtidi,kmtids,kmtidb, cmtid,timid,varid, & + nvertexid,ivertex + integer (kind=int_kind), dimension(3) :: dimid, dimid_nverts + integer (kind=int_kind), dimension(4) :: dimidz, dimidex + integer (kind=int_kind), dimension(5) :: dimidcz + + integer (kind=int_kind) :: shuffle, deflate, deflate_level ! comrpession settings + + integer (kind=int_kind) :: ind,boundid + + character (char_len) :: title, start_time,current_date,current_time + character (len=8) :: cdate - !----------------------------------------------------------------- - ! define attributes for time-invariant variables - !----------------------------------------------------------------- + + CHARACTER (char_len), dimension(ncoord) :: coord_bounds + + ! We leave shuffle at 0, this is only useful for integer data. + shuffle = 0 + + ! If history_deflate_level < 0 then don't do deflation, + ! otherwise it sets the deflate level + if (history_deflate_level < 0) then + deflate = 0 + deflate_level = 0 + else + deflate = 1 + deflate_level = history_deflate_level + endif + + ! create file + if (history_parallel_io) then + call check(nf90_create(ncfile, ior(NF90_NETCDF4, NF90_MPIIO), ncid, & + comm=MPI_COMM_ICE, info=MPI_INFO_NULL), & + 'create history ncfile '//ncfile) + if (.not. equal_num_blocks_per_cpu) then + call abort_ice('ice: error history_parallel_io needs equal_num_blocks_per_cpu') + endif + else + call check(nf90_create(ncfile, ior(NF90_CLASSIC_MODEL, NF90_HDF5), ncid), & + 'create history ncfile '//ncfile) + endif + + !----------------------------------------------------------------- + ! define dimensions + !----------------------------------------------------------------- + + if (hist_avg) then + call check(nf90_def_dim(ncid,'d2',2,boundid), 'def dim d2') + endif + + call check(nf90_def_dim(ncid, 'ni', nx_global, imtid), & + 'def dim ni') + call check(nf90_def_dim(ncid, 'nj', ny_global, jmtid), & + 'def dim nj') + call check(nf90_def_dim(ncid, 'nc', ncat_hist, cmtid), & + 'def dim nc') + call check(nf90_def_dim(ncid, 'nkice', nzilyr, kmtidi), & + 'def dim nkice') + call check(nf90_def_dim(ncid, 'nksnow', nzslyr, kmtids), & + 'def dim nksnow') + call check(nf90_def_dim(ncid, 'nkbio', nzblyr, kmtidb), & + 'def dim nkbio') + call check(nf90_def_dim(ncid, 'time', NF90_UNLIMITED, timid), & + 'def dim time') + call check(nf90_def_dim(ncid, 'nvertices', nverts, nvertexid), & + 'def dim nverts') + + !----------------------------------------------------------------- + ! define coordinate variables + !----------------------------------------------------------------- + + call check(nf90_def_var(ncid,'time',nf90_float,timid,varid), & + 'def var time') + call check(nf90_put_att(ncid,varid,'long_name','model time'), & + 'put_att long_name') + + write(cdate,'(i8.8)') idate0 + write(title,'(a,a,a,a,a,a,a,a)') 'days since ', & + cdate(1:4),'-',cdate(5:6),'-',cdate(7:8),' 00:00:00' + call check(nf90_put_att(ncid,varid,'units',title), & + 'put_att time units') + + if (days_per_year == 360) then + call check(nf90_put_att(ncid,varid,'calendar','360_day'), & + 'att time calendar') + elseif (days_per_year == 365 .and. .not.use_leap_years ) then + call check(nf90_put_att(ncid,varid,'calendar','NoLeap'), & + 'att time calendar') + elseif (use_leap_years) then + call check(nf90_put_att(ncid,varid,'calendar','proleptic_gregorian'), & + 'att time calendar') + else + call abort_ice( 'ice Error: invalid calendar settings') + endif + + if (hist_avg) then + call check(nf90_put_att(ncid,varid,'bounds','time_bounds'), & + 'att time bounds') + endif + + !----------------------------------------------------------------- + ! Define attributes for time bounds if hist_avg is true + !----------------------------------------------------------------- + + if (hist_avg) then + dimid(1) = boundid + dimid(2) = timid + call check(nf90_def_var(ncid, 'time_bounds', & + nf90_float,dimid(1:2),varid), & + 'def var time_bounds') + + call check(nf90_put_att(ncid,varid,'long_name', & + 'boundaries for time-averaging interval'), & + 'att time_bounds long_name') + write(cdate,'(i8.8)') idate0 + write(title,'(a,a,a,a,a,a,a,a)') 'days since ', & + cdate(1:4),'-',cdate(5:6),'-',cdate(7:8),' 00:00:00' + call check(nf90_put_att(ncid,varid,'units',title), & + 'att time_bounds units') + endif + + !----------------------------------------------------------------- + ! define information for required time-invariant variables + !----------------------------------------------------------------- + + ind = 0 + ind = ind + 1 + coord_var(ind) = coord_attributes('TLON', & + 'T grid center longitude', 'degrees_east') + coord_bounds(ind) = 'lont_bounds' + ind = ind + 1 + coord_var(ind) = coord_attributes('TLAT', & + 'T grid center latitude', 'degrees_north') + coord_bounds(ind) = 'latt_bounds' + ind = ind + 1 + coord_var(ind) = coord_attributes('ULON', & + 'U grid center longitude', 'degrees_east') + coord_bounds(ind) = 'lonu_bounds' + ind = ind + 1 + coord_var(ind) = coord_attributes('ULAT', & + 'U grid center latitude', 'degrees_north') + coord_bounds(ind) = 'latu_bounds' + + var_nz(1) = coord_attributes('NCAT', 'category maximum thickness', 'm') + var_nz(2) = coord_attributes('VGRDi', 'vertical ice levels', '1') + var_nz(3) = coord_attributes('VGRDs', 'vertical snow levels', '1') + var_nz(4) = coord_attributes('VGRDb', 'vertical ice-bio levels', '1') + + !----------------------------------------------------------------- + ! define information for optional time-invariant variables + !----------------------------------------------------------------- + + var(n_tarea)%req = coord_attributes('tarea', & + 'area of T grid cells', 'm^2') + var(n_tarea)%coordinates = 'TLON TLAT' + var(n_uarea)%req = coord_attributes('uarea', & + 'area of U grid cells', 'm^2') + var(n_uarea)%coordinates = 'ULON ULAT' + var(n_dxt)%req = coord_attributes('dxt', & + 'T cell width through middle', 'm') + var(n_dxt)%coordinates = 'TLON TLAT' + var(n_dyt)%req = coord_attributes('dyt', & + 'T cell height through middle', 'm') + var(n_dyt)%coordinates = 'TLON TLAT' + var(n_dxu)%req = coord_attributes('dxu', & + 'U cell width through middle', 'm') + var(n_dxu)%coordinates = 'ULON ULAT' + var(n_dyu)%req = coord_attributes('dyu', & + 'U cell height through middle', 'm') + var(n_dyu)%coordinates = 'ULON ULAT' + var(n_HTN)%req = coord_attributes('HTN', & + 'T cell width on North side','m') + var(n_HTN)%coordinates = 'TLON TLAT' + var(n_HTE)%req = coord_attributes('HTE', & + 'T cell width on East side', 'm') + var(n_HTE)%coordinates = 'TLON TLAT' + var(n_ANGLE)%req = coord_attributes('ANGLE', & + 'angle grid makes with latitude line on U grid', & + 'radians') + var(n_ANGLE)%coordinates = 'ULON ULAT' + var(n_ANGLET)%req = coord_attributes('ANGLET', & + 'angle grid makes with latitude line on T grid', & + 'radians') + var(n_ANGLET)%coordinates = 'TLON TLAT' + + ! These fields are required for CF compliance + ! dimensions (nx,ny,nverts) + var_nverts(n_lont_bnds) = coord_attributes('lont_bounds', & + 'longitude boundaries of T cells', 'degrees_east') + var_nverts(n_latt_bnds) = coord_attributes('latt_bounds', & + 'latitude boundaries of T cells', 'degrees_north') + var_nverts(n_lonu_bnds) = coord_attributes('lonu_bounds', & + 'longitude boundaries of U cells', 'degrees_east') + var_nverts(n_latu_bnds) = coord_attributes('latu_bounds', & + 'latitude boundaries of U cells', 'degrees_north') + + !----------------------------------------------------------------- + ! define attributes for time-invariant variables + !----------------------------------------------------------------- dimid(1) = imtid dimid(2) = jmtid @@ -366,11 +503,11 @@ subroutine ice_write_hist (ns) enddo - ! Extra dimensions (NCAT, NZILYR, NZSLYR, NZBLYR) - dimidex(1)=cmtid - dimidex(2)=kmtidi - dimidex(3)=kmtids - dimidex(4)=kmtidb + ! Extra dimensions (NCAT, NZILYR, NZSLYR, NZBLYR) + dimidex(1)=cmtid + dimidex(2)=kmtidi + dimidex(3)=kmtids + dimidex(4)=kmtidb do i = 1, nvarz if (igrdz(i)) then @@ -382,9 +519,9 @@ subroutine ice_write_hist (ns) deflate_level), & 'deflate '//var_nz(i)%short_name) - call check(nf90_put_att(ncid,varid,'long_name',var_nz(i)%long_name), & + call check(nf90_put_att(ncid,varid,'long_name',var_nz(i)%long_name), & 'put att long_name '//var_nz(i)%short_name) - call check(nf90_put_att(ncid, varid, 'units', var_nz(i)%units), & + call check(nf90_put_att(ncid, varid, 'units', var_nz(i)%units), & 'for att units '//var_nz(i)%short_name) endif enddo @@ -940,25 +1077,21 @@ subroutine ice_write_hist (ns) call check(nf90_put_att(ncid,nf90_global,'source',title), & 'global attribute source') -#ifdef AusCOM +#if defined(AUSCOM) && !defined(ACCESS) write(title,'(a,i3,a)') 'This Year Has ',int(dayyr),' days' #else if (use_leap_years) then - write(title,'(a,i3,a)') 'This year has ',int(dayyr),' days' + write(title,'(a,i3,a)') 'This year has ',int(dayyr),' days' else - write(title,'(a,i3,a)') 'All years have exactly ',int(dayyr),' days' + write(title,'(a,i3,a)') 'All years have exactly ',int(dayyr),' days' endif #endif call check(nf90_put_att(ncid,nf90_global,'comment',title), & 'global attribute comment') - write(title,'(a,i8.8)') 'File written on model date ',idate + write(title,'(a,i8.8)') 'File started on model date ',idate call check(nf90_put_att(ncid,nf90_global,'comment2',title), & - 'global attribute date1') - - write(title,'(a,i6)') 'seconds elapsed into model date: ',sec - call check(nf90_put_att(ncid,nf90_global,'comment3',title), & - 'global attribute date2') + 'global attribute comment2') title = 'CF-1.0' call check(nf90_put_att(ncid,nf90_global,'conventions',title), & @@ -983,77 +1116,8 @@ subroutine ice_write_hist (ns) call check(nf90_enddef(ncid), 'enddef') - !----------------------------------------------------------------- - ! write time variable - !----------------------------------------------------------------- - - call check(nf90_inq_varid(ncid,'time',varid), & - 'inq varid time') - call check(nf90_put_var(ncid,varid,ltime), & - 'put var time') +end subroutine ice_hist_create - !----------------------------------------------------------------- - ! write time_bounds info - !----------------------------------------------------------------- - - if (hist_avg) then - call check(nf90_inq_varid(ncid,'time_bounds',varid), & - 'inq varid time_bounds') - call check(nf90_put_var(ncid,varid,time_beg(ns),start=(/1/)), & - 'put var time_bounds beginning') - call check(nf90_put_var(ncid,varid,time_end(ns),start=(/2/)), & - 'put var time_bounds end') - endif - endif ! master_task or history_parallel_io - - !----------------------------------------------------------------- - ! write coordinate variables - !----------------------------------------------------------------- - - if (history_parallel_io) then - call write_coordinate_variables_parallel(ncid, coord_var, var_nz) - else - call write_coordinate_variables(ncid, coord_var, var_nz) - endif - - !----------------------------------------------------------------- - ! write grid masks, area and rotation angle - !----------------------------------------------------------------- - - if (history_parallel_io) then - call write_grid_variables_parallel(ncid, var, var_nverts) - else - call write_grid_variables(ncid, var, var_nverts) - endif - - - !----------------------------------------------------------------- - ! write 2d variable data - !----------------------------------------------------------------- - - if (history_parallel_io) then - call write_2d_variables_parallel(ns, ncid) - else - call write_2d_variables(ns, ncid) - endif - - if (history_parallel_io) then - call write_3d_and_4d_variables_parallel(ns, ncid) - else - call write_3d_and_4d_variables(ns, ncid) - endif - - !----------------------------------------------------------------- - ! close output dataset - !----------------------------------------------------------------- - - if (my_task == master_task .or. history_parallel_io) then - call check(nf90_close(ncid), 'closing netCDF history file') - write(nu_diag,*) ' ' - write(nu_diag,*) 'Finished writing ',trim(ncfile(ns)) - endif - -end subroutine ice_write_hist subroutine write_coordinate_variables(ncid, coord_var, var_nz) @@ -1069,69 +1133,70 @@ subroutine write_coordinate_variables(ncid, coord_var, var_nz) integer :: varid character (len=len(coord_var(1)%short_name)) :: coord_var_name - if (my_task==master_task) then - allocate(work_g1(nx_global,ny_global)) - allocate(work_gr(nx_global,ny_global)) - else - allocate(work_g1(1,1)) - allocate(work_gr(1,1)) ! to save memory - endif + if (my_task==master_task) then + allocate(work_g1(nx_global,ny_global)) + allocate(work_gr(nx_global,ny_global)) + else + allocate(work_g1(1,1)) + allocate(work_gr(1,1)) ! to save memory + endif - work_g1(:,:) = c0 + work_g1(:,:) = c0 - do i = 1,ncoord - coord_var_name = coord_var(i)%short_name + do i = 1,ncoord + if (my_task == master_task) coord_var_name = coord_var(i)%short_name call broadcast_scalar(coord_var_name, master_task) SELECT CASE (coord_var_name) - CASE ('TLON') - ! Convert T grid longitude from -180 -> 180 to 0 to 360 - work1 = TLON*rad_to_deg + c360 - where (work1 > c360) work1 = work1 - c360 - where (work1 < c0 ) work1 = work1 + c360 - call gather_global(work_g1,work1,master_task,distrb_info) - CASE ('TLAT') - work1 = TLAT*rad_to_deg - call gather_global(work_g1,work1,master_task,distrb_info) - CASE ('ULON') - work1 = ULON*rad_to_deg - call gather_global(work_g1,work1,master_task,distrb_info) - CASE ('ULAT') - work1 = ULAT*rad_to_deg - call gather_global(work_g1,work1,master_task,distrb_info) - END SELECT - - if (my_task == master_task) then + CASE ('TLON') + ! Convert T grid longitude from -180 -> 180 to 0 to 360 + work1 = TLON*rad_to_deg + c360 + where (work1 > c360) work1 = work1 - c360 + where (work1 < c0 ) work1 = work1 + c360 + call gather_global(work_g1,work1,master_task,distrb_info) + CASE ('TLAT') + work1 = TLAT*rad_to_deg + call gather_global(work_g1,work1,master_task,distrb_info) + CASE ('ULON') + work1 = ULON*rad_to_deg + call gather_global(work_g1,work1,master_task,distrb_info) + CASE ('ULAT') + work1 = ULAT*rad_to_deg + call gather_global(work_g1,work1,master_task,distrb_info) + END SELECT + + if (my_task == master_task) then work_gr = work_g1 call check(nf90_inq_varid(ncid, coord_var_name, varid), & 'inq varid '//coord_var_name) call check(nf90_put_var(ncid,varid,work_gr), & 'put var '//coord_var_name) - endif - enddo + endif + enddo - ! Extra dimensions (NCAT, VGRD*) + ! Extra dimensions (NCAT, VGRD*) - do i = 1, nvarz - if (igrdz(i)) then - call broadcast_scalar(var_nz(i)%short_name,master_task) - if (my_task == master_task) then - call check(nf90_inq_varid(ncid, var_nz(i)%short_name, varid), & - 'inq varid '//var_nz(i)%short_name) - SELECT CASE (var_nz(i)%short_name) - CASE ('NCAT') - status = nf90_put_var(ncid,varid,hin_max(1:ncat_hist)) - CASE ('VGRDi') ! index - needed for Met Office analysis code - status = nf90_put_var(ncid,varid,(/(k, k=1,nzilyr)/)) - CASE ('VGRDs') ! index - needed for Met Office analysis code - status = nf90_put_var(ncid,varid,(/(k, k=1,nzslyr)/)) - CASE ('VGRDb') - status = nf90_put_var(ncid,varid,(/(k, k=1,nzblyr)/)) - END SELECT - if (status /= nf90_noerr) call abort_ice( & - 'ice: Error writing'//var_nz(i)%short_name) - endif - endif - enddo + do i = 1, nvarz + if (my_task == master_task) coord_var_name = var_nz(i)%short_name + if (igrdz(i)) then + call broadcast_scalar(coord_var_name,master_task) + if (my_task == master_task) then + call check(nf90_inq_varid(ncid, coord_var_name, varid), & + 'inq varid '//coord_var_name) + SELECT CASE (coord_var_name) + CASE ('NCAT') + status = nf90_put_var(ncid,varid,hin_max(1:ncat_hist)) + CASE ('VGRDi') ! index - needed for Met Office analysis code + status = nf90_put_var(ncid,varid,(/(k, k=1,nzilyr)/)) + CASE ('VGRDs') ! index - needed for Met Office analysis code + status = nf90_put_var(ncid,varid,(/(k, k=1,nzslyr)/)) + CASE ('VGRDb') + status = nf90_put_var(ncid,varid,(/(k, k=1,nzblyr)/)) + END SELECT + if (status /= nf90_noerr) call abort_ice( & + 'ice: Error writing'//coord_var_name) + endif + endif + enddo deallocate(work_g1) deallocate(work_gr) @@ -1139,7 +1204,6 @@ subroutine write_coordinate_variables(ncid, coord_var, var_nz) end subroutine write_coordinate_variables - subroutine write_grid_variables(ncid, var, var_nverts) integer, intent(in) :: ncid @@ -1170,9 +1234,9 @@ subroutine write_grid_variables(ncid, var, var_nverts) work_gr(:,:) = c0 work_gr3(:,:,:) = c0 - if (igrd(n_tmask)) then - call gather_global(work_g1, hm, master_task, distrb_info) - if (my_task == master_task) then + if (igrd(n_tmask)) then + call gather_global(work_g1, hm, master_task, distrb_info) + if (my_task == master_task) then work_gr = work_g1 call check(nf90_inq_varid(ncid, 'tmask', varid), & 'inq var tmask') @@ -1284,10 +1348,9 @@ subroutine write_grid_variables(ncid, var, var_nverts) end subroutine write_grid_variables -subroutine write_2d_variables(ns, ncid) +subroutine write_2d_variables(ns, ncid, i_time) - integer, intent(in) :: ns - integer, intent(in) :: ncid + integer, intent(in) :: ns, ncid, i_time real (kind=dbl_kind), dimension(:,:), allocatable :: work_g1 real (kind=real_kind), dimension(:,:), allocatable :: work_gr @@ -1297,29 +1360,30 @@ subroutine write_2d_variables(ns, ncid) if (my_task == master_task) then allocate(work_g1(nx_global,ny_global)) - allocate(work_gr(nx_global,ny_global)) - else + allocate(work_gr(nx_global,ny_global)) + else allocate(work_g1(1,1)) - allocate(work_gr(1,1)) ! to save memory - endif + allocate(work_gr(1,1)) ! to save memory + endif - work_gr(:,:) = c0 - work_g1(:,:) = c0 + work_gr(:,:) = c0 + work_g1(:,:) = c0 do n=1, num_avail_hist_fields_2D if (avail_hist_fields(n)%vhistfreq == histfreq(ns) .or. write_ic) then - call gather_global(work_g1, a2D(:,:,n,:), & - master_task, distrb_info) - if (my_task == master_task) then - work_gr(:,:) = work_g1(:,:) + call gather_global(work_g1, a2D(:,:,n,:), & + master_task, distrb_info) + if (my_task == master_task) then + work_gr(:,:) = work_g1(:,:) call check(nf90_inq_varid(ncid,avail_hist_fields(n)%vname,varid), & 'inq varid '//avail_hist_fields(n)%vname) call check(nf90_put_var(ncid,varid,work_gr(:,:), & + start=(/1,1,i_time/), & count=(/nx_global,ny_global/)), & 'put var '//avail_hist_fields(n)%vname) - endif + endif endif - enddo ! num_avail_hist_fields_2D + enddo ! num_avail_hist_fields_2D deallocate(work_g1) deallocate(work_gr) @@ -1327,10 +1391,9 @@ subroutine write_2d_variables(ns, ncid) end subroutine write_2d_variables -subroutine write_3d_and_4d_variables(ns, ncid) +subroutine write_3d_and_4d_variables(ns, ncid, i_time) - integer, intent(in) :: ns - integer, intent(in) :: ncid + integer, intent(in) :: ns, ncid, i_time real (kind=dbl_kind), dimension(:,:), allocatable :: work_g1 real (kind=real_kind), dimension(:,:), allocatable :: work_gr @@ -1346,161 +1409,161 @@ subroutine write_3d_and_4d_variables(ns, ncid) allocate(work_gr(1,1)) ! to save memory endif - work_gr(:,:) = c0 - work_g1(:,:) = c0 + work_gr(:,:) = c0 + work_g1(:,:) = c0 - do n = n2D + 1, n3Dccum + do n = n2D + 1, n3Dccum nn = n - n2D if (avail_hist_fields(n)%vhistfreq == histfreq(ns) .or. write_ic) then - if (my_task == master_task) then + if (my_task == master_task) then call check(nf90_inq_varid(ncid,avail_hist_fields(n)%vname,varid), & 'inq varid '//avail_hist_fields(n)%vname) - endif - do k = 1, ncat_hist - call gather_global(work_g1, a3Dc(:,:,k,nn,:), & - master_task, distrb_info) - work_gr(:,:) = work_g1(:,:) + endif + do k = 1, ncat_hist + call gather_global(work_g1, a3Dc(:,:,k,nn,:), & + master_task, distrb_info) + work_gr(:,:) = work_g1(:,:) - if (my_task == master_task) then + if (my_task == master_task) then call check(nf90_put_var(ncid,varid,work_gr(:,:), & - start=(/ 1, 1, k/), & + start=(/1,1,k,i_time/), & count=(/nx_global,ny_global, 1/)), & 'put var '//avail_hist_fields(n)%vname) - endif - enddo ! k + endif + enddo ! k endif - enddo ! num_avail_hist_fields_3Dc + enddo ! num_avail_hist_fields_3Dc - work_gr(:,:) = c0 - work_g1(:,:) = c0 + work_gr(:,:) = c0 + work_g1(:,:) = c0 - do n = n3Dccum+1, n3Dzcum + do n = n3Dccum+1, n3Dzcum nn = n - n3Dccum if (avail_hist_fields(n)%vhistfreq == histfreq(ns) .or. write_ic) then - if (my_task == master_task) then + if (my_task == master_task) then call check(nf90_inq_varid(ncid,avail_hist_fields(n)%vname,varid), & 'inq varid '//avail_hist_fields(n)%vname) - endif - do k = 1, nzilyr - call gather_global(work_g1, a3Dz(:,:,k,nn,:), & - master_task, distrb_info) - work_gr(:,:) = work_g1(:,:) + endif + do k = 1, nzilyr + call gather_global(work_g1, a3Dz(:,:,k,nn,:), & + master_task, distrb_info) + work_gr(:,:) = work_g1(:,:) - if (my_task == master_task) then + if (my_task == master_task) then call check(nf90_put_var(ncid,varid,work_gr(:,:), & - start=(/ 1, 1,k/), & + start=(/1,1,k,i_time/), & count=(/nx_global,ny_global,1/)), & 'put var '//avail_hist_fields(n)%vname) - endif - enddo ! k + endif + enddo ! k endif - enddo ! num_avail_hist_fields_3Dz + enddo ! num_avail_hist_fields_3Dz - work_gr(:,:) = c0 - work_g1(:,:) = c0 + work_gr(:,:) = c0 + work_g1(:,:) = c0 - do n = n3Dzcum+1, n3Dbcum + do n = n3Dzcum+1, n3Dbcum nn = n - n3Dzcum if (avail_hist_fields(n)%vhistfreq == histfreq(ns) .or. write_ic) then - if (my_task == master_task) then + if (my_task == master_task) then call check(nf90_inq_varid(ncid,avail_hist_fields(n)%vname,varid), & 'inq varid '//avail_hist_fields(n)%vname) - endif - do k = 1, nzblyr - call gather_global(work_g1, a3Db(:,:,k,nn,:), & - master_task, distrb_info) - work_gr(:,:) = work_g1(:,:) + endif + do k = 1, nzblyr + call gather_global(work_g1, a3Db(:,:,k,nn,:), & + master_task, distrb_info) + work_gr(:,:) = work_g1(:,:) - if (my_task == master_task) then + if (my_task == master_task) then call check(nf90_put_var(ncid,varid,work_gr(:,:), & - start=(/ 1, 1,k/), & + start=(/1,1,k,i_time/), & count=(/nx_global,ny_global,1/)), & 'put var '//avail_hist_fields(n)%vname) - endif - enddo ! k + endif + enddo ! k endif - enddo ! num_avail_hist_fields_3Db + enddo ! num_avail_hist_fields_3Db - work_gr(:,:) = c0 - work_g1(:,:) = c0 + work_gr(:,:) = c0 + work_g1(:,:) = c0 - do n = n3Dbcum+1, n4Dicum + do n = n3Dbcum+1, n4Dicum nn = n - n3Dbcum if (avail_hist_fields(n)%vhistfreq == histfreq(ns) .or. write_ic) then - if (my_task == master_task) then + if (my_task == master_task) then call check(nf90_inq_varid(ncid,avail_hist_fields(n)%vname,varid), & 'inq varid '//avail_hist_fields(n)%vname) - endif - do ic = 1, ncat_hist - do k = 1, nzilyr - call gather_global(work_g1, a4Di(:,:,k,ic,nn,:), & - master_task, distrb_info) - work_gr(:,:) = work_g1(:,:) - if (my_task == master_task) then + endif + do ic = 1, ncat_hist + do k = 1, nzilyr + call gather_global(work_g1, a4Di(:,:,k,ic,nn,:), & + master_task, distrb_info) + work_gr(:,:) = work_g1(:,:) + if (my_task == master_task) then call check(nf90_put_var(ncid,varid,work_gr(:,:), & - start=(/ 1, 1,k,ic/), & + start=(/1,1,k,ic,i_time/), & count=(/nx_global,ny_global,1, 1/)), & 'put var '//avail_hist_fields(n)%vname) - endif - enddo ! k - enddo ! ic + endif + enddo ! k + enddo ! ic endif - enddo ! num_avail_hist_fields_4Di + enddo ! num_avail_hist_fields_4Di - work_gr(:,:) = c0 - work_g1(:,:) = c0 + work_gr(:,:) = c0 + work_g1(:,:) = c0 - do n = n4Dicum+1, n4Dscum + do n = n4Dicum+1, n4Dscum nn = n - n4Dicum if (avail_hist_fields(n)%vhistfreq == histfreq(ns) .or. write_ic) then - if (my_task == master_task) then + if (my_task == master_task) then call check(nf90_inq_varid(ncid,avail_hist_fields(n)%vname,varid), & 'inq var '//avail_hist_fields(n)%vname) - endif - do ic = 1, ncat_hist - do k = 1, nzslyr - call gather_global(work_g1, a4Ds(:,:,k,ic,nn,:), & - master_task, distrb_info) - work_gr(:,:) = work_g1(:,:) - if (my_task == master_task) then + endif + do ic = 1, ncat_hist + do k = 1, nzslyr + call gather_global(work_g1, a4Ds(:,:,k,ic,nn,:), & + master_task, distrb_info) + work_gr(:,:) = work_g1(:,:) + if (my_task == master_task) then call check(nf90_put_var(ncid,varid,work_gr(:,:), & - start=(/ 1, 1,k,ic/), & + start=(/1,1,k,ic,i_time/), & count=(/nx_global,ny_global,1, 1/)), & 'put var '//avail_hist_fields(n)%vname) - endif - enddo ! k - enddo ! ic + endif + enddo ! k + enddo ! ic endif - enddo ! num_avail_hist_fields_4Ds + enddo ! num_avail_hist_fields_4Ds - work_gr(:,:) = c0 - work_g1(:,:) = c0 + work_gr(:,:) = c0 + work_g1(:,:) = c0 - do n = n4Dscum+1, n4Dbcum + do n = n4Dscum+1, n4Dbcum nn = n - n4Dscum if (avail_hist_fields(n)%vhistfreq == histfreq(ns) .or. write_ic) then - if (my_task == master_task) then + if (my_task == master_task) then call check(nf90_inq_varid(ncid,avail_hist_fields(n)%vname,varid), & 'inq varid '//avail_hist_fields(n)%vname) - endif - do ic = 1, ncat_hist - do k = 1, nzblyr - call gather_global(work_g1, a4Db(:,:,k,ic,nn,:), & - master_task, distrb_info) - work_gr(:,:) = work_g1(:,:) - if (my_task == master_task) then + endif + do ic = 1, ncat_hist + do k = 1, nzblyr + call gather_global(work_g1, a4Db(:,:,k,ic,nn,:), & + master_task, distrb_info) + work_gr(:,:) = work_g1(:,:) + if (my_task == master_task) then call check(nf90_put_var(ncid,varid,work_gr(:,:), & - start=(/ 1, 1,k,ic/), & + start=(/1,1,k,ic,i_time/), & count=(/nx_global,ny_global,1, 1/)), & 'put var '//avail_hist_fields(n)%vname) - endif - enddo ! k - enddo ! ic + endif + enddo ! k + enddo ! ic endif - enddo ! num_avail_hist_fields_4Db + enddo ! num_avail_hist_fields_4Db - deallocate(work_gr) - deallocate(work_g1) + deallocate(work_gr) + deallocate(work_g1) end subroutine write_3d_and_4d_variables @@ -1530,7 +1593,7 @@ subroutine write_coordinate_variables_parallel(ncid, coord_var, var_nz) work1 = ULAT*rad_to_deg END SELECT - call put_2d_with_blocks(ncid, coord_var(i)%short_name, work1) + call put_2d_with_blocks(ncid, 1, coord_var(i)%short_name, work1) enddo ! Extra dimensions (NCAT, VGRD*) @@ -1538,6 +1601,8 @@ subroutine write_coordinate_variables_parallel(ncid, coord_var, var_nz) if (igrdz(i)) then call check(nf90_inq_varid(ncid, var_nz(i)%short_name, varid), & 'inq_varid '//var_nz(i)%short_name) + call check(nf90_var_par_access(ncid, varid, NF90_COLLECTIVE), & + 'parallel access '//var_nz(i)%short_name) SELECT CASE (var_nz(i)%short_name) CASE ('NCAT') call check(nf90_put_var(ncid, varid, hin_max(1:ncat_hist)), & @@ -1552,7 +1617,7 @@ subroutine write_coordinate_variables_parallel(ncid, coord_var, var_nz) call check(nf90_put_var(ncid, varid, (/(k, k=1, nzblyr)/)), & 'put var VGRDb') END SELECT - endif + endif enddo end subroutine write_coordinate_variables_parallel @@ -1576,11 +1641,11 @@ subroutine write_grid_variables_parallel(ncid, var, var_nverts) integer :: varid if (igrd(n_tmask)) then - call put_2d_with_blocks(ncid, 'tmask', hm) + call put_2d_with_blocks(ncid, 1, 'tmask', hm) endif if (igrd(n_blkmask)) then - call put_2d_with_blocks(ncid, 'blkmask', bm) + call put_2d_with_blocks(ncid, 1, 'blkmask', bm) endif do i = 3, nvar ! note n_tmask=1, n_blkmask=2 @@ -1608,7 +1673,7 @@ subroutine write_grid_variables_parallel(ncid, var, var_nverts) work1 = ANGLET END SELECT - call put_2d_with_blocks(ncid, var(i)%req%short_name, work1) + call put_2d_with_blocks(ncid, 1, var(i)%req%short_name, work1) endif enddo @@ -1631,6 +1696,8 @@ subroutine write_grid_variables_parallel(ncid, var, var_nverts) call check(nf90_inq_varid(ncid, var_nverts(i)%short_name, varid), & 'inq varid '//var_nverts(i)%short_name) + call check(nf90_var_par_access(ncid, varid, NF90_COLLECTIVE), & + 'parallel access '//var_nverts(i)%short_name) do iblk=1, nblocks the_block = get_block(blocks_ice(iblk), iblk) @@ -1657,17 +1724,16 @@ subroutine write_grid_variables_parallel(ncid, var, var_nverts) end subroutine write_grid_variables_parallel -subroutine write_2d_variables_parallel(ns, ncid) +subroutine write_2d_variables_parallel(ns, ncid, i_time) - integer, intent(in) :: ns - integer, intent(in) :: ncid + integer, intent(in) :: ns, ncid, i_time integer :: varid integer :: n do n=1, num_avail_hist_fields_2D if (avail_hist_fields(n)%vhistfreq == histfreq(ns) .or. write_ic) then - call put_2d_with_blocks(ncid, avail_hist_fields(n)%vname, & + call put_2d_with_blocks(ncid, i_time, avail_hist_fields(n)%vname, & a2D(:, :, n, :)) endif enddo ! num_avail_hist_fields_2D @@ -1676,10 +1742,9 @@ end subroutine write_2d_variables_parallel -subroutine write_3d_and_4d_variables_parallel(ns, ncid) +subroutine write_3d_and_4d_variables_parallel(ns, ncid, i_time) - integer, intent(in) :: ns - integer, intent(in) :: ncid + integer, intent(in) :: ns, ncid, i_time integer :: varid integer :: n, nn, k, ic @@ -1687,7 +1752,7 @@ subroutine write_3d_and_4d_variables_parallel(ns, ncid) do n = n2D + 1, n3Dccum nn = n - n2D if (avail_hist_fields(n)%vhistfreq == histfreq(ns) .or. write_ic) then - call put_3d_with_blocks(ncid, avail_hist_fields(n)%vname, & + call put_3d_with_blocks(ncid, i_time, avail_hist_fields(n)%vname, & ncat_hist, a3Dc(:, :, :, nn, :)) endif enddo ! num_avail_hist_fields_3Dc @@ -1696,7 +1761,7 @@ subroutine write_3d_and_4d_variables_parallel(ns, ncid) do n = n3Dccum+1, n3Dzcum nn = n - n3Dccum if (avail_hist_fields(n)%vhistfreq == histfreq(ns) .or. write_ic) then - call put_3d_with_blocks(ncid, avail_hist_fields(n)%vname, & + call put_3d_with_blocks(ncid, i_time, avail_hist_fields(n)%vname, & nzilyr, a3Dz(:, :, :, nn, :)) endif enddo ! num_avail_hist_fields_3Dz @@ -1705,7 +1770,7 @@ subroutine write_3d_and_4d_variables_parallel(ns, ncid) do n = n3Dzcum+1, n3Dbcum nn = n - n3Dzcum if (avail_hist_fields(n)%vhistfreq == histfreq(ns) .or. write_ic) then - call put_3d_with_blocks(ncid, avail_hist_fields(n)%vname, & + call put_3d_with_blocks(ncid, i_time, avail_hist_fields(n)%vname, & nzblyr, a3Db(:, :, :, nn, :)) endif enddo ! num_avail_hist_fields_3Db @@ -1714,7 +1779,7 @@ subroutine write_3d_and_4d_variables_parallel(ns, ncid) do n = n3Dbcum+1, n4Dicum nn = n - n3Dbcum if (avail_hist_fields(n)%vhistfreq == histfreq(ns) .or. write_ic) then - call put_4d_with_blocks(ncid, avail_hist_fields(n)%vname, & + call put_4d_with_blocks(ncid, i_time, avail_hist_fields(n)%vname, & nzilyr, ncat_hist, a4Di(:, :, :, :, nn, :)) endif enddo ! num_avail_hist_fields_4Di @@ -1723,7 +1788,7 @@ subroutine write_3d_and_4d_variables_parallel(ns, ncid) do n = n4Dicum+1, n4Dscum nn = n - n4Dicum if (avail_hist_fields(n)%vhistfreq == histfreq(ns) .or. write_ic) then - call put_4d_with_blocks(ncid, avail_hist_fields(n)%vname, & + call put_4d_with_blocks(ncid, i_time, avail_hist_fields(n)%vname, & nzslyr, ncat_hist, a4Ds(:, :, :, :, nn, :)) endif enddo ! num_avail_hist_fields_4Ds @@ -1731,7 +1796,7 @@ subroutine write_3d_and_4d_variables_parallel(ns, ncid) do n = n4Dscum+1, n4Dbcum nn = n - n4Dscum if (avail_hist_fields(n)%vhistfreq == histfreq(ns) .or. write_ic) then - call put_4d_with_blocks(ncid, avail_hist_fields(n)%vname, & + call put_4d_with_blocks(ncid, i_time, avail_hist_fields(n)%vname, & nzblyr, ncat_hist, a4Db(:, :, :, :, nn, :)) endif enddo ! num_avail_hist_fields_4Db @@ -1739,20 +1804,26 @@ subroutine write_3d_and_4d_variables_parallel(ns, ncid) end subroutine write_3d_and_4d_variables_parallel -subroutine put_2d_with_blocks(ncid, var_name, data) +subroutine put_2d_with_blocks(ncid, i_start, var_name, data) - integer, intent(in) :: ncid + ! by convention only, 2d variables are actually 3d if you consider time + ! sometimes the third array is a different index (e.g. number of bounds ) + ! typically i_start is the current time index, but can be different + + integer, intent(in) :: ncid, i_start character(len=*), intent(in) :: var_name real(kind=dbl_kind), dimension(nx_block, ny_block, max_blocks), intent(in) :: data integer :: varid integer :: iblk integer :: ilo, jlo, ihi, jhi, gilo, gjlo, gihi, gjhi - integer, dimension(2) :: start, count + integer, dimension(3) :: start, count type(block) :: the_block call check(nf90_inq_varid(ncid, var_name, varid), & 'inq varid for '//var_name) + call check(nf90_var_par_access(ncid, varid, NF90_COLLECTIVE), & + 'parallel access '//var_name) do iblk=1, nblocks the_block = get_block(blocks_ice(iblk), iblk) @@ -1766,8 +1837,8 @@ subroutine put_2d_with_blocks(ncid, var_name, data) gihi = the_block%i_glob(ihi) gjhi = the_block%j_glob(jhi) - start = (/ gilo, gjlo /) - count = (/ gihi - gilo + 1, gjhi - gjlo + 1 /) + start = (/ gilo, gjlo,i_start /) + count = (/ gihi - gilo + 1, gjhi - gjlo + 1 , 1/) call check(nf90_put_var(ncid, varid, real(data(ilo:ihi, jlo:jhi, iblk)), & start=start, count=count), & 'put_2d_with_blocks put '//trim(var_name)) @@ -1775,20 +1846,25 @@ subroutine put_2d_with_blocks(ncid, var_name, data) end subroutine put_2d_with_blocks -subroutine put_3d_with_blocks(ncid, var_name, len_3dim, data) +subroutine put_3d_with_blocks(ncid, i_time, var_name, len_3dim, data) + + ! by convention only, 3d variables are actually 4d if you consider time + - integer, intent(in) :: ncid, len_3dim + integer, intent(in) :: ncid, i_time, len_3dim character(len=*), intent(in) :: var_name real(kind=dbl_kind), dimension(nx_block, ny_block, len_3dim, max_blocks), intent(in) :: data integer :: varid integer :: iblk integer :: ilo, jlo, ihi, jhi, gilo, gjlo, gihi, gjhi - integer, dimension(3) :: start, count + integer, dimension(4) :: start, count type(block) :: the_block call check(nf90_inq_varid(ncid, var_name, varid), & 'inq varid for '//var_name) + call check(nf90_var_par_access(ncid, varid, NF90_COLLECTIVE), & + 'parallel access '//var_name) do iblk=1, nblocks the_block = get_block(blocks_ice(iblk), iblk) @@ -1802,8 +1878,8 @@ subroutine put_3d_with_blocks(ncid, var_name, len_3dim, data) gihi = the_block%i_glob(ihi) gjhi = the_block%j_glob(jhi) - start = (/ gilo, gjlo, 1 /) - count = (/ gihi - gilo + 1, gjhi - gjlo + 1, len_3dim /) + start = (/ gilo, gjlo, 1 , i_time/) + count = (/ gihi - gilo + 1, gjhi - gjlo + 1, len_3dim, 1 /) call check(nf90_put_var(ncid, varid, & real(data(ilo:ihi, jlo:jhi, 1:len_3dim, iblk)), & start=start, count=count), & @@ -1813,9 +1889,11 @@ subroutine put_3d_with_blocks(ncid, var_name, len_3dim, data) end subroutine put_3d_with_blocks -subroutine put_4d_with_blocks(ncid, var_name, len_3dim, len_4dim, data) +subroutine put_4d_with_blocks(ncid, i_time, var_name, len_3dim, len_4dim, data) + ! by convention only, 4d variables are actually 5d if you consider time - integer, intent(in) :: ncid, len_3dim, len_4dim + + integer, intent(in) :: ncid, i_time, len_3dim, len_4dim character(len=*), intent(in) :: var_name real(kind=dbl_kind), dimension(nx_block, ny_block, len_3dim, & len_4dim, max_blocks), intent(in) :: data @@ -1823,11 +1901,13 @@ subroutine put_4d_with_blocks(ncid, var_name, len_3dim, len_4dim, data) integer :: varid integer :: iblk integer :: ilo, jlo, ihi, jhi, gilo, gjlo, gihi, gjhi - integer, dimension(4) :: start, count + integer, dimension(5) :: start, count type(block) :: the_block call check(nf90_inq_varid(ncid, var_name, varid), & 'inq varid for '//var_name) + call check(nf90_var_par_access(ncid, varid, NF90_COLLECTIVE), & + 'parallel access '//var_name) do iblk=1, nblocks the_block = get_block(blocks_ice(iblk), iblk) @@ -1841,8 +1921,8 @@ subroutine put_4d_with_blocks(ncid, var_name, len_3dim, len_4dim, data) gihi = the_block%i_glob(ihi) gjhi = the_block%j_glob(jhi) - start = (/ gilo, gjlo, 1, 1 /) - count = (/ gihi - gilo + 1, gjhi - gjlo + 1, len_3dim, len_4dim /) + start = (/ gilo, gjlo, 1, 1 , i_time/) + count = (/ gihi - gilo + 1, gjhi - gjlo + 1, len_3dim, len_4dim, 1 /) call check(nf90_put_var(ncid, varid, & real(data(ilo:ihi, jlo:jhi, 1:len_3dim, 1:len_4dim, iblk)), & start=start, count=count), & @@ -1852,4 +1932,4 @@ subroutine put_4d_with_blocks(ncid, var_name, len_3dim, len_4dim, data) end subroutine put_4d_with_blocks -end module ice_history_write +end module ice_history_write \ No newline at end of file diff --git a/io_netcdf/ice_restart.F90 b/io_netcdf/ice_restart.F90 index e708d141..d37494f3 100644 --- a/io_netcdf/ice_restart.F90 +++ b/io_netcdf/ice_restart.F90 @@ -35,6 +35,10 @@ subroutine init_restart_read(ice_ic) use ice_calendar, only: sec, month, mday, nyr, istep0, istep1, & time, time_forc, year_init, npt +#ifdef ACCESS + use cpl_parameters, only: iniyear, inimon, iniday + use ice_calendar, only: check_start_date +#endif use ice_communicate, only: my_task, master_task use ice_domain, only: nblocks use ice_fileunits, only: nu_diag, nu_rst_pointer @@ -47,6 +51,7 @@ subroutine init_restart_read(ice_ic) filename, filename0 integer (kind=int_kind) :: status + integer (kind=int_kind) :: year if (present(ice_ic)) then filename = trim(ice_ic) @@ -55,14 +60,6 @@ subroutine init_restart_read(ice_ic) open(nu_rst_pointer,file=pointer_file) read(nu_rst_pointer,'(a)') filename0 filename = trim(filename0) -#ifdef AusCOM - write(nu_diag,*) 'XXX: restart_dir = ', restart_dir - write(nu_diag,*) 'XXX: org restart file => ', filename -!ars599: 28042015 restart issue -! filename = trim(restart_dir)//trim(filename) - filename = trim(filename) - write(nu_diag,*) 'XXX: restart file => ', filename -#endif close(nu_rst_pointer) write(nu_diag,*) 'Read ',pointer_file(1:lenstr(pointer_file)) endif @@ -93,17 +90,31 @@ subroutine init_restart_read(ice_ic) call assert(status == NF90_NOERR, & 'in init_restart_read, on nf90_get_att(nyr)', status) - status = nf90_get_att(ncid, nf90_global, 'month', month) +#ifdef ACCESS + status = nf90_get_att(ncid, nf90_global, 'year', year) call assert(status == NF90_NOERR, & + ' reading year attribute from ncfile '//trim(filename), status) +#endif + + if (status == nf90_noerr) then + status = nf90_get_att(ncid, nf90_global, 'month', month) + call assert(status == NF90_NOERR, & 'in init_restart_read, on nf90_get_att(month)', status) - status = nf90_get_att(ncid, nf90_global, 'mday', mday) - call assert(status == NF90_NOERR, & + status = nf90_get_att(ncid, nf90_global, 'mday', mday) + call assert(status == NF90_NOERR, & 'in init_restart_read, on nf90_get_att(mday)', status) - status = nf90_get_att(ncid, nf90_global, 'sec', sec) - call assert(status == NF90_NOERR, & + status = nf90_get_att(ncid, nf90_global, 'sec', sec) + call assert(status == NF90_NOERR, & 'in init_restart_read, on nf90_get_att(sec)', status) + endif + + if ( sec .ne. 0 ) then + call abort_ice('ice: restart ncfile '//trim(filename)//' has '//& + 'restart "sec" attribute not set to 0. This is not supported '//& + 'as a start time.') + endif endif ! use namelist values if use_restart_time = F @@ -113,7 +124,20 @@ subroutine init_restart_read(ice_ic) call broadcast_scalar(istep0,master_task) call broadcast_scalar(time,master_task) call broadcast_scalar(time_forc,master_task) - + +#ifdef ACCESS + ! Set run start date + call broadcast_scalar(year,master_task) + call broadcast_scalar(month,master_task) + call broadcast_scalar(mday,master_task) + iniyear = year + inimon = month + iniday = mday + + ! Check starting date and time are consistent + call check_start_date +#endif + istep1 = istep0 ! if runid is bering then need to correct npt for istep0 @@ -146,12 +170,6 @@ subroutine init_restart_write(filename_spec) tr_bgc_chl_sk, tr_bgc_DMSPd_sk, tr_bgc_Am_sk, & skl_bgc -!ars599: 26032014 -! since need to output idate so use ice_calendar -#ifdef AusCOM - use ice_calendar, only: idate -#endif - character(len=char_len_long), intent(in), optional :: filename_spec ! local variables @@ -174,13 +192,12 @@ subroutine init_restart_write(filename_spec) character (len=3) :: nchar ! construct path/file + iyear = nyr + year_init - 1 if (present(filename_spec)) then filename = trim(filename_spec) else - iyear = nyr + year_init - 1 imonth = month iday = mday - write(filename,'(a,a,a,i4.4,a,i2.2,a,i2.2,a,i5.5)') & restart_dir(1:lenstr(restart_dir)), & restart_file(1:lenstr(restart_file)),'.', & @@ -194,8 +211,7 @@ subroutine init_restart_write(filename_spec) write(nu_rst_pointer,'(a)') filename close(nu_rst_pointer) - status = nf90_create(trim(filename), & - ior(NF90_CLASSIC_MODEL, NF90_HDF5), ncid) + status = nf90_create(trim(filename), NF90_NETCDF4, ncid) call assert(status == NF90_NOERR, & 'in init_restart_write on nf90_create '//trim(filename), status) @@ -211,10 +227,14 @@ subroutine init_restart_write(filename_spec) call assert(status == NF90_NOERR, & 'in init_restart_write on nf90_put_att(time_forc)', status) - status = nf90_put_att(ncid,nf90_global,'nyr',nyr) + status = nf90_put_att(ncid,nf90_global,'nyr',nyr) ! year count since year_init call assert(status == NF90_NOERR, & 'in init_restart_write on nf90_put_att(nyr)', status) + status = nf90_put_att(ncid,nf90_global,'year',iyear) ! calendar year + call assert(status == NF90_NOERR, & + 'in init_restart_write on nf90_put_att(year)', status) + status = nf90_put_att(ncid,nf90_global,'month',month) call assert(status == NF90_NOERR, & 'in init_restart_write on nf90_put_att(month)', status) @@ -470,7 +490,8 @@ subroutine read_restart_field(nu,nrec,work,atype,vname,ndim3, & integer (kind=int_kind) :: & n, & ! number of dimensions for variable - varid ! variable id + varid, & ! variable id + status ! status variable from netCDF routine real (kind=dbl_kind), dimension(nx_block,ny_block,max_blocks) :: & work2 ! input array (real, 8-byte) @@ -594,9 +615,9 @@ subroutine final_restart() integer (kind=int_kind) :: status if (my_task == master_task) then - status = nf90_close(ncid) - call assert(status == NF90_NOERR, 'in final_restart', status) - write(nu_diag,*) 'Restart read/written ',istep1,time,time_forc + status = nf90_close(ncid) + call assert(status == NF90_NOERR, 'in final_restart', status) + write(nu_diag,*) 'Restart read/written ',istep1,time,time_forc endif end subroutine final_restart diff --git a/mpi/ice_boundary.F90 b/mpi/ice_boundary.F90 index 28fa1e73..9198dcf3 100644 --- a/mpi/ice_boundary.F90 +++ b/mpi/ice_boundary.F90 @@ -593,7 +593,6 @@ function ice_HaloCreate(dist, nsBoundaryType, ewBoundaryType, & ! check to see if they need to be re-sized ! !----------------------------------------------------------------------- - maxTmp = maxval(sendCount) maxSizeSend = global_maxval(maxTmp, dist) maxTmp = maxval(recvCount) @@ -1192,7 +1191,6 @@ subroutine ice_HaloMask(halo, basehalo, mask) elseif (mask(icel,jcel,abs(nblock)) /= 0) then tmpflag = .true. endif - if (tmpflag) then scnt = scnt + 1 if (scnt == 1) then @@ -1224,7 +1222,6 @@ subroutine ice_HaloMask(halo, basehalo, mask) elseif (mask(icel,jcel,abs(nblock)) /= 0) then tmpflag = .true. endif - if (tmpflag) then scnt = scnt + 1 if (scnt == 1) then @@ -1589,7 +1586,6 @@ subroutine ice_HaloUpdate2DR8(array, halo, & bufTripoleR8(i ,halo%tripoleRows) = xavg bufTripoleR8(iDst,halo%tripoleRows) = isign*xavg end do - case default call abort_ice( & 'ice_HaloUpdate2DR8: Unknown field location') @@ -1984,7 +1980,6 @@ subroutine ice_HaloUpdate2DR4(array, halo, & bufTripoleR4(i ,halo%tripoleRows) = xavg bufTripoleR4(iDst,halo%tripoleRows) = isign*xavg end do - case default call abort_ice( & 'ice_HaloUpdate2DR4: Unknown field location') @@ -2379,7 +2374,6 @@ subroutine ice_HaloUpdate2DI4(array, halo, & bufTripoleI4(i ,halo%tripoleRows) = xavg bufTripoleI4(iDst,halo%tripoleRows) = isign*xavg end do - case default call abort_ice( & 'ice_HaloUpdate2DI4: Unknown field location') @@ -3748,7 +3742,6 @@ subroutine ice_HaloUpdate3DI4(array, halo, & !----------------------------------------------------------------------- if (nxGlobal > 0) then - select case (fieldKind) case (field_type_scalar) isign = 1 @@ -3880,7 +3873,6 @@ subroutine ice_HaloUpdate3DI4(array, halo, & call abort_ice( & 'ice_HaloUpdate3DI4: Unknown field location') end select - endif !*** copy out of global tripole buffer into local @@ -4268,7 +4260,6 @@ subroutine ice_HaloUpdate4DR8(array, halo, & !----------------------------------------------------------------------- if (nxGlobal > 0) then - select case (fieldKind) case (field_type_scalar) isign = 1 @@ -4408,7 +4399,6 @@ subroutine ice_HaloUpdate4DR8(array, halo, & call abort_ice( & 'ice_HaloUpdate4DR8: Unknown field location') end select - endif !*** copy out of global tripole buffer into local @@ -4797,7 +4787,6 @@ subroutine ice_HaloUpdate4DR4(array, halo, & !----------------------------------------------------------------------- if (nxGlobal > 0) then - select case (fieldKind) case (field_type_scalar) isign = 1 @@ -4937,7 +4926,6 @@ subroutine ice_HaloUpdate4DR4(array, halo, & call abort_ice( & 'ice_HaloUpdate4DR4: Unknown field location') end select - endif !*** copy out of global tripole buffer into local @@ -5467,7 +5455,6 @@ subroutine ice_HaloUpdate4DI4(array, halo, & call abort_ice( & 'ice_HaloUpdate4DI4: Unknown field location') end select - endif !*** copy out of global tripole buffer into local @@ -5824,7 +5811,6 @@ subroutine ice_HaloUpdate_stress(array1, array2, halo, & iSrc = iSrc - ioffset jSrc = jSrc - joffset if (iSrc == 0) iSrc = nxGlobal - !*** for center and Eface, do not need to replace !*** top row of physical domain, so jSrc should be !*** out of range and skipped diff --git a/mpi/ice_exit.F90 b/mpi/ice_exit.F90 index 8b77e065..342ff923 100644 --- a/mpi/ice_exit.F90 +++ b/mpi/ice_exit.F90 @@ -23,14 +23,11 @@ subroutine abort_ice(error_message) ! This routine aborts the ice model and prints an error message. -!ars599: 14042014: change ice_fileunits to ice_fileunits and ice_communicate -! use ice_fileunits, only: nu_diag, ice_stderr, flush_fileunit - use ice_fileunits - use ice_communicate -#if (defined CCSM) || (defined SEQ_MCT) +#if (defined CCSMCOUPLED) use shr_sys_mod #else - use ice_fileunits, only: nu_diag, ice_stderr, flush_fileunit + use ice_fileunits, only: nu_diag, ice_stderr, ice_stdout, & + flush_fileunit include 'mpif.h' ! MPI Fortran include file #endif @@ -39,8 +36,8 @@ subroutine abort_ice(error_message) ! local variables #ifndef CCSMCOUPLED - ! MPI error flag, default to non-zero error - integer (int_kind) :: errorcode = 1 + ! MPI error flag, + integer (int_kind) :: errorcode ! MPI return value integer (int_kind) :: ierr #endif @@ -53,16 +50,13 @@ subroutine abort_ice(error_message) #else call flush_fileunit(nu_diag) + write (ice_stdout,*) error_message + call flush_fileunit(ice_stdout) write (ice_stderr,*) error_message call flush_fileunit(ice_stderr) -#if defined(__INTEL_COMPILER) - call TRACEBACKQQ(USER_EXIT_CODE=-1) -#elif defined(__GFORTRAN__) - call BACKTRACE() -#endif - call MPI_ABORT(MPI_COMM_WORLD, errorcode, ierr) - + error_code = 1, !default to non-zero error + call MPI_ABORT(MPI_COMM_WORLD, error_code, ierr) stop #endif diff --git a/mpi/ice_global_reductions.F90 b/mpi/ice_global_reductions.F90 index a913c472..275012e9 100644 --- a/mpi/ice_global_reductions.F90 +++ b/mpi/ice_global_reductions.F90 @@ -24,6 +24,9 @@ module ice_global_reductions implicit none private +#ifdef ACCESS + save +#endif include 'mpif.h' diff --git a/mpi/ice_timers.F90 b/mpi/ice_timers.F90 index d5035ad9..8b035a14 100644 --- a/mpi/ice_timers.F90 +++ b/mpi/ice_timers.F90 @@ -57,7 +57,12 @@ module ice_timers timer_cplsend, &! send to coupled timer_sndrcv, &! time between send to receive #endif -#ifdef AusCOM +#ifdef ACCESS + timer_from_atm, & + timer_into_atm, & + timer_from_ocn, & + timer_into_ocn, & +#elif defined(AusCOM) timer_from_ocn, &! timer_waiting_ocn, &! timer_into_ocn, &! @@ -69,6 +74,7 @@ module ice_timers timer_runoff_remap, &! #endif +#endif timer_bound, &! boundary updates timer_bgc ! biogeochemistry ! timer_tmp ! for temporary timings @@ -191,7 +197,12 @@ subroutine init_ice_timers call get_ice_timer(timer_cplsend, 'Cpl-Send', nblocks,distrb_info%nprocs) call get_ice_timer(timer_sndrcv, 'Snd->Rcv', nblocks,distrb_info%nprocs) #endif -#ifdef AusCOM +#ifdef ACCESS + call get_ice_timer(timer_from_atm, 'Cpl_fromA', nblocks,distrb_info%nprocs) + call get_ice_timer(timer_into_atm, 'Cpl_toA', nblocks,distrb_info%nprocs) + call get_ice_timer(timer_from_ocn, 'Cpl_fromO', nblocks,distrb_info%nprocs) + call get_ice_timer(timer_into_ocn, 'Cpl_toO', nblocks,distrb_info%nprocs) +#elif defined(AusCOM) call get_ice_timer(timer_from_ocn, 'from_ocn', nblocks,distrb_info%nprocs) call get_ice_timer(timer_waiting_ocn, 'waiting_ocn', nblocks,distrb_info%nprocs) call get_ice_timer(timer_into_ocn, 'into_ocn', nblocks,distrb_info%nprocs) diff --git a/source/ice_aerosol.F90 b/source/ice_aerosol.F90 index f0bba85c..e4e40999 100644 --- a/source/ice_aerosol.F90 +++ b/source/ice_aerosol.F90 @@ -176,6 +176,7 @@ subroutine update_aerosol (nx_block, ny_block, & faero_atm, faero_ocn) use ice_domain_size, only: max_ntrcr, nilyr, nslyr, n_aero, max_aero + use ice_itd, only: hs_min use ice_state, only: nt_aero use ice_shortwave, only: hi_ssl, hs_ssl diff --git a/source/ice_atmo.F90 b/source/ice_atmo.F90 index ae00446c..8f7b95b4 100644 --- a/source/ice_atmo.F90 +++ b/source/ice_atmo.F90 @@ -57,10 +57,8 @@ module ice_atmo Cdn_atm_ratio ! ratio drag atm / neutral drag atm !ars599: 24092014 (CODE: petteri) ! tuning parameters, set in namelist -#ifdef AusCOM real (kind=dbl_kind), public :: & iceruf ! ice surface roughness (m) -#endif !======================================================================= @@ -768,7 +766,7 @@ subroutine neutral_drag_coeffs (nx_block, ny_block, & real (kind=dbl_kind), parameter :: & ocnruf = 0.000327_dbl_kind, & ! ocean surface roughness (m) !ars599: 24092014 (CODE: petteri) -#ifdef AusCOM +#if defined(AusCOM) && !defined(ACCESS) ocnrufi = c1/ocnruf ! inverse ocean roughness #else ocnrufi = c1/ocnruf, & ! inverse ocean roughness @@ -922,7 +920,7 @@ subroutine neutral_drag_coeffs (nx_block, ny_block, & sca = c1 - exp(-sHGB*distrdg(i,j)/tmp1) ! see Eq. 9 ctecar = cra*p5 !ars599: 24092014 (CODE: petteri) -#ifdef AusCOM +#if defined(AusCOM) && !defined(ACCESS) Cdn_atm_rdg(i,j) = ai * ctecar*tmp1/distrdg(i,j)*sca* & (log(tmp1/iceruf)/log(zref/iceruf))**c2 #else @@ -956,7 +954,7 @@ subroutine neutral_drag_coeffs (nx_block, ny_block, & ctecwk = crw*p5 !ars599: 24092014 (CODE: petteri) -#ifdef AusCOM +#if defined(AusCOM) && !defined(ACCESS) Cdn_ocn_keel(i,j) = ctecwk*ai*tmp1/dkeel(i,j)*scw* & (log(tmp1/iceruf)/log(zref/iceruf))**c2 #else diff --git a/source/ice_brine.F90 b/source/ice_brine.F90 index 5edabb08..d199ea5c 100644 --- a/source/ice_brine.F90 +++ b/source/ice_brine.F90 @@ -1,4 +1,4 @@ -! SVN:$Id: ice_brine.F90 925 2015-03-04 00:34:27Z eclare $ +! SVN:$Id: ice_brine.F90 744 2013-09-27 22:53:24Z eclare $ !======================================================================= ! ! Computes ice microstructural information for use in biogeochemistry diff --git a/source/ice_calendar.F90 b/source/ice_calendar.F90 index a1bad8c5..b1cdff14 100644 --- a/source/ice_calendar.F90 +++ b/source/ice_calendar.F90 @@ -19,12 +19,20 @@ module ice_calendar c4, c400, secday use ice_domain_size, only: max_nstrm use ice_exit, only: abort_ice +#ifdef ACCESS + use cpl_parameters, only : iniday, inimon, iniyear, init_date + use cpl_parameters, only : il_out + use cpl_parameters, only : runtime0 !accumulated runtime by the end of last run +#endif implicit none private save public :: init_calendar, calendar, time2sec, sec2time +#ifdef ACCESS + public :: check_start_date +#endif integer (kind=int_kind), public :: & days_per_year , & ! number of days in one year @@ -103,8 +111,9 @@ module ice_calendar write_history(max_nstrm) ! write history now character (len=1), public :: & - histfreq(max_nstrm), & ! history output frequency, 'y','m','d','h','1' - dumpfreq ! restart frequency, 'y','m','d' + histfreq(max_nstrm), & ! history output frequency, 'y','m','d','h','1' + hist_file_freq(max_nstrm), & ! history output file save frequency, 'y','m','d','h','1' + dumpfreq ! restart frequency, 'y','m','d' character (len=char_len),public :: calendar_type @@ -146,6 +155,12 @@ subroutine init_calendar ' because use_leap_years = .true.' end if +#ifdef ACCESS + if (days_year(year_init) == 366) days_per_year = 366 +#endif + + write(*,*)'CICE (calendar) days_per_year = ', days_per_year + dayyr = real(days_per_year, kind=dbl_kind) if (days_per_year == 360) then daymo = daymo360 @@ -153,8 +168,17 @@ subroutine init_calendar elseif (days_per_year == 365) then daymo = daymo365 daycal = daycal365 +#ifdef ACCESS + elseif (days_per_year == 366) then + daymo = daymo366 + daycal = daycal366 +#endif else +#ifdef ACCESS + call abort_ice('ice: days_per_year must be 360, 365 or 366') +#else call abort_ice('ice: days_per_year must be 360 or 365') +#endif endif ! Get the time in seconds from calendar zero to start of initial year @@ -172,6 +196,16 @@ subroutine init_calendar nyr = nyr - year_init + 1 ! year number idate0 = (nyr+year_init-1)*10000 + month*100 + mday ! date (yyyymmdd) + +#ifdef ACCESS + write(il_out,*) '(init_calendar) istep0, dt, time, sec = ', istep0, dt, time, sec + write(il_out,*) '(init_calendar) tday, yday, mday, nyr = ', tday, yday, mday, nyr + write(il_out,*) '(init_calendar) idate0 = ', idate0 + + idate0 = init_date + write(il_out,*) '(init_calendar) idate0 (-corrected-) = ',idate0 + print *, 'CICE (init_calendar) idate0 = ', idate0 +#endif end subroutine init_calendar !======================================================================= @@ -200,6 +234,11 @@ subroutine calendar(ttime) elapsed_hours , & ! since beginning this run month0 +#ifdef ACCESS + integer (kind=int_kind) :: & + newh, newd, newm, newy !date by the end of this step +#endif + nyrp=nyr monthp=month mdayp=mday @@ -213,7 +252,23 @@ subroutine calendar(ttime) sec = mod(ttime,secday) ! elapsed seconds into date at ! end of dt - +#ifdef ACCESS + call get_idate(ttime, newh, newd, newm, newy) + ! + !note ttime is seconds accumulated from the beginning of this run only. + !the following stuff is required here or there in other routines ... + ! + yday = (ttime-sec)/secday + c1 ! day of the year + hour = newh + mday = newd + month = newm + nyr = newy - year_init + 1 + ! + elapsed_months = (nyr - 1)*12 + month - 1 + tday = (ttime+runtime0 - mod(ttime+runtime0,secday))/secday + c1 + elapsed_days = int(yday) - 1 + elapsed_hours = int(ttime/3600) +#else tday = (ttime-sec)/secday + c1 ! absolute day number ! Deterime the current date from the timestep @@ -221,7 +276,6 @@ subroutine calendar(ttime) yday = mday + daycal(month) ! day of the year nyr = nyr - year_init + 1 ! year number - hour = int((ttime)/c3600) + c1 ! hour month0 = int((idate0 - int(idate0 / 10000) * 10000) / 100) @@ -229,11 +283,20 @@ subroutine calendar(ttime) elapsed_months = (nyr - 1)*12 + (month - month0) elapsed_days = int((istep * dt) / secday) elapsed_hours = int(ttime/3600) +#endif - idate = (nyr+year_init-1)*10000 + month*100 + mday ! date (yyyymmdd) + idate = (nyr+year_init-1)*10000 + month*100 + mday ! date (yyyymmdd) -#ifndef CCSMCOUPLED +#ifdef ACCESS + ! Need this extra call to set_calendar to handle history + ! file naming in leap years properly + call set_calendar(nyr+year_init-1) + ! write(il_out,*) '(calendar) runtime0 = ', runtime0 + ! write(il_out,*) '(calendar) nyr, year_init, month, mday = ', nyr, year_init, month, mday + ! write(il_out,*) '(calendar) idate = ', idate +#endif if (istep >= npt+1) stop_now = 1 +#ifndef ACCESS if (istep == npt .and. dump_last) write_restart = 1 ! last timestep #endif if (nyr /= nyrp) new_year = .true. @@ -290,7 +353,6 @@ subroutine calendar(ttime) end select if (force_restart_now) write_restart = 1 - endif ! istep > 1 if (my_task == master_task .and. mod(istep,diagfreq) == 0 & @@ -477,7 +539,7 @@ subroutine set_calendar(year) if (mod(year,400) == 0) isleap = .true. ! Ensure the calendar is set correctly - if (isleap) then + if (isleap .and. use_leap_years) then daycal = daycal366 daymo = daymo366 dayyr=real(daycal(13), kind=dbl_kind) @@ -491,6 +553,168 @@ subroutine set_calendar(year) end subroutine set_calendar +#ifdef AusCOM +!======================================================================= +subroutine get_idate(ttime, khfin, kdfin, kmfin, kyfin) +! Calculate the date ttime seconds from the run start date given by iniyear +! inimon and iniday. + +use cpl_parameters + +implicit none + +real (kind=dbl_kind), intent(in) :: ttime +integer, intent(out) :: khfin, kdfin, kmfin, kyfin + +integer :: klmo(12) !length of the months +integer :: inc_day !increment of days since the beginning of this run +integer :: jm, jd + +logical :: lleap + +! Initialise date +inc_day = int ((ttime + 0.5)/86400. ) +khfin = (ttime - inc_day*86400)/3600 +kdfin = iniday +kmfin = inimon +kyfin = iniyear + + +IF (days_per_year == 365 .or. days_per_year == 366) THEN + + ! + ! 1. Length of the months in initial year + ! + DO jm = 1, 12 + klmo(jm) = 31 + if ( (jm-4)*(jm-6)*(jm-9)*(jm-11) == 0) klmo(jm) = 30 + IF (jm .eq. 2) THEN + ! + !* Leap years + ! + lleap = .FALSE. + IF (use_leap_years) THEN + IF (MOD(iniyear, 4) .eq. 0) lleap = .TRUE. + IF (MOD(iniyear,100) .eq. 0) lleap = .FALSE. + IF (MOD(iniyear,400) .eq. 0) lleap = .TRUE. + ENDIF + klmo(jm) = 28 + if (lleap) klmo(jm) = 29 + ENDIF + ENDDO !jm=1,12 + + ! + ! 2. Loop on the days + ! + + DO 210 jd = 1, inc_day + kdfin = kdfin + 1 + IF (kdfin .le. klmo(kmfin)) GOTO 210 + kdfin = 1 + kmfin = kmfin + 1 + IF (kmfin .le. 12) GOTO 210 + kmfin = 1 + kyfin = kyfin + 1 + ! + !* Leap years + ! + lleap = .FALSE. + IF (use_leap_years) THEN + IF (MOD(kyfin, 4) .eq. 0) lleap = .TRUE. + IF (MOD(kyfin,100) .eq. 0) lleap = .FALSE. + IF (MOD(kyfin,400) .eq. 0) lleap = .TRUE. + ENDIF + klmo(2) = 28 + if (lleap) klmo(2) = 29 +210 CONTINUE + +ELSEIF(days_per_year == 360) THEN + + ! + ! 1. Calculate month lengths for current year + ! + DO jm = 1, 12 + klmo(jm) = 30 + ENDDO + + ! + ! 2. Loop on the days + ! + + DO 410 jd = 1, inc_day + kdfin = kdfin + 1 + IF (kdfin .le. klmo(kmfin)) GOTO 410 + kdfin = 1 + kmfin = kmfin + 1 + IF (kmfin .le. 12) GOTO 410 + kmfin = 1 + kyfin = kyfin + 1 +410 CONTINUE + +ENDIF + + +end subroutine get_idate + +!======================================================================= +function days_year(year) + +implicit none + +integer, intent(in) :: year +real (kind=dbl_kind) :: days_year +logical :: lleap + +IF (days_per_year == 365 .or. days_per_year == 366) THEN + lleap = .FALSE. + days_year = 365. + IF (use_leap_years) THEN + IF (MOD(year, 4) .eq. 0) lleap = .TRUE. + IF (MOD(year,100) .eq. 0) lleap = .FALSE. + IF (MOD(year,400) .eq. 0) lleap = .TRUE. + ENDIF + if (lleap) days_year = 366. + +ELSEIF (days_per_year == 360) THEN + days_year = 360. +ENDIF +return +end function days_year +#endif + +!======================================================================= + +#ifdef ACCESS + subroutine check_start_date + ! Check that the start date and time variables from the restart file + ! are consistent. + use ice_communicate, only: my_task, master_task + implicit none + + integer(kind=int_kind) :: init_year, init_mon, init_day + real (kind=dbl_kind) :: sec_init_date, sec_start_date, sec_init_to_start + + init_day = mod(init_date, 100) + init_mon = mod( (init_date - init_day)/100, 100) + init_year = init_date / 10000 + + call time2sec(init_year, init_mon, init_day, sec_init_date) + call time2sec(iniyear, inimon, iniday, sec_start_date) + + sec_init_to_start = sec_start_date - sec_init_date + + if (sec_init_to_start /= time) then + if (my_task == master_task) then + write(il_out,*) 'CICE: ERROR restart time: ', time, ' and date: ', & + iniyear, inimon, iniday, ' are inconsistent' + call abort_ice('CICE: ERROR Restart file time and date variables are inconsistent') + endif + endif + + end subroutine check_start_date +#endif +!======================================================================= + end module ice_calendar !======================================================================= diff --git a/source/ice_distribution.F90 b/source/ice_distribution.F90 index 0a3a31e5..1f46e40a 100644 --- a/source/ice_distribution.F90 +++ b/source/ice_distribution.F90 @@ -50,6 +50,7 @@ module ice_distribution ! 'slenderX1' (NPX x 1) ! 'slenderX2' (NPX x 2) +!ars599: 04042016: should we keep or not? Refer to fn: create_distrb_cart !ars599: 26032014: will call from cpl_interface ! from function create_distrb_cart ! so change to public diff --git a/source/ice_dyn_evp.F90 b/source/ice_dyn_evp.F90 index b59154b0..2080f146 100644 --- a/source/ice_dyn_evp.F90 +++ b/source/ice_dyn_evp.F90 @@ -505,8 +505,12 @@ subroutine evp (dt) call u2tgrid_vector(strocnxT) ! shift call u2tgrid_vector(strocnyT) - call ice_timer_stop(timer_dynamics) ! dynamics + call ice_HaloUpdate(strocnxT, halo_info, & + field_loc_center, field_type_vector) + call ice_HaloUpdate(strocnyT, halo_info, & + field_loc_center, field_type_vector) + call ice_timer_stop(timer_dynamics) ! dynamics end subroutine evp !======================================================================= diff --git a/source/ice_dyn_shared.F90 b/source/ice_dyn_shared.F90 index 9c997408..e820748e 100644 --- a/source/ice_dyn_shared.F90 +++ b/source/ice_dyn_shared.F90 @@ -688,6 +688,10 @@ subroutine stepu (nx_block, ny_block, & cca,ccb,ab2,cc1,cc2,& ! intermediate variables taux, tauy ! part of ocean stress term +#ifdef ACCESS + real :: vel_max = 5.0 !m/s. Dave: set velocity limit to uvel and vvel. +#endif + !----------------------------------------------------------------- ! integrate the momentum equation !----------------------------------------------------------------- @@ -736,6 +740,13 @@ subroutine stepu (nx_block, ny_block, & uvel(i,j) = (cca*cc1 + ccb*cc2) / ab2 ! m/s vvel(i,j) = (cca*cc2 - ccb*cc1) / ab2 +#ifdef ACCESS +!20160624 -- Siobhan and Dave's idea to set ice velocity limit to avoid +!transport remap "departure point error": + uvel(i,j) = sign(min(abs(uvel(i,j)),vel_max),uvel(i,j)) + vvel(i,j) = sign(min(abs(vvel(i,j)),vel_max),vvel(i,j)) +#endif + !----------------------------------------------------------------- ! ocean-ice stress for coupling ! here, strocn includes the factor of aice diff --git a/source/ice_fileunits.F90 b/source/ice_fileunits.F90 index d27bea63..7339affa 100644 --- a/source/ice_fileunits.F90 +++ b/source/ice_fileunits.F90 @@ -105,7 +105,12 @@ module ice_fileunits subroutine init_fileunits - nu_diag = ice_stderr ! default +#ifndef ACCESS + nu_diag = ice_stdout ! default +#else + nu_diag = 111 + open(nu_diag,file='ice_diag_out',form='formatted') ! status='new') +#endif ice_IOUnitsInUse = .false. ice_IOUnitsInUse(ice_stdin) = .true. ! reserve unit 5 @@ -217,7 +222,7 @@ subroutine release_all_fileunits call release_fileunit(nu_rst_pointer) call release_fileunit(nu_history) call release_fileunit(nu_hdr) -#ifndef AusCOM +#if !defined(AusCOM) || defined(ACCESS) if (nu_diag /= ice_stdout) call release_fileunit(nu_diag) #else close(nu_diag) @@ -242,7 +247,12 @@ subroutine release_fileunit(iunit) #else ! check for proper unit number if (iunit < 1 .or. iunit > ice_IOUnitsMaxUnit) then +#ifdef ACCESS + write (*,*) 'XXX Warning -- bad unit: iunit = ', iunit + !stop 'release_fileunit: bad unit' +#else stop 'release_fileunit: bad unit' +#endif endif ! mark the unit as not in use diff --git a/source/ice_flux.F90 b/source/ice_flux.F90 index c746d0df..474eced1 100644 --- a/source/ice_flux.F90 +++ b/source/ice_flux.F90 @@ -23,8 +23,15 @@ module ice_flux implicit none private public :: init_coupler_flux, init_history_therm, init_history_dyn, & +#ifdef ACCESS + init_flux_ocn, init_flux_atm, scale_fluxes, merge_fluxes!, & +!ars599: 06042016: something wrong with set_sfcflux can't recon. so +! markout from ice_flux +! set_sfcflux +#else init_flux_ocn, init_flux_atm, scale_fluxes, merge_fluxes, & set_sfcflux +#endif save !----------------------------------------------------------------- @@ -67,11 +74,15 @@ module ice_flux strinty , & ! divergence of internal ice stress, y (N/m^2) daidtd , & ! ice area tendency due to transport (1/s) dvidtd , & ! ice volume tendency due to transport (m/s) + dvsdtd , & ! snow volume tendency due to transport (m/s) dagedtd , & ! ice age tendency due to transport (s/s) dardg1dt, & ! rate of area loss by ridging ice (1/s) dardg2dt, & ! rate of area gain by new ridges (1/s) dvirdgdt, & ! rate of ice volume ridged (m/s) - opening ! rate of opening due to divergence/shear (1/s) + opening , & ! rate of opening due to divergence/shear (1/s) + ice_freeboard ! height of ice surface (i.e. not snow surface) + ! above sea level (m) + real (kind=dbl_kind), & dimension (nx_block,ny_block,ncat,max_blocks), public :: & @@ -86,7 +97,10 @@ module ice_flux araftn, & ! rafting ice area vraftn, & ! rafting ice volume aredistn, & ! redistribution function: fraction of new ridge area - vredistn ! redistribution function: fraction of new ridge volume + vredistn , & ! redistribution function: fraction of new ridge volume + ice_freeboardn ! category height of ice surface (i.e. not snow + ! surface) above sea level (m) + ! restart @@ -174,7 +188,10 @@ module ice_flux Tref , & ! 2m atm reference temperature (K) Qref , & ! 2m atm reference spec humidity (kg/kg) Uref , & ! 10m atm reference wind speed (m/s) - evap ! evaporative water flux (kg/m^2/s) + evap , & ! evaporative water flux (kg/m^2/s) + evap_ice, & ! evaporative water flux over ice only (kg/m^2/s) + evap_snow ! evaporative water flux over snow only (kg/m^2/s) + ! albedos aggregated over categories (if calc_Tsfc) real (kind=dbl_kind), dimension(nx_block,ny_block,max_blocks), public :: & @@ -232,8 +249,8 @@ module ice_flux snoicen ! snow-ice formation in category n (m) real (kind=dbl_kind), dimension (nx_block,ny_block,ncat,max_blocks), public :: & - keffn_top ! effective thermal conductivity of the top ice layer - ! on categories (W/m^2/K) + keffn_top , & ! effective thermal conductivity of the top ice layer + Tn_top ! on categories (W/m^2/K) ! for biogeochemistry real (kind=dbl_kind), dimension (nx_block,ny_block,ncat,max_blocks), public :: & @@ -266,6 +283,7 @@ module ice_flux real (kind=dbl_kind), dimension (nx_block,ny_block,max_blocks), public :: & fsurf , & ! net surface heat flux (excluding fcondtop)(W/m^2) fcondtop,&! top surface conductive flux (W/m^2) + fcondbot,&! bottom surface conductive flux (W/m^2) congel, & ! basal ice growth (m/step-->cm/day) frazil, & ! frazil ice growth (m/step-->cm/day) snoice, & ! snow-ice formation (m/step-->cm/day) @@ -276,6 +294,7 @@ module ice_flux dsnow, & ! change in snow thickness (m/step-->cm/day) daidtt, & ! ice area tendency thermo. (s^-1) dvidtt, & ! ice volume tendency thermo. (m/s) + dvsdtt, & ! snow volume tendency thermo. (m/s) dagedtt,& ! ice age tendency thermo. (s/s) mlt_onset, &! day of year that sfc melting begins frz_onset ! day of year that freezing begins (congel or frazil) @@ -284,9 +303,14 @@ module ice_flux dimension (nx_block,ny_block,ncat,max_blocks), public :: & fsurfn, & ! category fsurf fcondtopn,& ! category fcondtop + fcondbotn,& ! category fcondbot fsensn, & ! category sensible heat flux flatn ! category latent heat flux + real (kind=dbl_kind), & + dimension (nx_block,ny_block,ncat,max_blocks), public :: & + snowfracn + ! As above but these remain grid box mean values i.e. they are not ! divided by aice at end of ice_dynamics. These are used in ! CICE_IN_NEMO for coupling and also for generating @@ -439,9 +463,7 @@ subroutine init_coupler_flux else Tf (:,:,:) = -depressT*sss(:,:,:) endif -#ifndef CICE_IN_NEMO sst (:,:,:) = Tf(:,:,:) ! sea surface temp (C) -#endif qdp (:,:,:) = c0 ! deep ocean heat flux (W/m^2) hmix (:,:,:) = c20 ! ocean mixed layer depth daice_da(:,:,:) = c0 ! data assimilation increment rate @@ -466,6 +488,8 @@ subroutine init_coupler_flux alidr (:,:,:) = c0 alvdf (:,:,:) = c0 alidf (:,:,:) = c0 + keffn_top(:,:,:,:) = c0 + Tn_top (:,:,:,:) = c0 !----------------------------------------------------------------- ! fluxes sent to ocean @@ -566,12 +590,18 @@ subroutine init_history_therm dkeel, lfloe, dfloe, Cdn_atm, Cdn_atm_rdg, & Cdn_atm_floe, Cdn_atm_pond, Cdn_atm_skin, & Cdn_atm_ratio, Cdn_ocn, Cdn_ocn_keel, & - Cdn_ocn_floe, Cdn_ocn_skin, formdrag, iceruf - use ice_state, only: aice, vice, trcr, tr_iage, nt_iage - use ice_constants, only: vonkar,zref !,iceruf + Cdn_ocn_floe, Cdn_ocn_skin, formdrag + use ice_constants, only: vonkar,zref +#ifdef ACCESS + use ice_constants, only: iceruf +#else + use ice_atmo, only: iceruf +#endif + use ice_state, only: aice, vice, vsno, trcr, tr_iage, nt_iage fsurf (:,:,:) = c0 fcondtop(:,:,:)= c0 + fcondbot(:,:,:)= c0 congel (:,:,:) = c0 frazil (:,:,:) = c0 snoice (:,:,:) = c0 @@ -580,8 +610,10 @@ subroutine init_history_therm melts (:,:,:) = c0 meltb (:,:,:) = c0 meltl (:,:,:) = c0 + ice_freeboard (:,:,:) = c0 daidtt (:,:,:) = aice(:,:,:) ! temporary initial area dvidtt (:,:,:) = vice(:,:,:) ! temporary initial volume + dvsdtt (:,:,:) = vsno(:,:,:) ! temporary initial volume if (tr_iage) then dagedtt(:,:,:) = trcr(:,:,nt_iage,:) ! temporary initial age else @@ -589,6 +621,7 @@ subroutine init_history_therm endif fsurfn (:,:,:,:) = c0 fcondtopn (:,:,:,:) = c0 + fcondbotn (:,:,:,:) = c0 flatn (:,:,:,:) = c0 fsensn (:,:,:,:) = c0 fpond (:,:,:) = c0 @@ -600,6 +633,7 @@ subroutine init_history_therm albsno (:,:,:) = c0 albpnd (:,:,:) = c0 apeff_ai (:,:,:) = c0 + snowfracn (:,:,:,:) = c0 snowfrac (:,:,:) = c0 ! drag coefficients are computed prior to the atmo_boundary call, @@ -638,7 +672,7 @@ end subroutine init_history_therm subroutine init_history_dyn - use ice_state, only: aice, vice, trcr, tr_iage, nt_iage + use ice_state, only: aice, vice, vsno, trcr, tr_iage, nt_iage sig1 (:,:,:) = c0 sig2 (:,:,:) = c0 @@ -656,6 +690,7 @@ subroutine init_history_dyn opening (:,:,:) = c0 daidtd (:,:,:) = aice(:,:,:) ! temporary initial area dvidtd (:,:,:) = vice(:,:,:) ! temporary initial volume + dvsdtd (:,:,:) = vsno(:,:,:) ! temporary initial volume if (tr_iage) & dagedtd (:,:,:) = trcr(:,:,nt_iage,:) ! temporary initial age fm (:,:,:) = c0 @@ -686,18 +721,24 @@ subroutine merge_fluxes (nx_block, ny_block, & strairxn, strairyn, & Cdn_atm_ratio_n, & fsurfn, fcondtopn, & + fcondbotn, & fsensn, flatn, & fswabsn, flwoutn, & evapn, & + evapn_ice,evapn_snow, & + ice_freeboardn, & Trefn, Qrefn, & freshn, fsaltn, & fhocnn, fswthrun, & strairxT, strairyT, & Cdn_atm_ratio, & fsurf, fcondtop, & + fcondbot, & fsens, flat, & fswabs, flwout, & evap, & + evap_ice, evap_snow, & + ice_freeboard, & Tref, Qref, & fresh, fsalt, & fhocn, fswthru, & @@ -725,6 +766,7 @@ subroutine merge_fluxes (nx_block, ny_block, & Cdn_atm_ratio_n, & ! ratio of total drag over neutral drag (atm) fsurfn , & ! net heat flux to top surface (W/m**2) fcondtopn,& ! downward cond flux at top sfc (W/m**2) + fcondbotn,& ! downward cond flux at bottom sfc (W/m**2) fsensn , & ! sensible heat flx (W/m**2) flatn , & ! latent heat flx (W/m**2) fswabsn , & ! shortwave absorbed heat flx (W/m**2) @@ -740,8 +782,11 @@ subroutine merge_fluxes (nx_block, ny_block, & meltbn , & ! bottom ice melt (m) meltsn , & ! snow melt (m) congeln , & ! congelation ice growth (m) - snoicen ! snow-ice growth (m) - + snoicen , & ! snow-ice growth (m) + ice_freeboardn , & ! ice freeboard (m) + evapn_ice, & ! evaporation over ice only (kg/m2/s) + evapn_snow ! evaporation over snow only (kg/m2/s) + real (kind=dbl_kind), dimension(nx_block,ny_block), optional, intent(in):: & Urefn ! air speed reference level (m/s) @@ -753,6 +798,7 @@ subroutine merge_fluxes (nx_block, ny_block, & Cdn_atm_ratio, & ! ratio of total drag over neutral drag (atm) fsurf , & ! net heat flux to top surface (W/m**2) fcondtop, & ! downward cond flux at top sfc (W/m**2) + fcondbot, & ! downward cond flux at bottom sfc (W/m**2) fsens , & ! sensible heat flx (W/m**2) flat , & ! latent heat flx (W/m**2) fswabs , & ! shortwave absorbed heat flx (W/m**2) @@ -768,7 +814,11 @@ subroutine merge_fluxes (nx_block, ny_block, & meltb , & ! bottom ice melt (m) melts , & ! snow melt (m) congel , & ! congelation ice growth (m) - snoice ! snow-ice growth (m) + snoice , & ! snow-ice growth (m) + ice_freeboard, & ! ice freeboard + evap_ice, & ! evaporation over ice only + evap_snow ! evaporation over snow only + real (kind=dbl_kind), dimension(nx_block,ny_block), optional, & intent(inout):: & @@ -799,12 +849,17 @@ subroutine merge_fluxes (nx_block, ny_block, & Cdn_atm_ratio_n (i,j)*aicen(i,j) fsurf (i,j) = fsurf (i,j) + fsurfn (i,j)*aicen(i,j) fcondtop (i,j) = fcondtop(i,j) + fcondtopn(i,j)*aicen(i,j) + fcondbot (i,j) = fcondbot(i,j) + fcondbotn(i,j)*aicen(i,j) fsens (i,j) = fsens (i,j) + fsensn (i,j)*aicen(i,j) flat (i,j) = flat (i,j) + flatn (i,j)*aicen(i,j) fswabs (i,j) = fswabs (i,j) + fswabsn (i,j)*aicen(i,j) flwout (i,j) = flwout (i,j) & + (flwoutn(i,j) - (c1-emissivity)*flw(i,j))*aicen(i,j) evap (i,j) = evap (i,j) + evapn (i,j)*aicen(i,j) + evap_ice (i,j) = evap_ice(i,j) + evapn_ice(i,j)*aicen(i,j) + evap_snow (i,j) = evap_snow(i,j) + evapn_snow(i,j)*aicen(i,j) + ice_freeboard (i,j) = ice_freeboard(i,j) + & + ice_freeboardn(i,j)*aicen(i,j) Tref (i,j) = Tref (i,j) + Trefn (i,j)*aicen(i,j) Qref (i,j) = Qref (i,j) + Qrefn (i,j)*aicen(i,j) if (present(Urefn) .and. present(Uref)) then @@ -845,6 +900,7 @@ subroutine scale_fluxes (nx_block, ny_block, & fsens, flat, & fswabs, flwout, & evap, & + evap_ice, evap_snow,& Tref, Qref, & fresh, fsalt, & fhocn, fswthru, & @@ -884,6 +940,8 @@ subroutine scale_fluxes (nx_block, ny_block, & fswabs , & ! shortwave absorbed heat flx (W/m**2) flwout , & ! upwd lw emitted heat flx (W/m**2) evap , & ! evaporation (kg/m2/s) + evap_ice, & ! evaporation over ice only (kg/m2/s) + evap_snow,& ! evaporation over snow only (kg/m2/s) Tref , & ! air tmp reference level (K) Qref , & ! air sp hum reference level (kg/kg) fresh , & ! fresh water flux to ocean (kg/m2/s) @@ -934,6 +992,8 @@ subroutine scale_fluxes (nx_block, ny_block, & fswabs (i,j) = fswabs (i,j) * ar flwout (i,j) = flwout (i,j) * ar evap (i,j) = evap (i,j) * ar + evap_ice(i,j) = evap_ice(i,j) * ar + evap_snow(i,j) = evap_snow(i,j) * ar Tref (i,j) = Tref (i,j) * ar Qref (i,j) = Qref (i,j) * ar if (present(Uref)) then @@ -1002,6 +1062,7 @@ end subroutine scale_fluxes !======================================================================= +#ifndef ACCESS ! If model is not calculating surface temperature, set the surface ! flux values using values read in from forcing data or supplied via ! coupling (stored in ice_flux). @@ -1155,7 +1216,7 @@ subroutine set_sfcflux (nx_block, ny_block, & endif ! extreme_test end subroutine set_sfcflux - +#endif !======================================================================= end module ice_flux diff --git a/source/ice_grid.F90 b/source/ice_grid.F90 index 00a1d26b..da5a4674 100644 --- a/source/ice_grid.F90 +++ b/source/ice_grid.F90 @@ -177,31 +177,19 @@ subroutine init_grid1 ! root and then doing a broadcast do_broadcast_array = .false. - fieldname='ulat' - call check(nf90_open(grid_file, NF90_NOWRITE, fid_grid), & - 'open: '//trim(grid_file)) - - call check(nf90_inq_varid(fid_grid, trim(fieldname), & - varid), & - 'inq var: '//trim(fieldname)) - - call check(nf90_get_var(fid_grid, varid, work_g1, & - start=(/1,1,1/), & - count=(/nx_global,ny_global,1/)), & - 'get var: '//trim(fieldname)) + call ice_open_nc(grid_file,fid_grid) + call ice_open_nc(kmt_file,fid_kmt) + fieldname='ulat' + call ice_read_global_nc(fid_grid,1,fieldname,work_g1,.true.) fieldname='kmt' - call check(nf90_open(kmt_file, NF90_NOWRITE, fid_kmt), & - 'open: '//trim(kmt_file)) + call ice_read_global_nc(fid_kmt,1,fieldname,work_g2,.true.) - call check(nf90_inq_varid(fid_kmt, trim(fieldname), & - varid), & - 'inq var: '//trim(fieldname)) + if (my_task == master_task) then + call ice_close_nc(fid_grid) + call ice_close_nc(fid_kmt) + endif - call check(nf90_get_var(fid_kmt, varid, work_g2, & - start=(/1,1,1/), & - count=(/nx_global,ny_global,1/)), & - 'get var: '//trim(fieldname)) else call ice_open(nu_grid,grid_file,64) ! ULAT @@ -249,19 +237,19 @@ subroutine init_grid1 end subroutine init_grid1 - subroutine check(status, msg) + ! subroutine check(status, msg) - use netcdf, only: nf90_noerr, nf90_strerror - use ice_exit, only: abort_ice + ! use netcdf, only: nf90_noerr, nf90_strerror + ! use ice_exit, only: abort_ice - integer, intent (in) :: status - character(len=*), intent (in) :: msg + ! integer, intent (in) :: status + ! character(len=*), intent (in) :: msg - if(status /= nf90_noerr) then - call abort_ice('ice: NetCDF error '//trim(nf90_strerror(status)//' '//trim(msg))) - end if + ! if(status /= nf90_noerr) then + ! call abort_ice('ice: NetCDF error '//trim(nf90_strerror(status)//' '//trim(msg))) + ! end if - end subroutine check + ! end subroutine check !======================================================================= diff --git a/source/ice_history.F90 b/source/ice_history.F90 index ba1f12ba..6c151d7f 100644 --- a/source/ice_history.F90 +++ b/source/ice_history.F90 @@ -76,6 +76,7 @@ subroutine init_hist (dt) use ice_history_drag, only: init_hist_drag_2D use ice_restart_shared, only: restart use ice_state, only: tr_iage, tr_FY, tr_lvl, tr_pond, tr_aero, tr_brine + use ice_therm_shared, only: calc_Tsfc, heat_capacity use ice_zbgc_shared, only: skl_bgc real (kind=dbl_kind), intent(in) :: & @@ -171,6 +172,13 @@ subroutine init_hist (dt) ! to prevent array-out-of-bounds when aggregating if (f_fmeltt_ai(1:1) /= 'x') f_fmelttn_ai = f_fmeltt_ai + ! AEW: These are only calculated under certain circumstances + ! (if using multilayers with UM-style coupling) + if (calc_Tsfc .or. .not. heat_capacity) then + f_Tn_top = 'x' + f_keffn_top = 'x' + endif + #ifndef ncdf f_bounds = .false. #endif @@ -249,6 +257,8 @@ subroutine init_hist (dt) call broadcast_scalar (f_flwup_ai, master_task) call broadcast_scalar (f_evap, master_task) call broadcast_scalar (f_evap_ai, master_task) + call broadcast_scalar (f_evap_ice_ai, master_task) + call broadcast_scalar (f_evap_snow_ai, master_task) call broadcast_scalar (f_Tair, master_task) call broadcast_scalar (f_Tref, master_task) call broadcast_scalar (f_Qref, master_task) @@ -284,7 +294,9 @@ subroutine init_hist (dt) call broadcast_scalar (f_sig1, master_task) call broadcast_scalar (f_sig2, master_task) call broadcast_scalar (f_dvidtt, master_task) + call broadcast_scalar (f_dvsdtt, master_task) call broadcast_scalar (f_dvidtd, master_task) + call broadcast_scalar (f_dvsdtd, master_task) call broadcast_scalar (f_daidtt, master_task) call broadcast_scalar (f_daidtd, master_task) call broadcast_scalar (f_dagedtt, master_task) @@ -293,6 +305,61 @@ subroutine init_hist (dt) call broadcast_scalar (f_frz_onset, master_task) call broadcast_scalar (f_aisnap, master_task) call broadcast_scalar (f_hisnap, master_task) + call broadcast_scalar (f_sithick, master_task) + call broadcast_scalar (f_siage, master_task) + call broadcast_scalar (f_sisnconc, master_task) + call broadcast_scalar (f_sisnthick, master_task) + call broadcast_scalar (f_sitemptop, master_task) + call broadcast_scalar (f_sitempsnic, master_task) + call broadcast_scalar (f_sitempbot, master_task) + call broadcast_scalar (f_siu, master_task) + call broadcast_scalar (f_siv, master_task) + call broadcast_scalar (f_sidmasstranx, master_task) + call broadcast_scalar (f_sidmasstrany, master_task) + call broadcast_scalar (f_sifb, master_task) + call broadcast_scalar (f_sistrxdtop, master_task) + call broadcast_scalar (f_sistrydtop, master_task) + call broadcast_scalar (f_sistrxubot, master_task) + call broadcast_scalar (f_sistryubot, master_task) + call broadcast_scalar (f_siforcetiltx, master_task) + call broadcast_scalar (f_siforcetilty, master_task) + call broadcast_scalar (f_siforcecoriolx, master_task) + call broadcast_scalar (f_siforcecorioly, master_task) + call broadcast_scalar (f_siforceintstrx, master_task) + call broadcast_scalar (f_siforceintstry, master_task) + call broadcast_scalar (f_sicompstren, master_task) + call broadcast_scalar (f_sispeed, master_task) + call broadcast_scalar (f_sialb, master_task) + call broadcast_scalar (f_sidivvel, master_task) + call broadcast_scalar (f_sihc, master_task) + call broadcast_scalar (f_sisnhc, master_task) + call broadcast_scalar (f_sidconcth, master_task) + call broadcast_scalar (f_sidconcdyn, master_task) + call broadcast_scalar (f_sidmassth, master_task) + call broadcast_scalar (f_sidmassdyn, master_task) + call broadcast_scalar (f_sidmassgrowthwat, master_task) + call broadcast_scalar (f_sidmassgrowthbot, master_task) + call broadcast_scalar (f_sidmasssi, master_task) + call broadcast_scalar (f_sidmassevapsubl, master_task) + call broadcast_scalar (f_sidmassmelttop, master_task) + call broadcast_scalar (f_sidmassmeltbot, master_task) + call broadcast_scalar (f_sidmasslat, master_task) + call broadcast_scalar (f_sndmasssnf, master_task) + call broadcast_scalar (f_sndmassmelt, master_task) + call broadcast_scalar (f_siflswdtop, master_task) + call broadcast_scalar (f_siflswutop, master_task) + call broadcast_scalar (f_siflswdbot, master_task) + call broadcast_scalar (f_sifllwdtop, master_task) + call broadcast_scalar (f_sifllwutop, master_task) + call broadcast_scalar (f_siflsenstop, master_task) + call broadcast_scalar (f_siflsensupbot, master_task) + call broadcast_scalar (f_sifllatstop, master_task) + call broadcast_scalar (f_siflcondtop, master_task) + call broadcast_scalar (f_siflcondbot, master_task) + call broadcast_scalar (f_sipr, master_task) + call broadcast_scalar (f_siflsaltbot, master_task) + call broadcast_scalar (f_siflfwbot, master_task) + call broadcast_scalar (f_sisaltmass, master_task) call broadcast_scalar (f_aicen, master_task) call broadcast_scalar (f_vicen, master_task) call broadcast_scalar (f_vsnon, master_task) @@ -308,6 +375,7 @@ subroutine init_hist (dt) call broadcast_scalar (f_fsensn_ai, master_task) ! call broadcast_scalar (f_field3dz, master_task) + call broadcast_scalar (f_Tn_top, master_task) call broadcast_scalar (f_keffn_top, master_task) call broadcast_scalar (f_Tinz, master_task) call broadcast_scalar (f_Sinz, master_task) @@ -348,12 +416,19 @@ subroutine init_hist (dt) "grid cell mean snow thickness", & "snow volume per unit grid cell area", c1, c0, & ns1, f_hs) +<<<<<<< HEAD +======= +>>>>>>> origin/access-esm1.6 call define_hist_field(n_snowfrac,"snowfrac","1",tstr2D, tcstr, & "grid cell mean snow fraction", & "snow fraction per unit grid cell area", c1, c0, & ns1, f_snowfrac) +<<<<<<< HEAD +======= + +>>>>>>> origin/access-esm1.6 call define_hist_field(n_Tsfc,"Tsfc","C",tstr2D, tcstr, & "snow/ice surface temperature", & "averaged with Tf if no ice is present", c1, c0, & @@ -424,7 +499,7 @@ subroutine init_hist (dt) "weighted by ice area", mps_to_cmpdy/rhofresh, c0, & ns1, f_rain_ai) - call define_hist_field(n_sst,"sst","C",tstr2D, tcstr, & + call define_hist_field(n_sst,"sst","degC",tstr2D, tcstr, & "sea surface temperature", & "none", c1, c0, & ns1, f_sst) @@ -574,6 +649,16 @@ subroutine init_hist (dt) "weighted by ice area", mps_to_cmpdy/rhofresh, c0, & ns1, f_evap_ai) + call define_hist_field(n_evap_ice_ai,"evap_ice_ai","cm/day",tstr2D, tcstr, & + "evaporative water flux over ice only", & + "weighted by ice area", mps_to_cmpdy/rhofresh, c0, & + ns1, f_evap_ice_ai) + + call define_hist_field(n_evap_snow_ai,"evap_snow_ai","cm/day",tstr2D, tcstr, & + "evaporative water flux over snow only", & + "weighted by ice area", mps_to_cmpdy/rhofresh, c0, & + ns1, f_evap_snow_ai) + call define_hist_field(n_Tair,"Tair","C",tstr2D, tcstr, & "air temperature", & "none", c1, -Tffresh, & @@ -746,15 +831,25 @@ subroutine init_hist (dt) ns1, f_sig2) call define_hist_field(n_dvidtt,"dvidtt","cm/day",tstr2D, tcstr, & - "volume tendency thermo", & + "ice volume tendency thermo", & "none", mps_to_cmpdy, c0, & ns1, f_dvidtt) + call define_hist_field(n_dvsdtt,"dvsdtt","cm/day",tstr2D, tcstr, & + "snow volume tendency thermo", & + "none", mps_to_cmpdy, c0, & + ns1, f_dvsdtt) + call define_hist_field(n_dvidtd,"dvidtd","cm/day",tstr2D, tcstr, & - "volume tendency dynamics", & + "ice volume tendency dynamics", & "none", mps_to_cmpdy, c0, & ns1, f_dvidtd) + call define_hist_field(n_dvsdtd,"dvsdtd","cm/day",tstr2D, tcstr, & + "snow volume tendency dynamics", & + "none", mps_to_cmpdy, c0, & + ns1, f_dvsdtd) + call define_hist_field(n_daidtt,"daidtt","%/day",tstr2D, tcstr, & "area tendency thermo", & "none", secday*c100, c0, & @@ -889,12 +984,290 @@ subroutine init_hist (dt) "first-year ice area", & "weighted by ice area", c1, c0, & ns1, f_FY) + ! CMIP6 2D variables + + call define_hist_field(n_sithick,"sithick","m",tstr2D, tcstr, & + "sea ice thickness", & + "volume divided by area", c1, c0, & + ns1, f_sithick) + + call define_hist_field(n_siage,"siage","s",tstr2D, tcstr, & + "sea ice age", & + "none", c1, c0, & + ns1, f_siage) + call define_hist_field(n_sifb,"sifb","m",tstr2D, tcstr, & + "sea ice freeboard", & + "none", c1, c0, & + ns1, f_sifb) + call define_hist_field(n_sisnconc,"sisnconc","1",tstr2D, tcstr, & + "snow area fraction", & + "none", c1, c0, & + ns1, f_sisnconc) + call define_hist_field(n_sisnthick,"sisnthick","m",tstr2D, tcstr, & + "sea ice snow thickness", & + "snow volume divided by area", c1, c0, & + ns1, f_sisnthick) + call define_hist_field(n_sitemptop,"sitemptop","degC",tstr2D, tcstr, & + "sea ice surface temperature", & + "none", c1, c0, & + ns1, f_sitemptop) + call define_hist_field(n_sitempsnic,"sitempsnic","degC",tstr2D, tcstr, & + "snow ice interface temperature", & + "surface temperature when no snow present", c1, c0, & + ns1, f_sitempsnic) + call define_hist_field(n_sitempbot,"sitempbot","degK",tstr2D, tcstr, & + "sea ice bottom temperature", & + "none", c1, c0, & + ns1, f_sitempbot) + call define_hist_field(n_siu,"siu","m/s",ustr2D, ucstr, & + "ice x velocity component", & + "none", c1, c0, & + ns1, f_siu) + call define_hist_field(n_siv,"siv","m/s",ustr2D, ucstr, & + "ice y velocity component", & + "none", c1, c0, & + ns1, f_siv) + + call define_hist_field(n_sidmasstranx,"sidmasstranx","kg/s",ustr2D, ucstr, & + "x component of snow and sea ice mass transport", & + "none", c1, c0, & + ns1, f_sidmasstranx) + + call define_hist_field(n_sidmasstrany,"sidmasstrany","kg/s",ustr2D, ucstr, & + "y component of snow and sea ice mass transport", & + "none", c1, c0, & + ns1, f_sidmasstrany) + + call define_hist_field(n_sistrxdtop,"sistrxdtop","N m^-2",ustr2D, ucstr, & + "x component of atmospheric stress on sea ice", & + "none", c1, c0, & + ns1, f_sistrxdtop) + + call define_hist_field(n_sistrydtop,"sistrydtop","N m^-2",ustr2D, ucstr, & + "y component of atmospheric stress on sea ice", & + "none", c1, c0, & + ns1, f_sistrydtop) + + + call define_hist_field(n_sistrxubot,"sistrxubot","N m^-2",ustr2D, ucstr, & + "x component of ocean stress on sea ice", & + "none", c1, c0, & + ns1, f_sistrxubot) + + call define_hist_field(n_sistryubot,"sistryubot","N m^-2",ustr2D, ucstr, & + "y component of ocean stress on sea ice", & + "none", c1, c0, & + ns1, f_sistryubot) + + call define_hist_field(n_siforcetiltx,"siforcetiltx","N m^-2",ustr2D, ucstr, & + "x component of sea surface tilt force", & + "none", c1, c0, & + ns1, f_siforcetiltx) + + call define_hist_field(n_siforcetilty,"siforcetilty","N m^-2",ustr2D, ucstr, & + "y component of sea surface tilt force", & + "none", c1, c0, & + ns1, f_siforcetilty) + + call define_hist_field(n_siforcecoriolx,"siforcecoriolx","N m^-2",ustr2D, ucstr, & + "x component of Coriolis force", & + "none", c1, c0, & + ns1, f_siforcecoriolx) + + call define_hist_field(n_siforcecorioly,"siforcecorioly","N m^-2",ustr2D, ucstr, & + "y component of Coriolis force", & + "none", c1, c0, & + ns1, f_siforcecorioly) + + call define_hist_field(n_siforceintstrx,"siforceintstrx","N m^-2",ustr2D, ucstr, & + "x component of internal ice stress force", & + "none", c1, c0, & + ns1, f_siforceintstrx) + + call define_hist_field(n_siforceintstry,"siforceintstry","N m^-2",ustr2D, ucstr, & + "y component of internal ice stress force", & + "none", c1, c0, & + ns1, f_siforceintstry) + + call define_hist_field(n_sicompstren,"sicompstren","N/m",ustr2D, ucstr, & + "compressive sea ice strength", & + "none", c1, c0, & + ns1, f_sicompstren) + + call define_hist_field(n_sidivvel,"sidivvel","1/s",ustr2D, ucstr, & + "divergence of the sea ice velocity field (ice area weighted)", & + "none", c1, c0, & + ns1, f_sidivvel) + + call define_hist_field(n_sispeed,"sispeed","m/s",ustr2D, ucstr, & + "ice speed", & + "none", c1, c0, & + ns1, f_sispeed) + + call define_hist_field(n_sialb,"sialb","1",tstr2D, tcstr, & + "sea ice albedo", & + "none", c1, c0, & + ns1, f_sialb) + + call define_hist_field(n_sihc,"sihc","J m^-2",tstr2D, tcstr, & + "sea ice heat content", & + "none", c1, c0, & + ns1, f_sihc) + + call define_hist_field(n_sisnhc,"sisnhc","J m^-2",tstr2D, tcstr, & + "snow heat content", & + "none", c1, c0, & + ns1, f_sisnhc) + + + + call define_hist_field(n_sidconcth,"sidconcth","1/s",tstr2D, tcstr, & + "sea ice area change from thermodynamics", & + "none", c1, c0, & + ns1, f_sidconcth) + call define_hist_field(n_sidconcdyn,"sidconcdyn","1/s",tstr2D, tcstr, & + "sea ice area change from dynamics", & + "none", c1, c0, & + ns1, f_sidconcdyn) + + call define_hist_field(n_sidmassth,"sidmassth","kg m^-2 s^-1",tstr2D, tcstr, & + "sea ice mass change from thermodynamics", & + "none", c1, c0, & + ns1, f_sidmassth) + + call define_hist_field(n_sidmassdyn,"sidmassdyn","kg m^-2 s^-1",tstr2D, tcstr, & + "sea ice mass change from dynamics", & + "none", c1, c0, & + ns1, f_sidmassdyn) + + call define_hist_field(n_sidmassgrowthwat,"sidmassgrowthwat","kg m^-2 s^-1",tstr2D, tcstr, & + "sea ice mass change from frazil growth", & + "none", c1, c0, & + ns1, f_sidmassgrowthwat) + + call define_hist_field(n_sidmassgrowthbot,"sidmassgrowthbot","kg m^-2 s^-1",tstr2D, tcstr, & + "sea ice mass change from bottom growth", & + "none", c1, c0, & + ns1, f_sidmassgrowthbot) + + call define_hist_field(n_sidmasssi,"sidmasssi","kg m^-2 s^-1",tstr2D, tcstr, & + "sea ice mass change from snow ice conversion", & + "none", c1, c0, & + ns1, f_sidmasssi) + + call define_hist_field(n_sidmassevapsubl,"sidmassevapsubl","kg m^-2 s^-1",tstr2D, tcstr, & + "sea ice mass change from evaporation and sublimation", & + "none", c1, c0, & + ns1, f_sidmassevapsubl) + + call define_hist_field(n_sidmassmelttop,"sidmassmelttop","kg m^-2 s^-1",tstr2D, tcstr, & + "sea ice mass change from top ice melt", & + "none", c1, c0, & + ns1, f_sidmassmelttop) + + call define_hist_field(n_sidmassmeltbot,"sidmassmeltbot","kg m^-2 s^-1",tstr2D, tcstr, & + "sea ice mass change from bottom ice melt", & + "none", c1, c0, & + ns1, f_sidmassmeltbot) + + call define_hist_field(n_sidmasslat,"sidmasslat","kg m^-2 s^-1",tstr2D, tcstr, & + "sea ice mass change from lateral ice melt", & + "none", c1, c0, & + ns1, f_sidmasslat) + + call define_hist_field(n_sndmasssnf,"sndmasssnf","kg m^-2 s^-1",tstr2D, tcstr, & + "snow mass change from snow fall", & + "none", c1, c0, & + ns1, f_sndmasssnf) + + call define_hist_field(n_sndmassmelt,"sndmassmelt","kg m^-2 s^-1",tstr2D, tcstr, & + "snow mass change from melt", & + "none", c1, c0, & + ns1, f_sndmassmelt) + + call define_hist_field(n_siflswdtop,"siflswdtop","W/m^2",tstr2D, tcstr, & + "down shortwave flux over sea ice", & + "positive downward", c1, c0, & + ns1, f_siflswdtop) + + call define_hist_field(n_siflswutop,"siflswutop","W/m^2",tstr2D, tcstr, & + "upward shortwave flux over sea ice", & + "positive downward", c1, c0, & + ns1, f_siflswutop) + + call define_hist_field(n_siflswdbot,"siflswdbot","W/m^2",tstr2D, tcstr, & + "down shortwave flux at bottom of ice", & + "positive downward", c1, c0, & + ns1, f_siflswdbot) + + call define_hist_field(n_sifllwdtop,"sifllwdtop","W/m^2",tstr2D, tcstr, & + "down longwave flux over sea ice", & + "positive downward", c1, c0, & + ns1, f_sifllwdtop) + + call define_hist_field(n_sifllwutop,"sifllwutop","W/m^2",tstr2D, tcstr, & + "upward longwave flux over sea ice", & + "positive downward", c1, c0, & + ns1, f_sifllwutop) + + call define_hist_field(n_siflsenstop,"siflsenstop","W/m^2",tstr2D, tcstr, & + "sensible heat flux over sea ice", & + "positive downward", c1, c0, & + ns1, f_siflsenstop) + + call define_hist_field(n_siflsensupbot,"siflsensupbot","W/m^2",tstr2D, tcstr, & + "sensible heat flux at bottom of sea ice", & + "positive downward", c1, c0, & + ns1, f_siflsensupbot) + + + call define_hist_field(n_sifllatstop,"sifllatstop","W/m^2",tstr2D, tcstr, & + "latent heat flux over sea ice", & + "positive downward", c1, c0, & + ns1, f_sifllatstop) + + call define_hist_field(n_siflcondtop,"siflcondtop","W/m^2",tstr2D, tcstr, & + "conductive heat flux at top of sea ice", & + "positive downward", c1, c0, & + ns1, f_siflcondtop) + + call define_hist_field(n_siflcondbot,"siflcondbot","W/m^2",tstr2D, tcstr, & + "conductive heat flux at bottom of sea ice", & + "positive downward", c1, c0, & + ns1, f_siflcondbot) + + call define_hist_field(n_sipr,"sipr","kg m^-2 s^-1",tstr2D, tcstr, & + "rainfall over sea ice", & + "none", c1, c0, & + ns1, f_sipr) + + + call define_hist_field(n_siflsaltbot,"siflsaltbot","kg m^-2 s^-1",tstr2D, tcstr, & + "salt flux from sea ice", & + "positive downward", c1, c0, & + ns1, f_siflsaltbot) + + call define_hist_field(n_siflfwbot,"siflfwbot","kg m^-2 s^-1",tstr2D, tcstr, & + "fresh water flux from sea ice", & + "positive downward", c1, c0, & + ns1, f_siflfwbot) + + + call define_hist_field(n_sisaltmass,"sisaltmass","kg m^-2",tstr2D,& + tcstr, "mass of salt in sea ice (for ocean fluxes)",& + + "none", c1, c0, & + ns1, f_sisaltmass) endif ! if (histfreq(ns1) /= 'x') then enddo ! ns1 ! other 2D history variables +<<<<<<< HEAD +======= + +>>>>>>> origin/access-esm1.6 ! mechanical redistribution call init_hist_mechred_2D @@ -949,6 +1322,10 @@ subroutine init_hist (dt) "sensible heat flux, category","weighted by ice area", c1, c0, & ns1, f_fsensn_ai) + call define_hist_field(n_Tn_top,"Tn_top","K",tstr3Dc, tcstr, & + "temperature of the top layer (snow or ice), categories","multilayer scheme", c1, c0, & + ns1, f_Tn_top) + call define_hist_field(n_keffn_top,"keffn_top","W/m^2/K",tstr3Dc, tcstr, & "effective thermal conductivity of the top ice layer, categories", & "multilayer scheme", c1, c0, & @@ -1149,11 +1526,13 @@ end subroutine init_hist subroutine accum_hist (dt) use ice_blocks, only: block, get_block, nx_block, ny_block + use ice_domain_size, only: nilyr, nslyr use ice_fileunits, only: nu_diag use ice_constants, only: c0, c1, p25, puny, secday, depressT, & - awtvdr, awtidr, awtvdf, awtidf, Lfresh, rhos, cp_ice, spval + awtvdr, awtidr, awtvdf, awtidf, Lfresh, rhoi, rhos, cp_ice, & + spval_dbl, Tffresh, ice_ref_salinity, c1000 use ice_domain, only: blocks_ice, nblocks - use ice_grid, only: tmask, lmask_n, lmask_s + use ice_grid, only: tmask, lmask_n, lmask_s, tarea, HTE, HTN #ifdef AusCOM use ice_grid, only: umask !ars599: 27032014 @@ -1169,7 +1548,7 @@ subroutine accum_hist (dt) new_month use ice_dyn_eap, only: a11, a12, e11, e12, e22, s11, s12, s22, & yieldstress11, yieldstress12, yieldstress22 - use ice_dyn_shared, only: kdyn, principal_stress + use ice_dyn_shared, only: kdyn, principal_stress,a_min use ice_flux, only: fsw, flw, fsnow, frain, sst, sss, uocn, vocn, & frzmlt_init, fswfac, fswabs, fswthru, alvdr, alvdf, alidr, alidf, & albice, albsno, albpnd, coszen, flat, fsens, flwout, evap, & @@ -1177,14 +1556,17 @@ subroutine accum_hist (dt) melts, meltb, meltt, meltl, fresh, fsalt, fresh_ai, fsalt_ai, & fhocn, fhocn_ai, uatm, vatm, & fswthru_ai, strairx, strairy, strtltx, strtlty, strintx, strinty, & - strocnx, strocny, fm, daidtt, dvidtt, daidtd, dvidtd, fsurf, & - fcondtop, fsurfn, fcondtopn, flatn, fsensn, albcnt, prs_sig, & + strocnx, strocny, fm, daidtt, dvidtt, dvsdtt, daidtd, dvidtd, dvsdtd, fsurf, & + fcondtop, fsurfn, fcondtopn, & + fcondbot, fcondbotn, ice_freeboard, & + flatn, fsensn, albcnt, prs_sig, & stressp_1, stressm_1, stress12_1, & stressp_2, stressm_2, stress12_2, & stressp_3, stressm_3, stress12_3, & stressp_4, stressm_4, stress12_4, sig1, sig2, & - mlt_onset, frz_onset, dagedtt, dagedtd, fswint_ai, keffn_top, & - snowfrac, alvdr_ai, alvdf_ai, alidr_ai, alidf_ai + mlt_onset, frz_onset, dagedtt, dagedtd, fswint_ai, Tn_top, & + keffn_top, snowfrac, snowfracn, alvdr_ai, alvdf_ai, alidr_ai, & + alidf_ai, evap_snow, evap_ice use ice_atmo, only: formdrag use ice_history_shared ! almost everything use ice_history_write, only: ice_write_hist @@ -1192,9 +1574,11 @@ subroutine accum_hist (dt) use ice_history_mechred, only: accum_hist_mechred use ice_history_pond, only: accum_hist_pond use ice_history_drag, only: accum_hist_drag + use ice_itd, only: hs_min, aicenmin + use ice_meltpond_cesm, only: hs0 use ice_state ! almost everything - use ice_shortwave, only: snowfracn - use ice_therm_shared, only: calculate_Tin_from_qin, Tmlt, ktherm + use ice_therm_shared, only: calculate_Tin_from_qin, Tmlt, ktherm, & + Ti_bot, Tsnic use ice_therm_mushy, only: temperature_mush, temperature_snow use ice_timers, only: ice_timer_start, ice_timer_stop, timer_readwrite use ice_zbgc_shared, only: skl_bgc @@ -1219,8 +1603,16 @@ subroutine accum_hist (dt) hs , & ! temporary variable for snow depth Tmlts ! temporary variable for melting temperature + real (kind=dbl_kind) :: & + area_threshold ! min time mean ice area allowed for dividing + ! (maximum of a_min and aicenmin - + ! dynamic + ! and thermodynamic ice areas) + + + real (kind=dbl_kind), dimension (nx_block,ny_block) :: & - worka, workb + worka, workb, ravgip type (block) :: & this_block ! block information for current block @@ -1303,8 +1695,10 @@ subroutine accum_hist (dt) call accum_hist_field(n_hi, iblk, vice(:,:,iblk), a2D) if (f_hs (1:1) /= 'x') & call accum_hist_field(n_hs, iblk, vsno(:,:,iblk), a2D) - if (f_snowfrac(1:1) /= 'x') & - call accum_hist_field(n_snowfrac, iblk, snowfrac(:,:,iblk), a2D) + if (f_sifb (1:1) /= 'x') & + call accum_hist_field(n_sifb, iblk, ice_freeboard(:,:,iblk), a2D) + if (f_snowfrac(1:1) /= 'x') & + call accum_hist_field(n_snowfrac, iblk, snowfrac(:,:,iblk), a2D) if (f_Tsfc (1:1) /= 'x') & call accum_hist_field(n_Tsfc, iblk, trcr(:,:,nt_Tsfc,iblk), a2D) if (f_aice (1:1) /= 'x') & @@ -1420,6 +1814,10 @@ subroutine accum_hist (dt) call accum_hist_field(n_evap, iblk, evap(:,:,iblk), a2D) if (f_evap_ai(1:1) /= 'x') & call accum_hist_field(n_evap_ai,iblk, evap(:,:,iblk)*workb(:,:), a2D) + if (f_evap_ice_ai(1:1) /= 'x') & + call accum_hist_field(n_evap_ice_ai,iblk, evap_ice(:,:,iblk)*workb(:,:), a2D) + if (f_evap_snow_ai(1:1) /= 'x') & + call accum_hist_field(n_evap_snow_ai,iblk, evap_snow(:,:,iblk)*workb(:,:), a2D) if (f_Tair (1:1) /= 'x') & call accum_hist_field(n_Tair, iblk, Tair(:,:,iblk), a2D) @@ -1502,8 +1900,12 @@ subroutine accum_hist (dt) if (f_dvidtt (1:1) /= 'x') & call accum_hist_field(n_dvidtt, iblk, dvidtt(:,:,iblk), a2D) + if (f_dvsdtt (1:1) /= 'x') & + call accum_hist_field(n_dvsdtt, iblk, dvsdtt(:,:,iblk), a2D) if (f_dvidtd (1:1) /= 'x') & call accum_hist_field(n_dvidtd, iblk, dvidtd(:,:,iblk), a2D) + if (f_dvsdtd (1:1) /= 'x') & + call accum_hist_field(n_dvsdtd, iblk, dvsdtd(:,:,iblk), a2D) if (f_daidtt (1:1) /= 'x') & call accum_hist_field(n_daidtt, iblk, daidtt(:,:,iblk), a2D) if (f_daidtd (1:1) /= 'x') & @@ -1521,207 +1923,889 @@ subroutine accum_hist (dt) if (f_icepresent(1:1) /= 'x') then worka(:,:) = c0 +#ifdef ACCESS + area_threshold = max(a_min,aicenmin) +#else + area_threshold = puny +#endif do j = jlo, jhi do i = ilo, ihi - if (aice(i,j,iblk) > puny) worka(i,j) = c1 + if (aice(i,j,iblk) > area_threshold) worka(i,j) = c1 enddo enddo call accum_hist_field(n_icepresent, iblk, worka(:,:), a2D) endif - ! 3D category fields - if (f_aicen (1:1) /= 'x') & - call accum_hist_field(n_aicen-n2D, iblk, ncat_hist, & - aicen(:,:,1:ncat_hist,iblk), a3Dc) - if (f_vicen (1:1) /= 'x') & - call accum_hist_field(n_vicen-n2D, iblk, ncat_hist, & - vicen(:,:,1:ncat_hist,iblk), a3Dc) - if (f_vsnon (1:1) /= 'x') & - call accum_hist_field(n_vsnon-n2D, iblk, ncat_hist, & - vsnon(:,:,1:ncat_hist,iblk), a3Dc) - if (f_snowfracn(1:1) /= 'x') & - call accum_hist_field(n_snowfracn-n2D, iblk, ncat_hist, & - snowfracn(:,:,1:ncat_hist,iblk), a3Dc) - if (f_keffn_top (1:1) /= 'x') & - call accum_hist_field(n_keffn_top-n2D, iblk, ncat_hist, & - keffn_top(:,:,1:ncat_hist,iblk), a3Dc) - if (f_fsurfn_ai (1:1) /= 'x') & - call accum_hist_field(n_fsurfn_ai-n2D, iblk, ncat_hist, & - fsurfn(:,:,1:ncat_hist,iblk)*aicen_init(:,:,1:ncat_hist,iblk), a3Dc) - if (f_fcondtopn_ai (1:1) /= 'x') & - call accum_hist_field(n_fcondtopn_ai-n2D, iblk, ncat_hist, & - fcondtopn(:,:,1:ncat_hist,iblk)*aicen_init(:,:,1:ncat_hist,iblk), a3Dc) - if (f_flatn_ai (1:1) /= 'x') & - call accum_hist_field(n_flatn_ai-n2D, iblk, ncat_hist, & - flatn(:,:,1:ncat_hist,iblk)*aicen_init(:,:,1:ncat_hist,iblk), a3Dc) - if (f_fsensn_ai (1:1) /= 'x') & - call accum_hist_field(n_fsensn_ai-n2D, iblk, ncat_hist, & - fsensn(:,:,1:ncat_hist,iblk)*aicen_init(:,:,1:ncat_hist,iblk), a3Dc) - ! Calculate surface heat flux that causes melt (calculated by the - ! atmos in HadGEM3 so needed for checking purposes) - if (f_fmelttn_ai (1:1) /= 'x') & - call accum_hist_field(n_fmelttn_ai-n2D, iblk, ncat_hist, & - max(fsurfn(:,:,1:ncat_hist,iblk) - fcondtopn(:,:,1:ncat_hist,iblk),c0) & - *aicen_init(:,:,1:ncat_hist,iblk), a3Dc) - -! example for 3D field (x,y,z) -! if (f_field3dz (1:1) /= 'x') & -! call accum_hist_field(n_field3dz-n3Dccum, iblk, nzilyr, & -! field3dz(:,:,1:nzilyr,iblk), a3Dz) + !2D CMIP6 fields - ! 4D category fields - if (f_Tinz (1:1) /= 'x') then - Tinz4d(:,:,:,:) = c0 - if (ktherm == 2) then - do n = 1, ncat_hist - do j = jlo, jhi - do i = ilo, ihi - do k = 1, nzilyr - Tinz4d(i,j,k,n) = temperature_mush( & - trcrn(i,j,nt_qice+k-1,n,iblk), trcrn(i,j,nt_sice+k-1,n,iblk)) - enddo - enddo - enddo - enddo - else - do n = 1, ncat_hist - do j = jlo, jhi - do i = ilo, ihi - do k = 1, nzilyr - qn = trcrn(i,j,nt_qice+k-1,n,iblk) -! Tinz4d(i,j,k,n) = calculate_Tin_from_qin(qn,Tmlt(k)) - Tmlts = -trcrn(i,j,nt_sice+k-1,n,iblk)*depressT - Tinz4d(i,j,k,n) = calculate_Tin_from_qin(qn,Tmlts) - enddo - enddo - enddo - enddo - endif - call accum_hist_field(n_Tinz-n3Dbcum, iblk, nzilyr, ncat_hist, & - Tinz4d(:,:,1:nzilyr,1:ncat_hist), a4Di) + if (f_sithick(1:1) /= 'x') then + worka(:,:) = c0 + do j = jlo, jhi + do i = ilo, ihi + if (aice(i,j,iblk) > puny) worka(i,j) = vice(i,j,iblk) + enddo + enddo + call accum_hist_field(n_sithick, iblk, worka(:,:), a2D) endif - if (f_Sinz (1:1) /= 'x') then - Sinz4d(:,:,:,:) = c0 - do n = 1, ncat_hist - do j = jlo, jhi - do i = ilo, ihi - if (vicen(i,j,n,iblk) > puny) then - Sinz4d(i,j,1:nzilyr,n) = trcrn(i,j,nt_sice:nt_sice+nzilyr-1,n,iblk) - endif - enddo - enddo - enddo - call accum_hist_field(n_Sinz-n3Dbcum, iblk, nzilyr, ncat_hist, & - Sinz4d(:,:,1:nzilyr,1:ncat_hist), a4Di) + + if (f_siage(1:1) /= 'x') then + worka(:,:) = c0 + do j = jlo, jhi + do i = ilo, ihi + if (aice(i,j,iblk) > puny) worka(i,j) = aice(i,j,iblk)*trcr(i,j,nt_iage,iblk) + enddo + enddo + call accum_hist_field(n_siage, iblk, worka(:,:), a2D) endif - - if (f_Tsnz (1:1) /= 'x') then - Tsnz4d(:,:,:,:) = c0 - if (ktherm == 2) then - do n = 1, ncat_hist - do j = jlo, jhi - do i = ilo, ihi - do k = 1, nzslyr - qn = trcrn(i,j,nt_qsno+k-1,n,iblk) - Tsnz4d(i,j,k,n) = temperature_snow(qn) - enddo - enddo - enddo - enddo - else - do n = 1, ncat_hist - do j = jlo, jhi - do i = ilo, ihi - do k = 1, nzslyr - qn = trcrn(i,j,nt_qsno+k-1,n,iblk) - Tsnz4d(i,j,k,n) = (Lfresh + qn/rhos)/cp_ice - enddo - enddo - enddo - enddo - endif - call accum_hist_field(n_Tsnz-n4Dicum, iblk, nzslyr, ncat_hist, & - Tsnz4d(:,:,1:nzslyr,1:ncat_hist), a4Ds) + + + if (f_sisnconc(1:1) /= 'x') then + worka(:,:) = c0 + do j = jlo, jhi + do i = ilo, ihi + if (aice(i,j,iblk) > puny) worka(i,j) = snowfrac(i,j,iblk) + enddo + enddo + call accum_hist_field(n_sisnconc, iblk, worka(:,:), a2D) endif - - ! Calculate aggregate surface melt flux by summing category values - if (f_fmeltt_ai(1:1) /= 'x') then - do ns = 1, nstreams - if (n_fmeltt_ai(ns) /= 0) then - worka(:,:) = c0 - do j = jlo, jhi - do i = ilo, ihi - if (tmask(i,j,iblk)) then - do n=1,ncat_hist - worka(i,j) = worka(i,j) + a3Dc(i,j,n,n_fmelttn_ai(ns)-n2D,iblk) - enddo ! n - endif ! tmask - enddo ! i - enddo ! j - a2D(:,:,n_fmeltt_ai(ns),iblk) = worka(:,:) + + if (f_sisnthick(1:1) /= 'x') then + worka(:,:) = c0 + do j = jlo, jhi + do i = ilo, ihi + if (aice(i,j,iblk) > puny .and. snowfrac(i,j,iblk) > puny) & + worka(i,j) = vsno(i,j,iblk) + enddo + enddo + call accum_hist_field(n_sisnthick, iblk, worka(:,:), a2D) endif - enddo - endif - !--------------------------------------------------------------- - ! accumulate other history output - !--------------------------------------------------------------- - ! mechanical redistribution - call accum_hist_mechred (iblk) + if (f_sitemptop(1:1) /= 'x') then + worka(:,:) = c0 + do j = jlo, jhi + do i = ilo, ihi + worka(i,j) = aice(i,j,iblk)*trcr(i,j,nt_Tsfc,iblk) + enddo + enddo + call accum_hist_field(n_sitemptop, iblk, worka(:,:), a2D) + endif - ! melt ponds - if (tr_pond) call accum_hist_pond (iblk) - ! biogeochemistry - if (tr_aero .or. tr_brine .or. skl_bgc) call accum_hist_bgc (iblk) + if (f_sitempsnic(1:1) /= 'x') then + worka(:,:) = c0 + do j = jlo, jhi + do i = ilo, ihi + if (vsno(i,j,iblk) > puny .and. aice_init(i,j,iblk) > puny) then + worka(i,j) = aice(i,j,iblk)*Tsnic(i,j,iblk)/aice_init(i,j,iblk) + else + worka(i,j) = aice(i,j,iblk)*trcr(i,j,nt_Tsfc,iblk) + endif + enddo + enddo + call accum_hist_field(n_sitempsnic, iblk, worka(:,:), a2D) + endif - ! form drag - if (formdrag) call accum_hist_drag (iblk) + if (f_sitempbot(1:1) /= 'x') then + worka(:,:) = c0 + do j = jlo, jhi + do i = ilo, ihi + if (aice_init(i,j,iblk) > puny) & + worka(i,j) = aice(i,j,iblk)*(Ti_bot(i,j,iblk)+Tffresh) + enddo + enddo + call accum_hist_field(n_sitempbot, iblk, worka(:,:), a2D) + endif - enddo ! iblk - !$OMP END PARALLEL DO + if (f_siu(1:1) /= 'x') then + worka(:,:) = c0 + do j = jlo, jhi + do i = ilo, ihi + if (aice(i,j,iblk) > puny) worka(i,j) = aice(i,j,iblk)*uvel(i,j,iblk) + enddo + enddo + call accum_hist_field(n_siu, iblk, worka(:,:), a2D) + endif - !--------------------------------------------------------------- - ! Write output files at prescribed intervals - !--------------------------------------------------------------- + if (f_siv(1:1) /= 'x') then + worka(:,:) = c0 + do j = jlo, jhi + do i = ilo, ihi + if (aice(i,j,iblk) > puny) worka(i,j) = aice(i,j,iblk)*vvel(i,j,iblk) + enddo + enddo + call accum_hist_field(n_siv, iblk, worka(:,:), a2D) + endif - nstrm = nstreams - if (write_ic) nstrm = 1 - do ns = 1, nstrm - if (write_history(ns) .or. write_ic) then + if (f_sispeed(1:1) /= 'x') then + worka(:,:) = c0 + do j = jlo, jhi + do i = ilo, ihi + if (aice(i,j,iblk) > puny) worka(i,j) = aice(i,j,iblk) & + * sqrt(uvel(i,j,iblk)*uvel(i,j,iblk)+vvel(i,j,iblk)*vvel(i,j,iblk)) + enddo + enddo + call accum_hist_field(n_sispeed, iblk, worka(:,:), a2D) + endif - !--------------------------------------------------------------- - ! Mask out land points and convert units - !--------------------------------------------------------------- - ravgct = c1/avgct(ns) - !$OMP PARALLEL DO PRIVATE(iblk,i,j,ilo,ihi,jlo,jhi,this_block, & - !$OMP n,nn,ravgctz) - do iblk = 1, nblocks - this_block = get_block(blocks_ice(iblk),iblk) - ilo = this_block%ilo - ihi = this_block%ihi - jlo = this_block%jlo - jhi = this_block%jhi + if (f_sidmasstranx(1:1) /= 'x') then + worka(:,:) = c0 + do j = jlo, jhi + do i = ilo, ihi + if (aice(i,j,iblk) > puny) & + worka(i,j) = (rhoi*0.5*(vice(i+1,j,iblk)+vice(i,j,iblk))*HTE(i,j,iblk) & + + rhos*0.5*(vsno(i+1,j,iblk)+vsno(i,j,iblk))*HTE(i,j,iblk)) & + * 0.5*(uvel(i,j-1,iblk)+uvel(i,j,iblk)) + enddo + enddo + call accum_hist_field(n_sidmasstranx, iblk, worka(:,:), a2D) + endif - do n = 1, num_avail_hist_fields_2D - if (avail_hist_fields(n)%vhistfreq == histfreq(ns)) then + if (f_sidmasstrany(1:1) /= 'x') then + worka(:,:) = c0 + do j = jlo, jhi + do i = ilo, ihi + if (aice(i,j,iblk) > puny) & + worka(i,j) = (rhoi*0.5*(vice(i,j+1,iblk)+vice(i,j,iblk))*HTN(i,j,iblk) & + + rhos*0.5*(vsno(i,j+1,iblk)+vsno(i,j,iblk))*HTN(i,j,iblk)) & + * 0.5*(vvel(i-1,j,iblk)+vvel(i,j,iblk)) + enddo + enddo + call accum_hist_field(n_sidmasstrany, iblk, worka(:,:), a2D) + endif - do j = jlo, jhi - do i = ilo, ihi -#ifdef AusCOM - if (n_uocn(ns)==n.or.n_vocn(ns)==n) then - if (.not. umask(i,j,iblk)) then ! mask out land points - a2D(i,j,n,iblk) = spval - else ! convert units - a2D(i,j,n,iblk) = avail_hist_fields(n)%cona*a2D(i,j,n,iblk) & + if (f_sistrxdtop(1:1) /= 'x') then + worka(:,:) = c0 + do j = jlo, jhi + do i = ilo, ihi + if (aice(i,j,iblk) > puny) & + worka(i,j) = aice(i,j,iblk)*strairx(i,j,iblk) + enddo + enddo + call accum_hist_field(n_sistrxdtop, iblk, worka(:,:), a2D) + endif + + if (f_sistrydtop(1:1) /= 'x') then + worka(:,:) = c0 + do j = jlo, jhi + do i = ilo, ihi + if (aice(i,j,iblk) > puny) & + worka(i,j) = aice(i,j,iblk)*strairy(i,j,iblk) + enddo + enddo + call accum_hist_field(n_sistrydtop, iblk, worka(:,:), a2D) + endif + + if (f_sistrxubot(1:1) /= 'x') then + worka(:,:) = c0 + do j = jlo, jhi + do i = ilo, ihi + if (aice(i,j,iblk) > puny) & + worka(i,j) = aice(i,j,iblk)*strocnx(i,j,iblk) + enddo + enddo + call accum_hist_field(n_sistrxubot, iblk, worka(:,:), a2D) + endif + + if (f_sistryubot(1:1) /= 'x') then + worka(:,:) = c0 + do j = jlo, jhi + do i = ilo, ihi + if (aice(i,j,iblk) > puny) & + worka(i,j) = aice(i,j,iblk)*strocny(i,j,iblk) + enddo + enddo + call accum_hist_field(n_sistryubot, iblk, worka(:,:), a2D) + endif + + if (f_siforcetiltx(1:1) /= 'x') then + worka(:,:) = c0 + do j = jlo, jhi + do i = ilo, ihi + if (aice(i,j,iblk) > puny) & + worka(i,j) = aice(i,j,iblk)*strtltx(i,j,iblk) + enddo + enddo + call accum_hist_field(n_siforcetiltx, iblk, worka(:,:), a2D) + endif + + if (f_siforcetilty(1:1) /= 'x') then + worka(:,:) = c0 + do j = jlo, jhi + do i = ilo, ihi + if (aice(i,j,iblk) > puny) & + worka(i,j) = aice(i,j,iblk)*strtlty(i,j,iblk) + enddo + enddo + call accum_hist_field(n_siforcetilty, iblk, worka(:,:), a2D) + endif + + if (f_siforcecoriolx(1:1) /= 'x') then + worka(:,:) = c0 + do j = jlo, jhi + do i = ilo, ihi + if (aice(i,j,iblk) > puny) & + worka(i,j) = aice(i,j,iblk)*fm(i,j,iblk)*vvel(i,j,iblk) + enddo + enddo + call accum_hist_field(n_siforcecoriolx, iblk, worka(:,:), a2D) + endif + + if (f_siforcecorioly(1:1) /= 'x') then + worka(:,:) = c0 + do j = jlo, jhi + do i = ilo, ihi + if (aice(i,j,iblk) > puny) & + worka(i,j) = -aice(i,j,iblk)*fm(i,j,iblk)*uvel(i,j,iblk) + enddo + enddo + call accum_hist_field(n_siforcecorioly, iblk, worka(:,:), a2D) + endif + + if (f_siforceintstrx(1:1) /= 'x') then + worka(:,:) = c0 + do j = jlo, jhi + do i = ilo, ihi + if (aice(i,j,iblk) > puny) & + worka(i,j) = aice(i,j,iblk)*strintx(i,j,iblk) + enddo + enddo + call accum_hist_field(n_siforceintstrx, iblk, worka(:,:), a2D) + endif + + if (f_siforceintstry(1:1) /= 'x') then + worka(:,:) = c0 + do j = jlo, jhi + do i = ilo, ihi + if (aice(i,j,iblk) > puny) & + worka(i,j) = aice(i,j,iblk)*strinty(i,j,iblk) + enddo + enddo + call accum_hist_field(n_siforceintstry, iblk, worka(:,:), a2D) + endif + + if (f_sicompstren(1:1) /= 'x') then + worka(:,:) = c0 + do j = jlo, jhi + do i = ilo, ihi + if (aice(i,j,iblk) > puny) & + worka(i,j) = aice(i,j,iblk)*strength(i,j,iblk) + enddo + enddo + call accum_hist_field(n_sicompstren, iblk, worka(:,:), a2D) + endif + + if (f_sidivvel(1:1) /= 'x') then + worka(:,:) = c0 + do j = jlo, jhi + do i = ilo, ihi + if (aice(i,j,iblk) > puny) & + worka(i,j) = aice(i,j,iblk)*divu(i,j,iblk) + enddo + enddo + call accum_hist_field(n_sidivvel, iblk, worka(:,:), a2D) + endif + + if (f_sialb(1:1) /= 'x') then + worka(:,:) = c0 + do j = jlo, jhi + do i = ilo, ihi + if (fsw(i,j,iblk) > puny .and. aice_init(i,j,iblk) > puny) then + worka(i,j) = aice(i,j,iblk)*(fsw(i,j,iblk)-fswabs(i,j,iblk) & + * aice(i,j,iblk)/aice_init(i,j,iblk)) & + * fsw(i,j,iblk) + endif + enddo + enddo + call accum_hist_field(n_sialb, iblk, worka(:,:), a2D) + endif + + if (f_sihc(1:1) /= 'x') then + worka(:,:) = c0 + do k = 1,nilyr + do j = jlo, jhi + do i = ilo, ihi + worka(i,j) = worka(i,j) + trcr(i,j,nt_qice+k-1,iblk)*vice(i,j,iblk)/real(nilyr,kind=dbl_kind) + enddo + enddo + enddo + call accum_hist_field(n_sihc, iblk, worka(:,:), a2D) + endif + + if (f_sisnhc(1:1) /= 'x') then + worka(:,:) = c0 + do k = 1,nslyr + do j = jlo, jhi + do i = ilo, ihi + worka(i,j) = worka(i,j) + trcr(i,j,nt_qsno+k-1,iblk)*vsno(i,j,iblk)/real(nslyr,kind=dbl_kind) + enddo + enddo + enddo + call accum_hist_field(n_sisnhc, iblk, worka(:,:), a2D) + endif + + if (f_sidconcth(1:1) /= 'x') then + worka(:,:) = c0 + do j = jlo, jhi + do i = ilo, ihi + if (aice(i,j,iblk) > puny) then + worka(i,j) = daidtt(i,j,iblk) + endif + enddo + enddo + call accum_hist_field(n_sidconcth, iblk, worka(:,:), a2D) + endif + + if (f_sidconcdyn(1:1) /= 'x') then + worka(:,:) = c0 + do j = jlo, jhi + do i = ilo, ihi + if (aice(i,j,iblk) > puny) then + worka(i,j) = daidtd(i,j,iblk) + endif + enddo + enddo + call accum_hist_field(n_sidconcdyn, iblk, worka(:,:), a2D) + endif + + if (f_sidmassth(1:1) /= 'x') then + worka(:,:) = c0 + do j = jlo, jhi + do i = ilo, ihi + if (aice(i,j,iblk) > puny) then + worka(i,j) = dvidtt(i,j,iblk) * rhoi + endif + enddo + enddo + call accum_hist_field(n_sidmassth, iblk, worka(:,:), a2D) + endif + + if (f_sidmassdyn(1:1) /= 'x') then + worka(:,:) = c0 + do j = jlo, jhi + do i = ilo, ihi + if (aice(i,j,iblk) > puny) then + worka(i,j) = dvidtd(i,j,iblk) * rhoi + endif + enddo + enddo + call accum_hist_field(n_sidmassdyn, iblk, worka(:,:), a2D) + endif + + if (f_sidmassgrowthwat(1:1) /= 'x') then + worka(:,:) = c0 + do j = jlo, jhi + do i = ilo, ihi + if (aice_init(i,j,iblk) > puny) then + worka(i,j) = aice(i,j,iblk)*frazil(i,j,iblk)*rhoi / aice_init(i,j,iblk) + endif + enddo + enddo + call accum_hist_field(n_sidmassgrowthwat, iblk, worka(:,:), a2D) + endif + + if (f_sidmassgrowthbot(1:1) /= 'x') then + worka(:,:) = c0 + do j = jlo, jhi + do i = ilo, ihi + if (aice_init(i,j,iblk) > puny) then + worka(i,j) = aice(i,j,iblk)*congel(i,j,iblk)*rhoi / aice_init(i,j,iblk) + endif + enddo + enddo + call accum_hist_field(n_sidmassgrowthbot, iblk, worka(:,:), a2D) + endif + + if (f_sidmasssi(1:1) /= 'x') then + worka(:,:) = c0 + do j = jlo, jhi + do i = ilo, ihi + if (aice_init(i,j,iblk) > puny) then + worka(i,j) = aice(i,j,iblk)*snoice(i,j,iblk)*rhoi / aice_init(i,j,iblk) + endif + enddo + enddo + call accum_hist_field(n_sidmasssi, iblk, worka(:,:), a2D) + endif + + if (f_sidmassevapsubl(1:1) /= 'x') then + worka(:,:) = c0 + do j = jlo, jhi + do i = ilo, ihi + if (aice(i,j,iblk) > puny) then + worka(i,j) = aice(i,j,iblk)*evap(i,j,iblk) + endif + enddo + enddo + call accum_hist_field(n_sidmassevapsubl, iblk, worka(:,:), a2D) + endif + + if (f_sidmassmelttop(1:1) /= 'x') then + worka(:,:) = c0 + do j = jlo, jhi + do i = ilo, ihi + if (aice_init(i,j,iblk) > puny) then + worka(i,j) = aice(i,j,iblk)*meltt(i,j,iblk)*rhoi / aice_init(i,j,iblk) + endif + enddo + enddo + call accum_hist_field(n_sidmassmelttop, iblk, worka(:,:), a2D) + endif + + if (f_sidmassmeltbot(1:1) /= 'x') then + worka(:,:) = c0 + do j = jlo, jhi + do i = ilo, ihi + if (aice_init(i,j,iblk) > puny) then + worka(i,j) = aice(i,j,iblk)*meltb(i,j,iblk)*rhoi / aice_init(i,j,iblk) + endif + enddo + enddo + call accum_hist_field(n_sidmassmeltbot, iblk, worka(:,:), a2D) + endif + + if (f_sidmasslat(1:1) /= 'x') then + worka(:,:) = c0 + do j = jlo, jhi + do i = ilo, ihi + if (aice_init(i,j,iblk) > puny) then + worka(i,j) = aice(i,j,iblk)*meltl(i,j,iblk)*rhoi / aice_init(i,j,iblk) + endif + enddo + enddo + call accum_hist_field(n_sidmasslat, iblk, worka(:,:), a2D) + endif + + if (f_sndmasssnf(1:1) /= 'x') then + worka(:,:) = c0 + do j = jlo, jhi + do i = ilo, ihi + if (aice(i,j,iblk) > puny) then + worka(i,j) = aice(i,j,iblk) * fsnow(i,j,iblk) * dt + endif + enddo + enddo + call accum_hist_field(n_sndmasssnf, iblk, worka(:,:), a2D) + endif + + if (f_sndmassmelt(1:1) /= 'x') then + worka(:,:) = c0 + do j = jlo, jhi + do i = ilo, ihi + if (aice_init(i,j,iblk) > puny) then + worka(i,j) = aice(i,j,iblk)*melts(i,j,iblk)*rhoi / aice_init(i,j,iblk) + endif + enddo + enddo + call accum_hist_field(n_sndmassmelt, iblk, worka(:,:), a2D) + endif + + if (f_siflswdtop(1:1) /= 'x') then + worka(:,:) = c0 + do j = jlo, jhi + do i = ilo, ihi + if (fsw(i,j,iblk) > puny .and. aice(i,j,iblk) > puny) then + worka(i,j) = aice(i,j,iblk)*fsw(i,j,iblk) + endif + enddo + enddo + call accum_hist_field(n_siflswdtop, iblk, worka(:,:), a2D) + endif + + if (f_siflswutop(1:1) /= 'x') then + worka(:,:) = c0 + do j = jlo, jhi + do i = ilo, ihi + if (fsw(i,j,iblk) > puny .and. aice_init(i,j,iblk) > puny) then + worka(i,j) = aice(i,j,iblk)*(fsw(i,j,iblk)-fswabs(i,j,iblk) & + * aice(i,j,iblk)/aice_init(i,j,iblk)) + endif + enddo + enddo + call accum_hist_field(n_siflswutop, iblk, worka(:,:), a2D) + endif + + if (f_siflswdbot(1:1) /= 'x') then + worka(:,:) = c0 + do j = jlo, jhi + do i = ilo, ihi + if (aice(i,j,iblk) > puny) then + worka(i,j) = aice(i,j,iblk)*fswthru(i,j,iblk) + endif + enddo + enddo + call accum_hist_field(n_siflswdbot, iblk, worka(:,:), a2D) + endif + + if (f_sifllwdtop(1:1) /= 'x') then + worka(:,:) = c0 + do j = jlo, jhi + do i = ilo, ihi + if (aice(i,j,iblk) > puny) then + worka(i,j) = aice(i,j,iblk)*flw(i,j,iblk) + endif + enddo + enddo + call accum_hist_field(n_sifllwdtop, iblk, worka(:,:), a2D) + endif + + if (f_sifllwutop(1:1) /= 'x') then + worka(:,:) = c0 + do j = jlo, jhi + do i = ilo, ihi + if (aice(i,j,iblk) > puny) then + worka(i,j) = aice(i,j,iblk)*flwout(i,j,iblk) + endif + enddo + enddo + call accum_hist_field(n_sifllwutop, iblk, worka(:,:), a2D) + endif + + if (f_siflsenstop(1:1) /= 'x') then + worka(:,:) = c0 + do j = jlo, jhi + do i = ilo, ihi + if (aice(i,j,iblk) > puny) then + worka(i,j) = aice(i,j,iblk)*fsens(i,j,iblk) + endif + enddo + enddo + call accum_hist_field(n_siflsenstop, iblk, worka(:,:), a2D) + endif + + if (f_siflsensupbot(1:1) /= 'x') then + worka(:,:) = c0 + do j = jlo, jhi + do i = ilo, ihi + if (aice(i,j,iblk) > puny) then + worka(i,j) = aice(i,j,iblk)*fhocn(i,j,iblk) + endif + enddo + enddo + call accum_hist_field(n_siflsensupbot, iblk, worka(:,:), a2D) + endif + + if (f_sifllatstop(1:1) /= 'x') then + worka(:,:) = c0 + do j = jlo, jhi + do i = ilo, ihi + if (aice(i,j,iblk) > puny) then + worka(i,j) = aice(i,j,iblk)*flat(i,j,iblk) + endif + enddo + enddo + call accum_hist_field(n_sifllatstop, iblk, worka(:,:), a2D) + endif + + if (f_siflcondtop(1:1) /= 'x') then + worka(:,:) = c0 + do j = jlo, jhi + do i = ilo, ihi + if (aice(i,j,iblk) > puny) then + worka(i,j) = aice(i,j,iblk)*fcondtop(i,j,iblk) + endif + enddo + enddo + call accum_hist_field(n_siflcondtop, iblk, worka(:,:), a2D) + endif + + if (f_siflcondbot(1:1) /= 'x') then + worka(:,:) = c0 + do j = jlo, jhi + do i = ilo, ihi + if (aice_init(i,j,iblk) > puny) then + worka(i,j) = aice(i,j,iblk)*fcondbot(i,j,iblk)/aice_init(i,j,iblk) + endif + enddo + enddo + call accum_hist_field(n_siflcondbot, iblk, worka(:,:), a2D) + endif + + if (f_sipr(1:1) /= 'x') then + worka(:,:) = c0 + do j = jlo, jhi + do i = ilo, ihi + if (aice(i,j,iblk) > puny) then + worka(i,j) = aice(i,j,iblk)*frain(i,j,iblk) + endif + enddo + enddo + call accum_hist_field(n_sipr, iblk, worka(:,:), a2D) + endif + + if (f_siflsaltbot(1:1) /= 'x') then + worka(:,:) = c0 + do j = jlo, jhi + do i = ilo, ihi + if (aice(i,j,iblk) > puny) then + worka(i,j) = aice(i,j,iblk)*fsalt(i,j,iblk) + endif + enddo + enddo + call accum_hist_field(n_siflsaltbot, iblk, worka(:,:), a2D) + endif + + if (f_sisaltmass(1:1) /= 'x') then + worka(:,:) = c0 + do j = jlo, jhi + do i = ilo, ihi + if (aice(i,j,iblk) > puny) then + worka(i,j) = ice_ref_salinity * rhoi * vice(i,j,iblk) / c1000 + endif + enddo + enddo + call accum_hist_field(n_sisaltmass, iblk, worka(:,:), a2D) + endif + + if (f_siflfwbot(1:1) /= 'x') then + worka(:,:) = c0 + do j = jlo, jhi + do i = ilo, ihi + if (aice(i,j,iblk) > puny) then + worka(i,j) = aice(i,j,iblk)*fresh(i,j,iblk) + endif + enddo + enddo + call accum_hist_field(n_siflfwbot, iblk, worka(:,:), a2D) + endif + + + if (f_siflsaltbot(1:1) /= 'x') then + worka(:,:) = c0 + do j = jlo, jhi + do i = ilo, ihi + if (aice(i,j,iblk) > puny) then + worka(i,j) = aice(i,j,iblk)*fsalt(i,j,iblk) + endif + enddo + enddo + call accum_hist_field(n_siflsaltbot, iblk, worka(:,:), a2D) + endif + +!3D category fields + + if (f_aicen (1:1) /= 'x') & + call accum_hist_field(n_aicen-n2D, iblk, ncat_hist, & + aicen(:,:,1:ncat_hist,iblk), a3Dc) + if (f_vicen (1:1) /= 'x') & + call accum_hist_field(n_vicen-n2D, iblk, ncat_hist, & + vicen(:,:,1:ncat_hist,iblk), a3Dc) + if (f_vsnon (1:1) /= 'x') & + call accum_hist_field(n_vsnon-n2D, iblk, ncat_hist, & + vsnon(:,:,1:ncat_hist,iblk), a3Dc) + if (f_snowfracn(1:1) /= 'x') & + call accum_hist_field(n_snowfracn-n2D, iblk, ncat_hist, & +#ifdef ACCESS + snowfracn(:,:,1:ncat_hist,iblk)*aicen(:,:,:,iblk), a3Dc) +#else + snowfracn(:,:,1:ncat_hist,iblk), a3Dc) +#endif + if (f_Tn_top (1:1) /= 'x') & + call accum_hist_field(n_Tn_top-n2D, iblk, ncat_hist, & + Tn_top(:,:,1:ncat_hist,iblk), a3Dc) + if (f_keffn_top (1:1) /= 'x') & + call accum_hist_field(n_keffn_top-n2D, iblk, ncat_hist, & + keffn_top(:,:,1:ncat_hist,iblk), a3Dc) + if (f_fsurfn_ai (1:1) /= 'x') & + call accum_hist_field(n_fsurfn_ai-n2D, iblk, ncat_hist, & + fsurfn(:,:,1:ncat_hist,iblk)*aicen_init(:,:,1:ncat_hist,iblk), a3Dc) + if (f_fcondtopn_ai (1:1) /= 'x') & + call accum_hist_field(n_fcondtopn_ai-n2D, iblk, ncat_hist, & + fcondtopn(:,:,1:ncat_hist,iblk)*aicen_init(:,:,1:ncat_hist,iblk), a3Dc) + if (f_flatn_ai (1:1) /= 'x') & + call accum_hist_field(n_flatn_ai-n2D, iblk, ncat_hist, & + flatn(:,:,1:ncat_hist,iblk)*aicen_init(:,:,1:ncat_hist,iblk), a3Dc) + if (f_fsensn_ai (1:1) /= 'x') & + call accum_hist_field(n_fsensn_ai-n2D, iblk, ncat_hist, & + fsensn(:,:,1:ncat_hist,iblk)*aicen_init(:,:,1:ncat_hist,iblk), a3Dc) + ! Calculate surface heat flux that causes melt (calculated by the + ! atmos in HadGEM3 so needed for checking purposes) + if (f_fmelttn_ai (1:1) /= 'x') & + call accum_hist_field(n_fmelttn_ai-n2D, iblk, ncat_hist, & + max(fsurfn(:,:,1:ncat_hist,iblk) - fcondtopn(:,:,1:ncat_hist,iblk),c0) & + *aicen_init(:,:,1:ncat_hist,iblk), a3Dc) + +! example for 3D field (x,y,z) +! if (f_field3dz (1:1) /= 'x') & +! call accum_hist_field(n_field3dz-n3Dccum, iblk, nzilyr, & +! field3dz(:,:,1:nzilyr,iblk), a3Dz) + + ! 4D category fields + if (f_Tinz (1:1) /= 'x') then + Tinz4d(:,:,:,:) = c0 + if (ktherm == 2) then + do n = 1, ncat_hist + do j = jlo, jhi + do i = ilo, ihi + do k = 1, nzilyr + Tinz4d(i,j,k,n) = temperature_mush( & + trcrn(i,j,nt_qice+k-1,n,iblk), trcrn(i,j,nt_sice+k-1,n,iblk)) + enddo + enddo + enddo + enddo + else + do n = 1, ncat_hist + do j = jlo, jhi + do i = ilo, ihi + do k = 1, nzilyr + qn = trcrn(i,j,nt_qice+k-1,n,iblk) +! Tinz4d(i,j,k,n) = calculate_Tin_from_qin(qn,Tmlt(k)) + Tmlts = -trcrn(i,j,nt_sice+k-1,n,iblk)*depressT + Tinz4d(i,j,k,n) = calculate_Tin_from_qin(qn,Tmlts) + enddo + enddo + enddo + enddo + endif + call accum_hist_field(n_Tinz-n3Dbcum, iblk, nzilyr, ncat_hist, & + Tinz4d(:,:,1:nzilyr,1:ncat_hist), a4Di) + endif + if (f_Sinz (1:1) /= 'x') then + Sinz4d(:,:,:,:) = c0 + do n = 1, ncat_hist + do j = jlo, jhi + do i = ilo, ihi + if (vicen(i,j,n,iblk) > puny) then + Sinz4d(i,j,1:nzilyr,n) = trcrn(i,j,nt_sice:nt_sice+nzilyr-1,n,iblk) + endif + enddo + enddo + enddo + call accum_hist_field(n_Sinz-n3Dbcum, iblk, nzilyr, ncat_hist, & + Sinz4d(:,:,1:nzilyr,1:ncat_hist), a4Di) + endif + + if (f_Tsnz (1:1) /= 'x') then + Tsnz4d(:,:,:,:) = c0 + if (ktherm == 2) then + do n = 1, ncat_hist + do j = jlo, jhi + do i = ilo, ihi + do k = 1, nzslyr + qn = trcrn(i,j,nt_qsno+k-1,n,iblk) + Tsnz4d(i,j,k,n) = temperature_snow(qn) + enddo + enddo + enddo + enddo + else + do n = 1, ncat_hist + do j = jlo, jhi + do i = ilo, ihi + do k = 1, nzslyr + qn = trcrn(i,j,nt_qsno+k-1,n,iblk) + Tsnz4d(i,j,k,n) = (Lfresh + qn/rhos)/cp_ice + enddo + enddo + enddo + enddo + endif + call accum_hist_field(n_Tsnz-n4Dicum, iblk, nzslyr, ncat_hist, & + Tsnz4d(:,:,1:nzslyr,1:ncat_hist), a4Ds) + endif + + ! Calculate aggregate surface melt flux by summing category values + if (f_fmeltt_ai(1:1) /= 'x') then + do ns = 1, nstreams + if (n_fmeltt_ai(ns) /= 0) then + worka(:,:) = c0 + do j = jlo, jhi + do i = ilo, ihi + if (tmask(i,j,iblk)) then + do n=1,ncat_hist + worka(i,j) = worka(i,j) + a3Dc(i,j,n,n_fmelttn_ai(ns)-n2D,iblk) + enddo ! n + endif ! tmask + enddo ! i + enddo ! j + a2D(:,:,n_fmeltt_ai(ns),iblk) = worka(:,:) + endif + enddo + endif + + !--------------------------------------------------------------- + ! accumulate other history output + !--------------------------------------------------------------- + + ! mechanical redistribution + call accum_hist_mechred (iblk) + + ! melt ponds + if (tr_pond) call accum_hist_pond (iblk) + + ! biogeochemistry + if (tr_aero .or. tr_brine .or. skl_bgc) call accum_hist_bgc (iblk) + + ! form drag + if (formdrag) call accum_hist_drag (iblk) + + enddo ! iblk + !$OMP END PARALLEL DO + + !--------------------------------------------------------------- + ! Write output files at prescribed intervals + !--------------------------------------------------------------- + + nstrm = nstreams + if (write_ic) nstrm = 1 + + do ns = 1, nstrm + if (write_history(ns) .or. write_ic) then + + !--------------------------------------------------------------- + ! Mask out land points and convert units + !--------------------------------------------------------------- + + ravgct = c1/avgct(ns) + !$OMP PARALLEL DO PRIVATE(iblk,i,j,ilo,ihi,jlo,jhi,this_block, & + !$OMP n,nn,ravgctz,ravgip) + do iblk = 1, nblocks + this_block = get_block(blocks_ice(iblk),iblk) + ilo = this_block%ilo + ihi = this_block%ihi + jlo = this_block%jlo + jhi = this_block%jhi + + do j = jlo, jhi + do i = ilo, ihi +#ifdef ACCESS + ! Alex West - enforce time mean ice area threshold based + ! on + ! the maximum of aicenmin (thermodynamic min ice fraction) + ! and a_min (dynamic min ice fraction) so that intensive + ! variables are reported only where both dynamics and + ! thermodynamics have been active + area_threshold = max(aicenmin,a_min) +#else + area_threshold = puny +#endif + if (a2D(i,j,n_aice(ns),iblk)*ravgct > area_threshold) then + ravgip(i,j) = c1/(a2D(i,j,n_aice(ns),iblk)) + else + ravgip(i,j) = c0 + endif + enddo ! i + enddo ! j + + do n = 1, num_avail_hist_fields_2D + if (avail_hist_fields(n)%vhistfreq == histfreq(ns)) then + + do j = jlo, jhi + do i = ilo, ihi +#ifdef AusCOM + if (n_uocn(ns)==n.or.n_vocn(ns)==n) then + if (.not. umask(i,j,iblk)) then ! mask out land points + a2D(i,j,n,iblk) = spval_dbl + else ! convert units + a2D(i,j,n,iblk) = avail_hist_fields(n)%cona*a2D(i,j,n,iblk) & * ravgct + avail_hist_fields(n)%conb endif else if (.not. tmask(i,j,iblk)) then ! mask out land points - a2D(i,j,n,iblk) = spval + a2D(i,j,n,iblk) = spval_dbl else ! convert units a2D(i,j,n,iblk) = avail_hist_fields(n)%cona*a2D(i,j,n,iblk) & * ravgct + avail_hist_fields(n)%conb @@ -1729,7 +2813,7 @@ subroutine accum_hist (dt) endif #else if (.not. tmask(i,j,iblk)) then ! mask out land points - a2D(i,j,n,iblk) = spval + a2D(i,j,n,iblk) = spval_dbl else ! convert units a2D(i,j,n,iblk) = avail_hist_fields(n)%cona*a2D(i,j,n,iblk) & * ravgct + avail_hist_fields(n)%conb @@ -1738,7 +2822,658 @@ subroutine accum_hist (dt) enddo ! i enddo ! j - ! back out albedo/zenith angle dependence + ! Only average for timesteps when ice present + if (index(avail_hist_fields(n)%vname,'sithick') /= 0) then + if (f_sithick(1:1) /= 'x' .and. n_sithick(ns) /= 0) then + do j = jlo, jhi + do i = ilo, ihi + if (tmask(i,j,iblk)) then + a2D(i,j,n_sithick(ns),iblk) = & + a2D(i,j,n_sithick(ns),iblk)*avgct(ns)*ravgip(i,j) + if (ravgip(i,j) == c0) a2D(i,j,n_sithick(ns),iblk) = spval_dbl + endif + enddo ! i + enddo ! j + endif + endif + + if (index(avail_hist_fields(n)%vname,'sifb') /= 0) then + if (f_sifb(1:1) /= 'x' .and. n_sifb(ns) /= 0) then + do j = jlo, jhi + do i = ilo, ihi + if (tmask(i,j,iblk)) then + a2D(i,j,n_sifb(ns),iblk) = & + a2D(i,j,n_sifb(ns),iblk)*avgct(ns)*ravgip(i,j) + if (ravgip(i,j) == c0) a2D(i,j,n_sifb(ns),iblk) = spval_dbl + endif + enddo ! i + enddo ! j + endif + endif + + if (index(avail_hist_fields(n)%vname,'siage') /= 0) then + if (f_siage(1:1) /= 'x' .and. n_siage(ns) /= 0) then + do j = jlo, jhi + do i = ilo, ihi + if (tmask(i,j,iblk)) then + a2D(i,j,n_siage(ns),iblk) = & + a2D(i,j,n_siage(ns),iblk)*avgct(ns)*ravgip(i,j) + if (ravgip(i,j) == c0) a2D(i,j,n_siage(ns),iblk) = spval_dbl + endif + enddo ! i + enddo ! j + endif + endif + + + if (index(avail_hist_fields(n)%vname,'sisnconc') /= 0) then + if (f_sisnconc(1:1) /= 'x' .and. n_sisnconc(ns) /= 0) then + do j = jlo, jhi + do i = ilo, ihi + if (tmask(i,j,iblk)) then + a2D(i,j,n_sisnconc(ns),iblk) = & + a2D(i,j,n_sisnconc(ns),iblk)*avgct(ns)*ravgip(i,j) + if (ravgip(i,j) == c0) a2D(i,j,n_sisnconc(ns),iblk) = spval_dbl + endif + enddo ! i + enddo ! j + endif + endif + + if (index(avail_hist_fields(n)%vname,'sisnthick') /= 0) then + if (f_sisnthick(1:1) /= 'x' .and. n_sisnthick(ns) /= 0) then + do j = jlo, jhi + do i = ilo, ihi + if (tmask(i,j,iblk)) then + a2D(i,j,n_sisnthick(ns),iblk) = & + a2D(i,j,n_sisnthick(ns),iblk)*avgct(ns)*ravgip(i,j) + if (ravgip(i,j) == c0) a2D(i,j,n_sisnthick(ns),iblk) = spval_dbl + endif + enddo ! i + enddo ! j + endif + endif + + if (index(avail_hist_fields(n)%vname,'sitemptop') /= 0) then + if (f_sitemptop(1:1) /= 'x' .and. n_sitemptop(ns) /= 0) then + do j = jlo, jhi + do i = ilo, ihi + if (tmask(i,j,iblk)) then + a2D(i,j,n_sitemptop(ns),iblk) = & + a2D(i,j,n_sitemptop(ns),iblk)*avgct(ns)*ravgip(i,j) + if (ravgip(i,j) == c0) a2D(i,j,n_sitemptop(ns),iblk) = spval_dbl + endif + enddo ! i + enddo ! j + endif + endif + + if (index(avail_hist_fields(n)%vname,'sitempsnic') /= 0) then + if (f_sitempsnic(1:1) /= 'x' .and. n_sitempsnic(ns) /= 0) then + do j = jlo, jhi + do i = ilo, ihi + if (tmask(i,j,iblk)) then + a2D(i,j,n_sitempsnic(ns),iblk) = & + a2D(i,j,n_sitempsnic(ns),iblk)*avgct(ns)*ravgip(i,j) + if (ravgip(i,j) == c0) a2D(i,j,n_sitempsnic(ns),iblk) = spval_dbl + endif + enddo ! i + enddo ! j + endif + endif + + if (index(avail_hist_fields(n)%vname,'sitempbot') /= 0) then + if (f_sitempbot(1:1) /= 'x' .and. n_sitempbot(ns) /= 0) then + do j = jlo, jhi + do i = ilo, ihi + if (tmask(i,j,iblk)) then + a2D(i,j,n_sitempbot(ns),iblk) = & + a2D(i,j,n_sitempbot(ns),iblk)*avgct(ns)*ravgip(i,j) + if (ravgip(i,j) == c0) a2D(i,j,n_sitempbot(ns),iblk) = spval_dbl + endif + enddo ! i + enddo ! j + endif + endif + + if (index(avail_hist_fields(n)%vname,'siu') /= 0) then + if (f_siu(1:1) /= 'x' .and. n_siu(ns) /= 0) then + do j = jlo, jhi + do i = ilo, ihi + if (tmask(i,j,iblk)) then + a2D(i,j,n_siu(ns),iblk) = & + a2D(i,j,n_siu(ns),iblk)*avgct(ns)*ravgip(i,j) + if (ravgip(i,j) == c0) a2D(i,j,n_siu(ns),iblk) = spval_dbl + endif + enddo ! i + enddo ! j + endif + endif + + if (index(avail_hist_fields(n)%vname,'siv') /= 0) then + if (f_siv(1:1) /= 'x' .and. n_siv(ns) /= 0) then + do j = jlo, jhi + do i = ilo, ihi + if (tmask(i,j,iblk)) then + a2D(i,j,n_siv(ns),iblk) = & + a2D(i,j,n_siv(ns),iblk)*avgct(ns)*ravgip(i,j) + if (ravgip(i,j) == c0) a2D(i,j,n_siv(ns),iblk) = spval_dbl + endif + enddo ! i + enddo ! j + endif + endif + + if (index(avail_hist_fields(n)%vname,'sistrxdtop') /= 0) then + if (f_sistrxdtop(1:1) /= 'x' .and. n_sistrxdtop(ns) /= 0) then + do j = jlo, jhi + do i = ilo, ihi + if (tmask(i,j,iblk)) then + a2D(i,j,n_sistrxdtop(ns),iblk) = & + a2D(i,j,n_sistrxdtop(ns),iblk)*avgct(ns)*ravgip(i,j) + if (ravgip(i,j) == c0) a2D(i,j,n_sistrxdtop(ns),iblk) = spval_dbl + endif + enddo ! i + enddo ! j + endif + endif + + if (index(avail_hist_fields(n)%vname,'sistrydtop') /= 0) then + if (f_sistrydtop(1:1) /= 'x' .and. n_sistrydtop(ns) /= 0) then + do j = jlo, jhi + do i = ilo, ihi + if (tmask(i,j,iblk)) then + a2D(i,j,n_sistrydtop(ns),iblk) = & + a2D(i,j,n_sistrydtop(ns),iblk)*avgct(ns)*ravgip(i,j) + if (ravgip(i,j) == c0) a2D(i,j,n_sistrydtop(ns),iblk) = spval_dbl + endif + enddo ! i + enddo ! j + endif + endif + + if (index(avail_hist_fields(n)%vname,'sistrxubot') /= 0) then + if (f_sistrxubot(1:1) /= 'x' .and. n_sistrxubot(ns) /= 0) then + do j = jlo, jhi + do i = ilo, ihi + if (tmask(i,j,iblk)) then + a2D(i,j,n_sistrxubot(ns),iblk) = & + a2D(i,j,n_sistrxubot(ns),iblk)*avgct(ns)*ravgip(i,j) + if (ravgip(i,j) == c0) a2D(i,j,n_sistrxubot(ns),iblk) = spval_dbl + endif + enddo ! i + enddo ! j + endif + endif + + if (index(avail_hist_fields(n)%vname,'sistryubot') /= 0) then + if (f_sistryubot(1:1) /= 'x' .and. n_sistryubot(ns) /= 0) then + do j = jlo, jhi + do i = ilo, ihi + if (tmask(i,j,iblk)) then + a2D(i,j,n_sistryubot(ns),iblk) = & + a2D(i,j,n_sistryubot(ns),iblk)*avgct(ns)*ravgip(i,j) + if (ravgip(i,j) == c0) a2D(i,j,n_sistryubot(ns),iblk) = spval_dbl + endif + enddo ! i + enddo ! j + endif + endif + + if (index(avail_hist_fields(n)%vname,'siforcetiltx') /= 0) then + if (f_siforcetiltx(1:1) /= 'x' .and. n_siforcetiltx(ns) /= 0) then + do j = jlo, jhi + do i = ilo, ihi + if (tmask(i,j,iblk)) then + a2D(i,j,n_siforcetiltx(ns),iblk) = & + a2D(i,j,n_siforcetiltx(ns),iblk)*avgct(ns)*ravgip(i,j) + if (ravgip(i,j) == c0) a2D(i,j,n_siforcetiltx(ns),iblk) = spval_dbl + endif + enddo ! i + enddo ! j + endif + endif + + if (index(avail_hist_fields(n)%vname,'siforcetilty') /= 0) then + if (f_siforcetilty(1:1) /= 'x' .and. n_siforcetilty(ns) /= 0) then + do j = jlo, jhi + do i = ilo, ihi + if (tmask(i,j,iblk)) then + a2D(i,j,n_siforcetilty(ns),iblk) = & + a2D(i,j,n_siforcetilty(ns),iblk)*avgct(ns)*ravgip(i,j) + if (ravgip(i,j) == c0) a2D(i,j,n_siforcetilty(ns),iblk) = spval_dbl + endif + enddo ! i + enddo ! j + endif + endif + + if (index(avail_hist_fields(n)%vname,'siforcecoriolx') /= 0) then + if (f_siforcecoriolx(1:1) /= 'x' .and. n_siforcecoriolx(ns) /= 0) then + do j = jlo, jhi + do i = ilo, ihi + if (tmask(i,j,iblk)) then + a2D(i,j,n_siforcecoriolx(ns),iblk) = & + a2D(i,j,n_siforcecoriolx(ns),iblk)*avgct(ns)*ravgip(i,j) + if (ravgip(i,j) == c0) a2D(i,j,n_siforcecoriolx(ns),iblk) = spval_dbl + endif + enddo ! i + enddo ! j + endif + endif + + if (index(avail_hist_fields(n)%vname,'siforcecorioly') /= 0) then + if (f_siforcecorioly(1:1) /= 'x' .and. n_siforcecorioly(ns) /= 0) then + do j = jlo, jhi + do i = ilo, ihi + if (tmask(i,j,iblk)) then + a2D(i,j,n_siforcecorioly(ns),iblk) = & + a2D(i,j,n_siforcecorioly(ns),iblk)*avgct(ns)*ravgip(i,j) + if (ravgip(i,j) == c0) a2D(i,j,n_siforcecorioly(ns),iblk) = spval_dbl + endif + enddo ! i + enddo ! j + endif + endif + + if (index(avail_hist_fields(n)%vname,'siforceintstrx') /= 0) then + if (f_siforceintstrx(1:1) /= 'x' .and. n_siforceintstrx(ns) /= 0) then + do j = jlo, jhi + do i = ilo, ihi + if (tmask(i,j,iblk)) then + a2D(i,j,n_siforceintstrx(ns),iblk) = & + a2D(i,j,n_siforceintstrx(ns),iblk)*avgct(ns)*ravgip(i,j) + if (ravgip(i,j) == c0) a2D(i,j,n_siforceintstrx(ns),iblk) = spval_dbl + endif + enddo ! i + enddo ! j + endif + endif + + if (index(avail_hist_fields(n)%vname,'siforceintstry') /= 0) then + if (f_siforceintstry(1:1) /= 'x' .and. n_siforceintstry(ns) /= 0) then + do j = jlo, jhi + do i = ilo, ihi + if (tmask(i,j,iblk)) then + a2D(i,j,n_siforceintstry(ns),iblk) = & + a2D(i,j,n_siforceintstry(ns),iblk)*avgct(ns)*ravgip(i,j) + if (ravgip(i,j) == c0) a2D(i,j,n_siforceintstry(ns),iblk) = spval_dbl + endif + enddo ! i + enddo ! j + endif + endif + + if (index(avail_hist_fields(n)%vname,'sicompstren') /= 0) then + if (f_sicompstren(1:1) /= 'x' .and. n_sicompstren(ns) /= 0) then + do j = jlo, jhi + do i = ilo, ihi + if (tmask(i,j,iblk)) then + a2D(i,j,n_sicompstren(ns),iblk) = & + a2D(i,j,n_sicompstren(ns),iblk)*avgct(ns)*ravgip(i,j) + if (ravgip(i,j) == c0) a2D(i,j,n_sicompstren(ns),iblk) = spval_dbl + endif + enddo ! i + enddo ! j + endif + endif + + if (index(avail_hist_fields(n)%vname,'sidivvel') /= 0) then + if (f_sidivvel(1:1) /= 'x' .and. n_sidivvel(ns) /= 0) then + do j = jlo, jhi + do i = ilo, ihi + if (tmask(i,j,iblk)) then + a2D(i,j,n_sidivvel(ns),iblk) = & + a2D(i,j,n_sidivvel(ns),iblk)*avgct(ns)*ravgip(i,j) + if (ravgip(i,j) == c0) a2D(i,j,n_sidivvel(ns),iblk) = spval_dbl + endif + enddo ! i + enddo ! j + endif + endif + + if (index(avail_hist_fields(n)%vname,'sispeed') /= 0) then + if (f_sispeed(1:1) /= 'x' .and. n_sispeed(ns) /= 0) then + do j = jlo, jhi + do i = ilo, ihi + if (tmask(i,j,iblk)) then + a2D(i,j,n_sispeed(ns),iblk) = & + a2D(i,j,n_sispeed(ns),iblk)*avgct(ns)*ravgip(i,j) + if (ravgip(i,j) == c0) a2D(i,j,n_sispeed(ns),iblk) = spval_dbl + endif + enddo ! i + enddo ! j + endif + endif + + if (index(avail_hist_fields(n)%vname,'sialb') /= 0) then + if (f_sialb(1:1) /= 'x' .and. n_sialb(ns) /= 0) then + do j = jlo, jhi + do i = ilo, ihi + if (tmask(i,j,iblk)) then + a2D(i,j,n_sialb(ns),iblk) = & + a2D(i,j,n_sialb(ns),iblk)*avgct(ns)*ravgip(i,j) + if (ravgip(i,j) == c0) a2D(i,j,n_sialb(ns),iblk) = spval_dbl + if (albcnt(i,j,iblk,ns) <= puny) a2D(i,j,n_sialb(ns),iblk) = spval_dbl + endif + enddo ! i + enddo ! j + endif + endif + + if (index(avail_hist_fields(n)%vname,'sidmassgrowthwat') /= 0) then + if (f_sidmassgrowthwat(1:1) /= 'x' .and. n_sidmassgrowthwat(ns) /= 0) then + do j = jlo, jhi + do i = ilo, ihi + if (tmask(i,j,iblk)) then + a2D(i,j,n_sidmassgrowthwat(ns),iblk) = & + a2D(i,j,n_sidmassgrowthwat(ns),iblk)*avgct(ns)*ravgip(i,j) + if (ravgip(i,j) == c0) a2D(i,j,n_sidmassgrowthwat(ns),iblk) = spval_dbl + endif + enddo ! i + enddo ! j + endif + endif + + if (index(avail_hist_fields(n)%vname,'sidmassgrowthbot') /= 0) then + if (f_sidmassgrowthbot(1:1) /= 'x' .and. n_sidmassgrowthbot(ns) /= 0) then + do j = jlo, jhi + do i = ilo, ihi + if (tmask(i,j,iblk)) then + a2D(i,j,n_sidmassgrowthbot(ns),iblk) = & + a2D(i,j,n_sidmassgrowthbot(ns),iblk)*avgct(ns)*ravgip(i,j) + if (ravgip(i,j) == c0) a2D(i,j,n_sidmassgrowthbot(ns),iblk) = spval_dbl + endif + enddo ! i + enddo ! j + endif + endif + + if (index(avail_hist_fields(n)%vname,'sidmasssi') /= 0) then + if (f_sidmasssi(1:1) /= 'x' .and. n_sidmasssi(ns) /= 0) then + do j = jlo, jhi + do i = ilo, ihi + if (tmask(i,j,iblk)) then + a2D(i,j,n_sidmasssi(ns),iblk) = & + a2D(i,j,n_sidmasssi(ns),iblk)*avgct(ns)*ravgip(i,j) + if (ravgip(i,j) == c0) a2D(i,j,n_sidmasssi(ns),iblk) = spval_dbl + endif + enddo ! i + enddo ! j + endif + endif + + if (index(avail_hist_fields(n)%vname,'sidmassevapsubl') /= 0) then + if (f_sidmassevapsubl(1:1) /= 'x' .and. n_sidmassevapsubl(ns) /= 0) then + do j = jlo, jhi + do i = ilo, ihi + if (tmask(i,j,iblk)) then + a2D(i,j,n_sidmassevapsubl(ns),iblk) = & + a2D(i,j,n_sidmassevapsubl(ns),iblk)*avgct(ns)*ravgip(i,j) + if (ravgip(i,j) == c0) a2D(i,j,n_sidmassevapsubl(ns),iblk) = spval_dbl + endif + enddo ! i + enddo ! j + endif + endif + + if (index(avail_hist_fields(n)%vname,'sidmassmelttop') /= 0) then + if (f_sidmassmelttop(1:1) /= 'x' .and. n_sidmassmelttop(ns) /= 0) then + do j = jlo, jhi + do i = ilo, ihi + if (tmask(i,j,iblk)) then + a2D(i,j,n_sidmassmelttop(ns),iblk) = & + a2D(i,j,n_sidmassmelttop(ns),iblk)*avgct(ns)*ravgip(i,j) + if (ravgip(i,j) == c0) a2D(i,j,n_sidmassmelttop(ns),iblk) = spval_dbl + endif + enddo ! i + enddo ! j + endif + endif + + if (index(avail_hist_fields(n)%vname,'sidmassmeltbot') /= 0) then + if (f_sidmassmeltbot(1:1) /= 'x' .and. n_sidmassmeltbot(ns) /= 0) then + do j = jlo, jhi + do i = ilo, ihi + if (tmask(i,j,iblk)) then + a2D(i,j,n_sidmassmeltbot(ns),iblk) = & + a2D(i,j,n_sidmassmeltbot(ns),iblk)*avgct(ns)*ravgip(i,j) + if (ravgip(i,j) == c0) a2D(i,j,n_sidmassmeltbot(ns),iblk) = spval_dbl + endif + enddo ! i + enddo ! j + endif + endif + + if (index(avail_hist_fields(n)%vname,'sidmasslat') /= 0) then + if (f_sidmasslat(1:1) /= 'x' .and. n_sidmasslat(ns) /= 0) then + do j = jlo, jhi + do i = ilo, ihi + if (tmask(i,j,iblk)) then + a2D(i,j,n_sidmasslat(ns),iblk) = & + a2D(i,j,n_sidmasslat(ns),iblk)*avgct(ns)*ravgip(i,j) + if (ravgip(i,j) == c0) a2D(i,j,n_sidmasslat(ns),iblk) = spval_dbl + endif + enddo ! i + enddo ! j + endif + endif + + + + + if (index(avail_hist_fields(n)%vname,'sndmasssnf') /= 0) then + if (f_sndmasssnf(1:1) /= 'x' .and. n_sndmasssnf(ns) /= 0) then + do j = jlo, jhi + do i = ilo, ihi + if (tmask(i,j,iblk)) then + a2D(i,j,n_sndmasssnf(ns),iblk) = & + a2D(i,j,n_sndmasssnf(ns),iblk)*avgct(ns)*ravgip(i,j) + if (ravgip(i,j) == c0) a2D(i,j,n_sndmasssnf(ns),iblk) = spval_dbl + endif + enddo ! i + enddo ! j + endif + endif + + if (index(avail_hist_fields(n)%vname,'sndmassmelt') /= 0) then + if (f_sndmassmelt(1:1) /= 'x' .and. n_sndmassmelt(ns) /= 0) then + do j = jlo, jhi + do i = ilo, ihi + if (tmask(i,j,iblk)) then + a2D(i,j,n_sndmassmelt(ns),iblk) = & + a2D(i,j,n_sndmassmelt(ns),iblk)*avgct(ns)*ravgip(i,j) + if (ravgip(i,j) == c0) a2D(i,j,n_sndmassmelt(ns),iblk) = spval_dbl + endif + enddo ! i + enddo ! j + endif + endif + + if (index(avail_hist_fields(n)%vname,'siflswdtop') /= 0) then + if (f_siflswdtop(1:1) /= 'x' .and. n_siflswdtop(ns) /= 0) then + do j = jlo, jhi + do i = ilo, ihi + if (tmask(i,j,iblk)) then + a2D(i,j,n_siflswdtop(ns),iblk) = & + a2D(i,j,n_siflswdtop(ns),iblk)*avgct(ns)*ravgip(i,j) + if (ravgip(i,j) == c0) a2D(i,j,n_siflswdtop(ns),iblk) = spval_dbl + endif + enddo ! i + enddo ! j + endif + endif + + if (index(avail_hist_fields(n)%vname,'siflswutop') /= 0) then + if (f_siflswutop(1:1) /= 'x' .and. n_siflswutop(ns) /= 0) then + do j = jlo, jhi + do i = ilo, ihi + if (tmask(i,j,iblk)) then + a2D(i,j,n_siflswutop(ns),iblk) = & + a2D(i,j,n_siflswutop(ns),iblk)*avgct(ns)*ravgip(i,j) + if (ravgip(i,j) == c0) a2D(i,j,n_siflswutop(ns),iblk) = spval_dbl + endif + enddo ! i + enddo ! j + endif + endif + + if(index(avail_hist_fields(n)%vname,'siflswdbot') /= 0) then + if (f_siflswdbot(1:1) /= 'x' .and. n_siflswdbot(ns) /= 0) then + do j = jlo, jhi + do i = ilo, ihi + if (tmask(i,j,iblk)) then + a2D(i,j,n_siflswdbot(ns),iblk) = & + a2D(i,j,n_siflswdbot(ns),iblk)*avgct(ns)*ravgip(i,j) + if (ravgip(i,j) == c0) a2D(i,j,n_siflswdbot(ns),iblk) = spval_dbl + endif + enddo ! i + enddo ! j + endif + endif + + if (index(avail_hist_fields(n)%vname,'sifllwdtop') /= 0) then + if (f_sifllwdtop(1:1) /= 'x' .and. n_sifllwdtop(ns) /= 0) then + do j = jlo, jhi + do i = ilo, ihi + if (tmask(i,j,iblk)) then + a2D(i,j,n_sifllwdtop(ns),iblk) = & + a2D(i,j,n_sifllwdtop(ns),iblk)*avgct(ns)*ravgip(i,j) + if (ravgip(i,j) == c0) a2D(i,j,n_sifllwdtop(ns),iblk) = spval_dbl + endif + enddo ! i + enddo ! j + endif + endif + + if (index(avail_hist_fields(n)%vname,'sifllwutop') /= 0) then + if (f_sifllwutop(1:1) /= 'x' .and. n_sifllwutop(ns) /= 0) then + do j = jlo, jhi + do i = ilo, ihi + if (tmask(i,j,iblk)) then + a2D(i,j,n_sifllwutop(ns),iblk) = & + a2D(i,j,n_sifllwutop(ns),iblk)*avgct(ns)*ravgip(i,j) + if (ravgip(i,j) == c0) a2D(i,j,n_sifllwutop(ns),iblk) = spval_dbl + endif + enddo ! i + enddo ! j + endif + endif + + if (index(avail_hist_fields(n)%vname,'siflsenstop') /= 0) then + if (f_siflsenstop(1:1) /= 'x' .and. n_siflsenstop(ns) /= 0) then + do j = jlo, jhi + do i = ilo, ihi + if (tmask(i,j,iblk)) then + a2D(i,j,n_siflsenstop(ns),iblk) = & + a2D(i,j,n_siflsenstop(ns),iblk)*avgct(ns)*ravgip(i,j) + if (ravgip(i,j) == c0) a2D(i,j,n_siflsenstop(ns),iblk) = spval_dbl + endif + enddo ! i + enddo ! j + endif + endif + + if (index(avail_hist_fields(n)%vname,'siflsensupbot') /= 0) then + if (f_siflsensupbot(1:1) /= 'x' .and. n_siflsensupbot(ns) /= 0) then + do j = jlo, jhi + do i = ilo, ihi + if (tmask(i,j,iblk)) then + a2D(i,j,n_siflsensupbot(ns),iblk) = & + a2D(i,j,n_siflsensupbot(ns),iblk)*avgct(ns)*ravgip(i,j) + if (ravgip(i,j) == c0) a2D(i,j,n_siflsensupbot(ns),iblk) = spval_dbl + endif + enddo ! i + enddo ! j + endif + endif + + if (index(avail_hist_fields(n)%vname,'sifllatstop') /= 0) then + if (f_sifllatstop(1:1) /= 'x' .and. n_sifllatstop(ns) /= 0) then + do j = jlo, jhi + do i = ilo, ihi + if (tmask(i,j,iblk)) then + a2D(i,j,n_sifllatstop(ns),iblk) = & + a2D(i,j,n_sifllatstop(ns),iblk)*avgct(ns)*ravgip(i,j) + if (ravgip(i,j) == c0) a2D(i,j,n_sifllatstop(ns),iblk) = spval_dbl + endif + enddo ! i + enddo ! j + endif + endif + + if (index(avail_hist_fields(n)%vname,'sipr') /= 0) then + if (f_sipr(1:1) /= 'x' .and. n_sipr(ns) /= 0) then + do j = jlo, jhi + do i = ilo, ihi + if (tmask(i,j,iblk)) then + a2D(i,j,n_sipr(ns),iblk) = & + a2D(i,j,n_sipr(ns),iblk)*avgct(ns)*ravgip(i,j) + if (ravgip(i,j) == c0) a2D(i,j,n_sipr(ns),iblk) = spval_dbl + endif + enddo ! i + enddo ! j + endif + endif + + if (index(avail_hist_fields(n)%vname,'siflcondtop') /= 0) then + if (f_siflcondtop(1:1) /= 'x' .and. n_siflcondtop(ns) /= 0) then + do j = jlo, jhi + do i = ilo, ihi + if (tmask(i,j,iblk)) then + a2D(i,j,n_siflcondtop(ns),iblk) = & + a2D(i,j,n_siflcondtop(ns),iblk)*avgct(ns)*ravgip(i,j) + if (ravgip(i,j) == c0) a2D(i,j,n_siflcondtop(ns),iblk) = spval_dbl + endif + enddo ! i + enddo ! j + endif + endif + + if (index(avail_hist_fields(n)%vname,'siflcondbot') /= 0) then + if (f_siflcondbot(1:1) /= 'x' .and. n_siflcondbot(ns) /= 0) then + do j = jlo, jhi + do i = ilo, ihi + if (tmask(i,j,iblk)) then + a2D(i,j,n_siflcondbot(ns),iblk) = & + a2D(i,j,n_siflcondbot(ns),iblk)*avgct(ns)*ravgip(i,j) + if (ravgip(i,j) == c0) a2D(i,j,n_siflcondbot(ns),iblk) = spval_dbl + endif + enddo ! i + enddo ! j + endif + endif + + if (index(avail_hist_fields(n)%vname,'siflsaltbot') /= 0) then + if (f_siflsaltbot(1:1) /= 'x' .and. n_siflsaltbot(ns) /= 0) then + do j = jlo, jhi + do i = ilo, ihi + if (tmask(i,j,iblk)) then + a2D(i,j,n_siflsaltbot(ns),iblk) = & + a2D(i,j,n_siflsaltbot(ns),iblk)*avgct(ns)*ravgip(i,j) + if (ravgip(i,j) == c0) a2D(i,j,n_siflsaltbot(ns),iblk) = spval_dbl + endif + enddo ! i + enddo ! j + endif + endif + + if (index(avail_hist_fields(n)%vname,'siflfwbot') /= 0) then + if (f_siflfwbot(1:1) /= 'x' .and. n_siflfwbot(ns) /= 0) then + do j = jlo, jhi + do i = ilo, ihi + if (tmask(i,j,iblk)) then + a2D(i,j,n_siflfwbot(ns),iblk) = & + a2D(i,j,n_siflfwbot(ns),iblk)*avgct(ns)*ravgip(i,j) + if (ravgip(i,j) == c0) a2D(i,j,n_siflfwbot(ns),iblk) = spval_dbl + endif + enddo ! i + enddo ! j + endif + endif + + + !back out albedo/zenith angle dependence if (avail_hist_fields(n)%vname(1:6) == 'albice') then do j = jlo, jhi do i = ilo, ihi @@ -1773,10 +3508,11 @@ subroutine accum_hist (dt) enddo ! i enddo ! j endif + if (avail_hist_fields(n)%vname(1:8) == 'alvdr_ai') then do j = jlo, jhi do i = ilo, ihi - if (tmask(i,j,iblk)) then + if (tmask(i,j,iblk)) then ravgctz = c0 if (albcnt(i,j,iblk,ns) > puny) & ravgctz = c1/albcnt(i,j,iblk,ns) @@ -1807,7 +3543,7 @@ subroutine accum_hist (dt) do j = jlo, jhi do i = ilo, ihi if (.not. tmask(i,j,iblk)) then ! mask out land points - a3Dc(i,j,k,n,iblk) = spval + a3Dc(i,j,k,n,iblk) = spval_dbl else ! convert units a3Dc(i,j,k,n,iblk) = avail_hist_fields(nn)%cona*a3Dc(i,j,k,n,iblk) & * ravgct + avail_hist_fields(nn)%conb @@ -1825,7 +3561,7 @@ subroutine accum_hist (dt) do j = jlo, jhi do i = ilo, ihi if (.not. tmask(i,j,iblk)) then ! mask out land points - a3Dz(i,j,k,n,iblk) = spval + a3Dz(i,j,k,n,iblk) = spval_dbl else ! convert units a3Dz(i,j,k,n,iblk) = avail_hist_fields(nn)%cona*a3Dz(i,j,k,n,iblk) & * ravgct + avail_hist_fields(nn)%conb @@ -1842,7 +3578,7 @@ subroutine accum_hist (dt) do j = jlo, jhi do i = ilo, ihi if (.not. tmask(i,j,iblk)) then ! mask out land points - a3Db(i,j,k,n,iblk) = spval + a3Db(i,j,k,n,iblk) = spval_dbl else ! convert units a3Db(i,j,k,n,iblk) = avail_hist_fields(nn)%cona*a3Db(i,j,k,n,iblk) & * ravgct + avail_hist_fields(nn)%conb @@ -1861,7 +3597,7 @@ subroutine accum_hist (dt) do j = jlo, jhi do i = ilo, ihi if (.not. tmask(i,j,iblk)) then ! mask out land points - a4Di(i,j,k,ic,n,iblk) = spval + a4Di(i,j,k,ic,n,iblk) = spval_dbl else ! convert units a4Di(i,j,k,ic,n,iblk) = avail_hist_fields(nn)%cona*a4Di(i,j,k,ic,n,iblk) & * ravgct + avail_hist_fields(nn)%conb @@ -1881,7 +3617,7 @@ subroutine accum_hist (dt) do j = jlo, jhi do i = ilo, ihi if (.not. tmask(i,j,iblk)) then ! mask out land points - a4Ds(i,j,k,ic,n,iblk) = spval + a4Ds(i,j,k,ic,n,iblk) = spval_dbl else ! convert units a4Ds(i,j,k,ic,n,iblk) = avail_hist_fields(nn)%cona*a4Ds(i,j,k,ic,n,iblk) & * ravgct + avail_hist_fields(nn)%conb @@ -1900,7 +3636,7 @@ subroutine accum_hist (dt) do j = jlo, jhi do i = ilo, ihi if (.not. tmask(i,j,iblk)) then ! mask out land points - a4Db(i,j,k,ic,n,iblk) = spval + a4Db(i,j,k,ic,n,iblk) = spval_dbl else ! convert units a4Db(i,j,k,ic,n,iblk) = avail_hist_fields(nn)%cona*a4Db(i,j,k,ic,n,iblk) & * ravgct + avail_hist_fields(nn)%conb @@ -1929,29 +3665,31 @@ subroutine accum_hist (dt) do j = jlo, jhi do i = ilo, ihi if (.not. tmask(i,j,iblk)) then ! mask out land points - if (n_divu (ns) /= 0) a2D(i,j,n_divu(ns), iblk) = spval - if (n_shear (ns) /= 0) a2D(i,j,n_shear(ns), iblk) = spval - if (n_sig1 (ns) /= 0) a2D(i,j,n_sig1(ns), iblk) = spval - if (n_sig2 (ns) /= 0) a2D(i,j,n_sig2(ns), iblk) = spval - if (n_mlt_onset(ns) /= 0) a2D(i,j,n_mlt_onset(ns),iblk) = spval - if (n_frz_onset(ns) /= 0) a2D(i,j,n_frz_onset(ns),iblk) = spval - if (n_hisnap (ns) /= 0) a2D(i,j,n_hisnap(ns), iblk) = spval - if (n_aisnap (ns) /= 0) a2D(i,j,n_aisnap(ns), iblk) = spval - if (n_trsig (ns) /= 0) a2D(i,j,n_trsig(ns), iblk) = spval - if (n_iage (ns) /= 0) a2D(i,j,n_iage(ns), iblk) = spval - if (n_FY (ns) /= 0) a2D(i,j,n_FY(ns), iblk) = spval - - if (n_a11 (ns) /= 0) a2D(i,j,n_a11(ns), iblk) = spval - if (n_a12 (ns) /= 0) a2D(i,j,n_a12(ns), iblk) = spval - if (n_e11 (ns) /= 0) a2D(i,j,n_e11(ns), iblk) = spval - if (n_e12 (ns) /= 0) a2D(i,j,n_e12(ns), iblk) = spval - if (n_e22 (ns) /= 0) a2D(i,j,n_e22(ns), iblk) = spval - if (n_s11 (ns) /= 0) a2D(i,j,n_s11(ns), iblk) = spval - if (n_s12 (ns) /= 0) a2D(i,j,n_s12(ns), iblk) = spval - if (n_s22 (ns) /= 0) a2D(i,j,n_s22(ns), iblk) = spval - if (n_yieldstress11 (ns) /= 0) a2D(i,j,n_yieldstress11(ns),iblk) = spval - if (n_yieldstress12 (ns) /= 0) a2D(i,j,n_yieldstress12(ns),iblk) = spval - if (n_yieldstress22 (ns) /= 0) a2D(i,j,n_yieldstress22(ns),iblk) = spval + if (n_divu (ns) /= 0) a2D(i,j,n_divu(ns), iblk) = spval_dbl + if (n_shear (ns) /= 0) a2D(i,j,n_shear(ns), iblk) = spval_dbl + if (n_sig1 (ns) /= 0) a2D(i,j,n_sig1(ns), iblk) = spval_dbl + if (n_sig2 (ns) /= 0) a2D(i,j,n_sig2(ns), iblk) = spval_dbl + if (n_mlt_onset(ns) /= 0) a2D(i,j,n_mlt_onset(ns),iblk) = spval_dbl + if (n_frz_onset(ns) /= 0) a2D(i,j,n_frz_onset(ns),iblk) = spval_dbl + if (n_hisnap (ns) /= 0) a2D(i,j,n_hisnap(ns), iblk) = spval_dbl + if (n_aisnap (ns) /= 0) a2D(i,j,n_aisnap(ns), iblk) = spval_dbl + if (n_trsig (ns) /= 0) a2D(i,j,n_trsig(ns), iblk) = spval_dbl + if (n_iage (ns) /= 0) a2D(i,j,n_iage(ns), iblk) = spval_dbl + if (n_FY (ns) /= 0) a2D(i,j,n_FY(ns), iblk) = spval_dbl + if (n_Tn_top (ns) /= 0) a3Dc(i,j,:,n_Tn_top(ns)-n2D,iblk) = spval_dbl + if (n_keffn_top (ns) /= 0) a3Dc(i,j,:,n_keffn_top(ns)-n2D,iblk) = spval_dbl + + if (n_a11 (ns) /= 0) a2D(i,j,n_a11(ns), iblk) = spval_dbl + if (n_a12 (ns) /= 0) a2D(i,j,n_a12(ns), iblk) = spval_dbl + if (n_e11 (ns) /= 0) a2D(i,j,n_e11(ns), iblk) = spval_dbl + if (n_e12 (ns) /= 0) a2D(i,j,n_e12(ns), iblk) = spval_dbl + if (n_e22 (ns) /= 0) a2D(i,j,n_e22(ns), iblk) = spval_dbl + if (n_s11 (ns) /= 0) a2D(i,j,n_s11(ns), iblk) = spval_dbl + if (n_s12 (ns) /= 0) a2D(i,j,n_s12(ns), iblk) = spval_dbl + if (n_s22 (ns) /= 0) a2D(i,j,n_s22(ns), iblk) = spval_dbl + if (n_yieldstress11 (ns) /= 0) a2D(i,j,n_yieldstress11(ns),iblk) = spval_dbl + if (n_yieldstress12 (ns) /= 0) a2D(i,j,n_yieldstress12(ns),iblk) = spval_dbl + if (n_yieldstress22 (ns) /= 0) a2D(i,j,n_yieldstress22(ns),iblk) = spval_dbl else if (n_divu (ns) /= 0) a2D(i,j,n_divu(ns),iblk) = & divu (i,j,iblk)*avail_hist_fields(n_divu(ns))%cona @@ -1986,6 +3724,11 @@ subroutine accum_hist (dt) if (n_FY (ns) /= 0) a2D(i,j,n_FY(ns),iblk) = & trcr(i,j,nt_FY,iblk)*avail_hist_fields(n_FY(ns))%cona + if (n_Tn_top (ns) /= 0) a3Dc(i,j,:,n_Tn_top(ns)-n2D,iblk) = & + Tn_top(i,j,:,iblk)*avail_hist_fields(n_Tn_top(ns))%cona + if (n_keffn_top(ns) /= 0) a3Dc(i,j,:,n_keffn_top(ns)-n2D,iblk)= & + keffn_top(i,j,:,iblk)*avail_hist_fields(n_keffn_top(ns))%cona + if (n_a11 (ns) /= 0) a2D(i,j,n_a11(ns),iblk) = & a11 (i,j,iblk)*avail_hist_fields(n_a11(ns))%cona if (n_a12 (ns) /= 0) a2D(i,j,n_a12(ns),iblk) = & diff --git a/source/ice_history_shared.F90 b/source/ice_history_shared.F90 index d89f696b..1d1a2ad8 100644 --- a/source/ice_history_shared.F90 +++ b/source/ice_history_shared.F90 @@ -33,7 +33,7 @@ module ice_history_shared private public :: define_hist_field, accum_hist_field, icefields_nml, construct_filename - + logical (kind=log_kind), public :: & hist_avg ! if true, write averaged data instead of snapshots @@ -221,6 +221,7 @@ module ice_history_shared f_fsens = 'm', f_fsens_ai = 'm', & f_flwup = 'm', f_flwup_ai = 'm', & f_evap = 'm', f_evap_ai = 'm', & + f_evap_ice_ai = 'm', f_evap_snow_ai = 'm', & f_Tair = 'm', & f_Tref = 'm', f_Qref = 'm', & f_congel = 'm', f_frazil = 'm', & @@ -240,21 +241,63 @@ module ice_history_shared f_divu = 'm', f_shear = 'm', & f_sig1 = 'm', f_sig2 = 'm', & f_dvidtt = 'm', f_dvidtd = 'm', & + f_dvsdtt = 'm', f_dvsdtd = 'm', & f_daidtt = 'm', f_daidtd = 'm', & f_dagedtt = 'm', f_dagedtd = 'm', & f_mlt_onset = 'm', f_frz_onset = 'm', & f_iage = 'm', f_FY = 'm', & f_hisnap = 'm', f_aisnap = 'm', & + f_sithick = 'x', f_sisnthick = 'x', & + f_sisnconc = 'x', f_siage = 'x', & + f_sitemptop = 'x', f_sitempsnic = 'x', & + f_sitempbot = 'x', f_sispeed = 'x', & + f_siu = 'x', f_siv = 'x', & + f_sidmasstranx = 'x', f_sidmasstrany = 'x', & + f_sistrxdtop = 'x', f_sistrydtop = 'x', & + f_sistrxubot = 'x', f_sistryubot = 'x', & + f_siforcetiltx = 'x', f_siforcetilty = 'x', & + f_siforcecoriolx = 'x', f_siforcecorioly = 'x', & + f_siforceintstrx = 'x', f_siforceintstry = 'x', & + f_sicompstren = 'x', & + f_sialb = 'x', & + f_sihc = 'x', f_sisnhc = 'x', & + f_sidconcth = 'x', f_sidconcdyn = 'x', & + f_sifb = 'x', & + f_sidmassth = 'x', f_sidmassdyn = 'x', & + f_sidmassgrowthwat = 'x', & + f_sidmassgrowthbot = 'x', & + f_sidmasssi = 'x', & + f_sidmassevapsubl = 'x', & + f_sidmassmelttop = 'x', & + f_sidmassmeltbot = 'x', & + f_sidmasslat = 'x', & + f_sndmasssnf = 'x', & + f_sndmassmelt = 'x', & + f_sidivvel = 'x', & + f_siflswdtop = 'x', & + f_siflswutop = 'x', & + f_siflswdbot = 'x', & + f_sifllwdtop = 'x', & + f_sifllwutop = 'x', & + f_siflsenstop = 'x', & + f_siflsensupbot = 'x', & + f_sifllatstop = 'x', & + f_siflcondtop = 'x', & + f_siflcondbot = 'x', & + f_sipr = 'x', & + f_siflsaltbot = 'x', & + f_siflfwbot = 'x', & + f_sisaltmass = 'x', & f_aicen = 'x', f_vicen = 'x', & - f_vsnon = 'x', & + f_vsnon = 'x', & f_trsig = 'm', f_icepresent = 'm', & f_fsurf_ai = 'm', f_fcondtop_ai= 'm', & f_fmeltt_ai = 'm', & f_fsurfn_ai = 'x' ,f_fcondtopn_ai='x', & f_fmelttn_ai= 'x', f_flatn_ai = 'x', & - f_fsensn_ai = 'x', & -! f_field3dz = 'x', & - f_keffn_top = 'x', & + f_fsensn_ai = 'x', & +! f_field3dz = 'x', & + f_Tn_top = 'm', f_keffn_top = 'm', & f_Tinz = 'x', f_Sinz = 'x', & f_Tsnz = 'x', & f_a11 = 'x', f_a12 = 'x', & @@ -306,7 +349,8 @@ module ice_history_shared f_fsens, f_fsens_ai , & f_flwup, f_flwup_ai , & f_evap, f_evap_ai , & - f_Tair, & + f_evap_ice_ai, f_evap_snow_ai, & + f_Tair , & f_Tref, f_Qref , & f_congel, f_frazil , & f_snoice, f_dsnow , & @@ -325,30 +369,72 @@ module ice_history_shared f_divu, f_shear , & f_sig1, f_sig2 , & f_dvidtt, f_dvidtd , & + f_dvsdtt, f_dvsdtd , & f_daidtt, f_daidtd , & f_dagedtt, f_dagedtd , & f_mlt_onset, f_frz_onset, & f_iage, f_FY , & f_hisnap, f_aisnap , & + f_sithick, f_sisnthick, & + f_sisnconc, f_siage, & + f_sifb, & + f_sitemptop, f_sitempsnic,& + f_sitempbot, f_sispeed, & + f_siu, f_siv, & + f_sidmasstranx, f_sidmasstrany, & + f_sistrxdtop, f_sistrydtop, & + f_sistrxubot, f_sistryubot, & + f_siforcetiltx, f_siforcetilty, & + f_siforcecoriolx, f_siforcecorioly, & + f_siforceintstrx, f_siforceintstry, & + f_sicompstren, & + f_sialb, & + f_sidivvel, & + f_sihc, f_sisnhc, & + f_sidconcth, f_sidconcdyn,& + f_sidmassth, f_sidmassdyn,& + f_sidmassgrowthwat, & + f_sidmassgrowthbot, & + f_sidmasssi, & + f_sidmassevapsubl, & + f_sidmassmelttop, & + f_sidmassmeltbot, & + f_sidmasslat, & + f_sndmasssnf, & + f_sndmassmelt, & + f_siflswdtop, & + f_siflswutop, & + f_siflswdbot, & + f_sifllwdtop, & + f_sifllwutop, & + f_siflsenstop, & + f_siflsensupbot, & + f_sifllatstop, & + f_siflcondtop, & + f_siflcondbot, & + f_sipr, & + f_siflsaltbot, & + f_siflfwbot, & + f_sisaltmass, & f_aicen, f_vicen , & - f_vsnon, & + f_vsnon, & f_trsig, f_icepresent,& f_fsurf_ai, f_fcondtop_ai,& f_fmeltt_ai, & f_fsurfn_ai,f_fcondtopn_ai,& f_fmelttn_ai,f_flatn_ai, & - f_fsensn_ai, & + f_fsensn_ai, & ! f_field3dz, & - f_keffn_top, & + f_Tn_top, f_keffn_top, & f_Tinz, f_Sinz, & - f_Tsnz, & - f_a11, f_a12, & - f_e11, f_e12, & - f_e22, & - f_s11, f_s12, & - f_s22, & - f_yieldstress11, & - f_yieldstress12, & + f_Tsnz, & + f_a11, f_a12 , & + f_e11, f_e12 , & + f_e22 , & + f_s11, f_s12 , & + f_s22 , & + f_yieldstress11 , & + f_yieldstress12 , & f_yieldstress22 !--------------------------------------------------------------- @@ -407,6 +493,7 @@ module ice_history_shared n_fsens , n_fsens_ai , & n_flwup , n_flwup_ai , & n_evap , n_evap_ai , & + n_evap_ice_ai, n_evap_snow_ai , & n_Tair , & n_Tref , n_Qref , & n_congel , n_frazil , & @@ -415,7 +502,48 @@ module ice_history_shared n_meltb , n_meltl , & n_fresh , n_fresh_ai , & n_fsalt , n_fsalt_ai , & - n_vsnon , & + n_sidivvel, & + n_sithick , n_sisnthick , & + n_sisnconc, n_siage, & + n_sifb, & + n_sitemptop , n_sitempsnic , & + n_sitempbot , n_sispeed, & + n_siu, n_siv, & + n_sidmasstranx, n_sidmasstrany, & + n_sistrxdtop, n_sistrydtop, & + n_sistrxubot, n_sistryubot, & + n_siforcetiltx, n_siforcetilty, & + n_siforcecoriolx, n_siforcecorioly, & + n_siforceintstrx, n_siforceintstry, & + n_sicompstren, & + n_sialb, & + n_sihc , n_sisnhc, & + n_sidconcth , n_sidconcdyn, & + n_sidmassth , n_sidmassdyn, & + n_sidmassgrowthwat, & + n_sidmassgrowthbot, & + n_sidmasssi, & + n_sidmassevapsubl, & + n_sidmassmelttop, & + n_sidmassmeltbot, & + n_sidmasslat, & + n_sndmasssnf, & + n_sndmassmelt, & + n_siflswdtop, & + n_siflswutop, & + n_siflswdbot, & + n_sifllwdtop, & + n_sifllwutop, & + n_siflsenstop, & + n_siflsensupbot, & + n_sifllatstop, & + n_siflcondtop, & + n_siflcondbot, & + n_sipr, & + n_siflsaltbot, & + n_siflfwbot, & + n_sisaltmass, & + n_vsnon, & n_fhocn , n_fhocn_ai , & n_fswthru , n_fswthru_ai , & n_strairx , n_strairy , & @@ -427,6 +555,7 @@ module ice_history_shared n_divu , n_shear , & n_sig1 , n_sig2 , & n_dvidtt , n_dvidtd , & + n_dvsdtt , n_dvsdtd , & n_daidtt , n_daidtd , & n_dagedtt , n_dagedtd , & n_mlt_onset , n_frz_onset , & @@ -442,15 +571,16 @@ module ice_history_shared n_flatn_ai , & n_fsensn_ai , & ! n_field3dz , & + n_Tn_top , & n_keffn_top , & n_Tinz , n_Sinz , & - n_Tsnz , & - n_a11 , n_a12 , & - n_e11 , n_e12 , & - n_e22 , & - n_s11 , n_s12 , & - n_s22 , & - n_yieldstress11, n_yieldstress12, & + n_Tsnz, & + n_a11 , n_a12 , & + n_e11 , n_e12 , & + n_e22 , & + n_s11 , n_s12 , & + n_s22 , & + n_yieldstress11, n_yieldstress12, & n_yieldstress22 interface accum_hist_field ! generic interface @@ -465,20 +595,25 @@ module ice_history_shared !======================================================================= - subroutine construct_filename(ncfile,suffix,ns) + subroutine construct_filename(ncfile,suffix,ns,time_string) + + ! construct filenames for history output + ! we follow cosima convention: + ! e.g. ice-1daily-mean_0001-01.nc for daily data in january 0001 use ice_calendar, only: time, sec, nyr, month, daymo, & - mday, write_ic, histfreq, histfreq_n, & + mday, write_ic, histfreq, hist_file_freq, histfreq_n, & year_init, new_year, new_month, new_day, & dt use ice_restart_shared, only: lenstr - character (len=*), intent(inout) :: ncfile + character (char_len_long), intent(inout) :: ncfile + character (char_len), intent(out), optional :: time_string + character (char_len) :: ldate_string character (len=2), intent(in) :: suffix integer (kind=int_kind), intent(in) :: ns integer (kind=int_kind) :: iyear, imonth, iday, isec - character (len=1) :: cstream iyear = nyr + year_init - 1 ! set year_init=1 in ice_in to get iyear=nyr imonth = month @@ -490,9 +625,9 @@ subroutine construct_filename(ncfile,suffix,ns) #endif ! construct filename if (write_ic) then - write(ncfile,'(a,a,i4.4,a,i2.2,a,i2.2,a,i5.5,a,a)') & - incond_file(1:lenstr(incond_file)),'.',iyear,'-', & - imonth,'-',iday,'-',isec,'.',suffix + ncfile=incond_file(1:lenstr(incond_file)) + write(ldate_string,'(i4.4,a,i2.2,a,i2.2,a,i5.5)') & + iyear,'-',imonth,'-',iday,'-',sec else if (hist_avg .and. histfreq(ns) /= '1') then @@ -510,41 +645,56 @@ subroutine construct_filename(ncfile,suffix,ns) endif endif - cstream = '' -!echmod ! this was implemented for CESM but it breaks post-processing software -!echmod ! of other groups (including RASM which uses CCSMCOUPLED) -!echmod if (ns > 1) write(cstream,'(i1.1)') ns-1 - - if (histfreq(ns) == '1') then ! instantaneous, write every dt - write(ncfile,'(a,a,i4.4,a,i2.2,a,i2.2,a,i5.5,a,a)') & - history_file(1:lenstr(history_file))//trim(cstream),'_inst.', & - iyear,'-',imonth,'-',iday,'-',sec,'.',suffix - - elseif (hist_avg) then ! write averaged data - - if (histfreq(ns) == 'd'.or.histfreq(ns) == 'D') then ! daily - write(ncfile,'(a,a,i4.4,a,i2.2,a,i2.2,a,a)') & - history_file(1:lenstr(history_file))//trim(cstream), & - '.',iyear,'-',imonth,'-',iday,'.',suffix - elseif (histfreq(ns) == 'h'.or.histfreq(ns) == 'H') then ! hourly - write(ncfile,'(a,a,i2.2,a,i4.4,a,i2.2,a,i2.2,a,i5.5,a,a)') & - history_file(1:lenstr(history_file))//trim(cstream),'_', & - histfreq_n(ns),'h.',iyear,'-',imonth,'-',iday,'-',sec,'.',suffix - elseif (histfreq(ns) == 'm'.or.histfreq(ns) == 'M') then ! monthly - write(ncfile,'(a,a,i4.4,a,i2.2,a,a)') & - history_file(1:lenstr(history_file))//trim(cstream),'.', & - iyear,'-',imonth,'.',suffix - elseif (histfreq(ns) == 'y'.or.histfreq(ns) == 'Y') then ! yearly - write(ncfile,'(a,a,i4.4,a,a)') & - history_file(1:lenstr(history_file))//trim(cstream),'.', & - iyear,'.',suffix - endif + ncfile=history_file(1:lenstr(history_file)) + + ! frequency of history ouput (typically 1) + if (histfreq_n(ns)>9) then + write(ldate_string,'(i2)') histfreq_n(ns) + else + write(ldate_string,'(i1)') histfreq_n(ns) + endif + + ncfile=ncfile(1:lenstr(ncfile))//'-'//trim(ldate_string) + + ! name file based on history frequency (e.g. "daily-mean") + if (histfreq(ns) == 'd'.or.histfreq(ns) == 'D') then ! daily + ncfile=ncfile(1:lenstr(ncfile))//'daily' + elseif (histfreq(ns) == 'h'.or.histfreq(ns) == 'H') then ! hourly + ncfile=ncfile(1:lenstr(ncfile))//'hourly' + elseif (histfreq(ns) == 'm'.or.histfreq(ns) == 'M') then ! monthly + ncfile=ncfile(1:lenstr(ncfile))//'monthly' + elseif (histfreq(ns) == 'y'.or.histfreq(ns) == 'Y') then ! yearly + ncfile=ncfile(1:lenstr(ncfile))//'yearly' + endif + + if (hist_avg) then + ncfile=ncfile(1:lenstr(ncfile))//'-mean' + else + ncfile=ncfile(1:lenstr(ncfile))//'-snap' + endif - else ! instantaneous with histfreq > dt - write(ncfile,'(a,a,i4.4,a,i2.2,a,i2.2,a,i5.5,a,a)') & - history_file(1:lenstr(history_file)),'_inst.', & - iyear,'-',imonth,'-',iday,'-',sec,'.',suffix + ! date in filename is based on history file output frequency (e.g. "0001-01" for one file per month) + if (hist_file_freq(ns) == 'd'.or.hist_file_freq(ns) == 'D') then ! daily + write(ldate_string,'(i4.4,a,i2.2,a,i2.2)') iyear,'-',imonth,'-',iday + elseif (hist_file_freq(ns) == 'h'.or.hist_file_freq(ns) == 'H') then ! hourly + write(ldate_string,'(i4.4,a,i2.2,a,i2.2,a,i5.5)') iyear,'-',imonth,'-',iday,'-',sec + elseif (hist_file_freq(ns) == 'm'.or.hist_file_freq(ns) == 'M') then ! monthly + write(ldate_string,'(i4.4,a,i2.2)') iyear,'-',imonth + elseif (hist_file_freq(ns) == 'y'.or.hist_file_freq(ns) == 'Y') then ! yearly + write(ldate_string,'(i4.4)') iyear + else !instantaneous + write(ldate_string,'(i4.4,a,i2.2,a,i2.2,a,i5.5)') iyear,'-',imonth,'-',iday,'-',sec endif + + endif + + ! join the pieces, typically iceh-1daily-mean_0001-01.nc + ncfile=ncfile(1:lenstr(ncfile))//'_'//ldate_string(1:lenstr(ldate_string))//'.'//suffix + + ! create a string of current time for debugging + if ( present(time_string) ) then + write(time_string,'(i4.4,a,i2.2,a,i2.2,a,i5.5)') & + iyear,'-',imonth,'-',iday,'-',sec endif end subroutine construct_filename @@ -630,16 +780,15 @@ subroutine define_hist_field(id, vname, vunit, vcoord, vcellmeas, & num_avail_hist_fields_3Db + & num_avail_hist_fields_4Di + & num_avail_hist_fields_4Ds + & - num_avail_hist_fields_4Db) then - + num_avail_hist_fields_4Db) & call abort_ice("num_avail_hist_fields error") - endif id(ns) = num_avail_hist_fields_tot stmp = vname - if (ns > 1) & - write(stmp,'(a,a1,a1)') trim(stmp),'_',vhistfreq(ns1:ns1) +! if (ns > 1) & +! write(stmp,'(a,a1,a1)') trim(stmp),'_',vhistfreq(ns1:ns1) +! This was disabled in Met office version by ABK. avail_hist_fields(id(ns))%vname = trim(stmp) avail_hist_fields(id(ns))%vunit = trim(vunit) @@ -783,7 +932,7 @@ subroutine accum_hist_field_3D(id, iblk, ndim, field_accum, field) do k = 1, ndim do j = jlo, jhi do i = ilo, ihi - if (tmask(i,j,iblk)) then + if (tmask(i,j,iblk) .and. abs(field_accum(i,j,k)) < 1.0e+10_dbl_kind) then field(i,j,k,idns,iblk) = field(i,j,k,idns,iblk) + field_accum(i,j,k) endif enddo @@ -849,9 +998,11 @@ subroutine accum_hist_field_4D(id, iblk, ndim3, ndim4, field_accum, field) do n = 1, ndim3 do j = jlo, jhi do i = ilo, ihi - if (tmask(i,j,iblk)) then + if (tmask(i,j,iblk)) then + if (tmask(i,j,iblk) .and. abs(field_accum(i,j,n,k)) < 1.0e+10_dbl_kind) then field(i,j,n,k,idns,iblk) = field(i,j,n,k,idns,iblk) + field_accum(i,j,n,k) endif + endif enddo enddo enddo diff --git a/source/ice_init.F90 b/source/ice_init.F90 index 951b4d7d..ec1beb95 100644 --- a/source/ice_init.F90 +++ b/source/ice_init.F90 @@ -36,15 +36,19 @@ module ice_init ! ! author Elizabeth C. Hunke, LANL +#ifdef AusCOM && !defined(ACCESS) subroutine input_data(forcing_start_date, cur_exp_date, & seconds_since_start_year, & total_runtime_in_seconds, timestep, calendar_type) +#else + subroutine input_data +#endif use ice_age, only: restart_age use ice_broadcast, only: broadcast_scalar, broadcast_array !ars599: 24042015 for the namelist variables use ice_constants, only: c0, c1, puny, dragio, & - awtvdr, awtidr, awtvdf, awtidf, Tocnfrz + awtvdr, awtidr, awtvdf, awtidf, Tocnfrz, ice_ref_salinity, ksno use ice_diagnostics, only: diag_file, print_global, print_points, latpnt, lonpnt use ice_domain_size, only: max_nstrm, nilyr, nslyr, max_ntrcr, ncat, n_aero use ice_fileunits, only: nu_nml, nu_diag, nml_filename, diag_type, & @@ -53,16 +57,16 @@ subroutine input_data(forcing_start_date, cur_exp_date, & use ice_calendar, only: year_init, istep0, histfreq, histfreq_n, & dumpfreq, dumpfreq_n, diagfreq, nstreams, & npt, dt, ndtd, days_per_year, use_leap_years, & - write_ic, dump_last + write_ic, dump_last, hist_file_freq use ice_restart_shared, only: & - restart, restart_ext, input_dir, input_dir, restart_dir, restart_file, & - pointer_file, runid, runtype, use_restart_time, restart_format + restart, restart_ext, restart_dir, restart_file, pointer_file, & + runid, runtype, use_restart_time, restart_format use ice_history_shared, only: hist_avg, history_dir, history_file, & history_deflate_level, history_parallel_io, & history_chunksize_x, history_chunksize_y, & incond_dir, incond_file use ice_exit, only: abort_ice - use ice_itd, only: kitd, kcatbound + use ice_itd, only: kitd, kcatbound, aicenmin use ice_ocean, only: oceanmixed_ice, tfrz_option use ice_firstyear, only: restart_FY use ice_flux, only: update_ocn_f, l_mpond_fresh @@ -100,8 +104,8 @@ subroutine input_data(forcing_start_date, cur_exp_date, & use ice_meltpond_lvl, only: restart_pond_lvl, dpscale, frzpnd, & rfracmin, rfracmax, pndaspect, hs1 use ice_aerosol, only: restart_aero - use ice_therm_shared, only: ktherm, calc_Tsfc, conduct - use ice_therm_vertical, only: ustar_min, fbot_xfer_type + use ice_therm_shared, only: ktherm, calc_Tsfc, conduct, cap_fluxes + use ice_therm_vertical, only: ustar_min, fbot_xfer_type, saltmax use ice_therm_mushy, only: a_rapid_mode, Rac_rapid_mode, aspect_rapid_mode, & dSdt_slow_mode, phi_c_slow_mode, & phi_i_mushy @@ -109,11 +113,13 @@ subroutine input_data(forcing_start_date, cur_exp_date, & #ifdef CCSMCOUPLED use shr_file_mod, only: shr_file_setIO #endif +#ifdef AusCOM && !defined(ACCESS) integer, dimension(6), optional, intent(in) :: forcing_start_date integer, dimension(6), optional, intent(in) :: cur_exp_date integer, optional, intent(in) :: seconds_since_start_year integer, optional, intent(in) :: total_runtime_in_seconds, timestep character(len=9), optional, intent(in) :: calendar_type +#endif ! local variables @@ -142,6 +148,7 @@ subroutine input_data(forcing_start_date, cur_exp_date, & diagfreq, diag_type, diag_file, & print_global, print_points, latpnt, lonpnt, & dbug, histfreq, histfreq_n, hist_avg, & + hist_file_freq, & history_dir, history_file, history_deflate_level, & history_parallel_io, history_chunksize_x, history_chunksize_y, & write_ic, incond_dir, incond_file @@ -153,11 +160,10 @@ subroutine input_data(forcing_start_date, cur_exp_date, & namelist /thermo_nml/ & kitd, ktherm, conduct, & a_rapid_mode, Rac_rapid_mode, aspect_rapid_mode, & -!ars599: 24092014 (CODE: petteri) #ifdef AusCOM - chio, & + chio, ice_ref_salinity, ksno, aicenmin, & #endif - dSdt_slow_mode, phi_c_slow_mode, phi_i_mushy + saltmax, dSdt_slow_mode, phi_c_slow_mode, phi_i_mushy namelist /dynamics_nml/ & kdyn, ndte, revised_evp, yield_curve, & @@ -194,7 +200,7 @@ subroutine input_data(forcing_start_date, cur_exp_date, & oceanmixed_ice, ocn_data_format, sss_data_type, sst_data_type, & ocn_data_dir, oceanmixed_file, restore_sst, trestore, & restore_ice, formdrag, highfreq, natmiter, & - tfrz_option + tfrz_option, cap_fluxes namelist /tracer_nml/ & tr_iage, restart_age, & @@ -230,6 +236,7 @@ subroutine input_data(forcing_start_date, cur_exp_date, & histfreq(4) = 'm' ! output frequency option for different streams histfreq(5) = 'y' ! output frequency option for different streams histfreq_n(:) = 1 ! output frequency + hist_file_freq(:) = 'x' ! default to histfreq (below) hist_avg = .true. ! if true, write time-averages (not snapshots) history_dir = './' ! write to executable dir for default history_file = 'iceh' ! history file name prefix @@ -255,7 +262,7 @@ subroutine input_data(forcing_start_date, cur_exp_date, & restart_ext = .false. ! if true, read/write ghost cells use_restart_time = .true. ! if true, use time info written in file pointer_file = 'ice.restart_file' - restart_format = 'pio' ! file format ('bin'=binary or 'nc'=netcdf or 'pio') + restart_format = 'nc' ! file format ('bin'=binary or 'nc'=netcdf or 'pio') ice_ic = 'default' ! latitude and sst-dependent grid_format = 'bin' ! file format ('bin'=binary or 'nc'=netcdf) grid_type = 'rectangular' ! define rectangular grid internally @@ -279,8 +286,14 @@ subroutine input_data(forcing_start_date, cur_exp_date, & shortwave = 'default' ! 'default' or 'dEdd' (delta-Eddington) albedo_type = 'default'! or 'constant' ktherm = 1 ! 0 = 0-layer, 1 = BL99, 2 = mushy thermo + saltmax = 3.2_dbl_kind ! maximum salinity at ice base (Weeks & Ackley 1986) conduct = 'bubbly' ! 'MU71' or 'bubbly' (Pringle et al 2007) calc_Tsfc = .true. ! calculate surface temperature + cap_fluxes = .false. ! Check top conductive flux before sending it to the + ! thermo solver, and send some of the energy straight + ! to the bottom of the ice if it's likely to crash + ! the solver. + ! Only relevant if calc_Tsfc = .false. update_ocn_f = .false. ! include fresh water and salt fluxes for frazil ustar_min = 0.005 ! minimum friction velocity for ocean heat flux (m/s) l_mpond_fresh = .false. ! logical switch for including meltpond freshwater @@ -330,6 +343,12 @@ subroutine input_data(forcing_start_date, cur_exp_date, & ! used as Tsfcn for open water chio = 0.006_dbl_kind ! unitless param for basal heat flx ala McPhee and Maykut iceruf = 0.0005_dbl_kind ! ice surface roughness (m) + ice_ref_salinity = 5._dbl_kind ! (ppt) + ksno = 0.30_dbl_kind ! thermal conductivity of snow (W/m/deg) + ! (use 0.2 for cm2) + aicenmin = 99 ! maximum ice concentration to zap + ! we set a sensible default after namelist read + #endif atmbndy = 'default' ! or 'constant' @@ -401,7 +420,7 @@ subroutine input_data(forcing_start_date, cur_exp_date, & call get_fileunit(nu_nml) if (my_task == master_task) then - open (nu_nml, file=nml_filename, status='old',iostat=nml_error) + open (nu_nml, file=trim(nml_filename), status='old',iostat=nml_error) if (nml_error /= 0) then nml_error = -1 else @@ -442,8 +461,8 @@ subroutine input_data(forcing_start_date, cur_exp_date, & end do if (nml_error == 0) close(nu_nml) +#ifdef AusCOM && !defined(ACCESS) ! Overwrite some run details passed in as arguments - if (use_restart_time) then ! the initial year is set by the forcing start, the current ! experiment date is calculated using this and values in the @@ -476,13 +495,23 @@ subroutine input_data(forcing_start_date, cur_exp_date, & use_leap_years = .true. endif endif - endif +#endif + endif ! my_task == master_task call broadcast_scalar(nml_error, master_task) if (nml_error /= 0) then call abort_ice('ice: error reading namelist') endif call release_fileunit(nu_nml) +#ifdef ACCESS + if (ktherm == 1 .and. aicenmin == 99) then + !Set a higher value + ! of aicenmin if we're using multilayers with UM-style coupling for stability. + aicenmin = 0.00001_dbl_kind + endif +#endif + if (aicenmin == 99) aicenmin = puny + !----------------------------------------------------------------- ! set up diagnostics output and resolve conflicts !----------------------------------------------------------------- @@ -682,6 +711,25 @@ subroutine input_data(forcing_start_date, cur_exp_date, & calc_Tsfc = .true. endif + if (cap_fluxes .and. calc_Tsfc) then + if (my_task == master_task) then + write (nu_diag,*) 'WARNING: cap_fluxes = T and calc_Tsfc = T' + write (nu_diag,*) 'WARNING: cap_fluxes only valid when using UM-style coupling, i.e. calc_Tsfc=F' + write (nu_diag,*) 'WARNING: Setting cap_fluxes = F' + endif + cap_fluxes = .false. + endif + +!20250214: add ktherm == 0 case: + if (ktherm == 0 .and. trim(tfrz_option) /= 'linear_salt') then + if (my_task == master_task) then + write (nu_diag,*) & + 'WARNING: ktherm = 0 and tfrz_option = ',trim(tfrz_option) + write (nu_diag,*) & + 'WARNING: For consistency, set tfrz_option = linear_salt' + endif + endif + if (ktherm == 1 .and. trim(tfrz_option) /= 'linear_salt') then if (my_task == master_task) then write (nu_diag,*) & @@ -750,6 +798,21 @@ subroutine input_data(forcing_start_date, cur_exp_date, & fbot_xfer_type = 'constant' endif +#ifdef ACCESS + if (trim(runtype) == 'continue' .and. .not. use_restart_time) then + if (my_task == master_task) then + write (nu_diag,*) 'ERROR: ACCESS ESM continue runs require use_restart_time=.true.' + call abort_ice('ice_init: "use_restart_time" must be .true. when "runtype = continue"') + endif + endif +#endif + + !if hist_file_freq not set, default to histfreq + if (my_task == master_task) then + do n = 1, max_nstrm + if (hist_file_freq(n)=='x' .or. hist_file_freq(n) == 'X') hist_file_freq(n) = histfreq(n) + enddo + endif call broadcast_scalar(days_per_year, master_task) call broadcast_scalar(use_leap_years, master_task) @@ -765,7 +828,10 @@ subroutine input_data(forcing_start_date, cur_exp_date, & call broadcast_scalar(diag_file, master_task) do n = 1, max_nstrm call broadcast_scalar(histfreq(n), master_task) - enddo + enddo + do n = 1, max_nstrm + call broadcast_scalar(hist_file_freq(n), master_task) + enddo call broadcast_array(histfreq_n, master_task) call broadcast_scalar(hist_avg, master_task) call broadcast_scalar(history_dir, master_task) @@ -782,7 +848,6 @@ subroutine input_data(forcing_start_date, cur_exp_date, & call broadcast_scalar(dump_last, master_task) call broadcast_scalar(restart_file, master_task) call broadcast_scalar(restart, master_task) - call broadcast_scalar(input_dir, master_task) call broadcast_scalar(restart_dir, master_task) call broadcast_scalar(restart_ext, master_task) call broadcast_scalar(use_restart_time, master_task) @@ -809,6 +874,7 @@ subroutine input_data(forcing_start_date, cur_exp_date, & call broadcast_scalar(advection, master_task) call broadcast_scalar(shortwave, master_task) call broadcast_scalar(albedo_type, master_task) + call broadcast_scalar(saltmax, master_task) call broadcast_scalar(ktherm, master_task) call broadcast_scalar(conduct, master_task) call broadcast_scalar(R_ice, master_task) @@ -844,6 +910,9 @@ subroutine input_data(forcing_start_date, cur_exp_date, & call broadcast_scalar(sinw, master_task) call broadcast_scalar(dragio, master_task) call broadcast_scalar(chio, master_task) + call broadcast_scalar(ice_ref_salinity, master_task) + call broadcast_scalar(ksno, master_task) + call broadcast_scalar(aicenmin, master_task) call broadcast_scalar(Tocnfrz, master_task) call broadcast_scalar(iceruf, master_task) #endif @@ -855,6 +924,7 @@ subroutine input_data(forcing_start_date, cur_exp_date, & call broadcast_scalar(atm_data_dir, master_task) call broadcast_scalar(calc_strair, master_task) call broadcast_scalar(calc_Tsfc, master_task) + call broadcast_scalar(cap_fluxes, master_task) call broadcast_scalar(formdrag, master_task) call broadcast_scalar(highfreq, master_task) call broadcast_scalar(natmiter, master_task) @@ -934,6 +1004,7 @@ subroutine input_data(forcing_start_date, cur_exp_date, & write(nu_diag,1010) ' print_points = ', print_points write(nu_diag,1010) ' bfbflag = ', bfbflag write(nu_diag,1050) ' histfreq = ', histfreq(:) + write(nu_diag,1050) ' hist_file_freq = ', hist_file_freq(:) write(nu_diag,1040) ' histfreq_n = ', histfreq_n(:) write(nu_diag,1010) ' hist_avg = ', hist_avg if (.not. hist_avg) write (nu_diag,*) 'History data will be snapshots' @@ -941,8 +1012,10 @@ subroutine input_data(forcing_start_date, cur_exp_date, & trim(history_dir) write(nu_diag,*) ' history_file = ', & trim(history_file) - write(nu_diag,*) ' history_deflate_level = ', & + write(nu_diag,1020) ' history_deflate_level = ', & history_deflate_level + write(nu_diag,1010) ' history_parallel_io = ', & + history_parallel_io if (write_ic) then write (nu_diag,*) 'Initial condition will be written in ', & trim(incond_dir) @@ -952,8 +1025,6 @@ subroutine input_data(forcing_start_date, cur_exp_date, & write(nu_diag,1020) ' dumpfreq_n = ', dumpfreq_n write(nu_diag,1010) ' dump_last = ', dump_last write(nu_diag,1010) ' restart = ', restart - write(nu_diag,*) ' input_dir = ', & - trim(input_dir) write(nu_diag,*) ' restart_dir = ', & trim(restart_dir) write(nu_diag,*) ' restart_ext = ', restart_ext @@ -1044,6 +1115,7 @@ subroutine input_data(forcing_start_date, cur_exp_date, & write(nu_diag,1000) ' pndaspect = ', pndaspect write(nu_diag,1020) ' ktherm = ', ktherm + write(nu_diag,1005) ' saltmax = ', saltmax if (ktherm == 1) & write(nu_diag,1030) ' conduct = ', conduct if (ktherm == 2) then @@ -1062,6 +1134,7 @@ subroutine input_data(forcing_start_date, cur_exp_date, & write(nu_diag,1020) ' natmiter = ', natmiter write(nu_diag,1010) ' calc_strair = ', calc_strair write(nu_diag,1010) ' calc_Tsfc = ', calc_Tsfc + write(nu_diag,1010) ' cap_fluxes = ', cap_fluxes write(nu_diag,1020) ' fyear_init = ', & fyear_init @@ -1085,6 +1158,9 @@ subroutine input_data(forcing_start_date, cur_exp_date, & write(nu_diag,1005) ' sinw = ', sinw write(nu_diag,1005) ' dragio = ', dragio write(nu_diag,1005) ' chio = ', chio + write(nu_diag,1005) ' ice_ref_salinity = ', ice_ref_salinity + write(nu_diag,1005) ' ksno = ', ksno + write(nu_diag,1006) ' aicenmin = ', aicenmin #endif write(nu_diag,1005) ' ustar_min = ', ustar_min write(nu_diag, *) ' fbot_xfer_type = ', & @@ -1223,6 +1299,7 @@ subroutine input_data(forcing_start_date, cur_exp_date, & 1000 format (a30,2x,f9.2) ! a30 to align formatted, unformatted statements 1005 format (a30,2x,f9.6) ! float + 1006 format (a30,2x,f16.12)! double 1010 format (a30,2x,l6) ! logical 1020 format (a30,2x,i6) ! integer 1030 format (a30, a8) ! character diff --git a/source/ice_itd.F90 b/source/ice_itd.F90 index c0da0c7e..3864b8ca 100644 --- a/source/ice_itd.F90 +++ b/source/ice_itd.F90 @@ -47,7 +47,14 @@ module ice_itd ! 2 = WMO standard real (kind=dbl_kind), public :: & - hi_min ! minimum ice thickness allowed (m) + hi_min , & ! minimum ice thickness allowed (m) + hs_min ! minimum snow thickness allowed (m) + + real (kind=dbl_kind), public :: & + aicenmin ! AEW: This variable will replace puny as the min ice conc to + ! allow when zap_small_areas is called. Set equal to puny + ! if zerolayers or standard coupling, set equal to 1e-5 + ! if multilayers AND UM-style coupling real (kind=dbl_kind), public :: & hin_max(0:ncat) ! category limits (m) @@ -80,7 +87,15 @@ module ice_itd ! authors: William H. Lipscomb and Elizabeth C. Hunke, LANL ! C. M. Bitz, UW - subroutine init_itd + subroutine init_itd (calc_Tsfc, heat_capacity) ! Alex West: added these two arguments + ! as per Alison's changes. + ! Needed to control setting of + ! aicenmin for use in + ! zap_small_areas + + logical (kind=log_kind), intent(in) :: & + calc_Tsfc, & ! If T, calculate surface temp + heat_capacity ! If T, ice had nonzero heat capacity integer (kind=int_kind) :: & n ! thickness category index @@ -151,9 +166,7 @@ subroutine init_itd hin_max(0) = c0 ! minimum ice thickness, m else ! delta function itd category limits -#ifndef CCSMCOUPLED hi_min = p1 ! minimum ice thickness allowed (m) for thermo -#endif cc1 = max(1.1_dbl_kind/rncat,c1*hi_min) cc2 = c25*cc1 cc3 = 2.25_dbl_kind @@ -216,6 +229,18 @@ subroutine init_itd endif ! kcatbound + ! AEW: (based on Alison McLaren's vn4 modifications) Set a higher value + ! of aicenmin in ice_init if we're using multilayers with UM-style coupling. + ! Also allow higher values of hi_min, hs_min to be set (this is a + ! bit ad-hoc). + !----------------------------------------------------------------- + + if (heat_capacity) then + ! Set higher values to help with stability + hi_min = p2 ! 0.2m + hs_min = p1 ! 0.1m + endif + if (my_task == master_task) then write (nu_diag,*) ' ' write (nu_diag,*) 'hin_max(n-1) < Cat n < hin_max(n)' @@ -447,7 +472,7 @@ subroutine aggregate (nx_block, ny_block, & call compute_tracers (nx_block, ny_block, & icells, indxi, indxj, & ntrcr, trcr_depend, & - atrcr, aice(:,:), & + atrcr(:,:), aice(:,:), & vice (:,:), vsno(:,:), & trcr(:,:,:)) @@ -1817,7 +1842,7 @@ subroutine zap_small_areas (nx_block, ny_block, & trcrn ! ice tracers real (kind=dbl_kind), dimension (nx_block,ny_block), & - intent(out) :: & + intent(inout) :: & dfpond , & ! zapped pond water flux (kg/m^2/s) dfresh , & ! zapped fresh water flux (kg/m^2/s) dfsalt , & ! zapped salt flux (kg/m^2/s) @@ -1887,7 +1912,7 @@ subroutine zap_small_areas (nx_block, ny_block, & jstop = j return elseif (abs(aicen(i,j,n)) /= c0 .and. & - abs(aicen(i,j,n)) <= puny) then + abs(aicen(i,j,n)) <= aicenmin) then icells = icells + 1 indxi(icells) = i indxj(icells) = j diff --git a/source/ice_read_write.F90 b/source/ice_read_write.F90 index 021b075c..768aabc2 100644 --- a/source/ice_read_write.F90 +++ b/source/ice_read_write.F90 @@ -1222,7 +1222,7 @@ subroutine ice_read_nc_xyz(fid, nrec, varname, work, diag, & nrec ! record number #ifndef AusCOM - character (char_len) :: & + character (len=*), intent(in) :: & #else character*(*), intent(in) :: & #endif @@ -1929,7 +1929,6 @@ subroutine ice_read_global_nc (fid, nrec, varname, work_g, diag) #endif end subroutine ice_read_global_nc -#ifdef AusCOM !======================================================================= !BOP ! @@ -2035,8 +2034,6 @@ subroutine ice_read_global_nc_3D (fid, nrec, varname, work_g, diag) end subroutine ice_read_global_nc_3D -#endif - !======================================================================= ! Closes a netCDF file diff --git a/source/ice_restart_driver.F90 b/source/ice_restart_driver.F90 index 2d11e371..10ccc239 100644 --- a/source/ice_restart_driver.F90 +++ b/source/ice_restart_driver.F90 @@ -20,9 +20,12 @@ module ice_restart_driver use ice_kinds_mod use ice_restart_shared, only: & - restart, restart_ext, input_dir, restart_dir, restart_file, pointer_file, & + restart, restart_ext, restart_dir, restart_file, pointer_file, & runid, runtype, use_restart_time, restart_format, lenstr use ice_restart +#ifdef ACCESS + use cpl_parameters, only: runtime0 +#endif implicit none private @@ -359,8 +362,7 @@ subroutine restartfile (ice_ic) call read_restart_field(nu_restart,0,stress12_1,'ruf8', & 'stress12_1',1,diag,field_loc_center,field_type_scalar) ! stress12_1 call read_restart_field(nu_restart,0,stress12_3,'ruf8', & - 'stress12_3',1,diag,field_loc_center,field_type_scalar) ! stress12_1 - + 'stress12_3',1,diag,field_loc_center,field_type_scalar) ! stress12_3 call read_restart_field(nu_restart,0,stress12_2,'ruf8', & 'stress12_2',1,diag,field_loc_center,field_type_scalar) ! stress12_2 call read_restart_field(nu_restart,0,stress12_4,'ruf8', & @@ -594,8 +596,11 @@ subroutine restartfile_v4 (ice_ic) read (nu_restart) istep0,time,time_forc write(nu_diag,*) 'Restart read at istep=',istep0,time,time_forc endif - call calendar(time) - +#if !defined(AusCOM) || defined(ACCESS) + call calendar(time) +#else + call calendar(time-runtime0) +#endif call broadcast_scalar(istep0,master_task) istep1 = istep0 call broadcast_scalar(time,master_task) diff --git a/source/ice_shortwave.F90 b/source/ice_shortwave.F90 index 6972f067..f80a42d7 100644 --- a/source/ice_shortwave.F90 +++ b/source/ice_shortwave.F90 @@ -176,6 +176,7 @@ subroutine init_shortwave use ice_grid, only: tmask, tlat, tlon use ice_meltpond_lvl, only: dhsn, ffracn use ice_restart_shared, only: restart, runtype + use ice_therm_shared, only: calc_Tsfc integer (kind=int_kind) :: & icells ! number of cells with aicen > puny @@ -231,6 +232,17 @@ subroutine init_shortwave enddo ! iblk !$OMP END PARALLEL DO + ! Alex West, March 2017: Because we do not model SW radiation penetrating + ! into ice in the coupled model yet, furthur SW calculations in the + ! initialisation after setting everything to 0 are unnecessary, and + ! may be introducing spurious values. Hence everything from here + ! onwards will be enclosed in a 'if calc_Tsfc' statement (which only + ! evaluates to .true. in the forced model). + ! + ! In the case that penetrating SW radiation is implemented in the coupled + ! model, this control structure may need to be removed. + + if (calc_Tsfc) then if (trim(shortwave) == 'dEdd') then ! delta Eddington #ifndef CCSMCOUPLED @@ -416,6 +428,8 @@ subroutine init_shortwave enddo ! nblocks !$OMP END PARALLEL DO + endif ! calc_Tsfc + end subroutine init_shortwave !======================================================================= @@ -939,7 +953,7 @@ subroutine constant_albedos (nx_block, ny_block, & do i = 1, nx_block !ars599: 21032014 (2D_code) #ifndef AusCOM - alvdrn(i,j) = albocn + alvdrn(i,j) = albocn alidrn(i,j) = albocn alvdfn(i,j) = albocn alidfn(i,j) = albocn @@ -1268,6 +1282,7 @@ subroutine run_dEdd(ilo,ihi,jlo,jhi, & initonly ) use ice_calendar, only: dt + use ice_itd, only: hs_min use ice_meltpond_cesm, only: hs0 use ice_meltpond_topo, only: hp1 use ice_meltpond_lvl, only: hs1, pndaspect @@ -1430,6 +1445,7 @@ subroutine run_dEdd(ilo,ihi,jlo,jhi, & ! set pond properties if (tr_pond_cesm) then + apeffn(:,:,n) = c0 ! for history do ij = 1, icells i = indxi(ij) j = indxj(ij) @@ -1448,6 +1464,7 @@ subroutine run_dEdd(ilo,ihi,jlo,jhi, & enddo elseif (tr_pond_lvl) then + apeffn(:,:,n) = c0 ! for history do ij = 1, icells i = indxi(ij) j = indxj(ij) @@ -1514,6 +1531,7 @@ subroutine run_dEdd(ilo,ihi,jlo,jhi, & enddo ! ij elseif (tr_pond_topo) then + apeffn(:,:,n) = c0 ! for history do ij = 1, icells i = indxi(ij) j = indxj(ij) @@ -3786,6 +3804,7 @@ subroutine shortwave_dEdd_set_snow(nx_block, ny_block, & Tsfc, fs, hs, & rhosnw, rsnw) + use ice_itd, only: hs_min use ice_meltpond_cesm, only: hs0 integer (kind=int_kind), & diff --git a/source/ice_step_mod.F90 b/source/ice_step_mod.F90 index 8ff7bcbc..fa7e43ec 100644 --- a/source/ice_step_mod.F90 +++ b/source/ice_step_mod.F90 @@ -43,6 +43,8 @@ subroutine prep_radiation (dt, iblk) Sswabsn, Iswabsn use ice_state, only: aice, aicen use ice_timers, only: ice_timer_start, ice_timer_stop, timer_sw + use ice_grid, only: tmask + use ice_calendar, only: istep1 real (kind=dbl_kind), intent(in) :: & dt ! time step @@ -164,18 +166,25 @@ subroutine step_therm1 (dt, iblk) use ice_blocks, only: block, get_block, nx_block, ny_block use ice_calendar, only: yday, istep1 use ice_communicate, only: my_task +#ifdef ACCESS + use ice_coupling, only: set_sfcflux +#else + use ice_flux, only: set_sfcflux +#endif use ice_domain, only: blocks_ice use ice_domain_size, only: ncat, nilyr use ice_exit, only: abort_ice use ice_fileunits, only: nu_diag use ice_flux, only: frzmlt, sst, Tf, strocnxT, strocnyT, rside, & meltsn, melttn, meltbn, congeln, snoicen, dsnown, uatm, vatm, & - wind, rhoa, potT, Qa, zlvl, strax, stray, flatn, fsensn, fsurfn, fcondtopn, & + wind, rhoa, potT, Qa, zlvl, strax, stray, flatn, fsensn, fsurfn, & + fcondtopn, fcondbotn, fcondbot, & + ice_freeboardn, ice_freeboard, snowfracn, & flw, fsnow, fpond, sss, mlt_onset, frz_onset, faero_atm, faero_ocn, & frain, Tair, coszen, strairxT, strairyT, fsurf, fcondtop, fsens, & flat, fswabs, flwout, evap, Tref, Qref, Uref, fresh, fsalt, fhocn, & fswthru, meltt, melts, meltb, meltl, congel, snoice, & - set_sfcflux, merge_fluxes + merge_fluxes, evap_ice, evap_snow use ice_firstyear, only: update_FYarea use ice_grid, only: lmask_n, lmask_s, TLAT, TLON use ice_itd, only: hi_min @@ -218,6 +227,8 @@ subroutine step_therm1 (dt, iblk) fswabsn , & ! shortwave absorbed by ice (W/m^2) flwoutn , & ! upward LW at surface (W/m^2) evapn , & ! flux of vapor, atmos to ice (kg m-2 s-1) + evapn_ice , & ! flux of vapor, atmos to ice (kg m-2 s-1) + evapn_snow , & ! flux of vapor, atmos to snow (kg m-2 s-1) freshn , & ! flux of water, ice to ocean (kg/m^2/s) fsaltn , & ! flux of salt, ice to ocean (kg/m^2/s) fhocnn , & ! fbot corrected for leftover energy (W/m^2) @@ -445,8 +456,8 @@ subroutine step_therm1 (dt, iblk) endif ! calc_Tsfc or calc_strair if (.not.(calc_strair)) then -!#ifndef CICE_IN_NEMO -#ifndef AusCOM + +#if !defined(AusCOM) || defined(ACCESS) ! Here we follow the CICE_in_NEMO treatment for wind stress: ! Do not do the following here as wind stress is supplied on T grid ! (but u grid in NEMO) grid and multipied by ice concentration and @@ -526,10 +537,13 @@ subroutine step_therm1 (dt, iblk) Sswabsn(:,:,:,n,iblk), & Iswabsn(:,:,:,n,iblk), & fsurfn(:,:,n,iblk), & - fcondtopn(:,:,n,iblk), & + fcondtopn(:,:,n,iblk), fcondbotn(:,:,n,iblk), & fsensn(:,:,n,iblk), flatn(:,:,n,iblk), & flwoutn, & - evapn, freshn, & + ice_freeboardn, & + evapn, & + evapn_ice, evapn_snow, & + freshn, & fsaltn, fhocnn, & melttn(:,:,n,iblk), meltsn(:,:,n,iblk), & meltbn(:,:,n,iblk), & @@ -683,18 +697,24 @@ subroutine step_therm1 (dt, iblk) strairxn, strairyn, & Cdn_atm_ratio_n, & fsurfn(:,:,n,iblk), fcondtopn(:,:,n,iblk),& + fcondbotn(:,:,n,iblk), & fsensn(:,:,n,iblk), flatn(:,:,n,iblk), & fswabsn, flwoutn, & evapn, & + evapn_ice, evapn_snow, & + ice_freeboardn(:,:,n,iblk), & Trefn, Qrefn, & freshn, fsaltn, & fhocnn, fswthrun(:,:,n,iblk), & strairxT(:,:,iblk), strairyT (:,:,iblk), & Cdn_atm_ratio(:,:,iblk), & fsurf (:,:,iblk), fcondtop (:,:,iblk), & + fcondbot(:,:,iblk), & fsens (:,:,iblk), flat (:,:,iblk), & fswabs (:,:,iblk), flwout (:,:,iblk), & evap (:,:,iblk), & + evap_ice(:,:,iblk), evap_snow (:,:,iblk), & + ice_freeboard(:,:,iblk), & Tref (:,:,iblk), Qref (:,:,iblk), & fresh (:,:,iblk), fsalt (:,:,iblk), & fhocn (:,:,iblk), fswthru (:,:,iblk), & @@ -708,6 +728,9 @@ subroutine step_therm1 (dt, iblk) enddo ! ncat + Ti_bot(:,:,iblk) = Tbot(:,:) * aice(:,:,iblk) + Tsnic(:,:,iblk) = c0 + !----------------------------------------------------------------- ! Calculate ponds from the topographic scheme !----------------------------------------------------------------- @@ -715,7 +738,7 @@ subroutine step_therm1 (dt, iblk) if (tr_pond_topo) then call compute_ponds_topo(nx_block, ny_block, & ilo, ihi, jlo, jhi, & - dt, & + dt,snowfracn(:,:,:,iblk), & aice (:,:,iblk), aicen(:,:,:,iblk), & vice (:,:,iblk), vicen(:,:,:,iblk), & vsno (:,:,iblk), vsnon(:,:,:,iblk), & @@ -1004,7 +1027,7 @@ subroutine post_thermo (dt) use ice_blocks, only: nx_block, ny_block use ice_domain, only: nblocks - use ice_flux, only: daidtt, dvidtt, dagedtt + use ice_flux, only: daidtt, dvidtt, dvsdtt, dagedtt use ice_grid, only: tmask use ice_itd, only: aggregate use ice_state, only: aicen, trcrn, vicen, vsnon, ntrcr, & @@ -1053,6 +1076,7 @@ subroutine post_thermo (dt) do i = 1, nx_block daidtt(i,j,iblk) = (aice(i,j,iblk) - daidtt(i,j,iblk)) / dt dvidtt(i,j,iblk) = (vice(i,j,iblk) - dvidtt(i,j,iblk)) / dt + dvsdtt(i,j,iblk) = (vsno(i,j,iblk) - dvsdtt(i,j,iblk)) / dt if (tr_iage) then if (trcr(i,j,nt_iage,iblk) > c0) & dagedtt(i,j,iblk)= (trcr(i,j,nt_iage,iblk)-dagedtt(i,j,iblk)-dt)/dt @@ -1085,7 +1109,7 @@ subroutine step_dynamics (dt, ndtd) use ice_dyn_evp, only: evp use ice_dyn_eap, only: eap use ice_dyn_shared, only: kdyn - use ice_flux, only: daidtd, dvidtd, init_history_dyn, dagedtd + use ice_flux, only: daidtd, dvidtd, dvsdtd, init_history_dyn, dagedtd use ice_grid, only: tmask use ice_itd, only: aggregate use ice_state, only: nt_qsno, trcrn, vsnon, aicen, vicen, ntrcr, & @@ -1183,6 +1207,7 @@ subroutine step_dynamics (dt, ndtd) do j = jlo,jhi do i = ilo,ihi dvidtd(i,j,iblk) = (vice(i,j,iblk) - dvidtd(i,j,iblk)) /dt + dvsdtd(i,j,iblk) = (vsno(i,j,iblk) - dvsdtd(i,j,iblk)) /dt daidtd(i,j,iblk) = (aice(i,j,iblk) - daidtd(i,j,iblk)) /dt if (tr_iage) & dagedtd(i,j,iblk)= (trcr(i,j,nt_iage,iblk)-dagedtd(i,j,iblk))/dt @@ -1374,13 +1399,11 @@ subroutine step_radiation (dt, iblk) Sswabsn, Iswabsn, shortwave, & albicen, albsnon, albpndn, & alvdrn, alidrn, alvdfn, alidfn, & - run_dedd, shortwave_ccsm3, apeffn, & - snowfracn - + run_dedd, shortwave_ccsm3, apeffn +!ars599: 27032014: 22042015 need ifdef #ifdef AusCOM use ice_shortwave, only : ocn_albedo2D #endif - use ice_state, only: aicen, vicen, vsnon, trcrn, nt_Tsfc, & nt_apnd, nt_ipnd, nt_hpnd, tr_pond_topo use ice_timers, only: ice_timer_start, ice_timer_stop, timer_sw diff --git a/source/ice_therm_bl99.F90 b/source/ice_therm_bl99.F90 index 19132761..4319477f 100644 --- a/source/ice_therm_bl99.F90 +++ b/source/ice_therm_bl99.F90 @@ -16,7 +16,9 @@ module ice_therm_bl99 use ice_domain_size, only: nilyr, nslyr, max_ntrcr, n_aero, ncat use ice_constants use ice_fileunits, only: nu_diag - use ice_therm_shared, only: conduct, calc_Tsfc, ferrmax, l_brine, hfrazilmin + use ice_therm_shared, only: calculate_ki_from_Tin, & + conduct, calc_Tsfc, ferrmax, l_brine, hfrazilmin + implicit none save @@ -66,8 +68,10 @@ subroutine temperature_changes (nx_block, ny_block, & flwoutn, fsurfn, & fcondtopn,fcondbot, & einit, l_stop, & - istop, jstop) + istop, jstop, & + enum) + use ice_itd, only: hs_min use ice_therm_shared, only: surface_heat_flux, dsurface_heat_flux_dTsf integer (kind=int_kind), intent(in) :: & @@ -145,7 +149,11 @@ subroutine temperature_changes (nx_block, ny_block, & ! local variables integer (kind=int_kind), parameter :: & - nitermax = 500, & ! max number of iterations in temperature solver +#ifdef ACCESS + nitermax = 100, & ! max number of iterations in temperature solver +#else + nitermax = 500, & +#endif nmat = nslyr + nilyr + 1 ! matrix dimension real (kind=dbl_kind), parameter :: & @@ -179,6 +187,12 @@ subroutine temperature_changes (nx_block, ny_block, & avg_Tsi , & ! = 1. if new snow/ice temps avg'd w/starting temps enew ! new energy of melting after temp change (J m-2) + real (kind=dbl_kind), dimension (icells), intent(out) :: & + enum ! Energy that, for numerical reasons, we don't want to use. + + real (kind=dbl_kind), dimension (icells) :: & + enew_icells ! debugging! + real (kind=dbl_kind), dimension (icells) :: & dTsf_prev , & ! dTsf from previous iteration dTi1_prev , & ! dTi1 from previous iteration @@ -197,6 +211,7 @@ subroutine temperature_changes (nx_block, ny_block, & Tmlts ! melting temp, -depressT * salinity real (kind=dbl_kind), dimension (icells,nslyr) :: & + dqmat_sn , & ! snow enthalpy difference before & after limiting Tsn_init , & ! zTsn at beginning of time step Tsn_start , & ! zTsn at start of iteration etas ! dt / (rho * cp * h) for snow layers @@ -230,6 +245,19 @@ subroutine temperature_changes (nx_block, ny_block, & logical (kind=log_kind) , dimension (icells,nilyr) :: & reduce_kh ! reduce conductivity when T exceeds Tmlt + real (kind=dbl_kind), dimension (nx_block,ny_block) :: & + fcondtopn_reduction, & ! desired decrease in cond. forcing if + ! top layer temp is being forced above + ! melting. Extra energy goes into ocean + fcondtopn_force ! Resulting value of fcondtopn passed to + ! tridiag matrix solver + + logical (kind=log_kind) , dimension (icells) :: & + Top_T_was_reset_last_time ! keep track of whether top layer temp was reset + ! in the previous iteration. For use in limiting + + real (kind=dbl_kind), dimension(icells) :: enew_save + !----------------------------------------------------------------- ! Initialize !----------------------------------------------------------------- @@ -237,6 +265,13 @@ subroutine temperature_changes (nx_block, ny_block, & all_converged = .false. do ij = 1, icells + ! Set variables involved in tracking limiting of top layer temp + i = indxi(ij) + j = indxj(ij) + fcondtopn_reduction(i,j) = c0 + fcondtopn_force(i,j) = fcondtopn(i,j) + enum(ij) = c0 + Top_T_was_reset_last_time(ij) = .false. converged (ij) = .false. l_snow (ij) = .false. @@ -251,6 +286,7 @@ subroutine temperature_changes (nx_block, ny_block, & dt_rhoi_hlyr(ij) = dt / (rhoi*hilyr(ij)) ! hilyr > 0 if (hslyr(ij) > hs_min/real(nslyr,kind=dbl_kind)) & l_snow(ij) = .true. + enew_icells(ij) = c0 enddo ! ij do k = 1, nslyr @@ -297,43 +333,44 @@ subroutine temperature_changes (nx_block, ny_block, & ! NOTE: This option is not available if the atmosphere model ! has already computed fsurf. (Unless we adjust fsurf here) !----------------------------------------------------------------- -!mclaren: Should there be an if calc_Tsfc statement here then?? + if (calc_Tsfc) then #ifdef CCSMCOUPLED - frac = c1 - dTemp = p01 -#else - frac = 0.9_dbl_kind - dTemp = 0.02_dbl_kind -#endif - do k = 1, nilyr - do ij = 1, icells - i = indxi(ij) - j = indxj(ij) - - Iswabs_tmp = c0 ! all Iswabs is moved into fswsfc + frac = c1 + dTemp = p01 + #else + frac = 0.9_dbl_kind + dTemp = 0.02_dbl_kind + #endif + do k = 1, nilyr + do ij = 1, icells + i = indxi(ij) + j = indxj(ij) - if (Tin_init(ij,k) <= Tmlts(ij,k) - dTemp) then - if (l_brine) then - ci = cp_ice - Lfresh * Tmlts(ij,k) / (Tin_init(ij,k)**2) - Iswabs_tmp = min(Iswabs(i,j,k), & - frac*(Tmlts(ij,k)-Tin_init(ij,k))*ci/dt_rhoi_hlyr(ij)) - else - ci = cp_ice - Iswabs_tmp = min(Iswabs(i,j,k), & - frac*(-Tin_init(ij,k))*ci/dt_rhoi_hlyr(ij)) + Iswabs_tmp = c0 ! all Iswabs is moved into fswsfc + + if (Tin_init(ij,k) <= Tmlts(ij,k) - dTemp) then + if (l_brine) then + ci = cp_ice - Lfresh * Tmlts(ij,k) / (Tin_init(ij,k)**2) + Iswabs_tmp = min(Iswabs(i,j,k), & + frac*(Tmlts(ij,k)-Tin_init(ij,k))*ci/dt_rhoi_hlyr(ij)) + else + ci = cp_ice + Iswabs_tmp = min(Iswabs(i,j,k), & + frac*(-Tin_init(ij,k))*ci/dt_rhoi_hlyr(ij)) + endif endif - endif - if (Iswabs_tmp < puny) Iswabs_tmp = c0 + if (Iswabs_tmp < puny) Iswabs_tmp = c0 - dswabs = min(Iswabs(i,j,k) - Iswabs_tmp, fswint(i,j)) + dswabs = min(Iswabs(i,j,k) - Iswabs_tmp, fswint(i,j)) - fswsfc(i,j) = fswsfc(i,j) + dswabs - fswint(i,j) = fswint(i,j) - dswabs - Iswabs(i,j,k) = Iswabs_tmp + fswsfc(i,j) = fswsfc(i,j) + dswabs + fswint(i,j) = fswint(i,j) - dswabs + Iswabs(i,j,k) = Iswabs_tmp + enddo enddo - enddo + endif #ifdef CCSMCOUPLED frac = 0.9_dbl_kind @@ -520,6 +557,8 @@ subroutine temperature_changes (nx_block, ny_block, & spdiag, rhs) else + ! See if we need to reduce fcondtopn anywhere + fcondtopn_force = fcondtopn - fcondtopn_reduction call get_matrix_elements_know_Tsfc & (nx_block, ny_block, & isolve, icells, & @@ -531,7 +570,11 @@ subroutine temperature_changes (nx_block, ny_block, & etai, etas, & sbdiag, diag, & spdiag, rhs, & +#ifdef ACCESS + fcondtopn_force) +#else fcondtopn) +#endif endif ! calc_Tsfc !----------------------------------------------------------------- @@ -650,12 +693,15 @@ subroutine temperature_changes (nx_block, ny_block, & endif ! calc_Tsfc + dqmat_sn(:,:) = c0 do k = 1, nslyr !DIR$ CONCURRENT !Cray !cdir nodep !NEC !ocl novrec !Fujitsu do ij = 1, isolve m = indxij(ij) + i = indxii(ij) + j = indxjj(ij) !----------------------------------------------------------------- ! Reload zTsn from matrix solution @@ -666,7 +712,35 @@ subroutine temperature_changes (nx_block, ny_block, & else zTsn(m,k) = c0 endif +#ifdef ACCESS + if ((l_brine) .and. zTsn(m,k)>c0) then + +! Alex West: return this energy to the ocean + + dqmat_sn(m,k) = (zTsn(m,k)*cp_ice - Lfresh)*rhos - zqsn(m,k) + + ! Alex West: If this is the second time in succession that Tsn(1) has been + ! reset, tell the solver to reduce the forcing at the top, and + ! pass the difference to the array enum where it will eventually + ! go into the ocean + ! This is done to avoid an 'infinite loop' whereby temp continually evolves + ! to the same point above zero, is reset, ad infinitum + if (l_snow(m) .AND. k == 1) then + if (Top_T_was_reset_last_time(m)) then + fcondtopn_reduction(i,j) = fcondtopn_reduction(i,j) + dqmat_sn(m,k)*hslyr(m) / dt + Top_T_was_reset_last_time(m) = .false. + enum(m) = enum(m) + hslyr(m) * dqmat_sn(m,k) + else + Top_T_was_reset_last_time(m) = .true. + endif + endif + + zTsn(m,k) = min(zTsn(m,k), c0) + + endif +#else if (l_brine) zTsn(m,k) = min(zTsn(m,k), c0) +#endif !----------------------------------------------------------------- ! If condition 1 or 2 failed, average new snow layer @@ -695,6 +769,8 @@ subroutine temperature_changes (nx_block, ny_block, & !ocl novrec !Fujitsu do ij = 1, isolve m = indxij(ij) + i = indxii(ij) + j = indxjj(ij) !----------------------------------------------------------------- ! Reload zTin from matrix solution @@ -706,6 +782,23 @@ subroutine temperature_changes (nx_block, ny_block, & dTmat(m,k) = zTin(m,k) - Tmlts(m,k) dqmat(m,k) = rhoi * dTmat(m,k) & * (cp_ice - Lfresh * Tmlts(m,k)/zTin(m,k)**2) +#ifdef ACCESS + ! Alex West: If this is the second time in succession that Tin(1) has been + ! reset, tell the solver to reduce the forcing at the top, and + ! pass the difference to the array enum where it will eventually + ! go into the ocean + ! This is done to avoid an 'infinite loop' whereby temp continually evolves + ! to the same point above zero, is reset, ad infinitum + if ((.NOT. (l_snow(m))) .AND. (k == 1)) then + if (Top_T_was_reset_last_time(m)) then + fcondtopn_reduction(i,j) = fcondtopn_reduction(i,j) + dqmat(m,k)*hilyr(m) / dt + Top_T_was_reset_last_time(m) = .false. + enum(m) = enum(m) + hilyr(m) * dqmat(m,k) + else + Top_T_was_reset_last_time(m) = .true. + endif + endif +#endif ! use this for the case that Tmlt changes by an amount dTmlt=Tmltnew-Tmlt(k) ! + rhoi * dTmlt & ! * (cp_ocn - cp_ice + Lfresh/zTin(m,k)) @@ -758,7 +851,6 @@ subroutine temperature_changes (nx_block, ny_block, & enddo ! ij enddo ! nilyr - if (calc_Tsfc) then !DIR$ CONCURRENT !Cray @@ -813,10 +905,20 @@ subroutine temperature_changes (nx_block, ny_block, & (zTin(m,nilyr) - Tbot(i,j)) ! Flux extra energy out of the ice - fcondbot(m) = fcondbot(m) + einex(m)/dt +#ifdef ACCESS + ! Alex West. Commenting this out for now - it's essentially what I'm doing with enum, so possibility of double-counting. + ! fcondbot(m) = fcondbot(m) + einex(m)/dt + + ! Alex West. Now including enum, the 'numeric energy' from limiting Tin1 and Tsn, + ! in this conservation check + ferr(m) = abs( (enew(ij) - einit(m) + enum(m))/dt & + - (fcondtopn(i,j) - fcondbot(m) + fswint(i,j)) ) +#else + fcondbot(m) = fcondbot(m) + einex(m)/dt ferr(m) = abs( (enew(ij)-einit(m))/dt & - (fcondtopn(i,j) - fcondbot(m) + fswint(i,j)) ) +#endif ! factor of 0.9 allows for roundoff errors later if (ferr(m) > 0.9_dbl_kind*ferrmax) then ! condition (5) @@ -825,18 +927,27 @@ subroutine temperature_changes (nx_block, ny_block, & all_converged = .false. ! reduce conductivity for next iteration + ! Alex West: I think this maybe shouldn't be done for the top layer + ! if the forcing is the top conductive flux? do k = 1, nilyr if (reduce_kh(m,k) .and. dqmat(m,k) > c0) then frac = max(0.5*(c1-ferr(m)/abs(fcondtopn(i,j)-fcondbot(m))),p1) -! frac = p1 kh(m,k+nslyr+1) = kh(m,k+nslyr+1) * frac kh(m,k+nslyr) = kh(m,k+nslyr+1) endif enddo endif ! ferr +#ifdef ACCESS + if (converged(m)) then + enew_icells(m) = enew(ij) + endif enddo ! ij + enew_save(1:isolve) = enew +#else + enddo +#endif deallocate(sbdiag) deallocate(diag) deallocate(spdiag) @@ -858,7 +969,16 @@ subroutine temperature_changes (nx_block, ny_block, & i = indxi(ij) j = indxj(ij) - !----------------------------------------------------------------- + + do m = 1,isolve + if (indxij(m)==ij) then + ij_solve = m + else + ij_solve = 1 + endif + enddo + + !----------------------------------------------------------------- ! Check for convergence failures. !----------------------------------------------------------------- if (.not.converged(ij)) then @@ -873,6 +993,13 @@ subroutine temperature_changes (nx_block, ny_block, & write(nu_diag,*) 'fsurf:', fsurfn(i,j) write(nu_diag,*) 'fcondtop, fcondbot, fswint', & fcondtopn(i,j), fcondbot(ij), fswint(i,j) +#ifdef ACCESS + write(nu_diag,*) '(enew_save - einit)/dt, enum/dt, (enew_save - einit + enum)/dt = ', & + (enew_save(ij_solve) - einit(ij))/dt, enum(ij)/dt, (enew_save(ij_solve) - einit(ij) + enum(ij))/dt + write(nu_diag,*) 'enew_save, einit = ', enew_save(ij_solve), einit(ij) + write(nu_diag,*) 'size(enew_save), size(einit) = ', size(enew_save), size(einit) + write(nu_diag,*) 'ij, m = ', ij, m +#endif write(nu_diag,*) 'fswsfc', fswsfc(i,j) write(nu_diag,*) 'Iswabs',(Iswabs(i,j,k),k=1,nilyr) write(nu_diag,*) 'Flux conservation error =', ferr(ij) @@ -992,6 +1119,16 @@ subroutine conductivity (nx_block, ny_block, & enddo ! nslyr ! interior ice layers +#ifdef ACCESS + do k = 1, nilyr +!DIR$ CONCURRENT !Cray +!cdir nodep !NEC +!ocl novrec !Fujitsu + do ij = 1, icells + kilyr(ij,k) = calculate_ki_from_Tin(zTin(ij,k),zSin(ij,k)) + enddo + enddo ! nilyr +#else if (conduct == 'MU71') then ! Maykut and Untersteiner 1971 form (with Wettlaufer 1991 constants) do k = 1, nilyr @@ -1017,6 +1154,7 @@ subroutine conductivity (nx_block, ny_block, & enddo enddo ! nilyr endif ! conductivity +#endif ! top snow interface, top and bottom ice interfaces do ij = 1, icells diff --git a/source/ice_therm_mushy.F90 b/source/ice_therm_mushy.F90 index ad66d918..d5641d70 100644 --- a/source/ice_therm_mushy.F90 +++ b/source/ice_therm_mushy.F90 @@ -7,6 +7,7 @@ module ice_therm_mushy use ice_kinds_mod use ice_constants use ice_domain_size, only: nilyr, nslyr + use ice_itd, only: hs_min use ice_therm_shared, only: ferrmax implicit none diff --git a/source/ice_therm_shared.F90 b/source/ice_therm_shared.F90 index eea8e11b..5605cfc0 100644 --- a/source/ice_therm_shared.F90 +++ b/source/ice_therm_shared.F90 @@ -4,17 +4,21 @@ ! Shared thermo variables, subroutines ! ! authors: Elizabeth C. Hunke, LANL +! AEW (Feb 2014): Added extra function calculate_ki_from_Tin, after Alison +! McLaren module ice_therm_shared use ice_kinds_mod - use ice_domain_size, only: ncat, nilyr, nslyr, max_ntrcr + use ice_domain_size, only: ncat, nilyr, nslyr, max_ntrcr, max_blocks + use ice_blocks, only: nx_block, ny_block implicit none save private public :: calculate_Tin_from_qin, & + calculate_ki_from_Tin, & surface_heat_flux, dsurface_heat_flux_dTsf integer (kind=int_kind), public :: & @@ -37,19 +41,29 @@ module ice_therm_shared character (char_len), public :: & conduct ! 'MU71' or 'bubbly' + real (kind=dbl_kind), & + dimension(nx_block,ny_block,max_blocks), & + public :: & + Tsnic, Ti_bot + logical (kind=log_kind), public :: & l_brine ! if true, treat brine pocket effects logical (kind=log_kind), public :: & heat_capacity, &! if true, ice has nonzero heat capacity ! if false, use zero-layer thermodynamics - calc_Tsfc ! if true, calculate surface temperature + calc_Tsfc , &! if true, calculate surface temperature ! if false, Tsfc is computed elsewhere and ! atmos-ice fluxes are provided to CICE + cap_fluxes ! AEW: Logical for capping conductive flux real (kind=dbl_kind), parameter, public :: & hfrazilmin = 0.05_dbl_kind ! min thickness of new frazil ice (m) + real (kind=dbl_kind), parameter, public :: & + betak = 0.13_dbl_kind, & ! constant in formula for k (W m-1 ppt-1) + kimin = 0.10_dbl_kind ! min conductivity of saline ice (W m-1 deg-1) + !======================================================================= contains @@ -156,6 +170,54 @@ subroutine surface_heat_flux(Tsf, fswsfc, & end subroutine surface_heat_flux +!======================================================================= +!BOP +! +! !ROUTINE: calculate_ki_from_Tin - calculate ice thermal conductivity +! +! !DESCRIPTION: +! +! Compute the ice thermal conductivity +! +! !REVISION HISTORY: +! +! !INTERFACE: +! + function calculate_ki_from_Tin (Tink, salink) & + result(ki) + + use ice_constants +! +! !USES: +! +! !INPUT PARAMETERS: +! + real (kind=dbl_kind), intent(in) :: & + Tink , & ! ice layer temperature + salink ! salinity at one level +! +! !OUTPUT PARAMETERS +! + real (kind=dbl_kind) :: & + ki ! ice conductivity + +! +!EOP +! + if (conduct == 'MU71') then + ! Maykut and Untersteiner 1971 form (with Wettlaufer 1991 constants) + ki = kice + betak*salink/min(-puny,Tink) + else + ! Pringle et al JGR 2007 'bubbly brine' + ki = (2.11_dbl_kind - 0.011_dbl_kind*Tink & + + 0.09_dbl_kind*salink/min(-puny,Tink)) & + * rhoi / 917._dbl_kind + endif + + ki = max (ki, kimin) + + end function calculate_ki_from_Tin + !======================================================================= subroutine dsurface_heat_flux_dTsf(Tsf, fswsfc, & diff --git a/source/ice_therm_vertical.F90 b/source/ice_therm_vertical.F90 index ad9b8517..c975738d 100644 --- a/source/ice_therm_vertical.F90 +++ b/source/ice_therm_vertical.F90 @@ -29,7 +29,8 @@ module ice_therm_vertical nt_Tsfc, nt_iage, nt_sice, nt_qice, nt_qsno, & nt_apnd, nt_hpnd use ice_therm_shared, only: ktherm, ferrmax, heat_capacity, l_brine, & - calc_Tsfc, calculate_tin_from_qin, Tmin + calc_Tsfc, calculate_tin_from_qin, Tmin, & + cap_fluxes use ice_therm_bl99, only: temperature_changes use ice_therm_0layer, only: zerolayer_temperature use ice_flux, only: Tf @@ -41,8 +42,11 @@ module ice_therm_vertical private public :: init_thermo_vertical, frzmlt_bottom_lateral, thermo_vertical + real (kind=dbl_kind), public :: & + saltmax ! max salinity at ice base for BL99 (ppt) + ! Now set in namelist + real (kind=dbl_kind), parameter, public :: & - saltmax = 3.2_dbl_kind, & ! max salinity at ice base for BL99 (ppt) ! phi_init and dSin0_frazil are used for mushy thermo, ktherm=2 phi_init = 0.75_dbl_kind, & ! initial liquid fraction of frazil dSin0_frazil = c3 ! bulk salinity reduction of newly formed frazil @@ -84,8 +88,12 @@ subroutine thermo_vertical (nx_block, ny_block, & fswsfc, fswint, & Sswabs, Iswabs, & fsurfn, fcondtopn, & + fcondbotn, & fsensn, flatn, & - flwoutn, evapn, & + flwoutn, & + ice_freeboardn, & + evapn, & + evapn_ice, evapn_snow,& freshn, fsaltn, & fhocnn, meltt, & melts, meltb, & @@ -157,14 +165,17 @@ subroutine thermo_vertical (nx_block, ny_block, & ! coupler fluxes to atmosphere real (kind=dbl_kind), dimension (nx_block,ny_block), intent(out):: & flwoutn , & ! outgoing longwave radiation (W/m^2) - evapn ! evaporative water flux (kg/m^2/s) + evapn , & ! evaporative water flux (kg/m^2/s) + evapn_ice, &! evaporative water flux over ice (kg/m^2/s) + evapn_snow ! evaporative water flux over snow(kg/m^2/s) ! Note: these are intent out if calc_Tsfc = T, otherwise intent in real (kind=dbl_kind), dimension (nx_block,ny_block), intent(inout):: & fsensn , & ! sensible heat flux (W/m^2) flatn , & ! latent heat flux (W/m^2) fsurfn , & ! net flux to top surface, excluding fcondtopn - fcondtopn ! downward cond flux at top surface (W m-2) + fcondtopn, & ! downward cond flux at top surface (W m-2) + fcondbotn ! downward cond flux at bottom surface (W m-2) ! coupler fluxes to ocean real (kind=dbl_kind), dimension (nx_block,ny_block), intent(out):: & @@ -182,7 +193,10 @@ subroutine thermo_vertical (nx_block, ny_block, & snoice , & ! snow-ice formation (m/step-->cm/day) dsnow , & ! change in snow thickness (m/step-->cm/day) mlt_onset, & ! day of year that sfc melting begins - frz_onset ! day of year that freezing begins (congel or frazil) + frz_onset, & ! day of year that freezing begins (congel or frazil) + ice_freeboardn ! height of ice surface (i.e. not snow surface) + ! above sea level in m + real (kind=dbl_kind), intent(in) :: & yday ! day of year @@ -240,6 +254,14 @@ subroutine thermo_vertical (nx_block, ny_block, & real (kind=dbl_kind), dimension (nx_block,ny_block) :: & fadvocn ! advective heat flux to ocean + real (kind=dbl_kind), dimension (icells) :: & + enum ! energy not used by the temperature solver (due to + ! limiting) that should be returned to the ocean. + + real (kind=dbl_kind) :: & + fcondtopn_extra(nx_block,ny_block), & + fcondtopn_solve(nx_block,ny_block) + !----------------------------------------------------------------- ! Initialize !----------------------------------------------------------------- @@ -248,8 +270,14 @@ subroutine thermo_vertical (nx_block, ny_block, & istop = 0 jstop = 0 + enum = c0 + do j=1, ny_block do i=1, nx_block + + fcondtopn_solve(i,j) = c0 + fcondtopn_extra(i,j) = c0 + flwoutn(i,j) = c0 evapn (i,j) = c0 @@ -257,7 +285,8 @@ subroutine thermo_vertical (nx_block, ny_block, & fsaltn (i,j) = c0 fhocnn (i,j) = c0 fadvocn(i,j) = c0 - + fcondbotn(i,j) = c0 + ice_freeboardn(i,j) = c0 meltt (i,j) = c0 meltb (i,j) = c0 melts (i,j) = c0 @@ -340,6 +369,28 @@ subroutine thermo_vertical (nx_block, ny_block, & else ! ktherm + !------------------ Flux capping code ------------------------------- + ! To be used with the UM-style coupling formulation (calc_Tsfc=.false.), + ! in which high fluxes can occasionally cause the thermo solver to crash. + ! Reduce fluxes either when ice is too thin, or when ice is getting too + ! cold. + + if (cap_fluxes) then + call cap_conductive_flux(nx_block,ny_block,my_task,icells,indxi,indxj,& + fcondtopn,fcondtopn_solve,fcondtopn_extra,hin,zTsn,zTin,hslyr) + + else + do i = 1,nx_block + do j = 1,ny_block + fcondtopn_solve(i,j) = fcondtopn(i,j) + fcondtopn_extra(i,j) = c0 + enddo + enddo + endif + + + !------------------ End of new code------------------------------- + call temperature_changes(nx_block, ny_block, & my_task, istep1, & dt, icells, & @@ -356,12 +407,23 @@ subroutine thermo_vertical (nx_block, ny_block, & Tsf, Tbot, & fsensn, flatn, & flwoutn, fsurfn, & - fcondtopn, fcondbot, & + fcondtopn_solve,fcondbot, & einit, l_stop, & - istop, jstop) + istop, jstop, & + enum) + + if (calc_Tsfc) then + ! Need to read fcondtopn_solve BACK INTO fcondtopn + ! during forced runs or we'll get nonsensical top melt... + do i = 1,nx_block + do j = 1,ny_block + fcondtopn(i,j) = fcondtopn_solve(i,j) + end do + end do + end if endif ! ktherm - + else if (calc_Tsfc) then @@ -399,6 +461,15 @@ subroutine thermo_vertical (nx_block, ny_block, & endif ! heat_capacity + ! Alex West: Read 1D bottom conductive flux array into 2D array + ! for diagnostics (SIMIP)i + do ij = 1, icells + i = indxi(ij) + j = indxj(ij) + fcondbotn(i,j) = fcondbot(ij) + enddo + + ! intermediate energy for error check do ij = 1, icells einter(ij) = c0 @@ -429,14 +500,17 @@ subroutine thermo_vertical (nx_block, ny_block, & fbot, Tbot, & flatn, fsurfn, & fcondtopn, fcondbot, & + ice_freeboardn, & fsnow, hsn_new, & fhocnn, evapn, & + evapn_ice, evapn_snow,& meltt, melts, & meltb, iage, & congel, snoice, & mlt_onset, frz_onset,& zSin, sss, & - dsnow) + dsnow, enum, & + fcondtopn_extra) !----------------------------------------------------------------- ! Check for energy conservation by comparing the change in energy @@ -454,8 +528,9 @@ subroutine thermo_vertical (nx_block, ny_block, & fcondtopn,fcondbot, & fadvocn, & fbot, l_stop, & - istop, jstop) - + istop, jstop, & + fcondtopn_solve,fcondtopn_extra, & + enum) if (l_stop) return !----------------------------------------------------------------- @@ -687,7 +762,6 @@ subroutine frzmlt_bottom_lateral (nx_block, ny_block, & m2 = 1.36_dbl_kind ! constant from Maykut & Perovich ! (unitless) -!#if defined(AusCOM) || defined(ACCICE) #ifdef AusCOM cpchr = -cp_ocn*rhow*chio ! chio defaults to 0.006 ala McPhee and Maykut #else @@ -857,6 +931,7 @@ subroutine init_vertical_profile(nx_block, ny_block, & Tbot, l_stop, & istop, jstop) + use ice_itd, only: hs_min use ice_therm_mushy, only: temperature_mush, & liquidus_temperature_mush, & enthalpy_of_melting @@ -1042,7 +1117,16 @@ subroutine init_vertical_profile(nx_block, ny_block, & write(nu_diag,*) 'istep1, my_task, i, j:', & istep1, my_task, i, j write(nu_diag,*) 'zqsn',zqsn(ij,k),-Lfresh*rhos,zqsn(ij,k)+Lfresh*rhos +#ifdef ACCESS + write(nu_diag,*) 'XX=>zTsn=',zTsn(ij,k),hslyr(ij),hin(ij),aicen(i,j) +!BX: drag zTsn back ------ + zTsn(ij,k) = Tmax +! +!BX l_stop = .true. + l_stop = .false. +#else l_stop = .true. +#endif istop = i jstop = j return @@ -1069,7 +1153,15 @@ subroutine init_vertical_profile(nx_block, ny_block, & write(nu_diag,*) hin(ij) write(nu_diag,*) hsn(ij) write(nu_diag,*) 0, Tsf(ij) +#ifdef ACCESS +!BX: grad zTsn back ------ + zTsn(ij,k) = Tmin +! +!BX: l_stop = .true. + l_stop = .false. +#else l_stop = .true. +#endif istop = i jstop = j return @@ -1272,6 +1364,73 @@ subroutine init_vertical_profile(nx_block, ny_block, & end subroutine init_vertical_profile + +!=============================================== +! +! This routine is only called if UM-style coupling is being used, with top conductive flux as forcing. +! Check top conductive flux, ice thickness and top layer temperature. +! If the ratio of flux to thickness is too high, remove some of the flux and put it into fcondtopn_extra. +! If the top layer temperature is getting too low, and the flux is negative, also put some into fcondtopn_extra. +! The remainder, fcondtopn_solve, goes to the thermodynamic solver. fcondtopn_extra is added to the energy balance +! at the bottom of the ice in thickness_changes, and is thus used to grow / melt ice at the bottom. +! +! author Alex West, MOHC + + subroutine cap_conductive_flux(nx_block,ny_block,my_task,icells,indxi,indxj,fcondtopn,fcondtopn_solve,fcondtopn_extra,hin,zTsn,zTin,hslyr) + + use ice_itd, only: hs_min + + integer (kind=int_kind), intent(in) :: nx_block, ny_block, my_task + integer (kind=int_kind), intent(in) :: icells + integer (kind=int_kind), intent(in) :: indxi(nx_block*ny_block), indxj(nx_block*ny_block) + real (kind=dbl_kind), intent(in) :: fcondtopn(nx_block,ny_block) + real (kind=dbl_kind) :: fcondtopn_solve(nx_block,ny_block), fcondtopn_extra(nx_block,ny_block) + real (kind=dbl_kind), intent(in) :: hin(icells) + real (kind=dbl_kind), intent(in) :: zTin(icells,nilyr) + real (kind=dbl_kind), intent(in) :: zTsn(icells,nslyr) + real (kind=dbl_kind), intent(in) :: hslyr(icells) + + real (kind=dbl_kind), parameter :: ratio_Wm2_m = c1000, cold_temp_flag = c0 - c60 + + ! AEW: New variables for cold-ice flux capping + real (kind=dbl_kind) :: top_layer_temp, & + reduce_ratio, & + reduce_amount + + integer (kind=int_kind) :: i, j, ij + + + do ij = 1,icells + i = indxi(ij) + j = indxj(ij) + if (abs(fcondtopn(i,j)) > ratio_Wm2_m * hin(ij)) then + fcondtopn_solve(i,j) = sign(ratio_Wm2_m * hin(ij),fcondtopn(i,j)) + fcondtopn_extra(i,j) = fcondtopn(i,j) - fcondtopn_solve(i,j) + + else + fcondtopn_solve(i,j) = fcondtopn(i,j) + fcondtopn_extra(i,j) = c0 + endif + + if (hslyr(ij)>hs_min) then + top_layer_temp = zTsn(ij,1) + else + top_layer_temp = zTin(ij,1) + endif + + if ((top_layer_temp < cold_temp_flag) .and. (fcondtopn_solve(i,j) < c0)) then + reduce_ratio = (cold_temp_flag - top_layer_temp) / (c100 + cold_temp_flag) + reduce_amount = reduce_ratio * fcondtopn_solve(i,j) + fcondtopn_solve(i,j) = fcondtopn_solve(i,j) - reduce_amount + fcondtopn_extra(i,j) = fcondtopn_extra(i,j) + reduce_amount + + endif + + + enddo + + end subroutine cap_conductive_flux + !======================================================================= ! ! Compute growth and/or melting at the top and bottom surfaces. @@ -1291,14 +1450,17 @@ subroutine thickness_changes (nx_block, ny_block, & fbot, Tbot, & flatn, fsurfn, & fcondtopn, fcondbot, & + ice_freeboardn, & fsnow, hsn_new, & fhocnn, evapn, & + evapn_ice, evapn_snow,& meltt, melts, & meltb, iage, & congel, snoice, & mlt_onset, frz_onset,& zSin, sss, & - dsnow) + dsnow, enum, & + fcondtopn_extra) use ice_therm_mushy, only: enthalpy_mush, enthalpy_of_melting, & phi_i_mushy, temperature_mush, & @@ -1350,7 +1512,10 @@ subroutine thickness_changes (nx_block, ny_block, & dsnow , & ! snow formation (m/step-->cm/day) iage , & ! ice age (s) mlt_onset , & ! day of year that sfc melting begins - frz_onset ! day of year that freezing begins (congel or frazil) + frz_onset , & ! day of year that freezing begins (congel or frazil) + ice_freeboardn ! height of ice surface (i.e. not snow surface) + ! above sea level in m + real (kind=dbl_kind), dimension (icells), & intent(inout) :: & @@ -1362,8 +1527,9 @@ subroutine thickness_changes (nx_block, ny_block, & real (kind=dbl_kind), dimension (nx_block,ny_block), intent(out):: & fhocnn , & ! fbot, corrected for any surplus energy (W m-2) - evapn ! ice/snow mass sublimated/condensed (kg m-2 s-1) - + evapn , & ! ice/snow mass sublimated/condensed (kg m-2 s-1) + evapn_ice , & ! ice mass sublimated/condensed (kg m-2 s-1) + evapn_snow ! snow mass sublimated/condensed (kg m-2 s-1) real (kind=dbl_kind), dimension (icells), intent(out):: & hsn_new ! thickness of new snow (m) @@ -1432,6 +1598,14 @@ subroutine thickness_changes (nx_block, ny_block, & qbotp , & qbot0 +! Alex West: Extra conductive flux, that didn't go into the thermo solver. + real (kind=dbl_kind), intent(in), dimension (nx_block,ny_block) :: & + fcondtopn_extra + + real (kind=dbl_kind), dimension (icells), intent(in) :: & + enum ! energy not used by the temperature solver (due to + ! limiting) that should be returned to the ocean. + !----------------------------------------------------------------- ! Initialize !----------------------------------------------------------------- @@ -1516,8 +1690,14 @@ subroutine thickness_changes (nx_block, ny_block, & wk1 = (fsurfn(i,j) - fcondtopn(i,j)) * dt etop_mlt(ij) = max(wk1, c0) ! etop_mlt > 0 - + +#ifdef ACCESS + ! AEW: Add negative energy, thrown away, to the energy available for bottom growth + wk1 = (fcondbot(ij) - fbot(i,j) + fcondtopn_extra(i,j)) * dt +#else wk1 = (fcondbot(ij) - fbot(i,j)) * dt +#endif + ebot_mlt(ij) = max(wk1, c0) ! ebot_mlt > 0 ebot_gro(ij) = min(wk1, c0) ! ebot_gro < 0 @@ -1529,15 +1709,20 @@ subroutine thickness_changes (nx_block, ny_block, & !-------------------------------------------------------------- evapn (i,j) = c0 ! initialize + evapn_ice(i,j) = c0 + evapn_snow(i,j) = c0 if (hsn(ij) > puny) then ! add snow with enthalpy zqsn(ij,1) dhs = econ(ij) / (zqsn(ij,1) - rhos*Lvap) ! econ < 0, dhs > 0 dzs(ij,1) = dzs(ij,1) + dhs evapn(i,j) = evapn(i,j) + dhs*rhos + evapn_snow(i,j) = evapn_snow(i,j) + dhs*rhos else ! add ice with enthalpy zqin(ij,1) dhi = econ(ij) / (qm(ij,1) - rhoi*Lvap) ! econ < 0, dhi > 0 dzi(ij,1) = dzi(ij,1) + dhi evapn(i,j) = evapn(i,j) + dhi*rhoi + evapn_ice(i,j) = evapn_ice(i,j) + dhi*rhoi + ! enthalpy of melt water emlt_atm(ij) = emlt_atm(ij) - qmlt(ij,1) * dhi endif @@ -1639,6 +1824,8 @@ subroutine thickness_changes (nx_block, ny_block, & esub(ij) = esub(ij) - dhs*qsub esub(ij) = max(esub(ij), c0) ! in case of roundoff error evapn(i,j) = evapn(i,j) + dhs*rhos + evapn_snow(i,j) = evapn_snow(i,j) + dhs*rhos + !-------------------------------------------------------------- ! Melt snow (top) @@ -1675,6 +1862,8 @@ subroutine thickness_changes (nx_block, ny_block, & esub(ij) = esub(ij) - dhi*qsub esub(ij) = max(esub(ij), c0) evapn(i,j) = evapn(i,j) + dhi*rhoi + evapn_ice(i,j) = evapn_ice(i,j) + dhi*rhoi + emlt_ocn(ij) = emlt_ocn(ij) - qmlt(ij,k) * dhi !-------------------------------------------------------------- @@ -1757,7 +1946,11 @@ subroutine thickness_changes (nx_block, ny_block, & i = indxi(ij) j = indxj(ij) fhocnn(i,j) = fbot(i,j) & +#ifdef ACCESS + + (esub(ij) + etop_mlt(ij) + ebot_mlt(ij) + enum(ij))/dt +#else + (esub(ij) + etop_mlt(ij) + ebot_mlt(ij))/dt +#endif enddo !---!----------------------------------------------------------------- @@ -1836,7 +2029,7 @@ subroutine thickness_changes (nx_block, ny_block, & hin, hsn, & zqin, zqsn, & dzi, dzs, & - dsnow) + dsnow, ice_freeboardn) !---!------------------------------------------------------------------- !---! Repartition the ice and snow into equal-thickness layers, @@ -1982,6 +2175,8 @@ subroutine thickness_changes (nx_block, ny_block, & j = indxj(ij) efinal(ij) = -evapn(i,j)*Lvap evapn(i,j) = evapn(i,j)/dt + evapn_ice(i,j) = evapn_ice(i,j)/dt + evapn_snow(i,j) = evapn_snow(i,j)/dt enddo do k = 1, nslyr @@ -2037,7 +2232,7 @@ subroutine freeboard (nx_block, ny_block, & hin, hsn, & zqin, zqsn, & dzi, dzs, & - dsnow) + dsnow, ice_freeboardn) integer (kind=int_kind), intent(in) :: & nx_block, ny_block, & ! block dimensions @@ -2056,6 +2251,12 @@ subroutine freeboard (nx_block, ny_block, & dsnow , & ! change in snow thickness after snow-ice formation (m) iage ! snow thickness (m) + real (kind=dbl_kind), dimension(nx_block,ny_block), & + intent(inout) :: & + ice_freeboardn ! height of ice surface (i.e. not snow surface) + ! above sea level in m + + real (kind=dbl_kind), dimension (icells), & intent(inout) :: & hin , & ! ice thickness (m) @@ -2164,6 +2365,16 @@ subroutine freeboard (nx_block, ny_block, & endif ! dhin > puny enddo ! ij + ! Calculate diagnostic sea ice freeboard after adjustments (SIMIP) + do ij = 1, icells + i = indxi(ij) + j = indxj(ij) + + ice_freeboardn(i,j) = & + hin(ij) * (1 - rhoi / rhow) - hsn(ij) * (rhos / rhow) + enddo + + end subroutine freeboard !======================================================================= @@ -2292,7 +2503,9 @@ subroutine conservation_check_vthermo(nx_block, ny_block, & fcondtopn,fcondbot, & fadvocn, & fbot, l_stop, & - istop, jstop) + istop, jstop, & + fcondtopn_solve,fcondtopn_extra, & + enum) integer (kind=int_kind), intent(in) :: & nx_block, ny_block, & ! block dimensions @@ -2338,6 +2551,14 @@ subroutine conservation_check_vthermo(nx_block, ny_block, & real (kind=dbl_kind) :: & einp , & ! energy input during timestep (J m-2) ferr ! energy conservation error (W m-2) + + real (kind=dbl_kind), intent(in) :: & + fcondtopn_extra(nx_block,ny_block), & + fcondtopn_solve(nx_block,ny_block) + + real (kind=dbl_kind), dimension (icells), intent(in) :: & + enum ! energy not used by the temperature solver (due to + ! limiting) that should be returned to the ocean. !---------------------------------------------------------------- ! If energy is not conserved, print diagnostics and exit. @@ -2379,7 +2600,9 @@ subroutine conservation_check_vthermo(nx_block, ny_block, & write(nu_diag,*) 'Input energy =', einp write(nu_diag,*) 'fbot(i,j),fcondbot(ij):' write(nu_diag,*) fbot(i,j),fcondbot(ij) - + write(nu_diag,*) 'fcondtop_solve(i,j), fcondtopn_extra(i,j):' + write(nu_diag,*) fcondtopn_solve(i,j), fcondtopn_extra(i,j) + write(nu_diag,*) 'enum(ij):', enum(ij) ! if (ktherm == 2) then write(nu_diag,*) 'Intermediate energy =', einter(ij) write(nu_diag,*) 'efinal - einter =', &