From abf2035728dc65512428f703dcc4abf4ed00178f Mon Sep 17 00:00:00 2001 From: Chan-Hoo Jeon Date: Thu, 16 May 2024 08:42:22 -0500 Subject: [PATCH 01/18] add cycle capability to rocoto yaml --- jobs/JLANDDA_ANALYSIS | 8 ++--- jobs/JLANDDA_FORECAST | 8 ++--- jobs/JLANDDA_PREP_BMAT | 8 ++--- jobs/JLANDDA_PREP_EXP | 8 ++--- jobs/JLANDDA_PREP_OBS | 8 ++--- parm/conda_environment.yml | 2 +- parm/land_analysis_era5_hera.yaml | 8 ++++- parm/land_analysis_era5_orion.yaml | 8 ++++- parm/land_analysis_gswp3_hera.yaml | 10 ++++-- parm/land_analysis_gswp3_orion.yaml | 51 +++++++++++++++++++++-------- parm/run_without_rocoto.sh | 4 ++- scripts/exlandda_prep_obs.sh | 14 +++++--- 12 files changed, 93 insertions(+), 44 deletions(-) diff --git a/jobs/JLANDDA_ANALYSIS b/jobs/JLANDDA_ANALYSIS index 9afa8043..910b6262 100755 --- a/jobs/JLANDDA_ANALYSIS +++ b/jobs/JLANDDA_ANALYSIS @@ -58,11 +58,11 @@ export RUN="${RUN:-landda}" [[ "$WORKFLOW_MANAGER" = "rocoto" ]] && export COMROOT=$COMROOT if [ "${MACHINE}" = "WCOSS2" ]; then - export COMIN="${COMIN:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR})}" - export COMOUT="${COMOUT:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR})}" + export COMIN="${COMIN:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY}/${cyc})}" + export COMOUT="${COMOUT:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY}/${cyc})}" else - export COMIN="${COMIN:-${COMROOT}/${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR}}" - export COMOUT="${COMOUT:-${COMROOT}/${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR}}" + export COMIN="${COMIN:-${COMROOT}/${NET}/${model_ver}/${RUN}.${PDY}/${cyc}}" + export COMOUT="${COMOUT:-${COMROOT}/${NET}/${model_ver}/${RUN}.${PDY}/${cyc}}" fi mkdir -p ${COMOUT} diff --git a/jobs/JLANDDA_FORECAST b/jobs/JLANDDA_FORECAST index 13f42b5a..e04c5786 100755 --- a/jobs/JLANDDA_FORECAST +++ b/jobs/JLANDDA_FORECAST @@ -58,11 +58,11 @@ export RUN="${RUN:-landda}" [[ "$WORKFLOW_MANAGER" = "rocoto" ]] && export COMROOT=$COMROOT if [ "${MACHINE}" = "WCOSS2" ]; then - export COMIN="${COMIN:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR})}" - export COMOUT="${COMOUT:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR})}" + export COMIN="${COMIN:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY}/${cyc})}" + export COMOUT="${COMOUT:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY}/${cyc})}" else - export COMIN="${COMIN:-${COMROOT}/${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR}}" - export COMOUT="${COMOUT:-${COMROOT}/${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR}}" + export COMIN="${COMIN:-${COMROOT}/${NET}/${model_ver}/${RUN}.${PDY}/${cyc}}" + export COMOUT="${COMOUT:-${COMROOT}/${NET}/${model_ver}/${RUN}.${PDY}/${cyc}}" fi mkdir -p ${COMOUT} diff --git a/jobs/JLANDDA_PREP_BMAT b/jobs/JLANDDA_PREP_BMAT index b8c35834..88c27555 100755 --- a/jobs/JLANDDA_PREP_BMAT +++ b/jobs/JLANDDA_PREP_BMAT @@ -58,11 +58,11 @@ export RUN="${RUN:-landda}" [[ "$WORKFLOW_MANAGER" = "rocoto" ]] && export COMROOT=$COMROOT if [ "${MACHINE}" = "WCOSS2" ]; then - export COMIN="${COMIN:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR})}" - export COMOUT="${COMOUT:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR})}" + export COMIN="${COMIN:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY}/${cyc})}" + export COMOUT="${COMOUT:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY}/${cyc})}" else - export COMIN="${COMIN:-${COMROOT}/${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR}}" - export COMOUT="${COMOUT:-${COMROOT}/${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR}}" + export COMIN="${COMIN:-${COMROOT}/${NET}/${model_ver}/${RUN}.${PDY}/${cyc}}" + export COMOUT="${COMOUT:-${COMROOT}/${NET}/${model_ver}/${RUN}.${PDY}/${cyc}}" fi mkdir -p ${COMOUT} diff --git a/jobs/JLANDDA_PREP_EXP b/jobs/JLANDDA_PREP_EXP index 5748908f..cc03b593 100755 --- a/jobs/JLANDDA_PREP_EXP +++ b/jobs/JLANDDA_PREP_EXP @@ -58,11 +58,11 @@ export RUN="${RUN:-landda}" [[ "$WORKFLOW_MANAGER" = "rocoto" ]] && export COMROOT=$COMROOT if [ "${MACHINE}" = "WCOSS2" ]; then - export COMIN="${COMIN:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR})}" - export COMOUT="${COMOUT:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR})}" + export COMIN="${COMIN:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY}/${cyc})}" + export COMOUT="${COMOUT:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY}/${cyc})}" else - export COMIN="${COMIN:-${COMROOT}/${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR}}" - export COMOUT="${COMOUT:-${COMROOT}/${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR}}" + export COMIN="${COMIN:-${COMROOT}/${NET}/${model_ver}/${RUN}.${PDY}/${cyc}}" + export COMOUT="${COMOUT:-${COMROOT}/${NET}/${model_ver}/${RUN}.${PDY}/${cyc}}" fi mkdir -p ${COMOUT} diff --git a/jobs/JLANDDA_PREP_OBS b/jobs/JLANDDA_PREP_OBS index e75c71a4..dd2422fc 100755 --- a/jobs/JLANDDA_PREP_OBS +++ b/jobs/JLANDDA_PREP_OBS @@ -58,11 +58,11 @@ export RUN="${RUN:-landda}" [[ "$WORKFLOW_MANAGER" = "rocoto" ]] && export COMROOT=$COMROOT if [ "${MACHINE}" = "WCOSS2" ]; then - export COMIN="${COMIN:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR})}" - export COMOUT="${COMOUT:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR})}" + export COMIN="${COMIN:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY}/${cyc})}" + export COMOUT="${COMOUT:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY}/${cyc})}" else - export COMIN="${COMIN:-${COMROOT}/${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR}}" - export COMOUT="${COMOUT:-${COMROOT}/${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR}}" + export COMIN="${COMIN:-${COMROOT}/${NET}/${model_ver}/${RUN}.${PDY}/${cyc}}" + export COMOUT="${COMOUT:-${COMROOT}/${NET}/${model_ver}/${RUN}.${PDY}/${cyc}}" fi mkdir -p ${COMOUT} diff --git a/parm/conda_environment.yml b/parm/conda_environment.yml index b1ea918f..53cb4899 100644 --- a/parm/conda_environment.yml +++ b/parm/conda_environment.yml @@ -5,4 +5,4 @@ channels: dependencies: - pylint=2.17* - pytest=7.2* - - uwtools=2.1.1 + - uwtools=2.2.0 diff --git a/parm/land_analysis_era5_hera.yaml b/parm/land_analysis_era5_hera.yaml index d4eff5ac..fee5f3de 100644 --- a/parm/land_analysis_era5_hera.yaml +++ b/parm/land_analysis_era5_hera.yaml @@ -2,9 +2,11 @@ workflow: attrs: realtime: false scheduler: slurm + cyclethrottle: 24 + taskthrottle: 24 cycledef: - attrs: - group: epic + group: cycled spec: 201912210000 201912210000 24:00:00 entities: MACHINE: "hera" @@ -18,6 +20,8 @@ workflow: FCSTHR: "24" NPROCS_ANALYSIS: "6" NPROCS_FORECAST: "6" + OBSDIR: "" + OBSDIR_SUBDIR: "" OBS_TYPES: "GHCN" DAtype: "letkfoi_snow" SNOWDEPTHVAR: "snwdph" @@ -68,6 +72,8 @@ workflow: join: "&LOGDIR;/prep_exp.log" task_prep_obs: envars: + OBSDIR: "&OBSDIR;" + OBSDIR_SUBDIR: "&OBSDIR_SUBDIR;" OBS_TYPES: "&OBS_TYPES;" MACHINE: "&MACHINE;" SCHED: "&SCHED;" diff --git a/parm/land_analysis_era5_orion.yaml b/parm/land_analysis_era5_orion.yaml index 08859eba..ee12151d 100644 --- a/parm/land_analysis_era5_orion.yaml +++ b/parm/land_analysis_era5_orion.yaml @@ -2,9 +2,11 @@ workflow: attrs: realtime: false scheduler: slurm + cyclethrottle: 24 + taskthrottle: 24 cycledef: - attrs: - group: epic + group: cycled spec: 201912210000 201912210000 24:00:00 entities: MACHINE: "orion" @@ -18,6 +20,8 @@ workflow: FCSTHR: "24" NPROCS_ANALYSIS: "6" NPROCS_FORECAST: "6" + OBSDIR: "" + OBSDIR_SUBDIR: "" OBS_TYPES: "GHCN" DAtype: "letkfoi_snow" SNOWDEPTHVAR: "snwdph" @@ -68,6 +72,8 @@ workflow: join: "&LOGDIR;/prep_exp.log" task_prep_obs: envars: + OBSDIR: "&OBSDIR;" + OBSDIR_SUBDIR: "&OBSDIR_SUBDIR;" OBS_TYPES: "&OBS_TYPES;" MACHINE: "&MACHINE;" SCHED: "&SCHED;" diff --git a/parm/land_analysis_gswp3_hera.yaml b/parm/land_analysis_gswp3_hera.yaml index 8f58c142..798af689 100644 --- a/parm/land_analysis_gswp3_hera.yaml +++ b/parm/land_analysis_gswp3_hera.yaml @@ -2,10 +2,12 @@ workflow: attrs: realtime: false scheduler: slurm + cyclethrottle: 24 + taskthrottle: 24 cycledef: - attrs: - group: epic - spec: 200001030000 200001030000 24:00:00 + group: cycled + spec: 200001030000 200001040000 24:00:00 entities: MACHINE: "hera" SCHED: "slurm" @@ -18,6 +20,8 @@ workflow: FCSTHR: "24" NPROCS_ANALYSIS: "6" NPROCS_FORECAST: "6" + OBSDIR: "" + OBSDIR_SUBDIR: "" OBS_TYPES: "GHCN" DAtype: "letkfoi_snow" SNOWDEPTHVAR: "snwdph" @@ -68,6 +72,8 @@ workflow: join: "&LOGDIR;/prep_exp.log" task_prep_obs: envars: + OBSDIR: "&OBSDIR;" + OBSDIR_SUBDIR: "&OBSDIR_SUBDIR;" OBS_TYPES: "&OBS_TYPES;" MACHINE: "&MACHINE;" SCHED: "&SCHED;" diff --git a/parm/land_analysis_gswp3_orion.yaml b/parm/land_analysis_gswp3_orion.yaml index d6251707..a7c533da 100644 --- a/parm/land_analysis_gswp3_orion.yaml +++ b/parm/land_analysis_gswp3_orion.yaml @@ -2,10 +2,12 @@ workflow: attrs: realtime: false scheduler: slurm + cyclethrottle: 24 + taskthrottle: 24 cycledef: - attrs: - group: epic - spec: 200001030000 200001030000 24:00:00 + group: cycled + spec: 200001030000 200001040000 24:00:00 entities: MACHINE: "orion" SCHED: "slurm" @@ -18,6 +20,8 @@ workflow: FCSTHR: "24" NPROCS_ANALYSIS: "6" NPROCS_FORECAST: "6" + OBSDIR: "" + OBSDIR_SUBDIR: "" OBS_TYPES: "GHCN" DAtype: "letkfoi_snow" SNOWDEPTHVAR: "snwdph" @@ -25,6 +29,7 @@ workflow: NET: "landda" envir: "test" model_ver: "v1.2.1" + RUN: "landda" HOMElandda: "&EXP_BASEDIR;/land-DA_workflow" PTMP: "&EXP_BASEDIR;/ptmp" COMROOT: "&PTMP;/&envir;/com" @@ -32,15 +37,20 @@ workflow: KEEPDATA: "YES" WORKDIR: "&EXP_BASEDIR;/workdir/run_&FORCING;" LOGDIR: "&COMROOT;/output/logs/run_&FORCING;" + LOGFN_SUFFIX: "_@Y@m@d.log" PATHRT: "&EXP_BASEDIR;" PDY: "@Y@m@d" cyc: "@H" - SLASH_ENSMEM_SUBDIR: "" PTIME: "@Y@m@d@H" NTIME: "@Y@m@d@H" + WARMSTART_DIR: "" + DATADEP_FILE1: "&WARMSTART_DIR;/ufs_land_restart.@Y-@m-@d_@H-00-00.tile1.nc" + DATADEP_FILE2: "&COMROOT;/&NET;/&model_ver;/&RUN;.@Y@m@d/@H/RESTART/ufs_land_restart.@Y-@m-@d_@H-00-00.tile1.nc" log: "&LOGDIR;/workflow.log" tasks: task_prep_exp: + attrs: + cycledefs: cycled envars: MACHINE: "&MACHINE;" SCHED: "&SCHED;" @@ -57,7 +67,6 @@ workflow: KEEPDATA: "&KEEPDATA;" PDY: "&PDY;" cyc: "&cyc;" - SLASH_ENSMEM_SUBDIR: "&SLASH_ENSMEM_SUBDIR;" PTIME: "&PTIME;" account: "&ACCOUNT;" command: '&HOMElandda;/parm/task_load_modules_run_jjob.sh "prep_exp" "&HOMElandda;" "&MACHINE;"' @@ -65,9 +74,23 @@ workflow: cores: 1 walltime: 00:02:00 queue: batch - join: "&LOGDIR;/prep_exp.log" + join: "&LOGDIR;/prep_exp&LOGFN_SUFFIX;" + dependency: + or: + datadep_file1: + attrs: + age: 5 + value: "&DATADEP_FILE1;" + datadep_file2: + attrs: + age: 5 + value: "&DATADEP_FILE2;" task_prep_obs: + attrs: + cycledefs: cycled envars: + OBSDIR: "&OBSDIR;" + OBSDIR_SUBDIR: "&OBSDIR_SUBDIR;" OBS_TYPES: "&OBS_TYPES;" MACHINE: "&MACHINE;" SCHED: "&SCHED;" @@ -82,7 +105,6 @@ workflow: KEEPDATA: "&KEEPDATA;" PDY: "&PDY;" cyc: "&cyc;" - SLASH_ENSMEM_SUBDIR: "&SLASH_ENSMEM_SUBDIR;" PTIME: "&PTIME;" account: "&ACCOUNT;" command: '&HOMElandda;/parm/task_load_modules_run_jjob.sh "prep_obs" "&HOMElandda;" "&MACHINE;"' @@ -90,12 +112,14 @@ workflow: cores: 1 walltime: 00:02:00 queue: batch - join: "&LOGDIR;/prep_obs.log" + join: "&LOGDIR;/prep_obs&LOGFN_SUFFIX;" dependency: taskdep: attrs: task: prep_exp task_prep_bmat: + attrs: + cycledefs: cycled envars: MACHINE: "&MACHINE;" SCHED: "&SCHED;" @@ -110,7 +134,6 @@ workflow: KEEPDATA: "&KEEPDATA;" PDY: "&PDY;" cyc: "&cyc;" - SLASH_ENSMEM_SUBDIR: "&SLASH_ENSMEM_SUBDIR;" PTIME: "&PTIME;" DAtype: "&DAtype;" SNOWDEPTHVAR: "&SNOWDEPTHVAR;" @@ -120,12 +143,14 @@ workflow: cores: 1 walltime: 00:02:00 queue: batch - join: "&LOGDIR;/prep_bmat.log" + join: "&LOGDIR;/prep_bmat&LOGFN_SUFFIX;" dependency: taskdep: attrs: task: prep_obs task_analysis: + attrs: + cycledefs: cycled envars: OBS_TYPES: "&OBS_TYPES;" MACHINE: "&MACHINE;" @@ -143,7 +168,6 @@ workflow: KEEPDATA: "&KEEPDATA;" PDY: "&PDY;" cyc: "&cyc;" - SLASH_ENSMEM_SUBDIR: "&SLASH_ENSMEM_SUBDIR;" PTIME: "&PTIME;" NTIME: "&NTIME;" DAtype: "&DAtype;" @@ -156,12 +180,14 @@ workflow: nodes: "1:ppn=&NPROCS_ANALYSIS;" walltime: 00:15:00 queue: batch - join: "&LOGDIR;/analysis.log" + join: "&LOGDIR;/analysis&LOGFN_SUFFIX;" dependency: taskdep: attrs: task: prep_bmat task_forecast: + attrs: + cycledefs: cycled envars: OBS_TYPES: "&OBS_TYPES;" MACHINE: "&MACHINE;" @@ -180,7 +206,6 @@ workflow: LOGDIR: "&LOGDIR;" PDY: "&PDY;" cyc: "&cyc;" - SLASH_ENSMEM_SUBDIR: "&SLASH_ENSMEM_SUBDIR;" PTIME: "&PTIME;" NTIME: "&NTIME;" DAtype: "&DAtype;" @@ -193,7 +218,7 @@ workflow: nodes: "1:ppn=&NPROCS_FORECAST;" walltime: 01:00:00 queue: batch - join: "&LOGDIR;/forecast.log" + join: "&LOGDIR;/forecast&LOGFN_SUFFIX;" dependency: taskdep: attrs: diff --git a/parm/run_without_rocoto.sh b/parm/run_without_rocoto.sh index 87087e81..4514a6fb 100755 --- a/parm/run_without_rocoto.sh +++ b/parm/run_without_rocoto.sh @@ -19,7 +19,7 @@ if [ "${MACHINE}" = "hera" ]; then export EXP_BASEDIR="/scratch2/NAGAPE/epic/{USER}/landda_test" export JEDI_INSTALL="/scratch2/NAGAPE/epic/UFS_Land-DA/jedi_skylabv7.0" elif [ "${MACHINE}" = "orion" ]; then - export EXP_BASEDIR="/work/noaa/epic/chjeon/landda_test" + export EXP_BASEDIR="/work/noaa/epic/{USER}/landda_test" export JEDI_INSTALL="/work/noaa/epic/UFS_Land-DA/jedi_skylabv7.0" fi @@ -27,6 +27,8 @@ export RES="96" export FCSTHR="24" export NPROCS_ANALYSIS="6" export NPROCS_FORECAST="6" +export OBSDIR="" +export OBSDIR_SUBDIR="" export OBS_TYPES="GHCN" export DAtype="letkfoi_snow" export SNOWDEPTHVAR="snwdph" diff --git a/scripts/exlandda_prep_obs.sh b/scripts/exlandda_prep_obs.sh index 2de0c8e0..cb865530 100755 --- a/scripts/exlandda_prep_obs.sh +++ b/scripts/exlandda_prep_obs.sh @@ -27,22 +27,26 @@ JEDIWORKDIR=${WORKDIR}/mem000/jedi cd $JEDIWORKDIR -OBSDIR=${FIXlandda}/DA ################################################ # 2. PREPARE OBS FILES ################################################ +OBSDIR="${OBSDIR:-${FIXlandda}/DA}" for obs in "${OBS_TYPES[@]}"; do # get the obs file name if [ ${obs} == "GTS" ]; then - obsfile=$OBSDIR/snow_depth/GTS/data_proc/${YYYY}${MM}/adpsfc_snow_${YYYY}${MM}${DD}${HH}.nc4 + OBSDIR_SUBDIR="${OBSDIR_SUBDIR:-snow_depth/GTS/data_proc}" + obsfile="${OBSDIR}/${OBSDIR_SUBDIR}/${YYYY}${MM}/adpsfc_snow_${YYYY}${MM}${DD}${HH}.nc4" # GHCN are time-stamped at 18. If assimilating at 00, need to use previous day's obs, so that # obs are within DA window. elif [ $ATMOS_FORC == "era5" ] && [ ${obs} == "GHCN" ]; then - obsfile=$OBSDIR/snow_depth/GHCN/data_proc/v3/${YYYY}/ghcn_snwd_ioda_${YYYP}${MP}${DP}_jediv7.nc + OBSDIR_SUBDIR="${OBSDIR_SUBDIR:-snow_depth/GHCN/data_proc/v3}" + obsfile="${OBSDIR}/${OBSDIR_SUBDIR}/${YYYY}/ghcn_snwd_ioda_${YYYP}${MP}${DP}.nc" elif [ $ATMOS_FORC == "gswp3" ] && [ ${obs} == "GHCN" ]; then - obsfile=$OBSDIR/snow_depth/GHCN/data_proc/v3/${YYYY}/fake_ghcn_snwd_ioda_${YYYP}${MP}${DP}_jediv7.nc + OBSDIR_SUBDIR="${OBSDIR_SUBDIR:-snow_depth/GHCN/data_proc/v3}" + obsfile="${OBSDIR}/${OBSDIR_SUBDIR}/${YYYY}/ghcn_snwd_ioda_${YYYP}${MP}${DP}.nc" elif [ ${obs} == "SYNTH" ]; then - obsfile=$OBSDIR/synthetic_noahmp/IODA.synthetic_gswp_obs.${YYYY}${MM}${DD}${HH}.nc + OBSDIR_SUBDIR="${OBSDIR_SUBDIR:-synthetic_noahmp}" + obsfile="${OBSDIR}/${OBSDIR_SUBDIR}/IODA.synthetic_gswp_obs.${YYYY}${MM}${DD}${HH}.nc" else echo "do_landDA: Unknown obs type requested ${obs}, exiting" exit 1 From f86ad0620a4eb3fc4520139702bff07fd1b1b0c1 Mon Sep 17 00:00:00 2001 From: Chan-Hoo Jeon Date: Thu, 16 May 2024 10:29:26 -0500 Subject: [PATCH 02/18] remove task prep_bmat --- jobs/JLANDDA_PREP_BMAT | 104 --------------------- modulefiles/tasks/hera/task.prep_bmat.lua | 8 -- modulefiles/tasks/orion/task.prep_bmat.lua | 8 -- parm/land_analysis_era5_orion.yaml | 32 +------ parm/land_analysis_gswp3_orion.yaml | 41 +------- parm/run_without_rocoto.sh | 10 -- scripts/exlandda_analysis.sh | 100 ++++++++++++++------ scripts/exlandda_forecast.sh | 41 +++----- scripts/exlandda_prep_bmat.sh | 78 ---------------- scripts/exlandda_prep_exp.sh | 54 +++-------- scripts/exlandda_prep_obs.sh | 8 +- 11 files changed, 97 insertions(+), 387 deletions(-) delete mode 100755 jobs/JLANDDA_PREP_BMAT delete mode 100644 modulefiles/tasks/hera/task.prep_bmat.lua delete mode 100644 modulefiles/tasks/orion/task.prep_bmat.lua delete mode 100755 scripts/exlandda_prep_bmat.sh diff --git a/jobs/JLANDDA_PREP_BMAT b/jobs/JLANDDA_PREP_BMAT deleted file mode 100755 index 88c27555..00000000 --- a/jobs/JLANDDA_PREP_BMAT +++ /dev/null @@ -1,104 +0,0 @@ -#!/bin/bash - -date -export PS4='+ $SECONDS + ' -set -xue -# -#----------------------------------------------------------------------- -# -# Set the NCO standard environment variables (Table 1, pp.4) -# -#----------------------------------------------------------------------- -# -export USHlandda="${HOMElandda}/ush" -export EXEClandda="${HOMElandda}/exec" -export PARMlandda="${HOMElandda}/parm" -export FIXlandda="${HOMElandda}/fix" -export SCRIPTSlandda="${HOMElandda}/scripts" -# -#----------------------------------------------------------------------- -# -# Define job and jobid by default for rocoto -# -#----------------------------------------------------------------------- -# -WORKFLOW_MANAGER="${WORKFLOW_MANAGER:-rocoto}" -if [ "${WORKFLOW_MANAGER}" = "rocoto" ]; then - if [ "${SCHED}" = "slurm" ]; then - job=${SLURM_JOB_NAME} - pid=${SLURM_JOB_ID} - elif [ "${SCHED}" = "pbspro" ]; then - job=${PBS_JOBNAME} - pid=${PBS_JOBID} - else - job="task" - pid=$$ - fi - jobid="${job}.${PDY}${cyc}.${pid}" -fi -# -#----------------------------------------------------------------------- -# -# Create a temp working directory (DATA) and cd into it. -# -#----------------------------------------------------------------------- -# -export DATA="${DATA:-${DATAROOT}/${jobid}}" -mkdir -p $DATA -cd $DATA -# -#----------------------------------------------------------------------- -# -# Define NCO environment variables and set COM type definitions. -# -#----------------------------------------------------------------------- -# -export NET="${NET:-landda}" -export RUN="${RUN:-landda}" - -[[ "$WORKFLOW_MANAGER" = "rocoto" ]] && export COMROOT=$COMROOT -if [ "${MACHINE}" = "WCOSS2" ]; then - export COMIN="${COMIN:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY}/${cyc})}" - export COMOUT="${COMOUT:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY}/${cyc})}" -else - export COMIN="${COMIN:-${COMROOT}/${NET}/${model_ver}/${RUN}.${PDY}/${cyc}}" - export COMOUT="${COMOUT:-${COMROOT}/${NET}/${model_ver}/${RUN}.${PDY}/${cyc}}" -fi - -mkdir -p ${COMOUT} - -# Create a teomporary share directory -export DATA_SHARE="${DATA_SHARE:-${DATAROOT}/DATA_SHARE/${PDY}${cyc}}" -mkdir -p ${DATA_SHARE} - -# Run setpdy to initialize PDYm and PDYp variables -export cycle="${cycle:-t${cyc}z}" -setpdy.sh -. ./PDY -# -#----------------------------------------------------------------------- -# -# Call the ex-script for this J-job. -# -#----------------------------------------------------------------------- -# -export pgmout="${DATA}/OUTPUT.$$" -env - -${SCRIPTSlandda}/exlandda_prep_bmat.sh -export err=$?; err_chk - -if [ -e "$pgmout" ]; then - cat $pgmout -fi -# -#----------------------------------------------------------------------- -# -# Whether or not working directory DATA should be kept. -# -#----------------------------------------------------------------------- -# -if [ "${KEEPDATA}" = "NO" ]; then - rm -rf ${DATA} -fi -date diff --git a/modulefiles/tasks/hera/task.prep_bmat.lua b/modulefiles/tasks/hera/task.prep_bmat.lua deleted file mode 100644 index 8ad021a3..00000000 --- a/modulefiles/tasks/hera/task.prep_bmat.lua +++ /dev/null @@ -1,8 +0,0 @@ -prepend_path("MODULEPATH", os.getenv("modulepath_spack_stack")) -prepend_path("MODULEPATH", os.getenv("modulepath_spack_stack_jedi")) - -load(pathJoin("stack-intel", stack_intel_ver)) -load(pathJoin("stack-intel-oneapi-mpi", stack_impi_ver)) -load(pathJoin("stack-python", stack_python_ver)) - -load(pathJoin("prod_util", prod_util_ver)) diff --git a/modulefiles/tasks/orion/task.prep_bmat.lua b/modulefiles/tasks/orion/task.prep_bmat.lua deleted file mode 100644 index 8ad021a3..00000000 --- a/modulefiles/tasks/orion/task.prep_bmat.lua +++ /dev/null @@ -1,8 +0,0 @@ -prepend_path("MODULEPATH", os.getenv("modulepath_spack_stack")) -prepend_path("MODULEPATH", os.getenv("modulepath_spack_stack_jedi")) - -load(pathJoin("stack-intel", stack_intel_ver)) -load(pathJoin("stack-intel-oneapi-mpi", stack_impi_ver)) -load(pathJoin("stack-python", stack_python_ver)) - -load(pathJoin("prod_util", prod_util_ver)) diff --git a/parm/land_analysis_era5_orion.yaml b/parm/land_analysis_era5_orion.yaml index ee12151d..846c883a 100644 --- a/parm/land_analysis_era5_orion.yaml +++ b/parm/land_analysis_era5_orion.yaml @@ -101,36 +101,6 @@ workflow: taskdep: attrs: task: prep_exp - task_prep_bmat: - envars: - MACHINE: "&MACHINE;" - SCHED: "&SCHED;" - ACCOUNT: "&ACCOUNT;" - EXP_NAME: "&EXP_NAME;" - ATMOS_FORC: "&FORCING;" - WORKDIR: "&WORKDIR;" - model_ver: "&model_ver;" - HOMElandda: "&HOMElandda;" - COMROOT: "&COMROOT;" - DATAROOT: "&DATAROOT;" - KEEPDATA: "&KEEPDATA;" - PDY: "&PDY;" - cyc: "&cyc;" - SLASH_ENSMEM_SUBDIR: "&SLASH_ENSMEM_SUBDIR;" - PTIME: "&PTIME;" - DAtype: "&DAtype;" - SNOWDEPTHVAR: "&SNOWDEPTHVAR;" - account: "&ACCOUNT;" - command: '&HOMElandda;/parm/task_load_modules_run_jjob.sh "prep_bmat" "&HOMElandda;" "&MACHINE;"' - jobname: prep_bmat - cores: 1 - walltime: 00:02:00 - queue: batch - join: "&LOGDIR;/prep_bmat.log" - dependency: - taskdep: - attrs: - task: prep_obs task_analysis: envars: OBS_TYPES: "&OBS_TYPES;" @@ -166,7 +136,7 @@ workflow: dependency: taskdep: attrs: - task: prep_bmat + task: prep_obs task_forecast: envars: OBS_TYPES: "&OBS_TYPES;" diff --git a/parm/land_analysis_gswp3_orion.yaml b/parm/land_analysis_gswp3_orion.yaml index a7c533da..51c958f6 100644 --- a/parm/land_analysis_gswp3_orion.yaml +++ b/parm/land_analysis_gswp3_orion.yaml @@ -15,6 +15,7 @@ workflow: EXP_NAME: "LETKF" EXP_BASEDIR: "/work/noaa/epic/{USER}/landda_test" JEDI_INSTALL: "/work/noaa/epic/UFS_Land-DA/jedi_skylabv7.0" + WARMSTART_DIR: "" FORCING: "gswp3" RES: "96" FCSTHR: "24" @@ -35,7 +36,6 @@ workflow: COMROOT: "&PTMP;/&envir;/com" DATAROOT: "&PTMP;/&envir;/tmp" KEEPDATA: "YES" - WORKDIR: "&EXP_BASEDIR;/workdir/run_&FORCING;" LOGDIR: "&COMROOT;/output/logs/run_&FORCING;" LOGFN_SUFFIX: "_@Y@m@d.log" PATHRT: "&EXP_BASEDIR;" @@ -43,7 +43,6 @@ workflow: cyc: "@H" PTIME: "@Y@m@d@H" NTIME: "@Y@m@d@H" - WARMSTART_DIR: "" DATADEP_FILE1: "&WARMSTART_DIR;/ufs_land_restart.@Y-@m-@d_@H-00-00.tile1.nc" DATADEP_FILE2: "&COMROOT;/&NET;/&model_ver;/&RUN;.@Y@m@d/@H/RESTART/ufs_land_restart.@Y-@m-@d_@H-00-00.tile1.nc" log: "&LOGDIR;/workflow.log" @@ -59,8 +58,8 @@ workflow: ATMOS_FORC: "&FORCING;" RES: "&RES;" TSTUB: "&TSTUB;" - WORKDIR: "&WORKDIR;" model_ver: "&model_ver;" + RUN: "&RUN;" HOMElandda: "&HOMElandda;" COMROOT: "&COMROOT;" DATAROOT: "&DATAROOT;" @@ -97,7 +96,6 @@ workflow: ACCOUNT: "&ACCOUNT;" EXP_NAME: "&EXP_NAME;" ATMOS_FORC: "&FORCING;" - WORKDIR: "&WORKDIR;" model_ver: "&model_ver;" HOMElandda: "&HOMElandda;" COMROOT: "&COMROOT;" @@ -117,37 +115,6 @@ workflow: taskdep: attrs: task: prep_exp - task_prep_bmat: - attrs: - cycledefs: cycled - envars: - MACHINE: "&MACHINE;" - SCHED: "&SCHED;" - ACCOUNT: "&ACCOUNT;" - EXP_NAME: "&EXP_NAME;" - ATMOS_FORC: "&FORCING;" - WORKDIR: "&WORKDIR;" - model_ver: "&model_ver;" - HOMElandda: "&HOMElandda;" - COMROOT: "&COMROOT;" - DATAROOT: "&DATAROOT;" - KEEPDATA: "&KEEPDATA;" - PDY: "&PDY;" - cyc: "&cyc;" - PTIME: "&PTIME;" - DAtype: "&DAtype;" - SNOWDEPTHVAR: "&SNOWDEPTHVAR;" - account: "&ACCOUNT;" - command: '&HOMElandda;/parm/task_load_modules_run_jjob.sh "prep_bmat" "&HOMElandda;" "&MACHINE;"' - jobname: prep_bmat - cores: 1 - walltime: 00:02:00 - queue: batch - join: "&LOGDIR;/prep_bmat&LOGFN_SUFFIX;" - dependency: - taskdep: - attrs: - task: prep_obs task_analysis: attrs: cycledefs: cycled @@ -160,7 +127,6 @@ workflow: ATMOS_FORC: "&FORCING;" RES: "&RES;" TSTUB: "&TSTUB;" - WORKDIR: "&WORKDIR;" model_ver: "&model_ver;" HOMElandda: "&HOMElandda;" COMROOT: "&COMROOT;" @@ -184,7 +150,7 @@ workflow: dependency: taskdep: attrs: - task: prep_bmat + task: prep_obs task_forecast: attrs: cycledefs: cycled @@ -197,7 +163,6 @@ workflow: ATMOS_FORC: "&FORCING;" RES: "&RES;" TSTUB: "&TSTUB;" - WORKDIR: "&WORKDIR;" model_ver: "&model_ver;" HOMElandda: "&HOMElandda;" COMROOT: "&COMROOT;" diff --git a/parm/run_without_rocoto.sh b/parm/run_without_rocoto.sh index 4514a6fb..76ff4a37 100755 --- a/parm/run_without_rocoto.sh +++ b/parm/run_without_rocoto.sh @@ -82,16 +82,6 @@ else exit 2 fi -echo " ... PREP_BMAT running ... " -${HOMElandda}/parm/task_load_modules_run_jjob.sh "prep_bmat" "${HOMElandda}" "${MACHINE}" -export err=$? -if [ $err = 0 ]; then - echo " === PREP_BMAT completed successfully === " -else - echo " ERROR: PREP_BMAT failed !!! " - exit 3 -fi - echo " ... ANALYSIS running ... " ${HOMElandda}/parm/task_load_modules_run_jjob.sh "analysis" "${HOMElandda}" "${MACHINE}" export err=$? diff --git a/scripts/exlandda_analysis.sh b/scripts/exlandda_analysis.sh index 1407e242..10fc483c 100755 --- a/scripts/exlandda_analysis.sh +++ b/scripts/exlandda_analysis.sh @@ -22,17 +22,12 @@ MP=${PTIME:4:2} DP=${PTIME:6:2} HP=${PTIME:8:2} -mem_ens="mem000" - -MEM_WORKDIR=${WORKDIR}/${mem_ens} -JEDIWORKDIR=${WORKDIR}/mem000/jedi FILEDATE=${YYYY}${MM}${DD}.${HH}0000 + JEDI_STATICDIR=${JEDI_INSTALL}/jedi-bundle/fv3-jedi/test/Data JEDI_EXECDIR=${JEDI_INSTALL}/build/bin -SAVE_INCR="YES" -KEEPJEDIDIR="YES" -cd $MEM_WORKDIR +SAVE_INCR="YES" # load modulefiles BUILD_VERSION_FILE="${HOMElandda}/versions/build.ver_${MACHINE}" @@ -46,15 +41,64 @@ MPIEXEC=`which mpiexec` YAML_DA=construct GFSv17="NO" B=30 # back ground error std for LETKFOI - -cd $JEDIWORKDIR -mkdir -p output/DA/hofx +if [[ $do_jedi == "YES" ]]; then + cres_file=${DATA}/${FILEDATE}.coupler.res + cp ${PARMlandda}/templates/template.coupler.res $cres_file + + sed -i -e "s/XXYYYY/${YYYY}/g" $cres_file + sed -i -e "s/XXMM/${MM}/g" $cres_file + sed -i -e "s/XXDD/${DD}/g" $cres_file + sed -i -e "s/XXHH/${HH}/g" $cres_file + sed -i -e "s/XXYYYP/${YYYP}/g" $cres_file + sed -i -e "s/XXMP/${MP}/g" $cres_file + sed -i -e "s/XXDP/${DP}/g" $cres_file + sed -i -e "s/XXHP/${HP}/g" $cres_file +fi + +################################################ +# CREATE BACKGROUND ENSEMBLE (LETKFOI) +################################################ + +if [[ ${DAtype} == "letkfoi_snow" ]]; then + + if [ $GFSv17 == "YES" ]; then + SNOWDEPTHVAR="snodl" + else + SNOWDEPTHVAR="snwdph" + # replace field overwrite file + cp ${PARMlandda}/jedi/gfs-land.yaml ${DATA}/gfs-land.yaml + fi + # FOR LETKFOI, CREATE THE PSEUDO-ENSEMBLE + for ens in pos neg + do + if [ -e $DATA/mem_${ens} ]; then + rm -r $DATA/mem_${ens} + fi + mkdir -p $DATA/mem_${ens} + for tile in 1 2 3 4 5 6 + do + cp ${COMIN}/${FILEDATE}.sfc_data.tile${tile}.nc ${DATA}/mem_${ens}/${FILEDATE}.sfc_data.tile${tile}.nc + done + cp ${DATA}/${FILEDATE}.coupler.res ${DATA}/mem_${ens}/${FILEDATE}.coupler.res + done + + echo 'do_landDA: calling create ensemble' + + # using ioda mods to get a python version with netCDF4 + ${USHlandda}/letkf_create_ens.py $FILEDATE $SNOWDEPTHVAR $B + if [[ $? != 0 ]]; then + echo "letkf create failed" + exit 10 + fi +fi ################################################ -# 3. DETERMINE REQUESTED JEDI TYPE, CONSTRUCT YAMLS +# DETERMINE REQUESTED JEDI TYPE, CONSTRUCT YAMLS ################################################ +mkdir -p output/DA/hofx + do_DA="YES" do_HOFX="NO" @@ -67,14 +111,14 @@ fi if [[ $do_DA == "YES" ]]; then if [[ $YAML_DA == "construct" ]];then # construct the yaml - cp ${PARMlandda}/jedi/${DAtype}.yaml ${JEDIWORKDIR}/letkf_land.yaml + cp ${PARMlandda}/jedi/${DAtype}.yaml ${DATA}/letkf_land.yaml for obs in "${OBS_TYPES[@]}"; do cat ${PARMlandda}/jedi/${obs}.yaml >> letkf_land.yaml done else # use specified yaml echo "Using user specified YAML: ${YAML_DA}" - cp ${PARMlandda}/jedi/${YAML_DA} ${JEDIWORKDIR}/letkf_land.yaml + cp ${PARMlandda}/jedi/${YAML_DA} ${DATA}/letkf_land.yaml fi sed -i -e "s/XXYYYY/${YYYY}/g" letkf_land.yaml @@ -96,14 +140,14 @@ fi if [[ $do_HOFX == "YES" ]]; then if [[ $YAML_HOFX == "construct" ]];then # construct the yaml - cp ${PARMlandda}/jedi/${DAtype}.yaml ${JEDIWORKDIR}/hofx_land.yaml + cp ${PARMlandda}/jedi/${DAtype}.yaml ${DATA}/hofx_land.yaml for obs in "${OBS_TYPES[@]}"; do cat ${PARMlandda}/jedi/${obs}.yaml >> hofx_land.yaml done else # use specified yaml echo "Using user specified YAML: ${YAML_HOFX}" - cp ${PARMlandda}/jedi/${YAML_HOFX} ${JEDIWORKDIR}/hofx_land.yaml + cp ${PARMlandda}/jedi/${YAML_HOFX} ${DATA}/hofx_land.yaml fi sed -i -e "s/XXYYYY/${YYYY}/g" hofx_land.yaml @@ -124,16 +168,13 @@ if [[ $do_HOFX == "YES" ]]; then fi if [[ "$GFSv17" == "NO" ]]; then - cp ${PARMlandda}/jedi/gfs-land.yaml ${JEDIWORKDIR}/gfs-land.yaml + cp ${PARMlandda}/jedi/gfs-land.yaml ${DATA}/gfs-land.yaml else - cp ${JEDI_INSTALL}/jedi-bundle/fv3-jedi/test/Data/fieldmetadata/gfs_v17-land.yaml ${JEDIWORKDIR}/gfs-land.yaml + cp ${JEDI_INSTALL}/jedi-bundle/fv3-jedi/test/Data/fieldmetadata/gfs_v17-land.yaml ${DATA}/gfs-land.yaml fi ################################################ -# 4. CREATE BACKGROUND ENSEMBLE (LETKFOI) -################################################ -################################################ -# 5. RUN JEDI +# RUN JEDI ################################################ if [[ ! -e Data ]]; then @@ -156,7 +197,7 @@ fi if [[ $do_HOFX == "YES" ]]; then export pgm="fv3jedi_letkf.x" . prep_step - ${MPIEXEC} -n $NPROC_JEDI ${JEDI_EXEC} hofx_land.yaml >>$pgmout 2>errfile + ${MPIEXEC} -n $NPROC_JEDI ${JEDI_EXECDIR}/$pgm hofx_land.yaml >>$pgmout 2>errfile export err=$?; err_chk cp errfile errfile_jedi_hofx if [[ $err != 0 ]]; then @@ -166,7 +207,7 @@ if [[ $do_HOFX == "YES" ]]; then fi ################################################ -# 6. APPLY INCREMENT TO UFS RESTARTS +# APPLY INCREMENT TO UFS RESTARTS ################################################ if [[ $do_DA == "YES" ]]; then @@ -201,20 +242,17 @@ EOF fi ################################################ -# 7. CLEAN UP +# CLEAN UP ################################################ if [[ -d output/DA/hofx ]]; then - cp -r output/DA/hofx ${COMOUT}/${mem_ens} + mkdir -p ${COMOUT}/hofx + cp -r output/DA/hofx ${COMOUT}/hofx fi # keep increments if [ $SAVE_INCR == "YES" ] && [ $do_DA == "YES" ]; then - mkdir -p ${COMOUT}/${mem_ens}/jedi_incr - cp ${JEDIWORKDIR}/${FILEDATE}.xainc.sfc_data.tile*.nc ${COMOUT}/${mem_ens}/jedi_incr + mkdir -p ${COMOUT}/jedi_incr + cp -p ${DATA}/${FILEDATE}.xainc.sfc_data.tile*.nc ${COMOUT}/jedi_incr fi -# clean up -if [[ $KEEPJEDIDIR == "NO" ]]; then - rm -rf ${JEDIWORKDIR} -fi diff --git a/scripts/exlandda_forecast.sh b/scripts/exlandda_forecast.sh index a635362e..d0be0709 100755 --- a/scripts/exlandda_forecast.sh +++ b/scripts/exlandda_forecast.sh @@ -26,15 +26,11 @@ nYYYY=${NTIME:0:4} nMM=${NTIME:4:2} nDD=${NTIME:6:2} nHH=${NTIME:8:2} -mem_ens="mem000" -MEM_WORKDIR=${WORKDIR}/${mem_ens} FREQ=$((${FCSTHR}*3600)) RDD=$((${FCSTHR}/24)) RHH=$((${FCSTHR}%24)) -cd $MEM_WORKDIR - # load modulefiles BUILD_VERSION_FILE="${HOMElandda}/versions/build.ver_${MACHINE}" if [ -e ${BUILD_VERSION_FILE} ]; then @@ -44,10 +40,6 @@ module use modulefiles; module load modules.landda MPIEXEC=`which mpiexec` MPIRUN=${MPIRUN:-`which mpiexec`} -#SNOWDEPTHVAR=snwdph - -cd $MEM_WORKDIR - # convert back to vector, run model (all members) convert back to vector, run model (all members) if [[ ${do_jedi} == "YES" ]]; then @@ -55,7 +47,7 @@ if [[ ${do_jedi} == "YES" ]]; then echo '************************************************' echo 'calling tile2vector' - cp ${PARMlandda}/templates/template.tile2vector tile2vector.namelist + cp ${PARMlandda}/templates/template.tile2vector tile2vector.namelist sed -i "s|FIXlandda|${FIXlandda}|g" tile2vector.namelist sed -i -e "s/XXYYYY/${YYYY}/g" tile2vector.namelist @@ -78,14 +70,14 @@ if [[ ${do_jedi} == "YES" ]]; then fi # save analysis restart - mkdir -p ${COMOUT}/${mem_ens}/restarts/vector - cp ${MEM_WORKDIR}/ufs_land_restart.${YYYY}-${MM}-${DD}_${HH}-00-00.nc ${COMOUT}/${mem_ens}/restarts/vector/ufs_land_restart_anal.${YYYY}-${MM}-${DD}_${HH}-00-00.nc + mkdir -p ${COMOUT}/RESTART/vector + cp -p ${DATA}/ufs_land_restart.${YYYY}-${MM}-${DD}_${HH}-00-00.nc ${COMOUT}/RESTART/vector/ufs_land_restart_anal.${YYYY}-${MM}-${DD}_${HH}-00-00.nc echo '************************************************' echo 'running the forecast model' # update model namelist - cp ${PARMlandda}/templates/template.ufs-noahMP.namelist.${ATMOS_FORC} ufs-land.namelist + cp ${PARMlandda}/templates/template.ufs-noahMP.namelist.${ATMOS_FORC} ufs-land.namelist sed -i "s|FIXlandda|${FIXlandda}|g" ufs-land.namelist sed -i -e "s/XXYYYY/${YYYY}/g" ufs-land.namelist @@ -96,9 +88,6 @@ if [[ ${do_jedi} == "YES" ]]; then sed -i -e "s/XXRDD/${RDD}/g" ufs-land.namelist sed -i -e "s/XXRHH/${RHH}/g" ufs-land.namelist - # submit model - echo $MEM_WORKDIR - nt=$SLURM_NTASKS export pgm="ufsLand.exe" @@ -139,11 +128,11 @@ if [[ ${do_jedi} == "YES" ]]; then fi # save analysis restart - mkdir -p ${COMOUT}/${mem_ens}/restarts/tile + mkdir -p ${COMOUT}/RESTART/tile for tile in 1 2 3 4 5 6 do - cp ${MEM_WORKDIR}/ufs_land_restart.${YYYY}-${MM}-${DD}_${HH}-00-00.tile${tile}.nc ${COMOUT}/${mem_ens}/restarts/tile/ufs_land_restart_anal.${YYYY}-${MM}-${DD}_${HH}-00-00.tile${tile}.nc - cp ${MEM_WORKDIR}/ufs_land_restart.${YYYY}-${MM}-${DD}_${HH}-00-00.tile${tile}.nc ${COMOUT}/${mem_ens}/restarts/tile/ufs.cpld.lnd.out.${YYYY}-${MM}-${DD}-00000.tile${tile}.nc + cp -p ${DATA}/ufs_land_restart.${YYYY}-${MM}-${DD}_${HH}-00-00.tile${tile}.nc ${COMOUT}/RESTART/tile/ufs_land_restart_anal.${YYYY}-${MM}-${DD}_${HH}-00-00.tile${tile}.nc + cp -p ${DATA}/ufs_land_restart.${YYYY}-${MM}-${DD}_${HH}-00-00.tile${tile}.nc ${COMOUT}/RESTART/tile/ufs.cpld.lnd.out.${YYYY}-${MM}-${DD}-00000.tile${tile}.nc done echo '************************************************' @@ -173,14 +162,6 @@ if [[ ${do_jedi} == "YES" ]]; then exit 1 fi - # create run folder - RUNDIR=${DATA}/noahmp/${TEST_NAME_RST} - [[ -d ${RUNDIR} ]] && echo "Warning: remove old run folder!" && rm -rf ${RUNDIR} - mkdir -p ${RUNDIR} - cd ${RUNDIR} - - echo "NoahMP run dir= $RUNDIR" - # modify some env variables - reduce core usage export ATM_compute_tasks=0 export ATM_io_tasks=1 @@ -224,7 +205,7 @@ if [[ ${do_jedi} == "YES" ]]; then # restart if [ $WARM_START = .true. ]; then # NoahMP restart files - cp ${COMOUT}/${mem_ens}/restarts/tile/ufs.cpld.lnd.out.${RESTART_FILE_SUFFIX_SECS}.tile*.nc RESTART/. + cp ${COMOUT}/RESTART/tile/ufs.cpld.lnd.out.${RESTART_FILE_SUFFIX_SECS}.tile*.nc RESTART/. # CMEPS restart and pointer files RFILE1=ufs.cpld.cpl.r.${RESTART_FILE_SUFFIX_SECS}.nc @@ -286,12 +267,12 @@ fi ############################ # check model ouput (all members) if [[ ${ATMOS_FORC} == "era5" ]]; then - if [[ -e ${MEM_WORKDIR}/ufs_land_restart.${nYYYY}-${nMM}-${nDD}_${nHH}-00-00.nc ]]; then - cp ${MEM_WORKDIR}/ufs_land_restart.${nYYYY}-${nMM}-${nDD}_${nHH}-00-00.nc ${COMOUT}/${mem_ens}/restarts/vector/ufs_land_restart_back.${nYYYY}-${nMM}-${nDD}_${nHH}-00-00.nc + if [[ -e ${DATA}/ufs_land_restart.${nYYYY}-${nMM}-${nDD}_${nHH}-00-00.nc ]]; then + cp -p ${DATA}/ufs_land_restart.${nYYYY}-${nMM}-${nDD}_${nHH}-00-00.nc ${COMOUT}/RESTART/vector/ufs_land_restart_back.${nYYYY}-${nMM}-${nDD}_${nHH}-00-00.nc fi elif [[ ${ATMOS_FORC} == "gswp3" ]]; then for tile in 1 2 3 4 5 6 do - cp ${RUNDIR}/ufs.cpld.lnd.out.${nYYYY}-${nMM}-${nDD}-00000.tile${tile}.nc ${COMOUT}/${mem_ens}/restarts/tile/ufs_land_restart_back.${nYYYY}-${nMM}-${nDD}_${nHH}-00-00.tile${tile}.nc + cp -p ${DATA}/ufs.cpld.lnd.out.${nYYYY}-${nMM}-${nDD}-00000.tile${tile}.nc ${COMOUT}/RESTART/tile/ufs_land_restart_back.${nYYYY}-${nMM}-${nDD}_${nHH}-00-00.tile${tile}.nc done fi diff --git a/scripts/exlandda_prep_bmat.sh b/scripts/exlandda_prep_bmat.sh deleted file mode 100755 index 83781500..00000000 --- a/scripts/exlandda_prep_bmat.sh +++ /dev/null @@ -1,78 +0,0 @@ -#!/bin/sh - -set -ex - -############################ -# copy restarts to workdir, convert to UFS tile for DA (all members) - -if [[ ${EXP_NAME} == "openloop" ]]; then - do_jedi="NO" -else - do_jedi="YES" - SAVE_TILE="YES" -fi - -TPATH=${FIXlandda}/forcing/${ATMOS_FORC}/orog_files/ -YYYY=${PDY:0:4} -MM=${PDY:4:2} -DD=${PDY:6:2} -HH=${cyc} - -mem_ens="mem000" - -MEM_WORKDIR=${WORKDIR}/${mem_ens} -JEDIWORKDIR=${WORKDIR}/mem000/jedi -FILEDATE=${YYYY}${MM}${DD}.${HH}0000 - -cd $MEM_WORKDIR - -# load modulefiles -BUILD_VERSION_FILE="${HOMElandda}/versions/build.ver_${MACHINE}" -if [ -e ${BUILD_VERSION_FILE} ]; then - . ${BUILD_VERSION_FILE} -fi -module use modulefiles; module load modules.landda - -#SNOWDEPTHVAR=snwdph -YAML_DA=construct -GFSv17="NO" -B=30 # back ground error std for LETKFOI -cd $JEDIWORKDIR - -################################################ -# 4. CREATE BACKGROUND ENSEMBLE (LETKFOI) -################################################ - -if [[ ${DAtype} == "letkfoi_snow" ]]; then - - if [ $GFSv17 == "YES" ]; then - SNOWDEPTHVAR="snodl" - else - SNOWDEPTHVAR="snwdph" - # replace field overwrite file - cp ${PARMlandda}/jedi/gfs-land.yaml ${JEDIWORKDIR}/gfs-land.yaml - fi - # FOR LETKFOI, CREATE THE PSEUDO-ENSEMBLE - for ens in pos neg - do - if [ -e $JEDIWORKDIR/mem_${ens} ]; then - rm -r $JEDIWORKDIR/mem_${ens} - fi - mkdir -p $JEDIWORKDIR/mem_${ens} - for tile in 1 2 3 4 5 6 - do - cp ${JEDIWORKDIR}/${FILEDATE}.sfc_data.tile${tile}.nc ${JEDIWORKDIR}/mem_${ens}/${FILEDATE}.sfc_data.tile${tile}.nc - done - cp ${JEDIWORKDIR}/${FILEDATE}.coupler.res ${JEDIWORKDIR}/mem_${ens}/${FILEDATE}.coupler.res - done - - echo 'do_landDA: calling create ensemble' - - # using ioda mods to get a python version with netCDF4 - ${USHlandda}/letkf_create_ens.py $FILEDATE $SNOWDEPTHVAR $B - if [[ $? != 0 ]]; then - echo "letkf create failed" - exit 10 - fi - -fi diff --git a/scripts/exlandda_prep_exp.sh b/scripts/exlandda_prep_exp.sh index 25787e74..77346763 100755 --- a/scripts/exlandda_prep_exp.sh +++ b/scripts/exlandda_prep_exp.sh @@ -6,13 +6,12 @@ set -ex # copy restarts to workdir, convert to UFS tile for DA (all members) if [[ ${EXP_NAME} == "openloop" ]]; then - do_jedi="NO" + do_jedi="NO" else - do_jedi="YES" - SAVE_TILE="YES" + do_jedi="YES" + SAVE_TILE="YES" fi -echo ${FIXlandda}, ${ATMOS_FORC} TPATH=${FIXlandda}/forcing/${ATMOS_FORC}/orog_files/ YYYY=${PDY:0:4} MM=${PDY:4:2} @@ -22,23 +21,17 @@ YYYP=${PTIME:0:4} MP=${PTIME:4:2} DP=${PTIME:6:2} HP=${PTIME:8:2} -mem_ens="mem000" -MEM_WORKDIR=${WORKDIR}/${mem_ens} -JEDIWORKDIR=${WORKDIR}/mem000/jedi FILEDATE=${YYYY}${MM}${DD}.${HH}0000 -mkdir -p ${MEM_WORKDIR} -mkdir -p $MEM_WORKDIR/modulefiles -cp ${HOMElandda}/modulefiles/build_${MACHINE}_intel.lua $MEM_WORKDIR/modulefiles/modules.landda.lua -cd $MEM_WORKDIR +mkdir -p modulefiles +cp ${HOMElandda}/modulefiles/build_${MACHINE}_intel.lua $DATA/modulefiles/modules.landda.lua # load modulefiles BUILD_VERSION_FILE="${HOMElandda}/versions/build.ver_${MACHINE}" if [ -e ${BUILD_VERSION_FILE} ]; then . ${BUILD_VERSION_FILE} fi - module use modulefiles; module load modules.landda if [[ $do_jedi == "YES" ]]; then @@ -46,18 +39,16 @@ if [[ $do_jedi == "YES" ]]; then if [[ $ATMOS_FORC == "era5" ]]; then # vector2tile for DA # copy restarts into work directory - rst_in=${COMOUT}/${mem_ens}/restarts/vector/ufs_land_restart_back.${YYYY}-${MM}-${DD}_${HH}-00-00.nc + rst_in=${COMIN}/RESTART/vector/ufs_land_restart_back.${YYYY}-${MM}-${DD}_${HH}-00-00.nc if [[ ! -e ${rst_in} ]]; then rst_in=${FIXlandda}/restarts/${ATMOS_FORC}/ufs_land_restart.${YYYY}-${MM}-${DD}_${HH}-00-00.nc fi - rst_out=${MEM_WORKDIR}/ufs_land_restart.${YYYY}-${MM}-${DD}_${HH}-00-00.nc + rst_out=${DATA}/ufs_land_restart.${YYYY}-${MM}-${DD}_${HH}-00-00.nc cp ${rst_in} ${rst_out} echo '************************************************' echo 'calling vector2tile' - export MEM_WORKDIR - # update vec2tile and tile2vec namelists cp ${PARMlandda}/templates/template.vector2tile vector2tile.namelist @@ -89,17 +80,15 @@ if [[ $do_jedi == "YES" ]]; then # tile2tile for DA echo '************************************************' echo 'calling tile2tile' - - export MEM_WORKDIR # copy restarts into work directory for tile in 1 2 3 4 5 6 do - rst_in=${COMOUT}/${mem_ens}/restarts/tile/ufs_land_restart_back.${YYYY}-${MM}-${DD}_${HH}-00-00.nc + rst_in=${COMIN}/RESTART/tile/ufs_land_restart_back.${YYYY}-${MM}-${DD}_${HH}-00-00.nc if [[ ! -e ${rst_in} ]]; then rst_in=${FIXlandda}/restarts/${ATMOS_FORC}/ufs.cpld.lnd.out.${YYYY}-${MM}-${DD}-00000.tile${tile}.nc fi - rst_out=${MEM_WORKDIR}/ufs_land_restart.${YYYY}-${MM}-${DD}_${HH}-00-00.tile${tile}.nc + rst_out=${DATA}/ufs_land_restart.${YYYY}-${MM}-${DD}_${HH}-00-00.tile${tile}.nc cp ${rst_in} ${rst_out} done @@ -129,39 +118,18 @@ if [[ $do_jedi == "YES" ]]; then fi fi - if [[ ! -e $JEDIWORKDIR ]]; then - mkdir -p $JEDIWORKDIR - fi - cd $JEDIWORKDIR - if [[ $SAVE_TILE == "YES" ]]; then for tile in 1 2 3 4 5 6 do - cp ${MEM_WORKDIR}/${FILEDATE}.sfc_data.tile${tile}.nc ${MEM_WORKDIR}/${FILEDATE}.sfc_data_back.tile${tile}.nc + cp ${DATA}/${FILEDATE}.sfc_data.tile${tile}.nc ${COMOUT}/${FILEDATE}.sfc_data_back.tile${tile}.nc done fi #stage restarts for applying JEDI update (files will get directly updated) for tile in 1 2 3 4 5 6 do - ln -fs ${MEM_WORKDIR}/${FILEDATE}.sfc_data.tile${tile}.nc ${JEDIWORKDIR}/${FILEDATE}.sfc_data.tile${tile}.nc + cp -p ${DATA}/${FILEDATE}.sfc_data.tile${tile}.nc ${COMOUT}/${FILEDATE}.sfc_data.tile${tile}.nc done - cres_file=${JEDIWORKDIR}/${FILEDATE}.coupler.res - - if [[ -e ${MEM_WORKDIR}/${FILEDATE}.coupler.res ]]; then - ln -sf ${MEM_WORKDIR}/${FILEDATE}.coupler.res $cres_file - else # if not present, need to create coupler.res for JEDI - cp ${PARMlandda}/templates/template.coupler.res $cres_file - - sed -i -e "s/XXYYYY/${YYYY}/g" $cres_file - sed -i -e "s/XXMM/${MM}/g" $cres_file - sed -i -e "s/XXDD/${DD}/g" $cres_file - sed -i -e "s/XXHH/${HH}/g" $cres_file - sed -i -e "s/XXYYYP/${YYYP}/g" $cres_file - sed -i -e "s/XXMP/${MP}/g" $cres_file - sed -i -e "s/XXDP/${DP}/g" $cres_file - sed -i -e "s/XXHP/${HP}/g" $cres_file - fi fi # do_jedi setup diff --git a/scripts/exlandda_prep_obs.sh b/scripts/exlandda_prep_obs.sh index cb865530..6b8fff0a 100755 --- a/scripts/exlandda_prep_obs.sh +++ b/scripts/exlandda_prep_obs.sh @@ -21,11 +21,7 @@ MP=${PTIME:4:2} DP=${PTIME:6:2} HP=${PTIME:8:2} -mem_ens="mem000" - -JEDIWORKDIR=${WORKDIR}/mem000/jedi - -cd $JEDIWORKDIR +mkdir -p "${COMOUT}/OBS" ################################################ # 2. PREPARE OBS FILES @@ -55,7 +51,7 @@ for obs in "${OBS_TYPES[@]}"; do # check obs are available if [[ -e $obsfile ]]; then echo "do_landDA: $i observations found: $obsfile" - ln -fs $obsfile ${obs}_${YYYY}${MM}${DD}${HH}.nc + cp -p $obsfile ${COMOUT}/OBS/${obs}_${YYYY}${MM}${DD}${HH}.nc else echo "${obs} observations not found: $obsfile" fi From df178dbc345e5f7c5b3ebff4d8346c8b01831f01 Mon Sep 17 00:00:00 2001 From: Chan-Hoo Jeon Date: Fri, 17 May 2024 05:31:04 -0500 Subject: [PATCH 03/18] update ex-scripts --- parm/land_analysis_era5_orion.yaml | 2 +- parm/land_analysis_gswp3_orion.yaml | 4 ++-- scripts/exlandda_analysis.sh | 13 +++++++++---- scripts/exlandda_forecast.sh | 2 +- scripts/exlandda_prep_exp.sh | 2 +- scripts/exlandda_prep_obs.sh | 4 ++-- 6 files changed, 16 insertions(+), 11 deletions(-) diff --git a/parm/land_analysis_era5_orion.yaml b/parm/land_analysis_era5_orion.yaml index 846c883a..a3c1b45f 100644 --- a/parm/land_analysis_era5_orion.yaml +++ b/parm/land_analysis_era5_orion.yaml @@ -14,7 +14,7 @@ workflow: ACCOUNT: "epic" EXP_NAME: "LETKF" EXP_BASEDIR: "/work/noaa/epic/{USER}/landda_test" - JEDI_INSTALL: "/work/noaa/epic/UFS_Land-DA/jedi_skylabv7.0" + JEDI_INSTALL: "/work/noaa/epic/UFS_Land-DA_Dev/jedi_v7" FORCING: "era5" RES: "96" FCSTHR: "24" diff --git a/parm/land_analysis_gswp3_orion.yaml b/parm/land_analysis_gswp3_orion.yaml index 51c958f6..05cfc843 100644 --- a/parm/land_analysis_gswp3_orion.yaml +++ b/parm/land_analysis_gswp3_orion.yaml @@ -14,7 +14,7 @@ workflow: ACCOUNT: "epic" EXP_NAME: "LETKF" EXP_BASEDIR: "/work/noaa/epic/{USER}/landda_test" - JEDI_INSTALL: "/work/noaa/epic/UFS_Land-DA/jedi_skylabv7.0" + JEDI_INSTALL: "/work/noaa/epic/UFS_Land-DA_Dev/jedi_v7" WARMSTART_DIR: "" FORCING: "gswp3" RES: "96" @@ -37,7 +37,7 @@ workflow: DATAROOT: "&PTMP;/&envir;/tmp" KEEPDATA: "YES" LOGDIR: "&COMROOT;/output/logs/run_&FORCING;" - LOGFN_SUFFIX: "_@Y@m@d.log" + LOGFN_SUFFIX: "_@Y@m@d@H.log" PATHRT: "&EXP_BASEDIR;" PDY: "@Y@m@d" cyc: "@H" diff --git a/scripts/exlandda_analysis.sh b/scripts/exlandda_analysis.sh index 10fc483c..59762cc8 100755 --- a/scripts/exlandda_analysis.sh +++ b/scripts/exlandda_analysis.sh @@ -1,6 +1,6 @@ #!/bin/sh -set -ex +set -xue ############################ # copy restarts to workdir, convert to UFS tile for DA (all members) @@ -34,6 +34,8 @@ BUILD_VERSION_FILE="${HOMElandda}/versions/build.ver_${MACHINE}" if [ -e ${BUILD_VERSION_FILE} ]; then . ${BUILD_VERSION_FILE} fi +mkdir -p modulefiles +cp ${HOMElandda}/modulefiles/build_${MACHINE}_intel.lua $DATA/modulefiles/modules.landda.lua module use modulefiles; module load modules.landda MPIEXEC=`which mpiexec` @@ -43,6 +45,10 @@ GFSv17="NO" B=30 # back ground error std for LETKFOI if [[ $do_jedi == "YES" ]]; then + + ln -nsf ${COMIN}/${FILEDATE}.sfc_data.tile*.nc . + ln -nsf ${COMIN}/OBS/*_${YYYY}${MM}${DD}${HH}.nc . + cres_file=${DATA}/${FILEDATE}.coupler.res cp ${PARMlandda}/templates/template.coupler.res $cres_file @@ -97,8 +103,6 @@ fi # DETERMINE REQUESTED JEDI TYPE, CONSTRUCT YAMLS ################################################ -mkdir -p output/DA/hofx - do_DA="YES" do_HOFX="NO" @@ -107,6 +111,7 @@ if [[ $do_DA == "NO" && $do_HOFX == "NO" ]]; then exit 0 fi +mkdir -p output/DA/hofx # if yaml is specified by user, use that. Otherwise, build the yaml if [[ $do_DA == "YES" ]]; then @@ -247,7 +252,7 @@ fi if [[ -d output/DA/hofx ]]; then mkdir -p ${COMOUT}/hofx - cp -r output/DA/hofx ${COMOUT}/hofx + cp -rp output/DA/hofx ${COMOUT} fi # keep increments diff --git a/scripts/exlandda_forecast.sh b/scripts/exlandda_forecast.sh index d0be0709..7e7962f5 100755 --- a/scripts/exlandda_forecast.sh +++ b/scripts/exlandda_forecast.sh @@ -1,6 +1,6 @@ #!/bin/sh -set -ex +set -xue ############################ # copy restarts to workdir, convert to UFS tile for DA (all members) diff --git a/scripts/exlandda_prep_exp.sh b/scripts/exlandda_prep_exp.sh index 77346763..201aecfd 100755 --- a/scripts/exlandda_prep_exp.sh +++ b/scripts/exlandda_prep_exp.sh @@ -1,6 +1,6 @@ #!/bin/sh -set -ex +set -xue ############################ # copy restarts to workdir, convert to UFS tile for DA (all members) diff --git a/scripts/exlandda_prep_obs.sh b/scripts/exlandda_prep_obs.sh index 6b8fff0a..fd36c5e8 100755 --- a/scripts/exlandda_prep_obs.sh +++ b/scripts/exlandda_prep_obs.sh @@ -1,6 +1,6 @@ #!/bin/sh -set -ex +set -xue ############################ # copy restarts to workdir, convert to UFS tile for DA (all members) @@ -50,7 +50,7 @@ for obs in "${OBS_TYPES[@]}"; do # check obs are available if [[ -e $obsfile ]]; then - echo "do_landDA: $i observations found: $obsfile" + echo "do_landDA: $obs observations found: $obsfile" cp -p $obsfile ${COMOUT}/OBS/${obs}_${YYYY}${MM}${DD}${HH}.nc else echo "${obs} observations not found: $obsfile" From 1f624fd7b8c1cf4d36da049c0dfa375255dac847 Mon Sep 17 00:00:00 2001 From: Chan-Hoo Jeon Date: Fri, 17 May 2024 09:00:17 -0500 Subject: [PATCH 04/18] add pre_anal and post_anal --- jobs/JLANDDA_POST_ANAL | 104 +++++++ jobs/{JLANDDA_PREP_EXP => JLANDDA_PRE_ANAL} | 2 +- parm/land_analysis_gswp3_orion.yaml | 81 +++-- parm/run_without_rocoto.sh | 26 +- scripts/exlandda_analysis.sh | 52 +--- scripts/exlandda_forecast.sh | 328 +++++++------------- scripts/exlandda_post_anal.sh | 130 ++++++++ scripts/exlandda_pre_anal.sh | 117 +++++++ scripts/exlandda_prep_exp.sh | 135 -------- scripts/exlandda_prep_obs.sh | 7 - 10 files changed, 556 insertions(+), 426 deletions(-) create mode 100755 jobs/JLANDDA_POST_ANAL rename jobs/{JLANDDA_PREP_EXP => JLANDDA_PRE_ANAL} (98%) create mode 100755 scripts/exlandda_post_anal.sh create mode 100755 scripts/exlandda_pre_anal.sh delete mode 100755 scripts/exlandda_prep_exp.sh diff --git a/jobs/JLANDDA_POST_ANAL b/jobs/JLANDDA_POST_ANAL new file mode 100755 index 00000000..dbdb8861 --- /dev/null +++ b/jobs/JLANDDA_POST_ANAL @@ -0,0 +1,104 @@ +#!/bin/bash + +date +export PS4='+ $SECONDS + ' +set -xue +# +#----------------------------------------------------------------------- +# +# Set the NCO standard environment variables (Table 1, pp.4) +# +#----------------------------------------------------------------------- +# +export USHlandda="${HOMElandda}/ush" +export EXEClandda="${HOMElandda}/exec" +export PARMlandda="${HOMElandda}/parm" +export FIXlandda="${HOMElandda}/fix" +export SCRIPTSlandda="${HOMElandda}/scripts" +# +#----------------------------------------------------------------------- +# +# Define job and jobid by default for rocoto +# +#----------------------------------------------------------------------- +# +WORKFLOW_MANAGER="${WORKFLOW_MANAGER:-rocoto}" +if [ "${WORKFLOW_MANAGER}" = "rocoto" ]; then + if [ "${SCHED}" = "slurm" ]; then + job=${SLURM_JOB_NAME} + pid=${SLURM_JOB_ID} + elif [ "${SCHED}" = "pbspro" ]; then + job=${PBS_JOBNAME} + pid=${PBS_JOBID} + else + job="task" + pid=$$ + fi + jobid="${job}.${PDY}${cyc}.${pid}" +fi +# +#----------------------------------------------------------------------- +# +# Create a temp working directory (DATA) and cd into it. +# +#----------------------------------------------------------------------- +# +export DATA="${DATA:-${DATAROOT}/${jobid}}" +mkdir -p $DATA +cd $DATA +# +#----------------------------------------------------------------------- +# +# Define NCO environment variables and set COM type definitions. +# +#----------------------------------------------------------------------- +# +export NET="${NET:-landda}" +export RUN="${RUN:-landda}" + +[[ "$WORKFLOW_MANAGER" = "rocoto" ]] && export COMROOT=$COMROOT +if [ "${MACHINE}" = "WCOSS2" ]; then + export COMIN="${COMIN:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY}/${cyc})}" + export COMOUT="${COMOUT:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY}/${cyc})}" +else + export COMIN="${COMIN:-${COMROOT}/${NET}/${model_ver}/${RUN}.${PDY}/${cyc}}" + export COMOUT="${COMOUT:-${COMROOT}/${NET}/${model_ver}/${RUN}.${PDY}/${cyc}}" +fi + +mkdir -p ${COMOUT} + +# Create a teomporary share directory +export DATA_SHARE="${DATA_SHARE:-${DATAROOT}/DATA_SHARE/${PDY}${cyc}}" +mkdir -p ${DATA_SHARE} + +# Run setpdy to initialize PDYm and PDYp variables +export cycle="${cycle:-t${cyc}z}" +setpdy.sh +. ./PDY +# +#----------------------------------------------------------------------- +# +# Call the ex-script for this J-job. +# +#----------------------------------------------------------------------- +# +export pgmout="${DATA}/OUTPUT.$$" +env + +${SCRIPTSlandda}/exlandda_post_anal.sh +export err=$?; err_chk + +if [ -e "$pgmout" ]; then + cat $pgmout +fi +# +#----------------------------------------------------------------------- +# +# Whether or not working directory DATA should be kept. +# +#----------------------------------------------------------------------- +# +if [ "${KEEPDATA}" = "NO" ]; then + rm -rf ${DATA} +fi +date diff --git a/jobs/JLANDDA_PREP_EXP b/jobs/JLANDDA_PRE_ANAL similarity index 98% rename from jobs/JLANDDA_PREP_EXP rename to jobs/JLANDDA_PRE_ANAL index cc03b593..30750dd2 100755 --- a/jobs/JLANDDA_PREP_EXP +++ b/jobs/JLANDDA_PRE_ANAL @@ -85,7 +85,7 @@ setpdy.sh export pgmout="${DATA}/OUTPUT.$$" env -${SCRIPTSlandda}/exlandda_prep_exp.sh +${SCRIPTSlandda}/exlandda_pre_anal.sh export err=$?; err_chk if [ -e "$pgmout" ]; then diff --git a/parm/land_analysis_gswp3_orion.yaml b/parm/land_analysis_gswp3_orion.yaml index 05cfc843..b121a0d8 100644 --- a/parm/land_analysis_gswp3_orion.yaml +++ b/parm/land_analysis_gswp3_orion.yaml @@ -47,7 +47,34 @@ workflow: DATADEP_FILE2: "&COMROOT;/&NET;/&model_ver;/&RUN;.@Y@m@d/@H/RESTART/ufs_land_restart.@Y-@m-@d_@H-00-00.tile1.nc" log: "&LOGDIR;/workflow.log" tasks: - task_prep_exp: + task_prep_obs: + attrs: + cycledefs: cycled + envars: + OBSDIR: "&OBSDIR;" + OBSDIR_SUBDIR: "&OBSDIR_SUBDIR;" + OBS_TYPES: "&OBS_TYPES;" + MACHINE: "&MACHINE;" + SCHED: "&SCHED;" + ACCOUNT: "&ACCOUNT;" + EXP_NAME: "&EXP_NAME;" + ATMOS_FORC: "&FORCING;" + model_ver: "&model_ver;" + HOMElandda: "&HOMElandda;" + COMROOT: "&COMROOT;" + DATAROOT: "&DATAROOT;" + KEEPDATA: "&KEEPDATA;" + PDY: "&PDY;" + cyc: "&cyc;" + PTIME: "&PTIME;" + account: "&ACCOUNT;" + command: '&HOMElandda;/parm/task_load_modules_run_jjob.sh "prep_obs" "&HOMElandda;" "&MACHINE;"' + jobname: prep_obs + cores: 1 + walltime: 00:02:00 + queue: batch + join: "&LOGDIR;/prep_obs&LOGFN_SUFFIX;" + task_pre_anal: attrs: cycledefs: cycled envars: @@ -68,12 +95,12 @@ workflow: cyc: "&cyc;" PTIME: "&PTIME;" account: "&ACCOUNT;" - command: '&HOMElandda;/parm/task_load_modules_run_jjob.sh "prep_exp" "&HOMElandda;" "&MACHINE;"' - jobname: prep_exp + command: '&HOMElandda;/parm/task_load_modules_run_jjob.sh "pre_anal" "&HOMElandda;" "&MACHINE;"' + jobname: pre_anal cores: 1 - walltime: 00:02:00 + walltime: 00:05:00 queue: batch - join: "&LOGDIR;/prep_exp&LOGFN_SUFFIX;" + join: "&LOGDIR;/pre_anal&LOGFN_SUFFIX;" dependency: or: datadep_file1: @@ -84,18 +111,18 @@ workflow: attrs: age: 5 value: "&DATADEP_FILE2;" - task_prep_obs: + task_analysis: attrs: cycledefs: cycled envars: - OBSDIR: "&OBSDIR;" - OBSDIR_SUBDIR: "&OBSDIR_SUBDIR;" OBS_TYPES: "&OBS_TYPES;" MACHINE: "&MACHINE;" SCHED: "&SCHED;" ACCOUNT: "&ACCOUNT;" EXP_NAME: "&EXP_NAME;" ATMOS_FORC: "&FORCING;" + RES: "&RES;" + TSTUB: "&TSTUB;" model_ver: "&model_ver;" HOMElandda: "&HOMElandda;" COMROOT: "&COMROOT;" @@ -104,22 +131,26 @@ workflow: PDY: "&PDY;" cyc: "&cyc;" PTIME: "&PTIME;" + NTIME: "&NTIME;" + DAtype: "&DAtype;" + SNOWDEPTHVAR: "&SNOWDEPTHVAR;" + NPROC_JEDI: "&NPROCS_ANALYSIS;" + JEDI_INSTALL: "&JEDI_INSTALL;" account: "&ACCOUNT;" - command: '&HOMElandda;/parm/task_load_modules_run_jjob.sh "prep_obs" "&HOMElandda;" "&MACHINE;"' - jobname: prep_obs - cores: 1 - walltime: 00:02:00 + command: '&HOMElandda;/parm/task_load_modules_run_jjob.sh "analysis" "&HOMElandda;" "&MACHINE;"' + jobname: analysis + nodes: "1:ppn=&NPROCS_ANALYSIS;" + walltime: 00:15:00 queue: batch - join: "&LOGDIR;/prep_obs&LOGFN_SUFFIX;" + join: "&LOGDIR;/analysis&LOGFN_SUFFIX;" dependency: taskdep: attrs: - task: prep_exp - task_analysis: + task: pre_anal + task_post_anal: attrs: cycledefs: cycled envars: - OBS_TYPES: "&OBS_TYPES;" MACHINE: "&MACHINE;" SCHED: "&SCHED;" ACCOUNT: "&ACCOUNT;" @@ -128,6 +159,7 @@ workflow: RES: "&RES;" TSTUB: "&TSTUB;" model_ver: "&model_ver;" + RUN: "&RUN;" HOMElandda: "&HOMElandda;" COMROOT: "&COMROOT;" DATAROOT: "&DATAROOT;" @@ -135,22 +167,17 @@ workflow: PDY: "&PDY;" cyc: "&cyc;" PTIME: "&PTIME;" - NTIME: "&NTIME;" - DAtype: "&DAtype;" - SNOWDEPTHVAR: "&SNOWDEPTHVAR;" - NPROC_JEDI: "&NPROCS_ANALYSIS;" - JEDI_INSTALL: "&JEDI_INSTALL;" account: "&ACCOUNT;" - command: '&HOMElandda;/parm/task_load_modules_run_jjob.sh "analysis" "&HOMElandda;" "&MACHINE;"' - jobname: analysis - nodes: "1:ppn=&NPROCS_ANALYSIS;" - walltime: 00:15:00 + command: '&HOMElandda;/parm/task_load_modules_run_jjob.sh "post_anal" "&HOMElandda;" "&MACHINE;"' + jobname: post_anal + cores: 1 + walltime: 00:05:00 queue: batch - join: "&LOGDIR;/analysis&LOGFN_SUFFIX;" + join: "&LOGDIR;/post_anal&LOGFN_SUFFIX;" dependency: taskdep: attrs: - task: prep_obs + task: pre_anal task_forecast: attrs: cycledefs: cycled diff --git a/parm/run_without_rocoto.sh b/parm/run_without_rocoto.sh index 17c6d33d..971f7618 100755 --- a/parm/run_without_rocoto.sh +++ b/parm/run_without_rocoto.sh @@ -62,23 +62,23 @@ fi # Call J-job scripts # -echo " ... PREP_EXP running ... " -${HOMElandda}/parm/task_load_modules_run_jjob.sh "prep_exp" "${HOMElandda}" "${MACHINE}" +echo " ... PREP_OBS running ... " +${HOMElandda}/parm/task_load_modules_run_jjob.sh "prep_obs" "${HOMElandda}" "${MACHINE}" export err=$? if [ $err = 0 ]; then - echo " === PREP_EXP completed successfully === " + echo " === PREP_OBS completed successfully === " else - echo " ERROR: PREP_EXP failed !!! " + echo " ERROR: PREP_OBS failed !!! " exit 1 fi -echo " ... PREP_OBS running ... " -${HOMElandda}/parm/task_load_modules_run_jjob.sh "prep_obs" "${HOMElandda}" "${MACHINE}" +echo " ... PRE_ANAL running ... " +${HOMElandda}/parm/task_load_modules_run_jjob.sh "pre_anal" "${HOMElandda}" "${MACHINE}" export err=$? if [ $err = 0 ]; then - echo " === PREP_OBS completed successfully === " + echo " === PRE_ANAL completed successfully === " else - echo " ERROR: PREP_OBS failed !!! " + echo " ERROR: PRE_ANAL failed !!! " exit 2 fi @@ -89,6 +89,16 @@ if [ $err = 0 ]; then echo " === Task ANALYSIS completed successfully === " else echo " ERROR: ANALYSIS failed !!! " + exit 3 +fi + +echo " ... POST_ANAL running ... " +${HOMElandda}/parm/task_load_modules_run_jjob.sh "post_anal" "${HOMElandda}" "${MACHINE}" +export err=$? +if [ $err = 0 ]; then + echo " === POST_ANAL completed successfully === " +else + echo " ERROR: POST_ANAL failed !!! " exit 4 fi diff --git a/scripts/exlandda_analysis.sh b/scripts/exlandda_analysis.sh index 59762cc8..d30926c6 100755 --- a/scripts/exlandda_analysis.sh +++ b/scripts/exlandda_analysis.sh @@ -5,13 +5,6 @@ set -xue ############################ # copy restarts to workdir, convert to UFS tile for DA (all members) -if [[ ${EXP_NAME} == "openloop" ]]; then - do_jedi="NO" -else - do_jedi="YES" - SAVE_TILE="YES" -fi - TPATH=${FIXlandda}/forcing/${ATMOS_FORC}/orog_files/ YYYY=${PDY:0:4} MM=${PDY:4:2} @@ -27,8 +20,6 @@ FILEDATE=${YYYY}${MM}${DD}.${HH}0000 JEDI_STATICDIR=${JEDI_INSTALL}/jedi-bundle/fv3-jedi/test/Data JEDI_EXECDIR=${JEDI_INSTALL}/build/bin -SAVE_INCR="YES" - # load modulefiles BUILD_VERSION_FILE="${HOMElandda}/versions/build.ver_${MACHINE}" if [ -e ${BUILD_VERSION_FILE} ]; then @@ -44,23 +35,23 @@ YAML_DA=construct GFSv17="NO" B=30 # back ground error std for LETKFOI -if [[ $do_jedi == "YES" ]]; then - - ln -nsf ${COMIN}/${FILEDATE}.sfc_data.tile*.nc . - ln -nsf ${COMIN}/OBS/*_${YYYY}${MM}${DD}${HH}.nc . +for itile in {1..6} +do + cp ${COMIN}/${FILEDATE}.sfc_data.ini.tile${itile}.nc ${FILEDATE}.sfc_data.tile${itile}.nc +done +ln -nsf ${COMIN}/OBS/*_${YYYY}${MM}${DD}${HH}.nc . - cres_file=${DATA}/${FILEDATE}.coupler.res - cp ${PARMlandda}/templates/template.coupler.res $cres_file +cres_file=${DATA}/${FILEDATE}.coupler.res +cp ${PARMlandda}/templates/template.coupler.res $cres_file - sed -i -e "s/XXYYYY/${YYYY}/g" $cres_file - sed -i -e "s/XXMM/${MM}/g" $cres_file - sed -i -e "s/XXDD/${DD}/g" $cres_file - sed -i -e "s/XXHH/${HH}/g" $cres_file - sed -i -e "s/XXYYYP/${YYYP}/g" $cres_file - sed -i -e "s/XXMP/${MP}/g" $cres_file - sed -i -e "s/XXDP/${DP}/g" $cres_file - sed -i -e "s/XXHP/${HP}/g" $cres_file -fi +sed -i -e "s/XXYYYY/${YYYY}/g" $cres_file +sed -i -e "s/XXMM/${MM}/g" $cres_file +sed -i -e "s/XXDD/${DD}/g" $cres_file +sed -i -e "s/XXHH/${HH}/g" $cres_file +sed -i -e "s/XXYYYP/${YYYP}/g" $cres_file +sed -i -e "s/XXMP/${MP}/g" $cres_file +sed -i -e "s/XXDP/${DP}/g" $cres_file +sed -i -e "s/XXHP/${HP}/g" $cres_file ################################################ # CREATE BACKGROUND ENSEMBLE (LETKFOI) @@ -82,10 +73,7 @@ if [[ ${DAtype} == "letkfoi_snow" ]]; then rm -r $DATA/mem_${ens} fi mkdir -p $DATA/mem_${ens} - for tile in 1 2 3 4 5 6 - do - cp ${COMIN}/${FILEDATE}.sfc_data.tile${tile}.nc ${DATA}/mem_${ens}/${FILEDATE}.sfc_data.tile${tile}.nc - done + cp ${FILEDATE}.sfc_data.tile*.nc ${DATA}/mem_${ens} cp ${DATA}/${FILEDATE}.coupler.res ${DATA}/mem_${ens}/${FILEDATE}.coupler.res done @@ -246,17 +234,11 @@ EOF fi -################################################ -# CLEAN UP -################################################ - if [[ -d output/DA/hofx ]]; then - mkdir -p ${COMOUT}/hofx cp -rp output/DA/hofx ${COMOUT} fi -# keep increments -if [ $SAVE_INCR == "YES" ] && [ $do_DA == "YES" ]; then +if [[ $do_DA == "YES" ]]; then mkdir -p ${COMOUT}/jedi_incr cp -p ${DATA}/${FILEDATE}.xainc.sfc_data.tile*.nc ${COMOUT}/jedi_incr fi diff --git a/scripts/exlandda_forecast.sh b/scripts/exlandda_forecast.sh index 7e7962f5..ab9385eb 100755 --- a/scripts/exlandda_forecast.sh +++ b/scripts/exlandda_forecast.sh @@ -5,13 +5,6 @@ set -xue ############################ # copy restarts to workdir, convert to UFS tile for DA (all members) -if [[ ${EXP_NAME} == "openloop" ]]; then - do_jedi="NO" -else - do_jedi="YES" - SAVE_TILE="YES" -fi - MACHINE_ID=${MACHINE} TPATH=${FIXlandda}/forcing/${ATMOS_FORC}/orog_files/ YYYY=${PDY:0:4} @@ -36,231 +29,140 @@ BUILD_VERSION_FILE="${HOMElandda}/versions/build.ver_${MACHINE}" if [ -e ${BUILD_VERSION_FILE} ]; then . ${BUILD_VERSION_FILE} fi +mkdir -p modulefiles +cp ${HOMElandda}/modulefiles/build_${MACHINE}_intel.lua $DATA/modulefiles/modules.landda.lua module use modulefiles; module load modules.landda -MPIEXEC=`which mpiexec` -MPIRUN=${MPIRUN:-`which mpiexec`} - -# convert back to vector, run model (all members) convert back to vector, run model (all members) -if [[ ${do_jedi} == "YES" ]]; then - - if [[ ${ATMOS_FORC} == "era5" ]]; then - echo '************************************************' - echo 'calling tile2vector' - - cp ${PARMlandda}/templates/template.tile2vector tile2vector.namelist - - sed -i "s|FIXlandda|${FIXlandda}|g" tile2vector.namelist - sed -i -e "s/XXYYYY/${YYYY}/g" tile2vector.namelist - sed -i -e "s/XXMM/${MM}/g" tile2vector.namelist - sed -i -e "s/XXDD/${DD}/g" tile2vector.namelist - sed -i -e "s/XXHH/${HH}/g" tile2vector.namelist - sed -i -e "s/MODEL_FORCING/${ATMOS_FORC}/g" vector2tile.namelist - sed -i -e "s/XXRES/${RES}/g" tile2vector.namelist - sed -i -e "s/XXTSTUB/${TSTUB}/g" tile2vector.namelist - sed -i -e "s#XXTPATH#${TPATH}#g" tile2vector.namelist - - export pgm="vector2tile_converter.exe" - . prep_step - ${EXEClandda}/$pgm tile2vector.namelist >>$pgmout 2>errfile - export err=$?; err_chk - cp errfile errfile_tile2vector - if [[ $err != 0 ]]; then - echo "tile2vector failed" - exit 10 - fi - - # save analysis restart - mkdir -p ${COMOUT}/RESTART/vector - cp -p ${DATA}/ufs_land_restart.${YYYY}-${MM}-${DD}_${HH}-00-00.nc ${COMOUT}/RESTART/vector/ufs_land_restart_anal.${YYYY}-${MM}-${DD}_${HH}-00-00.nc - - echo '************************************************' - echo 'running the forecast model' - - # update model namelist - cp ${PARMlandda}/templates/template.ufs-noahMP.namelist.${ATMOS_FORC} ufs-land.namelist - - sed -i "s|FIXlandda|${FIXlandda}|g" ufs-land.namelist - sed -i -e "s/XXYYYY/${YYYY}/g" ufs-land.namelist - sed -i -e "s/XXMM/${MM}/g" ufs-land.namelist - sed -i -e "s/XXDD/${DD}/g" ufs-land.namelist - sed -i -e "s/XXHH/${HH}/g" ufs-land.namelist - sed -i -e "s/XXFREQ/${FREQ}/g" ufs-land.namelist - sed -i -e "s/XXRDD/${RDD}/g" ufs-land.namelist - sed -i -e "s/XXRHH/${RHH}/g" ufs-land.namelist - - nt=$SLURM_NTASKS - - export pgm="ufsLand.exe" - . prep_step - ${MPIEXEC} -n 1 ${EXEClandda}/$pgm >>$pgmout 2>errfile - export err=$?; err_chk - cp errfile errfile_ufsLand - if [[ $err != 0 ]]; then - echo "ufsLand failed" - exit 10 - fi - - # convert back to UFS tile, run model (all members) - elif [[ ${ATMOS_FORC} == "gswp3" ]]; then - echo '************************************************' - echo 'calling tile2tile' - - cp ${PARMlandda}/templates/template.jedi2ufs jedi2ufs.namelist - - sed -i "s|FIXlandda|${FIXlandda}|g" jedi2ufs.namelist - sed -i -e "s/XXYYYY/${YYYY}/g" jedi2ufs.namelist - sed -i -e "s/XXMM/${MM}/g" jedi2ufs.namelist - sed -i -e "s/XXDD/${DD}/g" jedi2ufs.namelist - sed -i -e "s/XXHH/${HH}/g" jedi2ufs.namelist - sed -i -e "s/MODEL_FORCING/${ATMOS_FORC}/g" jedi2ufs.namelist - sed -i -e "s/XXRES/${RES}/g" jedi2ufs.namelist - sed -i -e "s/XXTSTUB/${TSTUB}/g" jedi2ufs.namelist - sed -i -e "s#XXTPATH#${TPATH}#g" jedi2ufs.namelist - - export pgm="tile2tile_converter.exe" - . prep_step - ${EXEClandda}/$pgm jedi2ufs.namelist >>$pgmout 2>errfile - export err=$?; err_chk - cp errfile errfile_tile2tile - if [[ $err != 0 ]]; then - echo "tile2tile failed" - exit 10 - fi - - # save analysis restart - mkdir -p ${COMOUT}/RESTART/tile - for tile in 1 2 3 4 5 6 - do - cp -p ${DATA}/ufs_land_restart.${YYYY}-${MM}-${DD}_${HH}-00-00.tile${tile}.nc ${COMOUT}/RESTART/tile/ufs_land_restart_anal.${YYYY}-${MM}-${DD}_${HH}-00-00.tile${tile}.nc - cp -p ${DATA}/ufs_land_restart.${YYYY}-${MM}-${DD}_${HH}-00-00.tile${tile}.nc ${COMOUT}/RESTART/tile/ufs.cpld.lnd.out.${YYYY}-${MM}-${DD}-00000.tile${tile}.nc - done - echo '************************************************' - echo 'running the forecast model' - - TEST_NAME=datm_cdeps_lnd_gswp3 - TEST_NAME_RST=datm_cdeps_lnd_gswp3_rst - PATHRT=${HOMElandda}/sorc/ufs_model.fd/tests - RT_COMPILER=${RT_COMPILER:-intel} - ATOL="1e-7" +MPIEXEC=`which mpiexec` - cp $PARMlandda/$TEST_NAME_RST ${PATHRT}/tests/$TEST_NAME_RST - source ${PATHRT}/rt_utils.sh - source ${PATHRT}/default_vars.sh - source ${PATHRT}/tests/$TEST_NAME_RST - source ${PATHRT}/atparse.bash +# convert back to UFS tile, run model (all members) +if [[ ${ATMOS_FORC} == "gswp3" ]]; then - BL_DATE=20230816 - RTPWD=${RTPWD:-${FIXlandda}/NEMSfv3gfs/develop-${BL_DATE}/INTEL/${TEST_NAME}} - INPUTDATA_ROOT=${INPUTDATA_ROOT:-${FIXlandda}/NEMSfv3gfs/input-data-20221101} + echo '************************************************' + echo 'running the forecast model' - echo "RTPWD= $RTPWD" - echo "INPUTDATA_ROOT= $INPUTDATA_ROOT" + TEST_NAME=datm_cdeps_lnd_gswp3 + TEST_NAME_RST=datm_cdeps_lnd_gswp3_rst + PATHRT=${HOMElandda}/sorc/ufs_model.fd/tests + RT_COMPILER=${RT_COMPILER:-intel} + ATOL="1e-7" - if [[ ! -d ${INPUTDATA_ROOT} ]] || [[ ! -d ${RTPWD} ]]; then - echo "Error: cannot find either folder for INPUTDATA_ROOT or RTPWD, please check!" - exit 1 - fi + cp $PARMlandda/$TEST_NAME_RST ${PATHRT}/tests/$TEST_NAME_RST + source ${PATHRT}/rt_utils.sh + source ${PATHRT}/default_vars.sh + source ${PATHRT}/tests/$TEST_NAME_RST + source ${PATHRT}/atparse.bash - # modify some env variables - reduce core usage - export ATM_compute_tasks=0 - export ATM_io_tasks=1 - export LND_tasks=6 - export layout_x=1 - export layout_y=1 - - # FV3 executable: - if [[ $DATM_CDEPS = 'true' ]] || [[ $FV3 = 'true' ]] || [[ $S2S = 'true' ]]; then - if [[ $HAFS = 'false' ]] || [[ $FV3 = 'true' && $HAFS = 'true' ]]; then - atparse < ${PATHRT}/parm/${INPUT_NML:-input.nml.IN} > input.nml - fi - fi + BL_DATE=20230816 + RTPWD=${RTPWD:-${FIXlandda}/NEMSfv3gfs/develop-${BL_DATE}/INTEL/${TEST_NAME}} + INPUTDATA_ROOT=${INPUTDATA_ROOT:-${FIXlandda}/NEMSfv3gfs/input-data-20221101} - atparse < ${PATHRT}/parm/${MODEL_CONFIGURE:-model_configure.IN} > model_configure + echo "RTPWD= $RTPWD" + echo "INPUTDATA_ROOT= $INPUTDATA_ROOT" - compute_petbounds_and_tasks - - atparse < ${PATHRT}/parm/${UFS_CONFIGURE:-ufs.configure} > ufs.configure - - # diag table - if [[ "Q${DIAG_TABLE:-}" != Q ]] ; then - atparse < ${PATHRT}/parm/diag_table/${DIAG_TABLE} > diag_table - fi + if [[ ! -d ${INPUTDATA_ROOT} ]] || [[ ! -d ${RTPWD} ]]; then + echo "Error: cannot find either folder for INPUTDATA_ROOT or RTPWD, please check!" + exit 1 + fi - # Field table - if [[ "Q${FIELD_TABLE:-}" != Q ]] ; then - cp ${PATHRT}/parm/field_table/${FIELD_TABLE} field_table + # modify some env variables - reduce core usage + export ATM_compute_tasks=0 + export ATM_io_tasks=1 + export LND_tasks=6 + export layout_x=1 + export layout_y=1 + + # FV3 executable: + if [[ $DATM_CDEPS = 'true' ]] || [[ $FV3 = 'true' ]] || [[ $S2S = 'true' ]]; then + if [[ $HAFS = 'false' ]] || [[ $FV3 = 'true' && $HAFS = 'true' ]]; then + atparse < ${PATHRT}/parm/${INPUT_NML:-input.nml.IN} > input.nml fi + fi - # Field Dictionary - cp ${PATHRT}/parm/fd_ufs.yaml fd_ufs.yaml - - # Set up the run directory - mkdir -p RESTART INPUT - cd INPUT - ln -nsf ${FIXlandda}/UFS_WM/DATM_GSWP3_input_data/* . - cd - + atparse < ${PATHRT}/parm/${MODEL_CONFIGURE:-model_configure.IN} > model_configure - SUFFIX=${RT_SUFFIX} - # restart - if [ $WARM_START = .true. ]; then - # NoahMP restart files - cp ${COMOUT}/RESTART/tile/ufs.cpld.lnd.out.${RESTART_FILE_SUFFIX_SECS}.tile*.nc RESTART/. + compute_petbounds_and_tasks - # CMEPS restart and pointer files - RFILE1=ufs.cpld.cpl.r.${RESTART_FILE_SUFFIX_SECS}.nc - cp ${FIXlandda}/restarts/gswp3/${RFILE1} RESTART/. - ls -1 "RESTART/${RFILE1}">rpointer.cpl + atparse < ${PATHRT}/parm/${UFS_CONFIGURE:-ufs.configure} > ufs.configure - # CDEPS restart and pointer files - RFILE2=ufs.cpld.datm.r.${RESTART_FILE_SUFFIX_SECS}.nc - cp ${FIXlandda}/restarts/gswp3/${RFILE2} RESTART/. - ls -1 "RESTART/${RFILE2}">rpointer.atm - fi + # diag table + if [[ "Q${DIAG_TABLE:-}" != Q ]] ; then + atparse < ${PATHRT}/parm/diag_table/${DIAG_TABLE} > diag_table + fi - cd INPUT - ln -nsf ${FIXlandda}/UFS_WM/NOAHMP_IC/ufs-land_C96_init_fields.tile1.nc C96.initial.tile1.nc - ln -nsf ${FIXlandda}/UFS_WM/NOAHMP_IC/ufs-land_C96_init_fields.tile2.nc C96.initial.tile2.nc - ln -nsf ${FIXlandda}/UFS_WM/NOAHMP_IC/ufs-land_C96_init_fields.tile3.nc C96.initial.tile3.nc - ln -nsf ${FIXlandda}/UFS_WM/NOAHMP_IC/ufs-land_C96_init_fields.tile4.nc C96.initial.tile4.nc - ln -nsf ${FIXlandda}/UFS_WM/NOAHMP_IC/ufs-land_C96_init_fields.tile5.nc C96.initial.tile5.nc - ln -nsf ${FIXlandda}/UFS_WM/NOAHMP_IC/ufs-land_C96_init_fields.tile6.nc C96.initial.tile6.nc - ln -nsf ${FIXlandda}/UFS_WM/FV3_fix_tiled/C96/C96.maximum_snow_albedo.tile*.nc . - ln -nsf ${FIXlandda}/UFS_WM/FV3_fix_tiled/C96/C96.slope_type.tile*.nc . - ln -nsf ${FIXlandda}/UFS_WM/FV3_fix_tiled/C96/C96.soil_type.tile*.nc . - ln -nsf ${FIXlandda}/UFS_WM/FV3_fix_tiled/C96/C96.soil_color.tile*.nc . - ln -nsf ${FIXlandda}/UFS_WM/FV3_fix_tiled/C96/C96.substrate_temperature.tile*.nc . - ln -nsf ${FIXlandda}/UFS_WM/FV3_fix_tiled/C96/C96.vegetation_greenness.tile*.nc . - ln -nsf ${FIXlandda}/UFS_WM/FV3_fix_tiled/C96/C96.vegetation_type.tile*.nc . - ln -nsf ${FIXlandda}/UFS_WM/FV3_fix_tiled/C96/oro_C96.mx100.tile1.nc oro_data.tile1.nc - ln -nsf ${FIXlandda}/UFS_WM/FV3_fix_tiled/C96/oro_C96.mx100.tile2.nc oro_data.tile2.nc - ln -nsf ${FIXlandda}/UFS_WM/FV3_fix_tiled/C96/oro_C96.mx100.tile3.nc oro_data.tile3.nc - ln -nsf ${FIXlandda}/UFS_WM/FV3_fix_tiled/C96/oro_C96.mx100.tile4.nc oro_data.tile4.nc - ln -nsf ${FIXlandda}/UFS_WM/FV3_fix_tiled/C96/oro_C96.mx100.tile5.nc oro_data.tile5.nc - ln -nsf ${FIXlandda}/UFS_WM/FV3_fix_tiled/C96/oro_C96.mx100.tile6.nc oro_data.tile6.nc - ln -nsf ${FIXlandda}/UFS_WM/FV3_input_data/INPUT/C96_grid.tile*.nc . - ln -nsf ${FIXlandda}/UFS_WM/FV3_input_data/INPUT/grid_spec.nc C96_mosaic.nc - cd - + # Field table + if [[ "Q${FIELD_TABLE:-}" != Q ]] ; then + cp ${PATHRT}/parm/field_table/${FIELD_TABLE} field_table + fi - if [[ $DATM_CDEPS = 'true' ]]; then - atparse < ${PATHRT}/parm/${DATM_IN_CONFIGURE:-datm_in.IN} > datm_in - atparse < ${PATHRT}/parm/${DATM_STREAM_CONFIGURE:-datm.streams.IN} > datm.streams - fi + # Field Dictionary + cp ${PATHRT}/parm/fd_ufs.yaml fd_ufs.yaml + + # Set up the run directory + mkdir -p RESTART INPUT + cd INPUT + ln -nsf ${FIXlandda}/UFS_WM/DATM_GSWP3_input_data/* . + cd - + + SUFFIX=${RT_SUFFIX} + # restart + if [ $WARM_START = .true. ]; then + # NoahMP restart files + cp ${COMOUT}/RESTART/tile/ufs.cpld.lnd.out.${RESTART_FILE_SUFFIX_SECS}.tile*.nc RESTART/. + + # CMEPS restart and pointer files + RFILE1=ufs.cpld.cpl.r.${RESTART_FILE_SUFFIX_SECS}.nc + cp ${FIXlandda}/restarts/gswp3/${RFILE1} RESTART/. + ls -1 "RESTART/${RFILE1}">rpointer.cpl + + # CDEPS restart and pointer files + RFILE2=ufs.cpld.datm.r.${RESTART_FILE_SUFFIX_SECS}.nc + cp ${FIXlandda}/restarts/gswp3/${RFILE2} RESTART/. + ls -1 "RESTART/${RFILE2}">rpointer.atm + fi - # NoahMP table file - cp ${PATHRT}/parm/noahmptable.tbl noahmptable.tbl + cd INPUT + ln -nsf ${FIXlandda}/UFS_WM/NOAHMP_IC/ufs-land_C96_init_fields.tile1.nc C96.initial.tile1.nc + ln -nsf ${FIXlandda}/UFS_WM/NOAHMP_IC/ufs-land_C96_init_fields.tile2.nc C96.initial.tile2.nc + ln -nsf ${FIXlandda}/UFS_WM/NOAHMP_IC/ufs-land_C96_init_fields.tile3.nc C96.initial.tile3.nc + ln -nsf ${FIXlandda}/UFS_WM/NOAHMP_IC/ufs-land_C96_init_fields.tile4.nc C96.initial.tile4.nc + ln -nsf ${FIXlandda}/UFS_WM/NOAHMP_IC/ufs-land_C96_init_fields.tile5.nc C96.initial.tile5.nc + ln -nsf ${FIXlandda}/UFS_WM/NOAHMP_IC/ufs-land_C96_init_fields.tile6.nc C96.initial.tile6.nc + ln -nsf ${FIXlandda}/UFS_WM/FV3_fix_tiled/C96/C96.maximum_snow_albedo.tile*.nc . + ln -nsf ${FIXlandda}/UFS_WM/FV3_fix_tiled/C96/C96.slope_type.tile*.nc . + ln -nsf ${FIXlandda}/UFS_WM/FV3_fix_tiled/C96/C96.soil_type.tile*.nc . + ln -nsf ${FIXlandda}/UFS_WM/FV3_fix_tiled/C96/C96.soil_color.tile*.nc . + ln -nsf ${FIXlandda}/UFS_WM/FV3_fix_tiled/C96/C96.substrate_temperature.tile*.nc . + ln -nsf ${FIXlandda}/UFS_WM/FV3_fix_tiled/C96/C96.vegetation_greenness.tile*.nc . + ln -nsf ${FIXlandda}/UFS_WM/FV3_fix_tiled/C96/C96.vegetation_type.tile*.nc . + ln -nsf ${FIXlandda}/UFS_WM/FV3_fix_tiled/C96/oro_C96.mx100.tile1.nc oro_data.tile1.nc + ln -nsf ${FIXlandda}/UFS_WM/FV3_fix_tiled/C96/oro_C96.mx100.tile2.nc oro_data.tile2.nc + ln -nsf ${FIXlandda}/UFS_WM/FV3_fix_tiled/C96/oro_C96.mx100.tile3.nc oro_data.tile3.nc + ln -nsf ${FIXlandda}/UFS_WM/FV3_fix_tiled/C96/oro_C96.mx100.tile4.nc oro_data.tile4.nc + ln -nsf ${FIXlandda}/UFS_WM/FV3_fix_tiled/C96/oro_C96.mx100.tile5.nc oro_data.tile5.nc + ln -nsf ${FIXlandda}/UFS_WM/FV3_fix_tiled/C96/oro_C96.mx100.tile6.nc oro_data.tile6.nc + ln -nsf ${FIXlandda}/UFS_WM/FV3_input_data/INPUT/C96_grid.tile*.nc . + ln -nsf ${FIXlandda}/UFS_WM/FV3_input_data/INPUT/grid_spec.nc C96_mosaic.nc + cd - + + if [[ $DATM_CDEPS = 'true' ]]; then + atparse < ${PATHRT}/parm/${DATM_IN_CONFIGURE:-datm_in.IN} > datm_in + atparse < ${PATHRT}/parm/${DATM_STREAM_CONFIGURE:-datm.streams.IN} > datm.streams + fi - # start runs - echo "Start ufs-cdeps-land model run with TASKS: ${TASKS}" - export pgm="ufs_model" - . prep_step - ${MPIRUN} -n ${TASKS} ${EXEClandda}/$pgm >>$pgmout 2>errfile - export err=$?; err_chk - cp errfile errfile_ufs_model - if [[ $err != 0 ]]; then - echo "ufs_model failed" - exit 10 - fi + # NoahMP table file + cp ${PATHRT}/parm/noahmptable.tbl noahmptable.tbl + + # start runs + echo "Start ufs-cdeps-land model run with TASKS: ${TASKS}" + export pgm="ufs_model" + . prep_step + ${MPIEXEC} -n ${TASKS} ${EXEClandda}/$pgm >>$pgmout 2>errfile + export err=$?; err_chk + cp errfile errfile_ufs_model + if [[ $err != 0 ]]; then + echo "ufs_model failed" + exit 10 fi fi diff --git a/scripts/exlandda_post_anal.sh b/scripts/exlandda_post_anal.sh new file mode 100755 index 00000000..074d6dce --- /dev/null +++ b/scripts/exlandda_post_anal.sh @@ -0,0 +1,130 @@ +#!/bin/sh + +set -xue + +############################ +# copy restarts to workdir, convert to UFS tile for DA (all members) + +MACHINE_ID=${MACHINE} +TPATH=${FIXlandda}/forcing/${ATMOS_FORC}/orog_files/ +YYYY=${PDY:0:4} +MM=${PDY:4:2} +DD=${PDY:6:2} +HH=${cyc} +YYYP=${PTIME:0:4} +MP=${PTIME:4:2} +DP=${PTIME:6:2} +HP=${PTIME:8:2} +nYYYY=${NTIME:0:4} +nMM=${NTIME:4:2} +nDD=${NTIME:6:2} +nHH=${NTIME:8:2} + +FREQ=$((${FCSTHR}*3600)) +RDD=$((${FCSTHR}/24)) +RHH=$((${FCSTHR}%24)) + +# load modulefiles +BUILD_VERSION_FILE="${HOMElandda}/versions/build.ver_${MACHINE}" +if [ -e ${BUILD_VERSION_FILE} ]; then + . ${BUILD_VERSION_FILE} +fi +mkdir -p modulefiles +cp ${HOMElandda}/modulefiles/build_${MACHINE}_intel.lua $DATA/modulefiles/modules.landda.lua +module use modulefiles; module load modules.landda + +MPIEXEC=`which mpiexec` + +# convert back to vector, run model (all members) convert back to vector, run model (all members) +if [[ ${ATMOS_FORC} == "era5" ]]; then + echo '************************************************' + echo 'calling tile2vector' + + cp ${PARMlandda}/templates/template.tile2vector tile2vector.namelist + + sed -i "s|FIXlandda|${FIXlandda}|g" tile2vector.namelist + sed -i -e "s/XXYYYY/${YYYY}/g" tile2vector.namelist + sed -i -e "s/XXMM/${MM}/g" tile2vector.namelist + sed -i -e "s/XXDD/${DD}/g" tile2vector.namelist + sed -i -e "s/XXHH/${HH}/g" tile2vector.namelist + sed -i -e "s/MODEL_FORCING/${ATMOS_FORC}/g" vector2tile.namelist + sed -i -e "s/XXRES/${RES}/g" tile2vector.namelist + sed -i -e "s/XXTSTUB/${TSTUB}/g" tile2vector.namelist + sed -i -e "s#XXTPATH#${TPATH}#g" tile2vector.namelist + + export pgm="vector2tile_converter.exe" + . prep_step + ${EXEClandda}/$pgm tile2vector.namelist >>$pgmout 2>errfile + export err=$?; err_chk + cp errfile errfile_tile2vector + if [[ $err != 0 ]]; then + echo "tile2vector failed" + exit 10 + fi + + # save analysis restart + mkdir -p ${COMOUT}/RESTART/vector + cp -p ${DATA}/ufs_land_restart.${YYYY}-${MM}-${DD}_${HH}-00-00.nc ${COMOUT}/RESTART/vector/ufs_land_restart_anal.${YYYY}-${MM}-${DD}_${HH}-00-00.nc + + echo '************************************************' + echo 'running the forecast model' + + # update model namelist + cp ${PARMlandda}/templates/template.ufs-noahMP.namelist.${ATMOS_FORC} ufs-land.namelist + + sed -i "s|FIXlandda|${FIXlandda}|g" ufs-land.namelist + sed -i -e "s/XXYYYY/${YYYY}/g" ufs-land.namelist + sed -i -e "s/XXMM/${MM}/g" ufs-land.namelist + sed -i -e "s/XXDD/${DD}/g" ufs-land.namelist + sed -i -e "s/XXHH/${HH}/g" ufs-land.namelist + sed -i -e "s/XXFREQ/${FREQ}/g" ufs-land.namelist + sed -i -e "s/XXRDD/${RDD}/g" ufs-land.namelist + sed -i -e "s/XXRHH/${RHH}/g" ufs-land.namelist + + nt=$SLURM_NTASKS + + export pgm="ufsLand.exe" + . prep_step + ${MPIEXEC} -n 1 ${EXEClandda}/$pgm >>$pgmout 2>errfile + export err=$?; err_chk + cp errfile errfile_ufsLand + if [[ $err != 0 ]]; then + echo "ufsLand failed" + exit 10 + fi + +# convert back to UFS tile, run model (all members) +elif [[ ${ATMOS_FORC} == "gswp3" ]]; then + echo '************************************************' + echo 'calling tile2tile' + + cp ${PARMlandda}/templates/template.jedi2ufs jedi2ufs.namelist + + sed -i "s|FIXlandda|${FIXlandda}|g" jedi2ufs.namelist + sed -i -e "s/XXYYYY/${YYYY}/g" jedi2ufs.namelist + sed -i -e "s/XXMM/${MM}/g" jedi2ufs.namelist + sed -i -e "s/XXDD/${DD}/g" jedi2ufs.namelist + sed -i -e "s/XXHH/${HH}/g" jedi2ufs.namelist + sed -i -e "s/MODEL_FORCING/${ATMOS_FORC}/g" jedi2ufs.namelist + sed -i -e "s/XXRES/${RES}/g" jedi2ufs.namelist + sed -i -e "s/XXTSTUB/${TSTUB}/g" jedi2ufs.namelist + sed -i -e "s#XXTPATH#${TPATH}#g" jedi2ufs.namelist + + export pgm="tile2tile_converter.exe" + . prep_step + ${EXEClandda}/$pgm jedi2ufs.namelist >>$pgmout 2>errfile + export err=$?; err_chk + cp errfile errfile_tile2tile + if [[ $err != 0 ]]; then + echo "tile2tile failed" + exit 10 + fi + + # save analysis restart + mkdir -p ${COMOUT}/RESTART/tile + for tile in 1 2 3 4 5 6 + do + cp -p ${DATA}/ufs_land_restart.${YYYY}-${MM}-${DD}_${HH}-00-00.tile${tile}.nc ${COMOUT}/RESTART/tile/ufs_land_restart_anal.${YYYY}-${MM}-${DD}_${HH}-00-00.tile${tile}.nc + cp -p ${DATA}/ufs_land_restart.${YYYY}-${MM}-${DD}_${HH}-00-00.tile${tile}.nc ${COMOUT}/RESTART/tile/ufs.cpld.lnd.out.${YYYY}-${MM}-${DD}-00000.tile${tile}.nc + done +fi diff --git a/scripts/exlandda_pre_anal.sh b/scripts/exlandda_pre_anal.sh new file mode 100755 index 00000000..74b0394f --- /dev/null +++ b/scripts/exlandda_pre_anal.sh @@ -0,0 +1,117 @@ +#!/bin/sh + +set -xue + +############################ +# copy restarts to workdir, convert to UFS tile for DA (all members) + +TPATH=${FIXlandda}/forcing/${ATMOS_FORC}/orog_files/ +YYYY=${PDY:0:4} +MM=${PDY:4:2} +DD=${PDY:6:2} +HH=${cyc} +YYYP=${PTIME:0:4} +MP=${PTIME:4:2} +DP=${PTIME:6:2} +HP=${PTIME:8:2} + +FILEDATE=${YYYY}${MM}${DD}.${HH}0000 + +mkdir -p modulefiles +cp ${HOMElandda}/modulefiles/build_${MACHINE}_intel.lua $DATA/modulefiles/modules.landda.lua + +# load modulefiles +BUILD_VERSION_FILE="${HOMElandda}/versions/build.ver_${MACHINE}" +if [ -e ${BUILD_VERSION_FILE} ]; then + . ${BUILD_VERSION_FILE} +fi +module use modulefiles; module load modules.landda + +if [[ $ATMOS_FORC == "era5" ]]; then + # vector2tile for DA + # copy restarts into work directory + rst_in=${COMIN}/RESTART/vector/ufs_land_restart_back.${YYYY}-${MM}-${DD}_${HH}-00-00.nc + if [[ ! -e ${rst_in} ]]; then + rst_in=${FIXlandda}/restarts/${ATMOS_FORC}/ufs_land_restart.${YYYY}-${MM}-${DD}_${HH}-00-00.nc + fi + rst_out=${DATA}/ufs_land_restart.${YYYY}-${MM}-${DD}_${HH}-00-00.nc + cp ${rst_in} ${rst_out} + + echo '************************************************' + echo 'calling vector2tile' + + # update vec2tile and tile2vec namelists + cp ${PARMlandda}/templates/template.vector2tile vector2tile.namelist + + sed -i "s|FIXlandda|${FIXlandda}|g" vector2tile.namelist + sed -i -e "s/XXYYYY/${YYYY}/g" vector2tile.namelist + sed -i -e "s/XXMM/${MM}/g" vector2tile.namelist + sed -i -e "s/XXDD/${DD}/g" vector2tile.namelist + sed -i -e "s/XXHH/${HH}/g" vector2tile.namelist + sed -i -e "s/XXHH/${HH}/g" vector2tile.namelist + sed -i -e "s/MODEL_FORCING/${ATMOS_FORC}/g" vector2tile.namelist + sed -i -e "s/XXRES/${RES}/g" vector2tile.namelist + sed -i -e "s/XXTSTUB/${TSTUB}/g" vector2tile.namelist + sed -i -e "s#XXTPATH#${TPATH}#g" vector2tile.namelist + + # submit vec2tile + echo '************************************************' + echo 'calling vector2tile' + + export pgm="vector2tile_converter.exe" + . prep_step + ${EXEClandda}/$pgm vector2tile.namelist >>$pgmout 2>errfile + cp errfile errfile_vector2tile + export err=$?; err_chk + if [[ $err != 0 ]]; then + echo "vec2tile failed" + exit + fi +elif [[ $ATMOS_FORC == "gswp3" ]]; then + # tile2tile for DA + echo '************************************************' + echo 'calling tile2tile' + + # copy restarts into work directory + for itile in {1..6} + do + rst_in=${COMIN}/RESTART/tile/ufs_land_restart_back.${YYYY}-${MM}-${DD}_${HH}-00-00.nc + if [[ ! -e ${rst_in} ]]; then + rst_in=${FIXlandda}/restarts/${ATMOS_FORC}/ufs.cpld.lnd.out.${YYYY}-${MM}-${DD}-00000.tile${itile}.nc + fi + rst_out=${DATA}/ufs_land_restart.${YYYY}-${MM}-${DD}_${HH}-00-00.tile${itile}.nc + cp ${rst_in} ${rst_out} + done + + # update tile2tile namelist + cp ${PARMlandda}/templates/template.ufs2jedi ufs2jedi.namelist + + sed -i "s|FIXlandda|${FIXlandda}|g" ufs2jedi.namelist + sed -i -e "s/XXYYYY/${YYYY}/g" ufs2jedi.namelist + sed -i -e "s/XXMM/${MM}/g" ufs2jedi.namelist + sed -i -e "s/XXDD/${DD}/g" ufs2jedi.namelist + sed -i -e "s/XXHH/${HH}/g" ufs2jedi.namelist + sed -i -e "s/XXHH/${HH}/g" ufs2jedi.namelist + sed -i -e "s/MODEL_FORCING/${ATMOS_FORC}/g" ufs2jedi.namelist + sed -i -e "s/XXRES/${RES}/g" ufs2jedi.namelist + sed -i -e "s/XXTSTUB/${TSTUB}/g" ufs2jedi.namelist + sed -i -e "s#XXTPATH#${TPATH}#g" ufs2jedi.namelist + + # submit tile2tile + export pgm="tile2tile_converter.exe" + . prep_step + ${EXEClandda}/$pgm ufs2jedi.namelist >>$pgmout 2>errfile + cp errfile errfile_tile2tile + export err=$?; err_chk + if [[ $err != 0 ]]; then + echo "tile2tile failed" + exit + fi +fi + +#stage restarts for applying JEDI update to intermediate directory +for itile in {1..6} +do + cp -p ${DATA}/${FILEDATE}.sfc_data.tile${itile}.nc ${COMOUT}/${FILEDATE}.sfc_data.tile${itile}.nc +done + diff --git a/scripts/exlandda_prep_exp.sh b/scripts/exlandda_prep_exp.sh deleted file mode 100755 index 201aecfd..00000000 --- a/scripts/exlandda_prep_exp.sh +++ /dev/null @@ -1,135 +0,0 @@ -#!/bin/sh - -set -xue - -############################ -# copy restarts to workdir, convert to UFS tile for DA (all members) - -if [[ ${EXP_NAME} == "openloop" ]]; then - do_jedi="NO" -else - do_jedi="YES" - SAVE_TILE="YES" -fi - -TPATH=${FIXlandda}/forcing/${ATMOS_FORC}/orog_files/ -YYYY=${PDY:0:4} -MM=${PDY:4:2} -DD=${PDY:6:2} -HH=${cyc} -YYYP=${PTIME:0:4} -MP=${PTIME:4:2} -DP=${PTIME:6:2} -HP=${PTIME:8:2} - -FILEDATE=${YYYY}${MM}${DD}.${HH}0000 - -mkdir -p modulefiles -cp ${HOMElandda}/modulefiles/build_${MACHINE}_intel.lua $DATA/modulefiles/modules.landda.lua - -# load modulefiles -BUILD_VERSION_FILE="${HOMElandda}/versions/build.ver_${MACHINE}" -if [ -e ${BUILD_VERSION_FILE} ]; then - . ${BUILD_VERSION_FILE} -fi -module use modulefiles; module load modules.landda - -if [[ $do_jedi == "YES" ]]; then - - if [[ $ATMOS_FORC == "era5" ]]; then - # vector2tile for DA - # copy restarts into work directory - rst_in=${COMIN}/RESTART/vector/ufs_land_restart_back.${YYYY}-${MM}-${DD}_${HH}-00-00.nc - if [[ ! -e ${rst_in} ]]; then - rst_in=${FIXlandda}/restarts/${ATMOS_FORC}/ufs_land_restart.${YYYY}-${MM}-${DD}_${HH}-00-00.nc - fi - rst_out=${DATA}/ufs_land_restart.${YYYY}-${MM}-${DD}_${HH}-00-00.nc - cp ${rst_in} ${rst_out} - - echo '************************************************' - echo 'calling vector2tile' - - # update vec2tile and tile2vec namelists - cp ${PARMlandda}/templates/template.vector2tile vector2tile.namelist - - sed -i "s|FIXlandda|${FIXlandda}|g" vector2tile.namelist - sed -i -e "s/XXYYYY/${YYYY}/g" vector2tile.namelist - sed -i -e "s/XXMM/${MM}/g" vector2tile.namelist - sed -i -e "s/XXDD/${DD}/g" vector2tile.namelist - sed -i -e "s/XXHH/${HH}/g" vector2tile.namelist - sed -i -e "s/XXHH/${HH}/g" vector2tile.namelist - sed -i -e "s/MODEL_FORCING/${ATMOS_FORC}/g" vector2tile.namelist - sed -i -e "s/XXRES/${RES}/g" vector2tile.namelist - sed -i -e "s/XXTSTUB/${TSTUB}/g" vector2tile.namelist - sed -i -e "s#XXTPATH#${TPATH}#g" vector2tile.namelist - - # submit vec2tile - echo '************************************************' - echo 'calling vector2tile' - - export pgm="vector2tile_converter.exe" - . prep_step - ${EXEClandda}/$pgm vector2tile.namelist >>$pgmout 2>errfile - cp errfile errfile_vector2tile - export err=$?; err_chk - if [[ $err != 0 ]]; then - echo "vec2tile failed" - exit - fi - elif [[ $ATMOS_FORC == "gswp3" ]]; then - # tile2tile for DA - echo '************************************************' - echo 'calling tile2tile' - - # copy restarts into work directory - for tile in 1 2 3 4 5 6 - do - rst_in=${COMIN}/RESTART/tile/ufs_land_restart_back.${YYYY}-${MM}-${DD}_${HH}-00-00.nc - if [[ ! -e ${rst_in} ]]; then - rst_in=${FIXlandda}/restarts/${ATMOS_FORC}/ufs.cpld.lnd.out.${YYYY}-${MM}-${DD}-00000.tile${tile}.nc - fi - rst_out=${DATA}/ufs_land_restart.${YYYY}-${MM}-${DD}_${HH}-00-00.tile${tile}.nc - cp ${rst_in} ${rst_out} - done - - # update tile2tile namelist - cp ${PARMlandda}/templates/template.ufs2jedi ufs2jedi.namelist - - sed -i "s|FIXlandda|${FIXlandda}|g" ufs2jedi.namelist - sed -i -e "s/XXYYYY/${YYYY}/g" ufs2jedi.namelist - sed -i -e "s/XXMM/${MM}/g" ufs2jedi.namelist - sed -i -e "s/XXDD/${DD}/g" ufs2jedi.namelist - sed -i -e "s/XXHH/${HH}/g" ufs2jedi.namelist - sed -i -e "s/XXHH/${HH}/g" ufs2jedi.namelist - sed -i -e "s/MODEL_FORCING/${ATMOS_FORC}/g" ufs2jedi.namelist - sed -i -e "s/XXRES/${RES}/g" ufs2jedi.namelist - sed -i -e "s/XXTSTUB/${TSTUB}/g" ufs2jedi.namelist - sed -i -e "s#XXTPATH#${TPATH}#g" ufs2jedi.namelist - - # submit tile2tile - export pgm="tile2tile_converter.exe" - . prep_step - ${EXEClandda}/$pgm ufs2jedi.namelist >>$pgmout 2>errfile - cp errfile errfile_tile2tile - export err=$?; err_chk - if [[ $err != 0 ]]; then - echo "tile2tile failed" - exit - fi - fi - - if [[ $SAVE_TILE == "YES" ]]; then - for tile in 1 2 3 4 5 6 - do - cp ${DATA}/${FILEDATE}.sfc_data.tile${tile}.nc ${COMOUT}/${FILEDATE}.sfc_data_back.tile${tile}.nc - done - fi - - #stage restarts for applying JEDI update (files will get directly updated) - for tile in 1 2 3 4 5 6 - do - cp -p ${DATA}/${FILEDATE}.sfc_data.tile${tile}.nc ${COMOUT}/${FILEDATE}.sfc_data.tile${tile}.nc - done - -fi # do_jedi setup - diff --git a/scripts/exlandda_prep_obs.sh b/scripts/exlandda_prep_obs.sh index fd36c5e8..cb2c269d 100755 --- a/scripts/exlandda_prep_obs.sh +++ b/scripts/exlandda_prep_obs.sh @@ -5,13 +5,6 @@ set -xue ############################ # copy restarts to workdir, convert to UFS tile for DA (all members) -if [[ ${EXP_NAME} == "openloop" ]]; then - do_jedi="NO" -else - do_jedi="YES" - SAVE_TILE="YES" -fi - YYYY=${PDY:0:4} MM=${PDY:4:2} DD=${PDY:6:2} From 71942ca6e4acfb33b99b0c04a23cf906e5b3266b Mon Sep 17 00:00:00 2001 From: Chan-Hoo Jeon Date: Fri, 17 May 2024 09:18:16 -0500 Subject: [PATCH 05/18] update task modules --- modulefiles/tasks/hera/task.analysis.lua | 2 +- modulefiles/tasks/hera/task.forecast.lua | 2 +- .../tasks/hera/{task.prep_exp.lua => task.post_anal.lua} | 2 +- .../{orion/task.prep_exp.lua => hera/task.pre_anal.lua} | 2 +- modulefiles/tasks/hera/task.prep_obs.lua | 2 +- modulefiles/tasks/orion/task.analysis.lua | 2 +- modulefiles/tasks/orion/task.forecast.lua | 2 +- modulefiles/tasks/orion/task.post_anal.lua | 8 ++++++++ modulefiles/tasks/orion/task.pre_anal.lua | 8 ++++++++ modulefiles/tasks/orion/task.prep_obs.lua | 2 +- 10 files changed, 24 insertions(+), 8 deletions(-) rename modulefiles/tasks/hera/{task.prep_exp.lua => task.post_anal.lua} (79%) rename modulefiles/tasks/{orion/task.prep_exp.lua => hera/task.pre_anal.lua} (79%) create mode 100644 modulefiles/tasks/orion/task.post_anal.lua create mode 100644 modulefiles/tasks/orion/task.pre_anal.lua diff --git a/modulefiles/tasks/hera/task.analysis.lua b/modulefiles/tasks/hera/task.analysis.lua index 8ad021a3..d7439d6d 100644 --- a/modulefiles/tasks/hera/task.analysis.lua +++ b/modulefiles/tasks/hera/task.analysis.lua @@ -2,7 +2,7 @@ prepend_path("MODULEPATH", os.getenv("modulepath_spack_stack")) prepend_path("MODULEPATH", os.getenv("modulepath_spack_stack_jedi")) load(pathJoin("stack-intel", stack_intel_ver)) -load(pathJoin("stack-intel-oneapi-mpi", stack_impi_ver)) +load(pathJoin("stack-intel-oneapi-mpi", stack_intel_oneapi_mpi_ver)) load(pathJoin("stack-python", stack_python_ver)) load(pathJoin("prod_util", prod_util_ver)) diff --git a/modulefiles/tasks/hera/task.forecast.lua b/modulefiles/tasks/hera/task.forecast.lua index 8ad021a3..d7439d6d 100644 --- a/modulefiles/tasks/hera/task.forecast.lua +++ b/modulefiles/tasks/hera/task.forecast.lua @@ -2,7 +2,7 @@ prepend_path("MODULEPATH", os.getenv("modulepath_spack_stack")) prepend_path("MODULEPATH", os.getenv("modulepath_spack_stack_jedi")) load(pathJoin("stack-intel", stack_intel_ver)) -load(pathJoin("stack-intel-oneapi-mpi", stack_impi_ver)) +load(pathJoin("stack-intel-oneapi-mpi", stack_intel_oneapi_mpi_ver)) load(pathJoin("stack-python", stack_python_ver)) load(pathJoin("prod_util", prod_util_ver)) diff --git a/modulefiles/tasks/hera/task.prep_exp.lua b/modulefiles/tasks/hera/task.post_anal.lua similarity index 79% rename from modulefiles/tasks/hera/task.prep_exp.lua rename to modulefiles/tasks/hera/task.post_anal.lua index 8ad021a3..d7439d6d 100644 --- a/modulefiles/tasks/hera/task.prep_exp.lua +++ b/modulefiles/tasks/hera/task.post_anal.lua @@ -2,7 +2,7 @@ prepend_path("MODULEPATH", os.getenv("modulepath_spack_stack")) prepend_path("MODULEPATH", os.getenv("modulepath_spack_stack_jedi")) load(pathJoin("stack-intel", stack_intel_ver)) -load(pathJoin("stack-intel-oneapi-mpi", stack_impi_ver)) +load(pathJoin("stack-intel-oneapi-mpi", stack_intel_oneapi_mpi_ver)) load(pathJoin("stack-python", stack_python_ver)) load(pathJoin("prod_util", prod_util_ver)) diff --git a/modulefiles/tasks/orion/task.prep_exp.lua b/modulefiles/tasks/hera/task.pre_anal.lua similarity index 79% rename from modulefiles/tasks/orion/task.prep_exp.lua rename to modulefiles/tasks/hera/task.pre_anal.lua index 8ad021a3..d7439d6d 100644 --- a/modulefiles/tasks/orion/task.prep_exp.lua +++ b/modulefiles/tasks/hera/task.pre_anal.lua @@ -2,7 +2,7 @@ prepend_path("MODULEPATH", os.getenv("modulepath_spack_stack")) prepend_path("MODULEPATH", os.getenv("modulepath_spack_stack_jedi")) load(pathJoin("stack-intel", stack_intel_ver)) -load(pathJoin("stack-intel-oneapi-mpi", stack_impi_ver)) +load(pathJoin("stack-intel-oneapi-mpi", stack_intel_oneapi_mpi_ver)) load(pathJoin("stack-python", stack_python_ver)) load(pathJoin("prod_util", prod_util_ver)) diff --git a/modulefiles/tasks/hera/task.prep_obs.lua b/modulefiles/tasks/hera/task.prep_obs.lua index 8ad021a3..d7439d6d 100644 --- a/modulefiles/tasks/hera/task.prep_obs.lua +++ b/modulefiles/tasks/hera/task.prep_obs.lua @@ -2,7 +2,7 @@ prepend_path("MODULEPATH", os.getenv("modulepath_spack_stack")) prepend_path("MODULEPATH", os.getenv("modulepath_spack_stack_jedi")) load(pathJoin("stack-intel", stack_intel_ver)) -load(pathJoin("stack-intel-oneapi-mpi", stack_impi_ver)) +load(pathJoin("stack-intel-oneapi-mpi", stack_intel_oneapi_mpi_ver)) load(pathJoin("stack-python", stack_python_ver)) load(pathJoin("prod_util", prod_util_ver)) diff --git a/modulefiles/tasks/orion/task.analysis.lua b/modulefiles/tasks/orion/task.analysis.lua index 8ad021a3..d7439d6d 100644 --- a/modulefiles/tasks/orion/task.analysis.lua +++ b/modulefiles/tasks/orion/task.analysis.lua @@ -2,7 +2,7 @@ prepend_path("MODULEPATH", os.getenv("modulepath_spack_stack")) prepend_path("MODULEPATH", os.getenv("modulepath_spack_stack_jedi")) load(pathJoin("stack-intel", stack_intel_ver)) -load(pathJoin("stack-intel-oneapi-mpi", stack_impi_ver)) +load(pathJoin("stack-intel-oneapi-mpi", stack_intel_oneapi_mpi_ver)) load(pathJoin("stack-python", stack_python_ver)) load(pathJoin("prod_util", prod_util_ver)) diff --git a/modulefiles/tasks/orion/task.forecast.lua b/modulefiles/tasks/orion/task.forecast.lua index 8ad021a3..d7439d6d 100644 --- a/modulefiles/tasks/orion/task.forecast.lua +++ b/modulefiles/tasks/orion/task.forecast.lua @@ -2,7 +2,7 @@ prepend_path("MODULEPATH", os.getenv("modulepath_spack_stack")) prepend_path("MODULEPATH", os.getenv("modulepath_spack_stack_jedi")) load(pathJoin("stack-intel", stack_intel_ver)) -load(pathJoin("stack-intel-oneapi-mpi", stack_impi_ver)) +load(pathJoin("stack-intel-oneapi-mpi", stack_intel_oneapi_mpi_ver)) load(pathJoin("stack-python", stack_python_ver)) load(pathJoin("prod_util", prod_util_ver)) diff --git a/modulefiles/tasks/orion/task.post_anal.lua b/modulefiles/tasks/orion/task.post_anal.lua new file mode 100644 index 00000000..d7439d6d --- /dev/null +++ b/modulefiles/tasks/orion/task.post_anal.lua @@ -0,0 +1,8 @@ +prepend_path("MODULEPATH", os.getenv("modulepath_spack_stack")) +prepend_path("MODULEPATH", os.getenv("modulepath_spack_stack_jedi")) + +load(pathJoin("stack-intel", stack_intel_ver)) +load(pathJoin("stack-intel-oneapi-mpi", stack_intel_oneapi_mpi_ver)) +load(pathJoin("stack-python", stack_python_ver)) + +load(pathJoin("prod_util", prod_util_ver)) diff --git a/modulefiles/tasks/orion/task.pre_anal.lua b/modulefiles/tasks/orion/task.pre_anal.lua new file mode 100644 index 00000000..d7439d6d --- /dev/null +++ b/modulefiles/tasks/orion/task.pre_anal.lua @@ -0,0 +1,8 @@ +prepend_path("MODULEPATH", os.getenv("modulepath_spack_stack")) +prepend_path("MODULEPATH", os.getenv("modulepath_spack_stack_jedi")) + +load(pathJoin("stack-intel", stack_intel_ver)) +load(pathJoin("stack-intel-oneapi-mpi", stack_intel_oneapi_mpi_ver)) +load(pathJoin("stack-python", stack_python_ver)) + +load(pathJoin("prod_util", prod_util_ver)) diff --git a/modulefiles/tasks/orion/task.prep_obs.lua b/modulefiles/tasks/orion/task.prep_obs.lua index 8ad021a3..d7439d6d 100644 --- a/modulefiles/tasks/orion/task.prep_obs.lua +++ b/modulefiles/tasks/orion/task.prep_obs.lua @@ -2,7 +2,7 @@ prepend_path("MODULEPATH", os.getenv("modulepath_spack_stack")) prepend_path("MODULEPATH", os.getenv("modulepath_spack_stack_jedi")) load(pathJoin("stack-intel", stack_intel_ver)) -load(pathJoin("stack-intel-oneapi-mpi", stack_impi_ver)) +load(pathJoin("stack-intel-oneapi-mpi", stack_intel_oneapi_mpi_ver)) load(pathJoin("stack-python", stack_python_ver)) load(pathJoin("prod_util", prod_util_ver)) From 676c021de9a5b8bca0763a2e4d1e95edc931ec4c Mon Sep 17 00:00:00 2001 From: Chan-Hoo Jeon Date: Fri, 17 May 2024 12:06:40 -0500 Subject: [PATCH 06/18] to meet nco standards --- parm/land_analysis_gswp3_orion.yaml | 11 +++-------- scripts/exlandda_analysis.sh | 21 +++++++++++++-------- scripts/exlandda_forecast.sh | 13 +++++-------- scripts/exlandda_post_anal.sh | 20 +++++++++----------- scripts/exlandda_pre_anal.sh | 5 ++++- scripts/exlandda_prep_obs.sh | 4 ++-- 6 files changed, 36 insertions(+), 38 deletions(-) diff --git a/parm/land_analysis_gswp3_orion.yaml b/parm/land_analysis_gswp3_orion.yaml index b121a0d8..90f8cf1f 100644 --- a/parm/land_analysis_gswp3_orion.yaml +++ b/parm/land_analysis_gswp3_orion.yaml @@ -167,6 +167,7 @@ workflow: PDY: "&PDY;" cyc: "&cyc;" PTIME: "&PTIME;" + FCSTHR: "&FCSTHR;" account: "&ACCOUNT;" command: '&HOMElandda;/parm/task_load_modules_run_jjob.sh "post_anal" "&HOMElandda;" "&MACHINE;"' jobname: post_anal @@ -177,7 +178,7 @@ workflow: dependency: taskdep: attrs: - task: pre_anal + task: analysis task_forecast: attrs: cycledefs: cycled @@ -188,8 +189,6 @@ workflow: ACCOUNT: "&ACCOUNT;" EXP_NAME: "&EXP_NAME;" ATMOS_FORC: "&FORCING;" - RES: "&RES;" - TSTUB: "&TSTUB;" model_ver: "&model_ver;" HOMElandda: "&HOMElandda;" COMROOT: "&COMROOT;" @@ -198,12 +197,8 @@ workflow: LOGDIR: "&LOGDIR;" PDY: "&PDY;" cyc: "&cyc;" - PTIME: "&PTIME;" NTIME: "&NTIME;" DAtype: "&DAtype;" - SNOWDEPTHVAR: "&SNOWDEPTHVAR;" - JEDI_INSTALL: "&JEDI_INSTALL;" - FCSTHR: "&FCSTHR;" account: "&ACCOUNT;" command: '&HOMElandda;/parm/task_load_modules_run_jjob.sh "forecast" "&HOMElandda;" "&MACHINE;"' jobname: forecast @@ -214,4 +209,4 @@ workflow: dependency: taskdep: attrs: - task: analysis + task: post_anal diff --git a/scripts/exlandda_analysis.sh b/scripts/exlandda_analysis.sh index d30926c6..4cf3b803 100755 --- a/scripts/exlandda_analysis.sh +++ b/scripts/exlandda_analysis.sh @@ -37,9 +37,9 @@ B=30 # back ground error std for LETKFOI for itile in {1..6} do - cp ${COMIN}/${FILEDATE}.sfc_data.ini.tile${itile}.nc ${FILEDATE}.sfc_data.tile${itile}.nc + cp ${DATA_SHARE}/${FILEDATE}.sfc_data.tile${itile}.nc . done -ln -nsf ${COMIN}/OBS/*_${YYYY}${MM}${DD}${HH}.nc . +ln -nsf ${COMIN}/obs/*_${YYYY}${MM}${DD}${HH}.nc . cres_file=${DATA}/${FILEDATE}.coupler.res cp ${PARMlandda}/templates/template.coupler.res $cres_file @@ -200,7 +200,7 @@ if [[ $do_HOFX == "YES" ]]; then fi ################################################ -# APPLY INCREMENT TO UFS RESTARTS +# Create increment files ################################################ if [[ $do_DA == "YES" ]]; then @@ -232,14 +232,19 @@ EOF fi fi + for itile in {1..6} + do + cp -p ${DATA}/${FILEDATE}.xainc.sfc_data.tile${itile}.nc ${COMOUT} + done + fi +for itile in {1..6} +do + cp -p ${DATA}/${FILEDATE}.sfc_data.tile${itile}.nc ${COMOUT} +done + if [[ -d output/DA/hofx ]]; then cp -rp output/DA/hofx ${COMOUT} fi -if [[ $do_DA == "YES" ]]; then - mkdir -p ${COMOUT}/jedi_incr - cp -p ${DATA}/${FILEDATE}.xainc.sfc_data.tile*.nc ${COMOUT}/jedi_incr -fi - diff --git a/scripts/exlandda_forecast.sh b/scripts/exlandda_forecast.sh index ab9385eb..e921bc74 100755 --- a/scripts/exlandda_forecast.sh +++ b/scripts/exlandda_forecast.sh @@ -11,19 +11,11 @@ YYYY=${PDY:0:4} MM=${PDY:4:2} DD=${PDY:6:2} HH=${cyc} -YYYP=${PTIME:0:4} -MP=${PTIME:4:2} -DP=${PTIME:6:2} -HP=${PTIME:8:2} nYYYY=${NTIME:0:4} nMM=${NTIME:4:2} nDD=${NTIME:6:2} nHH=${NTIME:8:2} -FREQ=$((${FCSTHR}*3600)) -RDD=$((${FCSTHR}/24)) -RHH=$((${FCSTHR}%24)) - # load modulefiles BUILD_VERSION_FILE="${HOMElandda}/versions/build.ver_${MACHINE}" if [ -e ${BUILD_VERSION_FILE} ]; then @@ -35,6 +27,11 @@ module use modulefiles; module load modules.landda MPIEXEC=`which mpiexec` +for itile in {1..6} +do + cp ${COMIN}/RESTART/tile/ufs.cpld.lnd.out.${YYYY}-${MM}-${DD}-00000.tile${itile}.nc . +done + # convert back to UFS tile, run model (all members) if [[ ${ATMOS_FORC} == "gswp3" ]]; then diff --git a/scripts/exlandda_post_anal.sh b/scripts/exlandda_post_anal.sh index 074d6dce..22b73488 100755 --- a/scripts/exlandda_post_anal.sh +++ b/scripts/exlandda_post_anal.sh @@ -11,14 +11,6 @@ YYYY=${PDY:0:4} MM=${PDY:4:2} DD=${PDY:6:2} HH=${cyc} -YYYP=${PTIME:0:4} -MP=${PTIME:4:2} -DP=${PTIME:6:2} -HP=${PTIME:8:2} -nYYYY=${NTIME:0:4} -nMM=${NTIME:4:2} -nDD=${NTIME:6:2} -nHH=${NTIME:8:2} FREQ=$((${FCSTHR}*3600)) RDD=$((${FCSTHR}/24)) @@ -35,6 +27,13 @@ module use modulefiles; module load modules.landda MPIEXEC=`which mpiexec` +FILEDATE=${YYYY}${MM}${DD}.${HH}0000 +for itile in {1..6} +do + cp ${DATA_SHARE}/${FILEDATE}.sfc_data.tile${itile}.nc . + cp ${DATA_SHARE}/ufs_land_restart.${YYYY}-${MM}-${DD}_${HH}-00-00.tile${itile}.nc . +done + # convert back to vector, run model (all members) convert back to vector, run model (all members) if [[ ${ATMOS_FORC} == "era5" ]]; then echo '************************************************' @@ -122,9 +121,8 @@ elif [[ ${ATMOS_FORC} == "gswp3" ]]; then # save analysis restart mkdir -p ${COMOUT}/RESTART/tile - for tile in 1 2 3 4 5 6 + for itile in {1..6} do - cp -p ${DATA}/ufs_land_restart.${YYYY}-${MM}-${DD}_${HH}-00-00.tile${tile}.nc ${COMOUT}/RESTART/tile/ufs_land_restart_anal.${YYYY}-${MM}-${DD}_${HH}-00-00.tile${tile}.nc - cp -p ${DATA}/ufs_land_restart.${YYYY}-${MM}-${DD}_${HH}-00-00.tile${tile}.nc ${COMOUT}/RESTART/tile/ufs.cpld.lnd.out.${YYYY}-${MM}-${DD}-00000.tile${tile}.nc + cp -p ${DATA}/ufs_land_restart.${YYYY}-${MM}-${DD}_${HH}-00-00.tile${itile}.nc ${COMOUT}/RESTART/tile/ufs.cpld.lnd.out.${YYYY}-${MM}-${DD}-00000.tile${itile}.nc done fi diff --git a/scripts/exlandda_pre_anal.sh b/scripts/exlandda_pre_anal.sh index 74b0394f..9ed911aa 100755 --- a/scripts/exlandda_pre_anal.sh +++ b/scripts/exlandda_pre_anal.sh @@ -36,6 +36,7 @@ if [[ $ATMOS_FORC == "era5" ]]; then fi rst_out=${DATA}/ufs_land_restart.${YYYY}-${MM}-${DD}_${HH}-00-00.nc cp ${rst_in} ${rst_out} + cp -p ${rst_out} ${DATA_SHARE} echo '************************************************' echo 'calling vector2tile' @@ -81,6 +82,8 @@ elif [[ $ATMOS_FORC == "gswp3" ]]; then fi rst_out=${DATA}/ufs_land_restart.${YYYY}-${MM}-${DD}_${HH}-00-00.tile${itile}.nc cp ${rst_in} ${rst_out} + # copy restart to data share dir for post_anal + cp -p ${rst_out} ${DATA_SHARE} done # update tile2tile namelist @@ -112,6 +115,6 @@ fi #stage restarts for applying JEDI update to intermediate directory for itile in {1..6} do - cp -p ${DATA}/${FILEDATE}.sfc_data.tile${itile}.nc ${COMOUT}/${FILEDATE}.sfc_data.tile${itile}.nc + cp -p ${DATA}/${FILEDATE}.sfc_data.tile${itile}.nc ${DATA_SHARE}/${FILEDATE}.sfc_data.tile${itile}.nc done diff --git a/scripts/exlandda_prep_obs.sh b/scripts/exlandda_prep_obs.sh index cb2c269d..c3799e94 100755 --- a/scripts/exlandda_prep_obs.sh +++ b/scripts/exlandda_prep_obs.sh @@ -14,7 +14,7 @@ MP=${PTIME:4:2} DP=${PTIME:6:2} HP=${PTIME:8:2} -mkdir -p "${COMOUT}/OBS" +mkdir -p "${COMOUT}/obs" ################################################ # 2. PREPARE OBS FILES @@ -44,7 +44,7 @@ for obs in "${OBS_TYPES[@]}"; do # check obs are available if [[ -e $obsfile ]]; then echo "do_landDA: $obs observations found: $obsfile" - cp -p $obsfile ${COMOUT}/OBS/${obs}_${YYYY}${MM}${DD}${HH}.nc + cp -p $obsfile ${COMOUT}/obs/${obs}_${YYYY}${MM}${DD}${HH}.nc else echo "${obs} observations not found: $obsfile" fi From 18ce7a46738be59cc044248c1aa8febed3cc278d Mon Sep 17 00:00:00 2001 From: Chan-Hoo Jeon Date: Fri, 17 May 2024 15:10:15 -0500 Subject: [PATCH 07/18] introduce data_restart --- jobs/JLANDDA_FORECAST | 2 + jobs/JLANDDA_POST_ANAL | 2 + jobs/JLANDDA_PRE_ANAL | 2 + parm/land_analysis_era5_hera.yaml | 206 ---------------------------- parm/land_analysis_era5_orion.yaml | 176 ------------------------ parm/land_analysis_gswp3_hera.yaml | 1 + parm/land_analysis_gswp3_orion.yaml | 13 +- scripts/exlandda_analysis.sh | 5 +- scripts/exlandda_forecast.sh | 33 ++--- scripts/exlandda_post_anal.sh | 13 +- scripts/exlandda_pre_anal.sh | 51 +++---- 11 files changed, 72 insertions(+), 432 deletions(-) delete mode 100644 parm/land_analysis_era5_hera.yaml delete mode 100644 parm/land_analysis_era5_orion.yaml diff --git a/jobs/JLANDDA_FORECAST b/jobs/JLANDDA_FORECAST index e04c5786..337cd273 100755 --- a/jobs/JLANDDA_FORECAST +++ b/jobs/JLANDDA_FORECAST @@ -70,6 +70,8 @@ mkdir -p ${COMOUT} # Create a teomporary share directory export DATA_SHARE="${DATA_SHARE:-${DATAROOT}/DATA_SHARE/${PDY}${cyc}}" mkdir -p ${DATA_SHARE} +export DATA_RESTART="${DATA_RESTART:-${DATAROOT}/DATA_SHARE/RESTART}" +mkdir -p ${DATA_RESTART} # Run setpdy to initialize PDYm and PDYp variables export cycle="${cycle:-t${cyc}z}" diff --git a/jobs/JLANDDA_POST_ANAL b/jobs/JLANDDA_POST_ANAL index dbdb8861..1a896664 100755 --- a/jobs/JLANDDA_POST_ANAL +++ b/jobs/JLANDDA_POST_ANAL @@ -70,6 +70,8 @@ mkdir -p ${COMOUT} # Create a teomporary share directory export DATA_SHARE="${DATA_SHARE:-${DATAROOT}/DATA_SHARE/${PDY}${cyc}}" mkdir -p ${DATA_SHARE} +export DATA_RESTART="${DATA_RESTART:-${DATAROOT}/DATA_SHARE/RESTART}" +mkdir -p ${DATA_RESTART} # Run setpdy to initialize PDYm and PDYp variables export cycle="${cycle:-t${cyc}z}" diff --git a/jobs/JLANDDA_PRE_ANAL b/jobs/JLANDDA_PRE_ANAL index 30750dd2..f15e2323 100755 --- a/jobs/JLANDDA_PRE_ANAL +++ b/jobs/JLANDDA_PRE_ANAL @@ -70,6 +70,8 @@ mkdir -p ${COMOUT} # Create a teomporary share directory export DATA_SHARE="${DATA_SHARE:-${DATAROOT}/DATA_SHARE/${PDY}${cyc}}" mkdir -p ${DATA_SHARE} +export DATA_RESTART="${DATA_RESTART:-${DATAROOT}/DATA_SHARE/RESTART}" +mkdir -p ${DATA_RESTART} # Run setpdy to initialize PDYm and PDYp variables export cycle="${cycle:-t${cyc}z}" diff --git a/parm/land_analysis_era5_hera.yaml b/parm/land_analysis_era5_hera.yaml deleted file mode 100644 index fee5f3de..00000000 --- a/parm/land_analysis_era5_hera.yaml +++ /dev/null @@ -1,206 +0,0 @@ -workflow: - attrs: - realtime: false - scheduler: slurm - cyclethrottle: 24 - taskthrottle: 24 - cycledef: - - attrs: - group: cycled - spec: 201912210000 201912210000 24:00:00 - entities: - MACHINE: "hera" - SCHED: "slurm" - ACCOUNT: "nems" - EXP_NAME: "LETKF" - EXP_BASEDIR: "/scratch2/NAGAPE/epic/{USER}/landda_test" - JEDI_INSTALL: "/scratch2/NAGAPE/epic/UFS_Land-DA/jedi_skylabv7.0" - FORCING: "era5" - RES: "96" - FCSTHR: "24" - NPROCS_ANALYSIS: "6" - NPROCS_FORECAST: "6" - OBSDIR: "" - OBSDIR_SUBDIR: "" - OBS_TYPES: "GHCN" - DAtype: "letkfoi_snow" - SNOWDEPTHVAR: "snwdph" - TSTUB: "oro_C96.mx100" - NET: "landda" - envir: "test" - model_ver: "v1.2.1" - HOMElandda: "&EXP_BASEDIR;/land-DA_workflow" - PTMP: "&EXP_BASEDIR;/ptmp" - COMROOT: "&PTMP;/&envir;/com" - DATAROOT: "&PTMP;/&envir;/tmp" - KEEPDATA: "YES" - WORKDIR: "&EXP_BASEDIR;/workdir/run_&FORCING;" - LOGDIR: "&COMROOT;/output/logs/run_&FORCING;" - PATHRT: "&EXP_BASEDIR;" - PDY: "@Y@m@d" - cyc: "@H" - SLASH_ENSMEM_SUBDIR: "" - PTIME: "@Y@m@d@H" - NTIME: "@Y@m@d@H" - log: "&LOGDIR;/workflow.log" - tasks: - task_prep_exp: - envars: - MACHINE: "&MACHINE;" - SCHED: "&SCHED;" - ACCOUNT: "&ACCOUNT;" - EXP_NAME: "&EXP_NAME;" - ATMOS_FORC: "&FORCING;" - RES: "&RES;" - TSTUB: "&TSTUB;" - WORKDIR: "&WORKDIR;" - model_ver: "&model_ver;" - HOMElandda: "&HOMElandda;" - COMROOT: "&COMROOT;" - DATAROOT: "&DATAROOT;" - KEEPDATA: "&KEEPDATA;" - PDY: "&PDY;" - cyc: "&cyc;" - SLASH_ENSMEM_SUBDIR: "&SLASH_ENSMEM_SUBDIR;" - PTIME: "&PTIME;" - account: "&ACCOUNT;" - command: '&HOMElandda;/parm/task_load_modules_run_jjob.sh "prep_exp" "&HOMElandda;" "&MACHINE;"' - jobname: prep_exp - cores: 1 - walltime: 00:02:00 - queue: batch - join: "&LOGDIR;/prep_exp.log" - task_prep_obs: - envars: - OBSDIR: "&OBSDIR;" - OBSDIR_SUBDIR: "&OBSDIR_SUBDIR;" - OBS_TYPES: "&OBS_TYPES;" - MACHINE: "&MACHINE;" - SCHED: "&SCHED;" - ACCOUNT: "&ACCOUNT;" - EXP_NAME: "&EXP_NAME;" - ATMOS_FORC: "&FORCING;" - WORKDIR: "&WORKDIR;" - model_ver: "&model_ver;" - HOMElandda: "&HOMElandda;" - COMROOT: "&COMROOT;" - DATAROOT: "&DATAROOT;" - KEEPDATA: "&KEEPDATA;" - PDY: "&PDY;" - cyc: "&cyc;" - SLASH_ENSMEM_SUBDIR: "&SLASH_ENSMEM_SUBDIR;" - PTIME: "&PTIME;" - account: "&ACCOUNT;" - command: '&HOMElandda;/parm/task_load_modules_run_jjob.sh "prep_obs" "&HOMElandda;" "&MACHINE;"' - jobname: prep_obs - cores: 1 - walltime: 00:02:00 - queue: batch - join: "&LOGDIR;/prep_obs.log" - dependency: - taskdep: - attrs: - task: prep_exp - task_prep_bmat: - envars: - MACHINE: "&MACHINE;" - SCHED: "&SCHED;" - ACCOUNT: "&ACCOUNT;" - EXP_NAME: "&EXP_NAME;" - ATMOS_FORC: "&FORCING;" - WORKDIR: "&WORKDIR;" - model_ver: "&model_ver;" - HOMElandda: "&HOMElandda;" - COMROOT: "&COMROOT;" - DATAROOT: "&DATAROOT;" - KEEPDATA: "&KEEPDATA;" - PDY: "&PDY;" - cyc: "&cyc;" - SLASH_ENSMEM_SUBDIR: "&SLASH_ENSMEM_SUBDIR;" - PTIME: "&PTIME;" - DAtype: "&DAtype;" - SNOWDEPTHVAR: "&SNOWDEPTHVAR;" - account: "&ACCOUNT;" - command: '&HOMElandda;/parm/task_load_modules_run_jjob.sh "prep_bmat" "&HOMElandda;" "&MACHINE;"' - jobname: prep_bmat - cores: 1 - walltime: 00:02:00 - queue: batch - join: "&LOGDIR;/prep_bmat.log" - dependency: - taskdep: - attrs: - task: prep_obs - task_analysis: - envars: - OBS_TYPES: "&OBS_TYPES;" - MACHINE: "&MACHINE;" - SCHED: "&SCHED;" - ACCOUNT: "&ACCOUNT;" - EXP_NAME: "&EXP_NAME;" - ATMOS_FORC: "&FORCING;" - RES: "&RES;" - TSTUB: "&TSTUB;" - WORKDIR: "&WORKDIR;" - model_ver: "&model_ver;" - HOMElandda: "&HOMElandda;" - COMROOT: "&COMROOT;" - DATAROOT: "&DATAROOT;" - KEEPDATA: "&KEEPDATA;" - PDY: "&PDY;" - cyc: "&cyc;" - SLASH_ENSMEM_SUBDIR: "&SLASH_ENSMEM_SUBDIR;" - PTIME: "&PTIME;" - NTIME: "&NTIME;" - DAtype: "&DAtype;" - SNOWDEPTHVAR: "&SNOWDEPTHVAR;" - NPROC_JEDI: "&NPROCS_ANALYSIS;" - JEDI_INSTALL: "&JEDI_INSTALL;" - account: "&ACCOUNT;" - command: '&HOMElandda;/parm/task_load_modules_run_jjob.sh "analysis" "&HOMElandda;" "&MACHINE;"' - jobname: analysis - nodes: "1:ppn=&NPROCS_ANALYSIS;" - walltime: 00:15:00 - queue: batch - join: "&LOGDIR;/analysis.log" - dependency: - taskdep: - attrs: - task: prep_bmat - task_forecast: - envars: - OBS_TYPES: "&OBS_TYPES;" - MACHINE: "&MACHINE;" - SCHED: "&SCHED;" - ACCOUNT: "&ACCOUNT;" - EXP_NAME: "&EXP_NAME;" - ATMOS_FORC: "&FORCING;" - RES: "&RES;" - TSTUB: "&TSTUB;" - WORKDIR: "&WORKDIR;" - model_ver: "&model_ver;" - HOMElandda: "&HOMElandda;" - COMROOT: "&COMROOT;" - DATAROOT: "&DATAROOT;" - KEEPDATA: "&KEEPDATA;" - LOGDIR: "&LOGDIR;" - PDY: "&PDY;" - cyc: "&cyc;" - SLASH_ENSMEM_SUBDIR: "&SLASH_ENSMEM_SUBDIR;" - PTIME: "&PTIME;" - NTIME: "&NTIME;" - DAtype: "&DAtype;" - SNOWDEPTHVAR: "&SNOWDEPTHVAR;" - JEDI_INSTALL: "&JEDI_INSTALL;" - FCSTHR: "&FCSTHR;" - account: "&ACCOUNT;" - command: '&HOMElandda;/parm/task_load_modules_run_jjob.sh "forecast" "&HOMElandda;" "&MACHINE;"' - jobname: forecast - nodes: "1:ppn=&NPROCS_FORECAST;" - walltime: 00:30:00 - queue: batch - join: "&LOGDIR;/forecast.log" - dependency: - taskdep: - attrs: - task: analysis diff --git a/parm/land_analysis_era5_orion.yaml b/parm/land_analysis_era5_orion.yaml deleted file mode 100644 index a3c1b45f..00000000 --- a/parm/land_analysis_era5_orion.yaml +++ /dev/null @@ -1,176 +0,0 @@ -workflow: - attrs: - realtime: false - scheduler: slurm - cyclethrottle: 24 - taskthrottle: 24 - cycledef: - - attrs: - group: cycled - spec: 201912210000 201912210000 24:00:00 - entities: - MACHINE: "orion" - SCHED: "slurm" - ACCOUNT: "epic" - EXP_NAME: "LETKF" - EXP_BASEDIR: "/work/noaa/epic/{USER}/landda_test" - JEDI_INSTALL: "/work/noaa/epic/UFS_Land-DA_Dev/jedi_v7" - FORCING: "era5" - RES: "96" - FCSTHR: "24" - NPROCS_ANALYSIS: "6" - NPROCS_FORECAST: "6" - OBSDIR: "" - OBSDIR_SUBDIR: "" - OBS_TYPES: "GHCN" - DAtype: "letkfoi_snow" - SNOWDEPTHVAR: "snwdph" - TSTUB: "oro_C96.mx100" - NET: "landda" - envir: "test" - model_ver: "v1.2.1" - HOMElandda: "&EXP_BASEDIR;/land-DA_workflow" - PTMP: "&EXP_BASEDIR;/ptmp" - COMROOT: "&PTMP;/&envir;/com" - DATAROOT: "&PTMP;/&envir;/tmp" - KEEPDATA: "YES" - WORKDIR: "&EXP_BASEDIR;/workdir/run_&FORCING;" - LOGDIR: "&COMROOT;/output/logs/run_&FORCING;" - PATHRT: "&EXP_BASEDIR;" - PDY: "@Y@m@d" - cyc: "@H" - SLASH_ENSMEM_SUBDIR: "" - PTIME: "@Y@m@d@H" - NTIME: "@Y@m@d@H" - log: "&LOGDIR;/workflow.log" - tasks: - task_prep_exp: - envars: - MACHINE: "&MACHINE;" - SCHED: "&SCHED;" - ACCOUNT: "&ACCOUNT;" - EXP_NAME: "&EXP_NAME;" - ATMOS_FORC: "&FORCING;" - RES: "&RES;" - TSTUB: "&TSTUB;" - WORKDIR: "&WORKDIR;" - model_ver: "&model_ver;" - HOMElandda: "&HOMElandda;" - COMROOT: "&COMROOT;" - DATAROOT: "&DATAROOT;" - KEEPDATA: "&KEEPDATA;" - PDY: "&PDY;" - cyc: "&cyc;" - SLASH_ENSMEM_SUBDIR: "&SLASH_ENSMEM_SUBDIR;" - PTIME: "&PTIME;" - account: "&ACCOUNT;" - command: '&HOMElandda;/parm/task_load_modules_run_jjob.sh "prep_exp" "&HOMElandda;" "&MACHINE;"' - jobname: prep_exp - cores: 1 - walltime: 00:02:00 - queue: batch - join: "&LOGDIR;/prep_exp.log" - task_prep_obs: - envars: - OBSDIR: "&OBSDIR;" - OBSDIR_SUBDIR: "&OBSDIR_SUBDIR;" - OBS_TYPES: "&OBS_TYPES;" - MACHINE: "&MACHINE;" - SCHED: "&SCHED;" - ACCOUNT: "&ACCOUNT;" - EXP_NAME: "&EXP_NAME;" - ATMOS_FORC: "&FORCING;" - WORKDIR: "&WORKDIR;" - model_ver: "&model_ver;" - HOMElandda: "&HOMElandda;" - COMROOT: "&COMROOT;" - DATAROOT: "&DATAROOT;" - KEEPDATA: "&KEEPDATA;" - PDY: "&PDY;" - cyc: "&cyc;" - SLASH_ENSMEM_SUBDIR: "&SLASH_ENSMEM_SUBDIR;" - PTIME: "&PTIME;" - account: "&ACCOUNT;" - command: '&HOMElandda;/parm/task_load_modules_run_jjob.sh "prep_obs" "&HOMElandda;" "&MACHINE;"' - jobname: prep_obs - cores: 1 - walltime: 00:02:00 - queue: batch - join: "&LOGDIR;/prep_obs.log" - dependency: - taskdep: - attrs: - task: prep_exp - task_analysis: - envars: - OBS_TYPES: "&OBS_TYPES;" - MACHINE: "&MACHINE;" - SCHED: "&SCHED;" - ACCOUNT: "&ACCOUNT;" - EXP_NAME: "&EXP_NAME;" - ATMOS_FORC: "&FORCING;" - RES: "&RES;" - TSTUB: "&TSTUB;" - WORKDIR: "&WORKDIR;" - model_ver: "&model_ver;" - HOMElandda: "&HOMElandda;" - COMROOT: "&COMROOT;" - DATAROOT: "&DATAROOT;" - KEEPDATA: "&KEEPDATA;" - PDY: "&PDY;" - cyc: "&cyc;" - SLASH_ENSMEM_SUBDIR: "&SLASH_ENSMEM_SUBDIR;" - PTIME: "&PTIME;" - NTIME: "&NTIME;" - DAtype: "&DAtype;" - SNOWDEPTHVAR: "&SNOWDEPTHVAR;" - NPROC_JEDI: "&NPROCS_ANALYSIS;" - JEDI_INSTALL: "&JEDI_INSTALL;" - account: "&ACCOUNT;" - command: '&HOMElandda;/parm/task_load_modules_run_jjob.sh "analysis" "&HOMElandda;" "&MACHINE;"' - jobname: analysis - nodes: "1:ppn=&NPROCS_ANALYSIS;" - walltime: 00:15:00 - queue: batch - join: "&LOGDIR;/analysis.log" - dependency: - taskdep: - attrs: - task: prep_obs - task_forecast: - envars: - OBS_TYPES: "&OBS_TYPES;" - MACHINE: "&MACHINE;" - SCHED: "&SCHED;" - ACCOUNT: "&ACCOUNT;" - EXP_NAME: "&EXP_NAME;" - ATMOS_FORC: "&FORCING;" - RES: "&RES;" - TSTUB: "&TSTUB;" - WORKDIR: "&WORKDIR;" - model_ver: "&model_ver;" - HOMElandda: "&HOMElandda;" - COMROOT: "&COMROOT;" - DATAROOT: "&DATAROOT;" - KEEPDATA: "&KEEPDATA;" - LOGDIR: "&LOGDIR;" - PDY: "&PDY;" - cyc: "&cyc;" - SLASH_ENSMEM_SUBDIR: "&SLASH_ENSMEM_SUBDIR;" - PTIME: "&PTIME;" - NTIME: "&NTIME;" - DAtype: "&DAtype;" - SNOWDEPTHVAR: "&SNOWDEPTHVAR;" - JEDI_INSTALL: "&JEDI_INSTALL;" - FCSTHR: "&FCSTHR;" - account: "&ACCOUNT;" - command: '&HOMElandda;/parm/task_load_modules_run_jjob.sh "forecast" "&HOMElandda;" "&MACHINE;"' - jobname: forecast - nodes: "1:ppn=&NPROCS_FORECAST;" - walltime: 00:30:00 - queue: batch - join: "&LOGDIR;/forecast.log" - dependency: - taskdep: - attrs: - task: analysis diff --git a/parm/land_analysis_gswp3_hera.yaml b/parm/land_analysis_gswp3_hera.yaml index 798af689..1cba9dad 100644 --- a/parm/land_analysis_gswp3_hera.yaml +++ b/parm/land_analysis_gswp3_hera.yaml @@ -8,6 +8,7 @@ workflow: - attrs: group: cycled spec: 200001030000 200001040000 24:00:00 +# for era5: spec: 201912210000 201912210000 24:00:00 entities: MACHINE: "hera" SCHED: "slurm" diff --git a/parm/land_analysis_gswp3_orion.yaml b/parm/land_analysis_gswp3_orion.yaml index 90f8cf1f..84b904f5 100644 --- a/parm/land_analysis_gswp3_orion.yaml +++ b/parm/land_analysis_gswp3_orion.yaml @@ -8,6 +8,7 @@ workflow: - attrs: group: cycled spec: 200001030000 200001040000 24:00:00 +# for era5: spec: 201912210000 201912210000 24:00:00 entities: MACHINE: "orion" SCHED: "slurm" @@ -44,7 +45,9 @@ workflow: PTIME: "@Y@m@d@H" NTIME: "@Y@m@d@H" DATADEP_FILE1: "&WARMSTART_DIR;/ufs_land_restart.@Y-@m-@d_@H-00-00.tile1.nc" - DATADEP_FILE2: "&COMROOT;/&NET;/&model_ver;/&RUN;.@Y@m@d/@H/RESTART/ufs_land_restart.@Y-@m-@d_@H-00-00.tile1.nc" + DATADEP_FILE2: "&WARMSTART_DIR;/ufs_land_restart.@Y-@m-@d_@H-00-00.nc" + DATADEP_FILE3: "&DATAROOT/DATA_SHARE/RESTART/ufs_land_restart.@Y-@m-@d_@H-00-00.tile1.nc" + DATADEP_FILE3: "&DATAROOT/DATA_SHARE/RESTART/ufs_land_restart.@Y-@m-@d_@H-00-00.nc" log: "&LOGDIR;/workflow.log" tasks: task_prep_obs: @@ -111,6 +114,14 @@ workflow: attrs: age: 5 value: "&DATADEP_FILE2;" + datadep_file3: + attrs: + age: 5 + value: "&DATADEP_FILE3;" + datadep_file4: + attrs: + age: 5 + value: "&DATADEP_FILE4;" task_analysis: attrs: cycledefs: cycled diff --git a/scripts/exlandda_analysis.sh b/scripts/exlandda_analysis.sh index 4cf3b803..03cbfc83 100755 --- a/scripts/exlandda_analysis.sh +++ b/scripts/exlandda_analysis.sh @@ -2,9 +2,6 @@ set -xue -############################ -# copy restarts to workdir, convert to UFS tile for DA (all members) - TPATH=${FIXlandda}/forcing/${ATMOS_FORC}/orog_files/ YYYY=${PDY:0:4} MM=${PDY:4:2} @@ -200,7 +197,7 @@ if [[ $do_HOFX == "YES" ]]; then fi ################################################ -# Create increment files +# Apply Increment to UFS sfc_data files ################################################ if [[ $do_DA == "YES" ]]; then diff --git a/scripts/exlandda_forecast.sh b/scripts/exlandda_forecast.sh index e921bc74..3fd651ac 100755 --- a/scripts/exlandda_forecast.sh +++ b/scripts/exlandda_forecast.sh @@ -2,9 +2,6 @@ set -xue -############################ -# copy restarts to workdir, convert to UFS tile for DA (all members) - MACHINE_ID=${MACHINE} TPATH=${FIXlandda}/forcing/${ATMOS_FORC}/orog_files/ YYYY=${PDY:0:4} @@ -27,10 +24,6 @@ module use modulefiles; module load modules.landda MPIEXEC=`which mpiexec` -for itile in {1..6} -do - cp ${COMIN}/RESTART/tile/ufs.cpld.lnd.out.${YYYY}-${MM}-${DD}-00000.tile${itile}.nc . -done # convert back to UFS tile, run model (all members) if [[ ${ATMOS_FORC} == "gswp3" ]]; then @@ -38,6 +31,11 @@ if [[ ${ATMOS_FORC} == "gswp3" ]]; then echo '************************************************' echo 'running the forecast model' + for itile in {1..6} + do + cp ${COMIN}/RESTART/ufs_land_restart.anal.${YYYY}-${MM}-${DD}_${HH}-00-00.tile${itile}.nc ufs.cpld.lnd.out.${YYYY}-${MM}-${DD}-00000.tile${itile}.nc + done + TEST_NAME=datm_cdeps_lnd_gswp3 TEST_NAME_RST=datm_cdeps_lnd_gswp3_rst PATHRT=${HOMElandda}/sorc/ufs_model.fd/tests @@ -105,7 +103,7 @@ if [[ ${ATMOS_FORC} == "gswp3" ]]; then # restart if [ $WARM_START = .true. ]; then # NoahMP restart files - cp ${COMOUT}/RESTART/tile/ufs.cpld.lnd.out.${RESTART_FILE_SUFFIX_SECS}.tile*.nc RESTART/. + cp ${COMOUT}/RESTART/ufs.cpld.lnd.out.${RESTART_FILE_SUFFIX_SECS}.tile*.nc RESTART/. # CMEPS restart and pointer files RFILE1=ufs.cpld.cpl.r.${RESTART_FILE_SUFFIX_SECS}.nc @@ -161,17 +159,16 @@ if [[ ${ATMOS_FORC} == "gswp3" ]]; then echo "ufs_model failed" exit 10 fi -fi -############################ -# check model ouput (all members) -if [[ ${ATMOS_FORC} == "era5" ]]; then - if [[ -e ${DATA}/ufs_land_restart.${nYYYY}-${nMM}-${nDD}_${nHH}-00-00.nc ]]; then - cp -p ${DATA}/ufs_land_restart.${nYYYY}-${nMM}-${nDD}_${nHH}-00-00.nc ${COMOUT}/RESTART/vector/ufs_land_restart_back.${nYYYY}-${nMM}-${nDD}_${nHH}-00-00.nc - fi -elif [[ ${ATMOS_FORC} == "gswp3" ]]; then - for tile in 1 2 3 4 5 6 + # copy model ouput to COM + for itile in {1..6} + do + cp -p ${DATA}/ufs.cpld.lnd.out.${nYYYY}-${nMM}-${nDD}-00000.tile${itile}.nc ${COMOUT}/RESTART/ufs_land_restart.${nYYYY}-${nMM}-${nDD}_${nHH}-00-00.tile${itile}.nc + done + + # link restart for next cycle + for itile in {1..6} do - cp -p ${DATA}/ufs.cpld.lnd.out.${nYYYY}-${nMM}-${nDD}-00000.tile${tile}.nc ${COMOUT}/RESTART/tile/ufs_land_restart_back.${nYYYY}-${nMM}-${nDD}_${nHH}-00-00.tile${tile}.nc + ln -nsf ${COMOUT}/RESTART/ufs_land_restart.${nYYYY}-${nMM}-${nDD}_${nHH}-00-00.tile${itile}.nc ${DATA_RESTART} done fi diff --git a/scripts/exlandda_post_anal.sh b/scripts/exlandda_post_anal.sh index 22b73488..b2fe1690 100755 --- a/scripts/exlandda_post_anal.sh +++ b/scripts/exlandda_post_anal.sh @@ -62,8 +62,7 @@ if [[ ${ATMOS_FORC} == "era5" ]]; then fi # save analysis restart - mkdir -p ${COMOUT}/RESTART/vector - cp -p ${DATA}/ufs_land_restart.${YYYY}-${MM}-${DD}_${HH}-00-00.nc ${COMOUT}/RESTART/vector/ufs_land_restart_anal.${YYYY}-${MM}-${DD}_${HH}-00-00.nc + cp -p ${DATA}/ufs_land_restart.${YYYY}-${MM}-${DD}_${HH}-00-00.nc ${COMOUT}/ufs_land_restart.anal.${YYYY}-${MM}-${DD}_${HH}-00-00.nc echo '************************************************' echo 'running the forecast model' @@ -92,6 +91,12 @@ if [[ ${ATMOS_FORC} == "era5" ]]; then exit 10 fi + mkdir -p ${COMOUT}/RESTART + cp -p ${DATA}/ufs_land_restart.${nYYYY}-${nMM}-${nDD}_${nHH}-00-00.nc ${COMOUT}/RESTART/ufs_land_restart.${nYYYY}-${nMM}-${nDD}_${nHH}-00-00.nc + + # link restart for next cycle + ln -nsf ${COMOUT}/RESTART/ufs_land_restart.${nYYYY}-${nMM}-${nDD}_${nHH}-00-00.nc ${DATA_RESTART} + # convert back to UFS tile, run model (all members) elif [[ ${ATMOS_FORC} == "gswp3" ]]; then echo '************************************************' @@ -120,9 +125,9 @@ elif [[ ${ATMOS_FORC} == "gswp3" ]]; then fi # save analysis restart - mkdir -p ${COMOUT}/RESTART/tile + mkdir -p ${COMOUT}/RESTART for itile in {1..6} do - cp -p ${DATA}/ufs_land_restart.${YYYY}-${MM}-${DD}_${HH}-00-00.tile${itile}.nc ${COMOUT}/RESTART/tile/ufs.cpld.lnd.out.${YYYY}-${MM}-${DD}-00000.tile${itile}.nc + cp -p ${DATA}/ufs_land_restart.${YYYY}-${MM}-${DD}_${HH}-00-00.tile${itile}.nc ${COMOUT}/ufs_land_restart.anal.${YYYY}-${MM}-${DD}_${HH}-00-00.tile${itile}.nc done fi diff --git a/scripts/exlandda_pre_anal.sh b/scripts/exlandda_pre_anal.sh index 9ed911aa..790808fd 100755 --- a/scripts/exlandda_pre_anal.sh +++ b/scripts/exlandda_pre_anal.sh @@ -2,9 +2,6 @@ set -xue -############################ -# copy restarts to workdir, convert to UFS tile for DA (all members) - TPATH=${FIXlandda}/forcing/${ATMOS_FORC}/orog_files/ YYYY=${PDY:0:4} MM=${PDY:4:2} @@ -30,13 +27,16 @@ module use modulefiles; module load modules.landda if [[ $ATMOS_FORC == "era5" ]]; then # vector2tile for DA # copy restarts into work directory - rst_in=${COMIN}/RESTART/vector/ufs_land_restart_back.${YYYY}-${MM}-${DD}_${HH}-00-00.nc - if [[ ! -e ${rst_in} ]]; then - rst_in=${FIXlandda}/restarts/${ATMOS_FORC}/ufs_land_restart.${YYYY}-${MM}-${DD}_${HH}-00-00.nc + rst_fn="ufs_land_restart.${YYYY}-${MM}-${DD}_${HH}-00-00.nc" + if [[ -e ${DATA_RESTART}/${rst_fn} ]]; then + cp ${DATA_RESTART}/${rst_fn} . + elif [[ -e ${WARMSTART_DIR}/${rst_fn} ]]; then + cp ${WARMSTART_DIR}/${rst_fn} . + else + echo "Initial restart file does not exist" + exit 11 fi - rst_out=${DATA}/ufs_land_restart.${YYYY}-${MM}-${DD}_${HH}-00-00.nc - cp ${rst_in} ${rst_out} - cp -p ${rst_out} ${DATA_SHARE} + cp -p ${rst_fn} ${DATA_SHARE} echo '************************************************' echo 'calling vector2tile' @@ -66,8 +66,10 @@ if [[ $ATMOS_FORC == "era5" ]]; then export err=$?; err_chk if [[ $err != 0 ]]; then echo "vec2tile failed" - exit + exit 12 fi + + elif [[ $ATMOS_FORC == "gswp3" ]]; then # tile2tile for DA echo '************************************************' @@ -76,14 +78,17 @@ elif [[ $ATMOS_FORC == "gswp3" ]]; then # copy restarts into work directory for itile in {1..6} do - rst_in=${COMIN}/RESTART/tile/ufs_land_restart_back.${YYYY}-${MM}-${DD}_${HH}-00-00.nc - if [[ ! -e ${rst_in} ]]; then - rst_in=${FIXlandda}/restarts/${ATMOS_FORC}/ufs.cpld.lnd.out.${YYYY}-${MM}-${DD}-00000.tile${itile}.nc + rst_fn="ufs_land_restart.${YYYY}-${MM}-${DD}_${HH}-00-00.tile${itile}.nc" + if [[ -e ${DATA_RESTART}/${rst_fn} ]]; then + cp ${DATA_RESTART}/${rst_fn} . + elif [[ -e ${WARMSTART_DIR}/${rst_fn} ]]; then + cp ${WARMSTART_DIR}/${rst_fn} . + else + echo "Initial restart files do not exist" + exit 21 fi - rst_out=${DATA}/ufs_land_restart.${YYYY}-${MM}-${DD}_${HH}-00-00.tile${itile}.nc - cp ${rst_in} ${rst_out} # copy restart to data share dir for post_anal - cp -p ${rst_out} ${DATA_SHARE} + cp -p ${rst_fn} ${DATA_SHARE} done # update tile2tile namelist @@ -108,13 +113,13 @@ elif [[ $ATMOS_FORC == "gswp3" ]]; then export err=$?; err_chk if [[ $err != 0 ]]; then echo "tile2tile failed" - exit + exit 22 fi -fi -#stage restarts for applying JEDI update to intermediate directory -for itile in {1..6} -do - cp -p ${DATA}/${FILEDATE}.sfc_data.tile${itile}.nc ${DATA_SHARE}/${FILEDATE}.sfc_data.tile${itile}.nc -done + #stage restarts for applying JEDI update to intermediate directory + for itile in {1..6} + do + cp -p ${DATA}/${FILEDATE}.sfc_data.tile${itile}.nc ${DATA_SHARE}/${FILEDATE}.sfc_data.tile${itile}.nc + done +fi From 822decdc238c2bb1434bfab5a832309182046b9f Mon Sep 17 00:00:00 2001 From: Chan-Hoo Jeon Date: Fri, 17 May 2024 16:24:07 -0500 Subject: [PATCH 08/18] fix typo in xml yaml --- parm/land_analysis_gswp3_orion.yaml | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/parm/land_analysis_gswp3_orion.yaml b/parm/land_analysis_gswp3_orion.yaml index 84b904f5..5c29abf1 100644 --- a/parm/land_analysis_gswp3_orion.yaml +++ b/parm/land_analysis_gswp3_orion.yaml @@ -16,7 +16,7 @@ workflow: EXP_NAME: "LETKF" EXP_BASEDIR: "/work/noaa/epic/{USER}/landda_test" JEDI_INSTALL: "/work/noaa/epic/UFS_Land-DA_Dev/jedi_v7" - WARMSTART_DIR: "" + WARMSTART_DIR: "/work/noaa/epic/UFS_Land-DA_Dev/DATA_RESTART" FORCING: "gswp3" RES: "96" FCSTHR: "24" @@ -46,8 +46,8 @@ workflow: NTIME: "@Y@m@d@H" DATADEP_FILE1: "&WARMSTART_DIR;/ufs_land_restart.@Y-@m-@d_@H-00-00.tile1.nc" DATADEP_FILE2: "&WARMSTART_DIR;/ufs_land_restart.@Y-@m-@d_@H-00-00.nc" - DATADEP_FILE3: "&DATAROOT/DATA_SHARE/RESTART/ufs_land_restart.@Y-@m-@d_@H-00-00.tile1.nc" - DATADEP_FILE3: "&DATAROOT/DATA_SHARE/RESTART/ufs_land_restart.@Y-@m-@d_@H-00-00.nc" + DATADEP_FILE3: "&DATAROOT;/DATA_SHARE/RESTART/ufs_land_restart.@Y-@m-@d_@H-00-00.tile1.nc" + DATADEP_FILE4: "&DATAROOT;/DATA_SHARE/RESTART/ufs_land_restart.@Y-@m-@d_@H-00-00.nc" log: "&LOGDIR;/workflow.log" tasks: task_prep_obs: @@ -88,6 +88,7 @@ workflow: ATMOS_FORC: "&FORCING;" RES: "&RES;" TSTUB: "&TSTUB;" + WARMSTART_DIR: "&WARMSTART_DIR;" model_ver: "&model_ver;" RUN: "&RUN;" HOMElandda: "&HOMElandda;" From 07de1b0a85645bd4f7244fe8706d2c8c67ed6549 Mon Sep 17 00:00:00 2001 From: Chan-Hoo Jeon Date: Fri, 17 May 2024 17:01:11 -0500 Subject: [PATCH 09/18] fix typo in forecast script --- scripts/exlandda_forecast.sh | 11 +++++++---- scripts/exlandda_post_anal.sh | 6 ++---- 2 files changed, 9 insertions(+), 8 deletions(-) diff --git a/scripts/exlandda_forecast.sh b/scripts/exlandda_forecast.sh index 3fd651ac..3cbf1f51 100755 --- a/scripts/exlandda_forecast.sh +++ b/scripts/exlandda_forecast.sh @@ -33,7 +33,7 @@ if [[ ${ATMOS_FORC} == "gswp3" ]]; then for itile in {1..6} do - cp ${COMIN}/RESTART/ufs_land_restart.anal.${YYYY}-${MM}-${DD}_${HH}-00-00.tile${itile}.nc ufs.cpld.lnd.out.${YYYY}-${MM}-${DD}-00000.tile${itile}.nc + cp ${COMIN}/ufs_land_restart.anal.${YYYY}-${MM}-${DD}_${HH}-00-00.tile${itile}.nc ufs.cpld.lnd.out.${YYYY}-${MM}-${DD}-00000.tile${itile}.nc done TEST_NAME=datm_cdeps_lnd_gswp3 @@ -103,7 +103,10 @@ if [[ ${ATMOS_FORC} == "gswp3" ]]; then # restart if [ $WARM_START = .true. ]; then # NoahMP restart files - cp ${COMOUT}/RESTART/ufs.cpld.lnd.out.${RESTART_FILE_SUFFIX_SECS}.tile*.nc RESTART/. + for itile in {1..6} + do + ln -nsf ${COMIN}/ufs_land_restart.anal.${YYYY}-${MM}-${DD}_${HH}-00-00.tile${itile}.nc RESTART/ufs.cpld.lnd.out.${RESTART_FILE_SUFFIX_SECS}.tile${itile}.nc + done # CMEPS restart and pointer files RFILE1=ufs.cpld.cpl.r.${RESTART_FILE_SUFFIX_SECS}.nc @@ -163,12 +166,12 @@ if [[ ${ATMOS_FORC} == "gswp3" ]]; then # copy model ouput to COM for itile in {1..6} do - cp -p ${DATA}/ufs.cpld.lnd.out.${nYYYY}-${nMM}-${nDD}-00000.tile${itile}.nc ${COMOUT}/RESTART/ufs_land_restart.${nYYYY}-${nMM}-${nDD}_${nHH}-00-00.tile${itile}.nc + cp -p ${DATA}/ufs.cpld.lnd.out.${nYYYY}-${nMM}-${nDD}-00000.tile${itile}.nc ${COMOUT}/ufs_land_restart.${nYYYY}-${nMM}-${nDD}_${nHH}-00-00.tile${itile}.nc done # link restart for next cycle for itile in {1..6} do - ln -nsf ${COMOUT}/RESTART/ufs_land_restart.${nYYYY}-${nMM}-${nDD}_${nHH}-00-00.tile${itile}.nc ${DATA_RESTART} + ln -nsf ${COMOUT}/ufs_land_restart.${nYYYY}-${nMM}-${nDD}_${nHH}-00-00.tile${itile}.nc ${DATA_RESTART} done fi diff --git a/scripts/exlandda_post_anal.sh b/scripts/exlandda_post_anal.sh index b2fe1690..40ba2c46 100755 --- a/scripts/exlandda_post_anal.sh +++ b/scripts/exlandda_post_anal.sh @@ -91,11 +91,10 @@ if [[ ${ATMOS_FORC} == "era5" ]]; then exit 10 fi - mkdir -p ${COMOUT}/RESTART - cp -p ${DATA}/ufs_land_restart.${nYYYY}-${nMM}-${nDD}_${nHH}-00-00.nc ${COMOUT}/RESTART/ufs_land_restart.${nYYYY}-${nMM}-${nDD}_${nHH}-00-00.nc + cp -p ${DATA}/ufs_land_restart.${nYYYY}-${nMM}-${nDD}_${nHH}-00-00.nc ${COMOUT}/ufs_land_restart.${nYYYY}-${nMM}-${nDD}_${nHH}-00-00.nc # link restart for next cycle - ln -nsf ${COMOUT}/RESTART/ufs_land_restart.${nYYYY}-${nMM}-${nDD}_${nHH}-00-00.nc ${DATA_RESTART} + ln -nsf ${COMOUT}/ufs_land_restart.${nYYYY}-${nMM}-${nDD}_${nHH}-00-00.nc ${DATA_RESTART} # convert back to UFS tile, run model (all members) elif [[ ${ATMOS_FORC} == "gswp3" ]]; then @@ -125,7 +124,6 @@ elif [[ ${ATMOS_FORC} == "gswp3" ]]; then fi # save analysis restart - mkdir -p ${COMOUT}/RESTART for itile in {1..6} do cp -p ${DATA}/ufs_land_restart.${YYYY}-${MM}-${DD}_${HH}-00-00.tile${itile}.nc ${COMOUT}/ufs_land_restart.anal.${YYYY}-${MM}-${DD}_${HH}-00-00.tile${itile}.nc From 4fb2a3b745eb7c6447a47c627e3daa338572d203 Mon Sep 17 00:00:00 2001 From: Chan-Hoo Jeon Date: Sat, 18 May 2024 18:56:26 -0500 Subject: [PATCH 10/18] clean up scripts --- parm/run_without_rocoto.sh | 2 ++ scripts/exlandda_prep_obs.sh | 2 -- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/parm/run_without_rocoto.sh b/parm/run_without_rocoto.sh index 971f7618..7b9f8e5c 100755 --- a/parm/run_without_rocoto.sh +++ b/parm/run_without_rocoto.sh @@ -18,9 +18,11 @@ export FORCING="era5" if [ "${MACHINE}" = "hera" ]; then export EXP_BASEDIR="/scratch2/NAGAPE/epic/{USER}/landda_test" export JEDI_INSTALL="/scratch2/NAGAPE/epic/UFS_Land-DA/jedi_skylabv7.0" + export WARMSTART_DIR="" elif [ "${MACHINE}" = "orion" ]; then export EXP_BASEDIR="/work/noaa/epic/{USER}/landda_test" export JEDI_INSTALL="/work/noaa/epic/UFS_Land-DA_Dev/jedi_v7" + export WARMSTART_DIR="/work/noaa/epic/UFS_Land-DA_Dev/DATA_RESTART" fi export RES="96" diff --git a/scripts/exlandda_prep_obs.sh b/scripts/exlandda_prep_obs.sh index c3799e94..1ec3b88a 100755 --- a/scripts/exlandda_prep_obs.sh +++ b/scripts/exlandda_prep_obs.sh @@ -2,8 +2,6 @@ set -xue -############################ -# copy restarts to workdir, convert to UFS tile for DA (all members) YYYY=${PDY:0:4} MM=${PDY:4:2} From b7d8e54517c8ac12f8bbe8eebe3cf4a8777d7875 Mon Sep 17 00:00:00 2001 From: Chan-Hoo Jeon Date: Sun, 19 May 2024 09:41:07 -0500 Subject: [PATCH 11/18] update scripts for era5 --- parm/land_analysis_gswp3_hera.yaml | 144 ++++++++++++++++------------ parm/land_analysis_gswp3_orion.yaml | 7 +- scripts/exlandda_analysis.sh | 3 +- scripts/exlandda_post_anal.sh | 16 +++- scripts/exlandda_pre_anal.sh | 12 +-- scripts/exlandda_prep_obs.sh | 2 +- 6 files changed, 107 insertions(+), 77 deletions(-) diff --git a/parm/land_analysis_gswp3_hera.yaml b/parm/land_analysis_gswp3_hera.yaml index 1cba9dad..63930aea 100644 --- a/parm/land_analysis_gswp3_hera.yaml +++ b/parm/land_analysis_gswp3_hera.yaml @@ -7,16 +7,17 @@ workflow: cycledef: - attrs: group: cycled - spec: 200001030000 200001040000 24:00:00 -# for era5: spec: 201912210000 201912210000 24:00:00 + spec: 200001030000 200001040000 24:00:00 # for gswp3 +# spec: 201912210000 201912220000 24:00:00 # for era5 entities: - MACHINE: "hera" + MACHINE: "orion" SCHED: "slurm" - ACCOUNT: "nems" + ACCOUNT: "epic" EXP_NAME: "LETKF" EXP_BASEDIR: "/scratch2/NAGAPE/epic/{USER}/landda_test" - JEDI_INSTALL: "/scratch2/NAGAPE/epic/UFS_Land-DA/jedi_skylabv7.0" - FORCING: "gswp3" + JEDI_INSTALL: "/scratch2/NAGAPE/epic/UFS_Land-DA_Dev/jedi_v7" + WARMSTART_DIR: "/scratch2/NAGAPE/epic/UFS_Land-DA_Dev/DATA_RESTART" + FORCING: "gswp3" # "gswp3" or "era5" RES: "96" FCSTHR: "24" NPROCS_ANALYSIS: "6" @@ -30,31 +31,37 @@ workflow: NET: "landda" envir: "test" model_ver: "v1.2.1" + RUN: "landda" HOMElandda: "&EXP_BASEDIR;/land-DA_workflow" PTMP: "&EXP_BASEDIR;/ptmp" COMROOT: "&PTMP;/&envir;/com" DATAROOT: "&PTMP;/&envir;/tmp" KEEPDATA: "YES" - WORKDIR: "&EXP_BASEDIR;/workdir/run_&FORCING;" LOGDIR: "&COMROOT;/output/logs/run_&FORCING;" + LOGFN_SUFFIX: "_@Y@m@d@H.log" PATHRT: "&EXP_BASEDIR;" PDY: "@Y@m@d" cyc: "@H" - SLASH_ENSMEM_SUBDIR: "" PTIME: "@Y@m@d@H" NTIME: "@Y@m@d@H" + DATADEP_FILE1: "&WARMSTART_DIR;/ufs_land_restart.@Y-@m-@d_@H-00-00.tile1.nc" + DATADEP_FILE2: "&WARMSTART_DIR;/ufs_land_restart.@Y-@m-@d_@H-00-00.nc" + DATADEP_FILE3: "&DATAROOT;/DATA_SHARE/RESTART/ufs_land_restart.@Y-@m-@d_@H-00-00.tile1.nc" + DATADEP_FILE4: "&DATAROOT;/DATA_SHARE/RESTART/ufs_land_restart.@Y-@m-@d_@H-00-00.nc" log: "&LOGDIR;/workflow.log" tasks: - task_prep_exp: + task_prep_obs: + attrs: + cycledefs: cycled envars: + OBSDIR: "&OBSDIR;" + OBSDIR_SUBDIR: "&OBSDIR_SUBDIR;" + OBS_TYPES: "&OBS_TYPES;" MACHINE: "&MACHINE;" SCHED: "&SCHED;" ACCOUNT: "&ACCOUNT;" EXP_NAME: "&EXP_NAME;" ATMOS_FORC: "&FORCING;" - RES: "&RES;" - TSTUB: "&TSTUB;" - WORKDIR: "&WORKDIR;" model_ver: "&model_ver;" HOMElandda: "&HOMElandda;" COMROOT: "&COMROOT;" @@ -62,54 +69,72 @@ workflow: KEEPDATA: "&KEEPDATA;" PDY: "&PDY;" cyc: "&cyc;" - SLASH_ENSMEM_SUBDIR: "&SLASH_ENSMEM_SUBDIR;" PTIME: "&PTIME;" account: "&ACCOUNT;" - command: '&HOMElandda;/parm/task_load_modules_run_jjob.sh "prep_exp" "&HOMElandda;" "&MACHINE;"' - jobname: prep_exp + command: '&HOMElandda;/parm/task_load_modules_run_jjob.sh "prep_obs" "&HOMElandda;" "&MACHINE;"' + jobname: prep_obs cores: 1 walltime: 00:02:00 queue: batch - join: "&LOGDIR;/prep_exp.log" - task_prep_obs: + join: "&LOGDIR;/prep_obs&LOGFN_SUFFIX;" + task_pre_anal: + attrs: + cycledefs: cycled envars: - OBSDIR: "&OBSDIR;" - OBSDIR_SUBDIR: "&OBSDIR_SUBDIR;" - OBS_TYPES: "&OBS_TYPES;" MACHINE: "&MACHINE;" SCHED: "&SCHED;" ACCOUNT: "&ACCOUNT;" EXP_NAME: "&EXP_NAME;" ATMOS_FORC: "&FORCING;" - WORKDIR: "&WORKDIR;" + RES: "&RES;" + TSTUB: "&TSTUB;" + WARMSTART_DIR: "&WARMSTART_DIR;" model_ver: "&model_ver;" + RUN: "&RUN;" HOMElandda: "&HOMElandda;" COMROOT: "&COMROOT;" DATAROOT: "&DATAROOT;" KEEPDATA: "&KEEPDATA;" PDY: "&PDY;" cyc: "&cyc;" - SLASH_ENSMEM_SUBDIR: "&SLASH_ENSMEM_SUBDIR;" PTIME: "&PTIME;" account: "&ACCOUNT;" - command: '&HOMElandda;/parm/task_load_modules_run_jjob.sh "prep_obs" "&HOMElandda;" "&MACHINE;"' - jobname: prep_obs + command: '&HOMElandda;/parm/task_load_modules_run_jjob.sh "pre_anal" "&HOMElandda;" "&MACHINE;"' + jobname: pre_anal cores: 1 - walltime: 00:02:00 + walltime: 00:05:00 queue: batch - join: "&LOGDIR;/prep_obs.log" + join: "&LOGDIR;/pre_anal&LOGFN_SUFFIX;" dependency: - taskdep: - attrs: - task: prep_exp - task_prep_bmat: + or: + datadep_file1: + attrs: + age: 5 + value: "&DATADEP_FILE1;" + datadep_file2: + attrs: + age: 5 + value: "&DATADEP_FILE2;" + datadep_file3: + attrs: + age: 5 + value: "&DATADEP_FILE3;" + datadep_file4: + attrs: + age: 5 + value: "&DATADEP_FILE4;" + task_analysis: + attrs: + cycledefs: cycled envars: + OBS_TYPES: "&OBS_TYPES;" MACHINE: "&MACHINE;" SCHED: "&SCHED;" ACCOUNT: "&ACCOUNT;" EXP_NAME: "&EXP_NAME;" ATMOS_FORC: "&FORCING;" - WORKDIR: "&WORKDIR;" + RES: "&RES;" + TSTUB: "&TSTUB;" model_ver: "&model_ver;" HOMElandda: "&HOMElandda;" COMROOT: "&COMROOT;" @@ -117,24 +142,27 @@ workflow: KEEPDATA: "&KEEPDATA;" PDY: "&PDY;" cyc: "&cyc;" - SLASH_ENSMEM_SUBDIR: "&SLASH_ENSMEM_SUBDIR;" PTIME: "&PTIME;" + NTIME: "&NTIME;" DAtype: "&DAtype;" SNOWDEPTHVAR: "&SNOWDEPTHVAR;" + NPROC_JEDI: "&NPROCS_ANALYSIS;" + JEDI_INSTALL: "&JEDI_INSTALL;" account: "&ACCOUNT;" - command: '&HOMElandda;/parm/task_load_modules_run_jjob.sh "prep_bmat" "&HOMElandda;" "&MACHINE;"' - jobname: prep_bmat - cores: 1 - walltime: 00:02:00 + command: '&HOMElandda;/parm/task_load_modules_run_jjob.sh "analysis" "&HOMElandda;" "&MACHINE;"' + jobname: analysis + nodes: "1:ppn=&NPROCS_ANALYSIS;" + walltime: 00:15:00 queue: batch - join: "&LOGDIR;/prep_bmat.log" + join: "&LOGDIR;/analysis&LOGFN_SUFFIX;" dependency: taskdep: attrs: - task: prep_obs - task_analysis: + task: pre_anal + task_post_anal: + attrs: + cycledefs: cycled envars: - OBS_TYPES: "&OBS_TYPES;" MACHINE: "&MACHINE;" SCHED: "&SCHED;" ACCOUNT: "&ACCOUNT;" @@ -142,33 +170,31 @@ workflow: ATMOS_FORC: "&FORCING;" RES: "&RES;" TSTUB: "&TSTUB;" - WORKDIR: "&WORKDIR;" model_ver: "&model_ver;" + RUN: "&RUN;" HOMElandda: "&HOMElandda;" COMROOT: "&COMROOT;" DATAROOT: "&DATAROOT;" KEEPDATA: "&KEEPDATA;" PDY: "&PDY;" cyc: "&cyc;" - SLASH_ENSMEM_SUBDIR: "&SLASH_ENSMEM_SUBDIR;" PTIME: "&PTIME;" NTIME: "&NTIME;" - DAtype: "&DAtype;" - SNOWDEPTHVAR: "&SNOWDEPTHVAR;" - NPROC_JEDI: "&NPROCS_ANALYSIS;" - JEDI_INSTALL: "&JEDI_INSTALL;" + FCSTHR: "&FCSTHR;" account: "&ACCOUNT;" - command: '&HOMElandda;/parm/task_load_modules_run_jjob.sh "analysis" "&HOMElandda;" "&MACHINE;"' - jobname: analysis - nodes: "1:ppn=&NPROCS_ANALYSIS;" - walltime: 00:15:00 + command: '&HOMElandda;/parm/task_load_modules_run_jjob.sh "post_anal" "&HOMElandda;" "&MACHINE;"' + jobname: post_anal + cores: 1 + walltime: 00:05:00 queue: batch - join: "&LOGDIR;/analysis.log" + join: "&LOGDIR;/post_anal&LOGFN_SUFFIX;" dependency: taskdep: attrs: - task: prep_bmat + task: analysis task_forecast: + attrs: + cycledefs: cycled envars: OBS_TYPES: "&OBS_TYPES;" MACHINE: "&MACHINE;" @@ -176,9 +202,6 @@ workflow: ACCOUNT: "&ACCOUNT;" EXP_NAME: "&EXP_NAME;" ATMOS_FORC: "&FORCING;" - RES: "&RES;" - TSTUB: "&TSTUB;" - WORKDIR: "&WORKDIR;" model_ver: "&model_ver;" HOMElandda: "&HOMElandda;" COMROOT: "&COMROOT;" @@ -187,21 +210,16 @@ workflow: LOGDIR: "&LOGDIR;" PDY: "&PDY;" cyc: "&cyc;" - SLASH_ENSMEM_SUBDIR: "&SLASH_ENSMEM_SUBDIR;" - PTIME: "&PTIME;" NTIME: "&NTIME;" DAtype: "&DAtype;" - SNOWDEPTHVAR: "&SNOWDEPTHVAR;" - JEDI_INSTALL: "&JEDI_INSTALL;" - FCSTHR: "&FCSTHR;" account: "&ACCOUNT;" command: '&HOMElandda;/parm/task_load_modules_run_jjob.sh "forecast" "&HOMElandda;" "&MACHINE;"' jobname: forecast nodes: "1:ppn=&NPROCS_FORECAST;" - walltime: 00:45:00 + walltime: 01:00:00 queue: batch - join: "&LOGDIR;/forecast.log" + join: "&LOGDIR;/forecast&LOGFN_SUFFIX;" dependency: taskdep: attrs: - task: analysis + task: post_anal diff --git a/parm/land_analysis_gswp3_orion.yaml b/parm/land_analysis_gswp3_orion.yaml index 5c29abf1..76941b00 100644 --- a/parm/land_analysis_gswp3_orion.yaml +++ b/parm/land_analysis_gswp3_orion.yaml @@ -7,8 +7,8 @@ workflow: cycledef: - attrs: group: cycled - spec: 200001030000 200001040000 24:00:00 -# for era5: spec: 201912210000 201912210000 24:00:00 + spec: 200001030000 200001040000 24:00:00 # for gswp3 +# spec: 201912210000 201912220000 24:00:00 # for era5 entities: MACHINE: "orion" SCHED: "slurm" @@ -17,7 +17,7 @@ workflow: EXP_BASEDIR: "/work/noaa/epic/{USER}/landda_test" JEDI_INSTALL: "/work/noaa/epic/UFS_Land-DA_Dev/jedi_v7" WARMSTART_DIR: "/work/noaa/epic/UFS_Land-DA_Dev/DATA_RESTART" - FORCING: "gswp3" + FORCING: "gswp3" # "gswp3" or "era5" RES: "96" FCSTHR: "24" NPROCS_ANALYSIS: "6" @@ -179,6 +179,7 @@ workflow: PDY: "&PDY;" cyc: "&cyc;" PTIME: "&PTIME;" + NTIME: "&NTIME;" FCSTHR: "&FCSTHR;" account: "&ACCOUNT;" command: '&HOMElandda;/parm/task_load_modules_run_jjob.sh "post_anal" "&HOMElandda;" "&MACHINE;"' diff --git a/scripts/exlandda_analysis.sh b/scripts/exlandda_analysis.sh index 03cbfc83..fdf82312 100755 --- a/scripts/exlandda_analysis.sh +++ b/scripts/exlandda_analysis.sh @@ -32,6 +32,7 @@ YAML_DA=construct GFSv17="NO" B=30 # back ground error std for LETKFOI +# Import input files for itile in {1..6} do cp ${DATA_SHARE}/${FILEDATE}.sfc_data.tile${itile}.nc . @@ -168,7 +169,7 @@ fi ################################################ if [[ ! -e Data ]]; then - ln -s $JEDI_STATICDIR Data + ln -nsf $JEDI_STATICDIR Data fi echo 'do_landDA: calling fv3-jedi' diff --git a/scripts/exlandda_post_anal.sh b/scripts/exlandda_post_anal.sh index 40ba2c46..92891d46 100755 --- a/scripts/exlandda_post_anal.sh +++ b/scripts/exlandda_post_anal.sh @@ -11,6 +11,10 @@ YYYY=${PDY:0:4} MM=${PDY:4:2} DD=${PDY:6:2} HH=${cyc} +nYYYY=${NTIME:0:4} +nMM=${NTIME:4:2} +nDD=${NTIME:6:2} +nHH=${NTIME:8:2} FREQ=$((${FCSTHR}*3600)) RDD=$((${FCSTHR}/24)) @@ -31,7 +35,6 @@ FILEDATE=${YYYY}${MM}${DD}.${HH}0000 for itile in {1..6} do cp ${DATA_SHARE}/${FILEDATE}.sfc_data.tile${itile}.nc . - cp ${DATA_SHARE}/ufs_land_restart.${YYYY}-${MM}-${DD}_${HH}-00-00.tile${itile}.nc . done # convert back to vector, run model (all members) convert back to vector, run model (all members) @@ -39,6 +42,8 @@ if [[ ${ATMOS_FORC} == "era5" ]]; then echo '************************************************' echo 'calling tile2vector' + cp ${DATA_SHARE}/ufs_land_restart.${YYYY}-${MM}-${DD}_${HH}-00-00.nc . + cp ${PARMlandda}/templates/template.tile2vector tile2vector.namelist sed -i "s|FIXlandda|${FIXlandda}|g" tile2vector.namelist @@ -46,7 +51,7 @@ if [[ ${ATMOS_FORC} == "era5" ]]; then sed -i -e "s/XXMM/${MM}/g" tile2vector.namelist sed -i -e "s/XXDD/${DD}/g" tile2vector.namelist sed -i -e "s/XXHH/${HH}/g" tile2vector.namelist - sed -i -e "s/MODEL_FORCING/${ATMOS_FORC}/g" vector2tile.namelist + sed -i -e "s/MODEL_FORCING/${ATMOS_FORC}/g" tile2vector.namelist sed -i -e "s/XXRES/${RES}/g" tile2vector.namelist sed -i -e "s/XXTSTUB/${TSTUB}/g" tile2vector.namelist sed -i -e "s#XXTPATH#${TPATH}#g" tile2vector.namelist @@ -96,11 +101,16 @@ if [[ ${ATMOS_FORC} == "era5" ]]; then # link restart for next cycle ln -nsf ${COMOUT}/ufs_land_restart.${nYYYY}-${nMM}-${nDD}_${nHH}-00-00.nc ${DATA_RESTART} -# convert back to UFS tile, run model (all members) +# convert back to UFS tile elif [[ ${ATMOS_FORC} == "gswp3" ]]; then echo '************************************************' echo 'calling tile2tile' + for itile in {1..6} + do + cp ${DATA_SHARE}/ufs_land_restart.${YYYY}-${MM}-${DD}_${HH}-00-00.tile${itile}.nc . + done + cp ${PARMlandda}/templates/template.jedi2ufs jedi2ufs.namelist sed -i "s|FIXlandda|${FIXlandda}|g" jedi2ufs.namelist diff --git a/scripts/exlandda_pre_anal.sh b/scripts/exlandda_pre_anal.sh index 790808fd..c54472ca 100755 --- a/scripts/exlandda_pre_anal.sh +++ b/scripts/exlandda_pre_anal.sh @@ -69,7 +69,6 @@ if [[ $ATMOS_FORC == "era5" ]]; then exit 12 fi - elif [[ $ATMOS_FORC == "gswp3" ]]; then # tile2tile for DA echo '************************************************' @@ -116,10 +115,11 @@ elif [[ $ATMOS_FORC == "gswp3" ]]; then exit 22 fi - #stage restarts for applying JEDI update to intermediate directory - for itile in {1..6} - do - cp -p ${DATA}/${FILEDATE}.sfc_data.tile${itile}.nc ${DATA_SHARE}/${FILEDATE}.sfc_data.tile${itile}.nc - done fi +#stage restarts for applying JEDI update to intermediate directory +for itile in {1..6} +do + cp -p ${DATA}/${FILEDATE}.sfc_data.tile${itile}.nc ${DATA_SHARE}/${FILEDATE}.sfc_data.tile${itile}.nc +done + diff --git a/scripts/exlandda_prep_obs.sh b/scripts/exlandda_prep_obs.sh index 1ec3b88a..83e19ff2 100755 --- a/scripts/exlandda_prep_obs.sh +++ b/scripts/exlandda_prep_obs.sh @@ -27,7 +27,7 @@ for obs in "${OBS_TYPES[@]}"; do # obs are within DA window. elif [ $ATMOS_FORC == "era5" ] && [ ${obs} == "GHCN" ]; then OBSDIR_SUBDIR="${OBSDIR_SUBDIR:-snow_depth/GHCN/data_proc/v3}" - obsfile="${OBSDIR}/${OBSDIR_SUBDIR}/${YYYY}/ghcn_snwd_ioda_${YYYP}${MP}${DP}.nc" + obsfile="${OBSDIR}/${OBSDIR_SUBDIR}/${YYYY}/ghcn_snwd_ioda_${YYYP}${MP}${DP}_jediv7.nc" elif [ $ATMOS_FORC == "gswp3" ] && [ ${obs} == "GHCN" ]; then OBSDIR_SUBDIR="${OBSDIR_SUBDIR:-snow_depth/GHCN/data_proc/v3}" obsfile="${OBSDIR}/${OBSDIR_SUBDIR}/${YYYY}/ghcn_snwd_ioda_${YYYP}${MP}${DP}.nc" From 8907a991a4e473ddf3e82ee867525b85329e537b Mon Sep 17 00:00:00 2001 From: Chan-Hoo Jeon Date: Sun, 19 May 2024 12:15:30 -0500 Subject: [PATCH 12/18] remove cyc from comin/comout --- jobs/JLANDDA_ANALYSIS | 20 +++++++------- jobs/JLANDDA_FORECAST | 22 ++++++++------- jobs/JLANDDA_POST_ANAL | 20 +++++++------- jobs/JLANDDA_PREP_OBS | 20 +++++++------- jobs/JLANDDA_PRE_ANAL | 20 +++++++------- scripts/exlandda_forecast.sh | 52 +++++++++++++++++++++--------------- 6 files changed, 82 insertions(+), 72 deletions(-) diff --git a/jobs/JLANDDA_ANALYSIS b/jobs/JLANDDA_ANALYSIS index 910b6262..30ddb154 100755 --- a/jobs/JLANDDA_ANALYSIS +++ b/jobs/JLANDDA_ANALYSIS @@ -56,25 +56,25 @@ cd $DATA export NET="${NET:-landda}" export RUN="${RUN:-landda}" +# Run setpdy to initialize PDYm and PDYp variables +export cycle="${cycle:-t${cyc}z}" +setpdy.sh +. ./PDY + [[ "$WORKFLOW_MANAGER" = "rocoto" ]] && export COMROOT=$COMROOT if [ "${MACHINE}" = "WCOSS2" ]; then - export COMIN="${COMIN:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY}/${cyc})}" - export COMOUT="${COMOUT:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY}/${cyc})}" + export COMIN="${COMIN:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY})}" + export COMOUT="${COMOUT:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY})}" else - export COMIN="${COMIN:-${COMROOT}/${NET}/${model_ver}/${RUN}.${PDY}/${cyc}}" - export COMOUT="${COMOUT:-${COMROOT}/${NET}/${model_ver}/${RUN}.${PDY}/${cyc}}" + export COMIN="${COMIN:-${COMROOT}/${NET}/${model_ver}/${RUN}.${PDY}}" + export COMOUT="${COMOUT:-${COMROOT}/${NET}/${model_ver}/${RUN}.${PDY}}" fi mkdir -p ${COMOUT} # Create a teomporary share directory -export DATA_SHARE="${DATA_SHARE:-${DATAROOT}/DATA_SHARE/${PDY}${cyc}}" +export DATA_SHARE="${DATA_SHARE:-${DATAROOT}/DATA_SHARE/${PDY}}" mkdir -p ${DATA_SHARE} - -# Run setpdy to initialize PDYm and PDYp variables -export cycle="${cycle:-t${cyc}z}" -setpdy.sh -. ./PDY # #----------------------------------------------------------------------- # diff --git a/jobs/JLANDDA_FORECAST b/jobs/JLANDDA_FORECAST index 337cd273..1c7c328f 100755 --- a/jobs/JLANDDA_FORECAST +++ b/jobs/JLANDDA_FORECAST @@ -56,27 +56,29 @@ cd $DATA export NET="${NET:-landda}" export RUN="${RUN:-landda}" +# Run setpdy to initialize PDYm and PDYp variables +export cycle="${cycle:-t${cyc}z}" +setpdy.sh +. ./PDY + [[ "$WORKFLOW_MANAGER" = "rocoto" ]] && export COMROOT=$COMROOT if [ "${MACHINE}" = "WCOSS2" ]; then - export COMIN="${COMIN:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY}/${cyc})}" - export COMOUT="${COMOUT:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY}/${cyc})}" + export COMIN="${COMIN:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY})}" + export COMOUT="${COMOUT:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY})}" + export COMINm1="${COMIN:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDYm1})}" else - export COMIN="${COMIN:-${COMROOT}/${NET}/${model_ver}/${RUN}.${PDY}/${cyc}}" - export COMOUT="${COMOUT:-${COMROOT}/${NET}/${model_ver}/${RUN}.${PDY}/${cyc}}" + export COMIN="${COMIN:-${COMROOT}/${NET}/${model_ver}/${RUN}.${PDY}}" + export COMOUT="${COMOUT:-${COMROOT}/${NET}/${model_ver}/${RUN}.${PDY}}" + export COMINm1="${COMIN:-${COMROOT}/${NET}/${model_ver}/${RUN}.${PDYm1}}" fi mkdir -p ${COMOUT} # Create a teomporary share directory -export DATA_SHARE="${DATA_SHARE:-${DATAROOT}/DATA_SHARE/${PDY}${cyc}}" +export DATA_SHARE="${DATA_SHARE:-${DATAROOT}/DATA_SHARE/${PDY}}" mkdir -p ${DATA_SHARE} export DATA_RESTART="${DATA_RESTART:-${DATAROOT}/DATA_SHARE/RESTART}" mkdir -p ${DATA_RESTART} - -# Run setpdy to initialize PDYm and PDYp variables -export cycle="${cycle:-t${cyc}z}" -setpdy.sh -. ./PDY # #----------------------------------------------------------------------- # diff --git a/jobs/JLANDDA_POST_ANAL b/jobs/JLANDDA_POST_ANAL index 1a896664..38e25d72 100755 --- a/jobs/JLANDDA_POST_ANAL +++ b/jobs/JLANDDA_POST_ANAL @@ -56,27 +56,27 @@ cd $DATA export NET="${NET:-landda}" export RUN="${RUN:-landda}" +# Run setpdy to initialize PDYm and PDYp variables +export cycle="${cycle:-t${cyc}z}" +setpdy.sh +. ./PDY + [[ "$WORKFLOW_MANAGER" = "rocoto" ]] && export COMROOT=$COMROOT if [ "${MACHINE}" = "WCOSS2" ]; then - export COMIN="${COMIN:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY}/${cyc})}" - export COMOUT="${COMOUT:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY}/${cyc})}" + export COMIN="${COMIN:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY})}" + export COMOUT="${COMOUT:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY})}" else - export COMIN="${COMIN:-${COMROOT}/${NET}/${model_ver}/${RUN}.${PDY}/${cyc}}" - export COMOUT="${COMOUT:-${COMROOT}/${NET}/${model_ver}/${RUN}.${PDY}/${cyc}}" + export COMIN="${COMIN:-${COMROOT}/${NET}/${model_ver}/${RUN}.${PDY}}" + export COMOUT="${COMOUT:-${COMROOT}/${NET}/${model_ver}/${RUN}.${PDY}}" fi mkdir -p ${COMOUT} # Create a teomporary share directory -export DATA_SHARE="${DATA_SHARE:-${DATAROOT}/DATA_SHARE/${PDY}${cyc}}" +export DATA_SHARE="${DATA_SHARE:-${DATAROOT}/DATA_SHARE/${PDY}}" mkdir -p ${DATA_SHARE} export DATA_RESTART="${DATA_RESTART:-${DATAROOT}/DATA_SHARE/RESTART}" mkdir -p ${DATA_RESTART} - -# Run setpdy to initialize PDYm and PDYp variables -export cycle="${cycle:-t${cyc}z}" -setpdy.sh -. ./PDY # #----------------------------------------------------------------------- # diff --git a/jobs/JLANDDA_PREP_OBS b/jobs/JLANDDA_PREP_OBS index dd2422fc..d3e0a720 100755 --- a/jobs/JLANDDA_PREP_OBS +++ b/jobs/JLANDDA_PREP_OBS @@ -56,25 +56,25 @@ cd $DATA export NET="${NET:-landda}" export RUN="${RUN:-landda}" +# Run setpdy to initialize PDYm and PDYp variables +export cycle="${cycle:-t${cyc}z}" +setpdy.sh +. ./PDY + [[ "$WORKFLOW_MANAGER" = "rocoto" ]] && export COMROOT=$COMROOT if [ "${MACHINE}" = "WCOSS2" ]; then - export COMIN="${COMIN:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY}/${cyc})}" - export COMOUT="${COMOUT:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY}/${cyc})}" + export COMIN="${COMIN:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY})}" + export COMOUT="${COMOUT:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY})}" else - export COMIN="${COMIN:-${COMROOT}/${NET}/${model_ver}/${RUN}.${PDY}/${cyc}}" - export COMOUT="${COMOUT:-${COMROOT}/${NET}/${model_ver}/${RUN}.${PDY}/${cyc}}" + export COMIN="${COMIN:-${COMROOT}/${NET}/${model_ver}/${RUN}.${PDY}}" + export COMOUT="${COMOUT:-${COMROOT}/${NET}/${model_ver}/${RUN}.${PDY}}" fi mkdir -p ${COMOUT} # Create a teomporary share directory -export DATA_SHARE="${DATA_SHARE:-${DATAROOT}/DATA_SHARE/${PDY}${cyc}}" +export DATA_SHARE="${DATA_SHARE:-${DATAROOT}/DATA_SHARE/${PDY}}" mkdir -p ${DATA_SHARE} - -# Run setpdy to initialize PDYm and PDYp variables -export cycle="${cycle:-t${cyc}z}" -setpdy.sh -. ./PDY # #----------------------------------------------------------------------- # diff --git a/jobs/JLANDDA_PRE_ANAL b/jobs/JLANDDA_PRE_ANAL index f15e2323..16858ba6 100755 --- a/jobs/JLANDDA_PRE_ANAL +++ b/jobs/JLANDDA_PRE_ANAL @@ -56,27 +56,27 @@ cd $DATA export NET="${NET:-landda}" export RUN="${RUN:-landda}" +# Run setpdy to initialize PDYm and PDYp variables +export cycle="${cycle:-t${cyc}z}" +setpdy.sh +. ./PDY + [[ "$WORKFLOW_MANAGER" = "rocoto" ]] && export COMROOT=$COMROOT if [ "${MACHINE}" = "WCOSS2" ]; then - export COMIN="${COMIN:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY}/${cyc})}" - export COMOUT="${COMOUT:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY}/${cyc})}" + export COMIN="${COMIN:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY})}" + export COMOUT="${COMOUT:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY})}" else - export COMIN="${COMIN:-${COMROOT}/${NET}/${model_ver}/${RUN}.${PDY}/${cyc}}" - export COMOUT="${COMOUT:-${COMROOT}/${NET}/${model_ver}/${RUN}.${PDY}/${cyc}}" + export COMIN="${COMIN:-${COMROOT}/${NET}/${model_ver}/${RUN}.${PDY}}" + export COMOUT="${COMOUT:-${COMROOT}/${NET}/${model_ver}/${RUN}.${PDY}}" fi mkdir -p ${COMOUT} # Create a teomporary share directory -export DATA_SHARE="${DATA_SHARE:-${DATAROOT}/DATA_SHARE/${PDY}${cyc}}" +export DATA_SHARE="${DATA_SHARE:-${DATAROOT}/DATA_SHARE/${PDY}}" mkdir -p ${DATA_SHARE} export DATA_RESTART="${DATA_RESTART:-${DATAROOT}/DATA_SHARE/RESTART}" mkdir -p ${DATA_RESTART} - -# Run setpdy to initialize PDYm and PDYp variables -export cycle="${cycle:-t${cyc}z}" -setpdy.sh -. ./PDY # #----------------------------------------------------------------------- # diff --git a/scripts/exlandda_forecast.sh b/scripts/exlandda_forecast.sh index 3cbf1f51..9ebc6c6c 100755 --- a/scripts/exlandda_forecast.sh +++ b/scripts/exlandda_forecast.sh @@ -31,11 +31,6 @@ if [[ ${ATMOS_FORC} == "gswp3" ]]; then echo '************************************************' echo 'running the forecast model' - for itile in {1..6} - do - cp ${COMIN}/ufs_land_restart.anal.${YYYY}-${MM}-${DD}_${HH}-00-00.tile${itile}.nc ufs.cpld.lnd.out.${YYYY}-${MM}-${DD}-00000.tile${itile}.nc - done - TEST_NAME=datm_cdeps_lnd_gswp3 TEST_NAME_RST=datm_cdeps_lnd_gswp3_rst PATHRT=${HOMElandda}/sorc/ufs_model.fd/tests @@ -100,24 +95,35 @@ if [[ ${ATMOS_FORC} == "gswp3" ]]; then cd - SUFFIX=${RT_SUFFIX} - # restart - if [ $WARM_START = .true. ]; then - # NoahMP restart files - for itile in {1..6} - do - ln -nsf ${COMIN}/ufs_land_restart.anal.${YYYY}-${MM}-${DD}_${HH}-00-00.tile${itile}.nc RESTART/ufs.cpld.lnd.out.${RESTART_FILE_SUFFIX_SECS}.tile${itile}.nc - done - - # CMEPS restart and pointer files - RFILE1=ufs.cpld.cpl.r.${RESTART_FILE_SUFFIX_SECS}.nc - cp ${FIXlandda}/restarts/gswp3/${RFILE1} RESTART/. - ls -1 "RESTART/${RFILE1}">rpointer.cpl - - # CDEPS restart and pointer files - RFILE2=ufs.cpld.datm.r.${RESTART_FILE_SUFFIX_SECS}.nc - cp ${FIXlandda}/restarts/gswp3/${RFILE2} RESTART/. - ls -1 "RESTART/${RFILE2}">rpointer.atm + + # Retrieve input files for restart + # NoahMP restart files + for itile in {1..6} + do + ln -nsf ${COMIN}/ufs_land_restart.anal.${YYYY}-${MM}-${DD}_${HH}-00-00.tile${itile}.nc RESTART/ufs.cpld.lnd.out.${YYYY}-${MM}-${DD}_${HH}-0000.tile${itile}.nc + done + + # CMEPS restart and pointer files + rfile1=ufs.cpld.cpl.r.${YYYY}-${MM}-${DD}_${HH}-0000.nc + if [[ -e "${COMINm1}/${rfile1}" ]]; then + cp "${COMINm1}/${rfile1}" RESTART/. + elif [[ -e "${WARMSTART_DIR}/${rfile1}" ]]; then + cp "${WARMSTART_DIR}/${rfile1}" RESTART/. + else + cp ${FIXlandda}/restarts/gswp3/${rfile1} RESTART/. + fi + ls -1 "RESTART/${rfile11}">rpointer.cpl + + # CDEPS restart and pointer files + rfile2=ufs.cpld.datm.r.${YYYY}-${MM}-${DD}_${HH}-0000.nc + if [[ -e "${COMINm1}/${rfile2}" ]]; then + cp "${COMINm1}/${rfile2}" RESTART/. + elif [[ -e "${WARMSTART_DIR}/${rfile2}" ]]; then + cp "${WARMSTART_DIR}/${rfile2}" RESTART/. + else + cp ${FIXlandda}/restarts/gswp3/${rfile2} RESTART/. fi + ls -1 "RESTART/${rfile2}">rpointer.atm cd INPUT ln -nsf ${FIXlandda}/UFS_WM/NOAHMP_IC/ufs-land_C96_init_fields.tile1.nc C96.initial.tile1.nc @@ -168,6 +174,8 @@ if [[ ${ATMOS_FORC} == "gswp3" ]]; then do cp -p ${DATA}/ufs.cpld.lnd.out.${nYYYY}-${nMM}-${nDD}-00000.tile${itile}.nc ${COMOUT}/ufs_land_restart.${nYYYY}-${nMM}-${nDD}_${nHH}-00-00.tile${itile}.nc done + cp -p ${DATA}/ufs.cpld.datm.r.${nYYYY}-${nMM}-${nHH}-0000.nc ${COMOUT} + cp -p ${DATA}/RESTART/ufs.cpld.cpl.r.${nYYYY}-${nMM}-${nHH}-0000.nc ${COMOUT} # link restart for next cycle for itile in {1..6} From 3b1f1e1af0c355bc845a0a0f314886ff77a872c4 Mon Sep 17 00:00:00 2001 From: Chan-Hoo Jeon Date: Sun, 19 May 2024 12:42:24 -0500 Subject: [PATCH 13/18] fix typo in forecast script --- parm/land_analysis_gswp3_hera.yaml | 1 + parm/land_analysis_gswp3_orion.yaml | 1 + scripts/exlandda_forecast.sh | 10 +++++----- 3 files changed, 7 insertions(+), 5 deletions(-) diff --git a/parm/land_analysis_gswp3_hera.yaml b/parm/land_analysis_gswp3_hera.yaml index 63930aea..1ebcba8d 100644 --- a/parm/land_analysis_gswp3_hera.yaml +++ b/parm/land_analysis_gswp3_hera.yaml @@ -202,6 +202,7 @@ workflow: ACCOUNT: "&ACCOUNT;" EXP_NAME: "&EXP_NAME;" ATMOS_FORC: "&FORCING;" + WARMSTART_DIR: "&WARMSTART_DIR;" model_ver: "&model_ver;" HOMElandda: "&HOMElandda;" COMROOT: "&COMROOT;" diff --git a/parm/land_analysis_gswp3_orion.yaml b/parm/land_analysis_gswp3_orion.yaml index 76941b00..db219e42 100644 --- a/parm/land_analysis_gswp3_orion.yaml +++ b/parm/land_analysis_gswp3_orion.yaml @@ -202,6 +202,7 @@ workflow: ACCOUNT: "&ACCOUNT;" EXP_NAME: "&EXP_NAME;" ATMOS_FORC: "&FORCING;" + WARMSTART_DIR: "&WARMSTART_DIR;" model_ver: "&model_ver;" HOMElandda: "&HOMElandda;" COMROOT: "&COMROOT;" diff --git a/scripts/exlandda_forecast.sh b/scripts/exlandda_forecast.sh index 9ebc6c6c..4e2560c0 100755 --- a/scripts/exlandda_forecast.sh +++ b/scripts/exlandda_forecast.sh @@ -104,7 +104,7 @@ if [[ ${ATMOS_FORC} == "gswp3" ]]; then done # CMEPS restart and pointer files - rfile1=ufs.cpld.cpl.r.${YYYY}-${MM}-${DD}_${HH}-0000.nc + rfile1="ufs.cpld.cpl.r.${YYYY}-${MM}-${DD}-00000.nc" if [[ -e "${COMINm1}/${rfile1}" ]]; then cp "${COMINm1}/${rfile1}" RESTART/. elif [[ -e "${WARMSTART_DIR}/${rfile1}" ]]; then @@ -112,10 +112,10 @@ if [[ ${ATMOS_FORC} == "gswp3" ]]; then else cp ${FIXlandda}/restarts/gswp3/${rfile1} RESTART/. fi - ls -1 "RESTART/${rfile11}">rpointer.cpl + ls -1 "RESTART/${rfile1}">rpointer.cpl # CDEPS restart and pointer files - rfile2=ufs.cpld.datm.r.${YYYY}-${MM}-${DD}_${HH}-0000.nc + rfile2="ufs.cpld.datm.r.${YYYY}-${MM}-${DD}-00000.nc" if [[ -e "${COMINm1}/${rfile2}" ]]; then cp "${COMINm1}/${rfile2}" RESTART/. elif [[ -e "${WARMSTART_DIR}/${rfile2}" ]]; then @@ -174,8 +174,8 @@ if [[ ${ATMOS_FORC} == "gswp3" ]]; then do cp -p ${DATA}/ufs.cpld.lnd.out.${nYYYY}-${nMM}-${nDD}-00000.tile${itile}.nc ${COMOUT}/ufs_land_restart.${nYYYY}-${nMM}-${nDD}_${nHH}-00-00.tile${itile}.nc done - cp -p ${DATA}/ufs.cpld.datm.r.${nYYYY}-${nMM}-${nHH}-0000.nc ${COMOUT} - cp -p ${DATA}/RESTART/ufs.cpld.cpl.r.${nYYYY}-${nMM}-${nHH}-0000.nc ${COMOUT} + cp -p ${DATA}/ufs.cpld.datm.r.${nYYYY}-${nMM}-${nDD}-00000.nc ${COMOUT} + cp -p ${DATA}/RESTART/ufs.cpld.cpl.r.${nYYYY}-${nMM}-${nDD}-00000.nc ${COMOUT} # link restart for next cycle for itile in {1..6} From f80a878e2205871e16584f05e5a46a70aaacc6cd Mon Sep 17 00:00:00 2001 From: Chan-Hoo Jeon Date: Sun, 19 May 2024 12:55:08 -0500 Subject: [PATCH 14/18] fix typo --- scripts/exlandda_forecast.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/exlandda_forecast.sh b/scripts/exlandda_forecast.sh index 4e2560c0..947e070c 100755 --- a/scripts/exlandda_forecast.sh +++ b/scripts/exlandda_forecast.sh @@ -100,7 +100,7 @@ if [[ ${ATMOS_FORC} == "gswp3" ]]; then # NoahMP restart files for itile in {1..6} do - ln -nsf ${COMIN}/ufs_land_restart.anal.${YYYY}-${MM}-${DD}_${HH}-00-00.tile${itile}.nc RESTART/ufs.cpld.lnd.out.${YYYY}-${MM}-${DD}_${HH}-0000.tile${itile}.nc + ln -nsf ${COMIN}/ufs_land_restart.anal.${YYYY}-${MM}-${DD}_${HH}-00-00.tile${itile}.nc RESTART/ufs.cpld.lnd.out.${YYYY}-${MM}-${DD}-00000.tile${itile}.nc done # CMEPS restart and pointer files From 589e3d01d53f44123f0d48d0f455b2eb5578211d Mon Sep 17 00:00:00 2001 From: Chan-Hoo Jeon Date: Sun, 19 May 2024 13:58:30 -0500 Subject: [PATCH 15/18] add templates for diag_table and model_configure --- jobs/JLANDDA_FORECAST | 4 +- parm/land_analysis_gswp3_hera.yaml | 1 + parm/land_analysis_gswp3_orion.yaml | 1 + parm/templates/template.diag_table | 283 ++++++++++++++++++++++++ parm/templates/template.model_configure | 9 + scripts/exlandda_forecast.sh | 18 +- 6 files changed, 311 insertions(+), 5 deletions(-) create mode 100644 parm/templates/template.diag_table create mode 100644 parm/templates/template.model_configure diff --git a/jobs/JLANDDA_FORECAST b/jobs/JLANDDA_FORECAST index 1c7c328f..007d0568 100755 --- a/jobs/JLANDDA_FORECAST +++ b/jobs/JLANDDA_FORECAST @@ -65,11 +65,11 @@ setpdy.sh if [ "${MACHINE}" = "WCOSS2" ]; then export COMIN="${COMIN:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY})}" export COMOUT="${COMOUT:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY})}" - export COMINm1="${COMIN:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDYm1})}" + export COMINm1="${COMINm1:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDYm1})}" else export COMIN="${COMIN:-${COMROOT}/${NET}/${model_ver}/${RUN}.${PDY}}" export COMOUT="${COMOUT:-${COMROOT}/${NET}/${model_ver}/${RUN}.${PDY}}" - export COMINm1="${COMIN:-${COMROOT}/${NET}/${model_ver}/${RUN}.${PDYm1}}" + export COMINm1="${COMINm1:-${COMROOT}/${NET}/${model_ver}/${RUN}.${PDYm1}}" fi mkdir -p ${COMOUT} diff --git a/parm/land_analysis_gswp3_hera.yaml b/parm/land_analysis_gswp3_hera.yaml index 1ebcba8d..c62e9f9a 100644 --- a/parm/land_analysis_gswp3_hera.yaml +++ b/parm/land_analysis_gswp3_hera.yaml @@ -213,6 +213,7 @@ workflow: cyc: "&cyc;" NTIME: "&NTIME;" DAtype: "&DAtype;" + FCSTHR: "&FCSTHR;" account: "&ACCOUNT;" command: '&HOMElandda;/parm/task_load_modules_run_jjob.sh "forecast" "&HOMElandda;" "&MACHINE;"' jobname: forecast diff --git a/parm/land_analysis_gswp3_orion.yaml b/parm/land_analysis_gswp3_orion.yaml index db219e42..4642e7f8 100644 --- a/parm/land_analysis_gswp3_orion.yaml +++ b/parm/land_analysis_gswp3_orion.yaml @@ -213,6 +213,7 @@ workflow: cyc: "&cyc;" NTIME: "&NTIME;" DAtype: "&DAtype;" + FCSTHR: "&FCSTHR;" account: "&ACCOUNT;" command: '&HOMElandda;/parm/task_load_modules_run_jjob.sh "forecast" "&HOMElandda;" "&MACHINE;"' jobname: forecast diff --git a/parm/templates/template.diag_table b/parm/templates/template.diag_table new file mode 100644 index 00000000..68a515fd --- /dev/null +++ b/parm/templates/template.diag_table @@ -0,0 +1,283 @@ +XXYYYYMMDD.00Z.1760x880.64bit.non-mono +XXYYYY XXMM XXDD XXHH 0 0 + +"fv3_history", 0, "hours", 1, "hours", "time" +"fv3_history2d", 0, "hours", 1, "hours", "time" +###################### +"ocn%4yr%2mo%2dy%2hr", 6, "hours", 1, "hours", "time", 6, "hours", "1901 1 1 0 0 0" +"SST%4yr%2mo%2dy", 1, "days", 1, "days", "time", 1, "days", "1901 1 1 0 0 0" +############################################## +# static fields + "ocean_model", "geolon", "geolon", "ocn%4yr%2mo%2dy%2hr", "all", .false., "none", 2 + "ocean_model", "geolat", "geolat", "ocn%4yr%2mo%2dy%2hr", "all", .false., "none", 2 + "ocean_model", "geolon_c", "geolon_c", "ocn%4yr%2mo%2dy%2hr", "all", .false., "none", 2 + "ocean_model", "geolat_c", "geolat_c", "ocn%4yr%2mo%2dy%2hr", "all", .false., "none", 2 + "ocean_model", "geolon_u", "geolon_u", "ocn%4yr%2mo%2dy%2hr", "all", .false., "none", 2 + "ocean_model", "geolat_u", "geolat_u", "ocn%4yr%2mo%2dy%2hr", "all", .false., "none", 2 + "ocean_model", "geolon_v", "geolon_v", "ocn%4yr%2mo%2dy%2hr", "all", .false., "none", 2 + "ocean_model", "geolat_v", "geolat_v", "ocn%4yr%2mo%2dy%2hr", "all", .false., "none", 2 +# "ocean_model", "depth_ocean", "depth_ocean", "ocn%4yr%2mo%2dy%2hr", "all", .false., "none", 2 +# "ocean_model", "wet", "wet", "ocn%4yr%2mo%2dy%2hr", "all", .false., "none", 2 + "ocean_model", "wet_c", "wet_c", "ocn%4yr%2mo%2dy%2hr", "all", .false., "none", 2 + "ocean_model", "wet_u", "wet_u", "ocn%4yr%2mo%2dy%2hr", "all", .false., "none", 2 + "ocean_model", "wet_v", "wet_v", "ocn%4yr%2mo%2dy%2hr", "all", .false., "none", 2 + "ocean_model", "sin_rot", "sin_rot", "ocn%4yr%2mo%2dy%2hr", "all", .false., "none", 2 + "ocean_model", "cos_rot", "cos_rot", "ocn%4yr%2mo%2dy%2hr", "all", .false., "none", 2 + +# ocean output TSUV and others + "ocean_model", "SSH", "SSH", "ocn%4yr%2mo%2dy%2hr","all",.true.,"none",2 + "ocean_model", "SST", "SST", "ocn%4yr%2mo%2dy%2hr","all",.true.,"none",2 + "ocean_model", "SSS", "SSS", "ocn%4yr%2mo%2dy%2hr","all",.true.,"none",2 + "ocean_model", "speed", "speed", "ocn%4yr%2mo%2dy%2hr","all",.true.,"none",2 + "ocean_model", "SSU", "SSU", "ocn%4yr%2mo%2dy%2hr","all",.true.,"none",2 + "ocean_model", "SSV", "SSV", "ocn%4yr%2mo%2dy%2hr","all",.true.,"none",2 + "ocean_model", "frazil", "frazil", "ocn%4yr%2mo%2dy%2hr","all",.true.,"none",2 + "ocean_model", "ePBL_h_ML", "ePBL", "ocn%4yr%2mo%2dy%2hr","all",.true.,"none",2 + "ocean_model", "MLD_003", "MLD_003", "ocn%4yr%2mo%2dy%2hr","all",.true.,"none",2 + "ocean_model", "MLD_0125", "MLD_0125", "ocn%4yr%2mo%2dy%2hr","all",.true.,"none",2 + +# save daily SST + "ocean_model", "geolon", "geolon", "SST%4yr%2mo%2dy", "all", .false., "none", 2 + "ocean_model", "geolat", "geolat", "SST%4yr%2mo%2dy", "all", .false., "none", 2 + "ocean_model", "SST", "sst", "SST%4yr%2mo%2dy", "all", .true., "none", 2 + +# Z-Space Fields Provided for CMIP6 (CMOR Names): +#=============================================== + "ocean_model_z","uo","uo" ,"ocn%4yr%2mo%2dy%2hr","all",.true.,"none",2 + "ocean_model_z","vo","vo" ,"ocn%4yr%2mo%2dy%2hr","all",.true.,"none",2 + "ocean_model_z","so","so" ,"ocn%4yr%2mo%2dy%2hr","all",.true.,"none",2 + "ocean_model_z","temp","temp" ,"ocn%4yr%2mo%2dy%2hr","all",.true.,"none",2 + +# forcing + "ocean_model", "taux", "taux", "ocn%4yr%2mo%2dy%2hr","all",.true.,"none",2 + "ocean_model", "tauy", "tauy", "ocn%4yr%2mo%2dy%2hr","all",.true.,"none",2 + "ocean_model", "latent", "latent", "ocn%4yr%2mo%2dy%2hr","all",.true.,"none",2 + "ocean_model", "sensible", "sensible", "ocn%4yr%2mo%2dy%2hr","all",.true.,"none",2 + "ocean_model", "SW", "SW", "ocn%4yr%2mo%2dy%2hr","all",.true.,"none",2 + "ocean_model", "LW", "LW", "ocn%4yr%2mo%2dy%2hr","all",.true.,"none",2 + "ocean_model", "evap", "evap", "ocn%4yr%2mo%2dy%2hr","all",.true.,"none",2 + "ocean_model", "lprec", "lprec", "ocn%4yr%2mo%2dy%2hr","all",.true.,"none",2 + "ocean_model", "lrunoff", "lrunoff", "ocn%4yr%2mo%2dy%2hr","all",.true.,"none",2 +# "ocean_model", "frunoff", "frunoff", "ocn%4yr%2mo%2dy%2hr","all",.true.,"none",2 + "ocean_model", "fprec", "fprec", "ocn%4yr%2mo%2dy%2hr","all",.true.,"none",2 + "ocean_model", "LwLatSens", "LwLatSens", "ocn%4yr%2mo%2dy%2hr","all",.true.,"none",2 + "ocean_model", "Heat_PmE", "Heat_PmE", "ocn%4yr%2mo%2dy%2hr","all",.true.,"none",2 +# +### +# FV3 variabls needed for NGGPS evaluation +### +"gfs_dyn", "ucomp", "ugrd", "fv3_history", "all", .false., "none", 2 +"gfs_dyn", "vcomp", "vgrd", "fv3_history", "all", .false., "none", 2 +"gfs_dyn", "sphum", "spfh", "fv3_history", "all", .false., "none", 2 +"gfs_dyn", "temp", "tmp", "fv3_history", "all", .false., "none", 2 +"gfs_dyn", "liq_wat", "clwmr", "fv3_history", "all", .false., "none", 2 +"gfs_dyn", "o3mr", "o3mr", "fv3_history", "all", .false., "none", 2 +"gfs_dyn", "delp", "dpres", "fv3_history", "all", .false., "none", 2 +"gfs_dyn", "delz", "delz", "fv3_history", "all", .false., "none", 2 +"gfs_dyn", "w", "dzdt", "fv3_history", "all", .false., "none", 2 +"gfs_dyn", "ice_wat", "icmr", "fv3_history", "all", .false., "none", 2 +"gfs_dyn", "rainwat", "rwmr", "fv3_history", "all", .false., "none", 2 +"gfs_dyn", "snowwat", "snmr", "fv3_history", "all", .false., "none", 2 +"gfs_dyn", "graupel", "grle", "fv3_history", "all", .false., "none", 2 +"gfs_dyn", "ps", "pressfc", "fv3_history", "all", .false., "none", 2 +"gfs_dyn", "hs", "hgtsfc", "fv3_history", "all", .false., "none", 2 +#"gfs_dyn", "ice_nc", "nicp", "fv3_history", "all", .false., "none", 2 +#"gfs_dyn", "rain_nc", "ntrnc", "fv3_history", "all", .false., "none", 2 + +"gfs_phys", "frzr", "frzr", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "frzrb", "frzrb", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "frozr", "frozr", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "frozrb", "frozrb", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "tsnowp", "tsnowp", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "tsnowpb", "tsnowpb", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "rhonewsn", "rhonewsn", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "ALBDO_ave", "albdo_ave", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "cnvprcp_ave", "cprat_ave", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "cnvprcpb_ave", "cpratb_ave", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "totprcp_ave", "prate_ave", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "totprcpb_ave", "prateb_ave", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "DLWRF", "dlwrf_ave", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "DLWRFI", "dlwrf", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "ULWRF", "ulwrf_ave", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "ULWRFI", "ulwrf", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "DSWRF", "dswrf_ave", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "DSWRFI", "dswrf", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "USWRF", "uswrf_ave", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "USWRFI", "uswrf", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "DSWRFtoa", "dswrf_avetoa","fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "USWRFtoa", "uswrf_avetoa","fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "ULWRFtoa", "ulwrf_avetoa","fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "gflux_ave", "gflux_ave", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "hpbl", "hpbl", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "lhtfl_ave", "lhtfl_ave", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "shtfl_ave", "shtfl_ave", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "pwat", "pwat", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "soilm", "soilm", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "TCDC_aveclm", "tcdc_aveclm", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "TCDC_avebndcl", "tcdc_avebndcl", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "TCDC_avehcl", "tcdc_avehcl", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "TCDC_avelcl", "tcdc_avelcl", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "TCDC_avemcl", "tcdc_avemcl", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "TCDCcnvcl", "tcdccnvcl", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "PREScnvclt", "prescnvclt", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "PREScnvclb", "prescnvclb", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "PRES_avehct", "pres_avehct", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "PRES_avehcb", "pres_avehcb", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "TEMP_avehct", "tmp_avehct", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "PRES_avemct", "pres_avemct", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "PRES_avemcb", "pres_avemcb", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "TEMP_avemct", "tmp_avemct", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "PRES_avelct", "pres_avelct", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "PRES_avelcb", "pres_avelcb", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "TEMP_avelct", "tmp_avelct", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "u-gwd_ave", "u-gwd_ave", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "v-gwd_ave", "v-gwd_ave", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "dusfc", "uflx_ave", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "dvsfc", "vflx_ave", "fv3_history2d", "all", .false., "none", 2 +#"gfs_phys", "cnvw", "cnvcldwat", "fv3_history2d", "all", .false., "none", 2 + +"gfs_phys", "psurf", "pressfc", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "u10m", "ugrd10m", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "v10m", "vgrd10m", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "crain", "crain", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "tprcp", "tprcp", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "hgtsfc", "orog", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "weasd", "weasd", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "f10m", "f10m", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "q2m", "spfh2m", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "t2m", "tmp2m", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "tsfc", "tmpsfc", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "vtype", "vtype", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "stype", "sotyp", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "slmsksfc", "land", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "vfracsfc", "veg", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "zorlsfc", "sfcr", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "uustar", "fricv", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "soilt1", "soilt1" "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "soilt2", "soilt2" "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "soilt3", "soilt3" "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "soilt4", "soilt4" "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "soilw1", "soilw1" "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "soilw2", "soilw2" "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "soilw3", "soilw3" "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "soilw4", "soilw4" "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "slc_1", "soill1", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "slc_2", "soill2", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "slc_3", "soill3", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "slc_4", "soill4", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "slope", "sltyp", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "alnsf", "alnsf", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "alnwf", "alnwf", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "alvsf", "alvsf", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "alvwf", "alvwf", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "canopy", "cnwat", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "facsf", "facsf", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "facwf", "facwf", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "ffhh", "ffhh", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "ffmm", "ffmm", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "fice", "icec", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "hice", "icetk", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "snoalb", "snoalb", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "shdmax", "shdmax", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "shdmin", "shdmin", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "snowd", "snod", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "tg3", "tg3", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "tisfc", "tisfc", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "tref", "tref", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "z_c", "zc", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "c_0", "c0", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "c_d", "cd", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "w_0", "w0", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "w_d", "wd", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "xt", "xt", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "xz", "xz", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "dt_cool", "dtcool", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "xs", "xs", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "xu", "xu", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "xv", "xv", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "xtts", "xtts", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "xzts", "xzts", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "d_conv", "dconv", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "qrain", "qrain", "fv3_history2d", "all", .false., "none", 2 + +"gfs_phys", "acond", "acond", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "cduvb_ave", "cduvb_ave", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "cpofp", "cpofp", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "duvb_ave", "duvb_ave", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "csdlf_ave", "csdlf", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "csusf_ave", "csusf", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "csusf_avetoa", "csusftoa", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "csdsf_ave", "csdsf", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "csulf_ave", "csulf", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "csulf_avetoa", "csulftoa", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "cwork_ave", "cwork_aveclm", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "evbs_ave", "evbs_ave", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "evcw_ave", "evcw_ave", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "fldcp", "fldcp", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "hgt_hyblev1", "hgt_hyblev1", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "spfh_hyblev1", "spfh_hyblev1", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "ugrd_hyblev1", "ugrd_hyblev1", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "vgrd_hyblev1", "vgrd_hyblev1", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "tmp_hyblev1", "tmp_hyblev1", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "gfluxi", "gflux", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "lhtfl", "lhtfl", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "shtfl", "shtfl", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "pevpr", "pevpr", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "pevpr_ave", "pevpr_ave", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "sbsno_ave", "sbsno_ave", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "sfexc", "sfexc", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "snohf", "snohf", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "snowc_ave", "snowc_ave", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "spfhmax2m", "spfhmax_max2m", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "spfhmin2m", "spfhmin_min2m", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "tmpmax2m", "tmax_max2m", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "tmpmin2m", "tmin_min2m", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "ssrun_acc", "ssrun_acc", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "sunsd_acc", "sunsd_acc", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "watr_acc", "watr_acc", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "wilt", "wilt", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "vbdsf_ave", "vbdsf_ave", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "vddsf_ave", "vddsf_ave", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "nbdsf_ave", "nbdsf_ave", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "nddsf_ave", "nddsf_ave", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "trans_ave", "trans_ave", "fv3_history2d", "all", .false., "none", 2 + +"gfs_phys", "AOD_550", "aod550", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "DU_AOD_550", "du_aod550", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "SU_AOD_550", "su_aod550", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "BC_AOD_550", "bc_aod550", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "OC_AOD_550", "oc_aod550", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "SS_AOD_550", "ss_aod550", "fv3_history2d", "all", .false., "none", 2 +#============================================================================================= +# +#====> This file can be used with diag_manager/v2.0a (or higher) <==== +# +# +# FORMATS FOR FILE ENTRIES (not all input values are used) +# ------------------------ +# +#"file_name", output_freq, "output_units", format, "time_units", "long_name", +# +# +#output_freq: > 0 output frequency in "output_units" +# = 0 output frequency every time step +# =-1 output frequency at end of run +# +#output_units = units used for output frequency +# (years, months, days, minutes, hours, seconds) +# +#time_units = units used to label the time axis +# (days, minutes, hours, seconds) +# +# +# FORMAT FOR FIELD ENTRIES (not all input values are used) +# ------------------------ +# +#"module_name", "field_name", "output_name", "file_name" "time_sampling", time_avg, "other_opts", packing +# +#time_avg = .true. or .false. +# +#packing = 1 double precision +# = 2 float +# = 4 packed 16-bit integers +# = 8 packed 1-byte (not tested?) diff --git a/parm/templates/template.model_configure b/parm/templates/template.model_configure new file mode 100644 index 00000000..568b2f86 --- /dev/null +++ b/parm/templates/template.model_configure @@ -0,0 +1,9 @@ +start_year: XXYYYY +start_month: XXMM +start_day: XXDD +start_hour: XXHH +start_minute: 0 +start_second: 0 +nhours_fcst: XXFCSTHR +dt_atmos: 900 +fhrot: 0 diff --git a/scripts/exlandda_forecast.sh b/scripts/exlandda_forecast.sh index 947e070c..df31292c 100755 --- a/scripts/exlandda_forecast.sh +++ b/scripts/exlandda_forecast.sh @@ -8,6 +8,7 @@ YYYY=${PDY:0:4} MM=${PDY:4:2} DD=${PDY:6:2} HH=${cyc} +YYYYMMDD=${PDY} nYYYY=${NTIME:0:4} nMM=${NTIME:4:2} nDD=${NTIME:6:2} @@ -69,15 +70,26 @@ if [[ ${ATMOS_FORC} == "gswp3" ]]; then fi fi - atparse < ${PATHRT}/parm/${MODEL_CONFIGURE:-model_configure.IN} > model_configure + # Set model_configure + cp ${PARMlandda}/templates/template.model_configure model_configure + sed -i -e "s/XXYYYY/${YYYY}/g" model_configure + sed -i -e "s/XXMM/${MM}/g" model_configure + sed -i -e "s/XXDD/${DD}/g" model_configure + sed -i -e "s/XXHH/${HH}/g" model_configure + sed -i -e "s/XXFCSTHR/${FCSTHR}/g" model_configure compute_petbounds_and_tasks atparse < ${PATHRT}/parm/${UFS_CONFIGURE:-ufs.configure} > ufs.configure - # diag table + # set diag table if [[ "Q${DIAG_TABLE:-}" != Q ]] ; then - atparse < ${PATHRT}/parm/diag_table/${DIAG_TABLE} > diag_table + cp ${PARMlandda}/templates/template.diag_table diag_table + sed -i -e "s/XXYYYYMMDD/${YYYYMMDD}/g" diag_table + sed -i -e "s/XXYYYY/${YYYY}/g" diag_table + sed -i -e "s/XXMM/${MM}/g" diag_table + sed -i -e "s/XXDD/${DD}/g" diag_table + sed -i -e "s/XXHH/${HH}/g" diag_table fi # Field table From 3b4fac0da34c708cc5f68c13ba840cb91f31e558 Mon Sep 17 00:00:00 2001 From: Chan-Hoo Jeon Date: Sun, 19 May 2024 14:41:01 -0500 Subject: [PATCH 16/18] change cp to ln --- scripts/exlandda_forecast.sh | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/scripts/exlandda_forecast.sh b/scripts/exlandda_forecast.sh index df31292c..38be885b 100755 --- a/scripts/exlandda_forecast.sh +++ b/scripts/exlandda_forecast.sh @@ -118,22 +118,22 @@ if [[ ${ATMOS_FORC} == "gswp3" ]]; then # CMEPS restart and pointer files rfile1="ufs.cpld.cpl.r.${YYYY}-${MM}-${DD}-00000.nc" if [[ -e "${COMINm1}/${rfile1}" ]]; then - cp "${COMINm1}/${rfile1}" RESTART/. + ln -nsf "${COMINm1}/${rfile1}" RESTART/. elif [[ -e "${WARMSTART_DIR}/${rfile1}" ]]; then - cp "${WARMSTART_DIR}/${rfile1}" RESTART/. + ln -nsf "${WARMSTART_DIR}/${rfile1}" RESTART/. else - cp ${FIXlandda}/restarts/gswp3/${rfile1} RESTART/. + ln -nsf ${FIXlandda}/restarts/gswp3/${rfile1} RESTART/. fi ls -1 "RESTART/${rfile1}">rpointer.cpl # CDEPS restart and pointer files rfile2="ufs.cpld.datm.r.${YYYY}-${MM}-${DD}-00000.nc" if [[ -e "${COMINm1}/${rfile2}" ]]; then - cp "${COMINm1}/${rfile2}" RESTART/. + ln -nsf "${COMINm1}/${rfile2}" RESTART/. elif [[ -e "${WARMSTART_DIR}/${rfile2}" ]]; then - cp "${WARMSTART_DIR}/${rfile2}" RESTART/. + ln -nsf "${WARMSTART_DIR}/${rfile2}" RESTART/. else - cp ${FIXlandda}/restarts/gswp3/${rfile2} RESTART/. + ln -nsf ${FIXlandda}/restarts/gswp3/${rfile2} RESTART/. fi ls -1 "RESTART/${rfile2}">rpointer.atm From 8914d21185b06e9e845907d76ffc887113abc315 Mon Sep 17 00:00:00 2001 From: Chan-Hoo Jeon Date: Mon, 20 May 2024 07:48:15 -0500 Subject: [PATCH 17/18] change name of sample yaml files --- parm/{land_analysis_gswp3_hera.yaml => land_analysis_hera.yaml} | 0 parm/{land_analysis_gswp3_orion.yaml => land_analysis_orion.yaml} | 0 2 files changed, 0 insertions(+), 0 deletions(-) rename parm/{land_analysis_gswp3_hera.yaml => land_analysis_hera.yaml} (100%) rename parm/{land_analysis_gswp3_orion.yaml => land_analysis_orion.yaml} (100%) diff --git a/parm/land_analysis_gswp3_hera.yaml b/parm/land_analysis_hera.yaml similarity index 100% rename from parm/land_analysis_gswp3_hera.yaml rename to parm/land_analysis_hera.yaml diff --git a/parm/land_analysis_gswp3_orion.yaml b/parm/land_analysis_orion.yaml similarity index 100% rename from parm/land_analysis_gswp3_orion.yaml rename to parm/land_analysis_orion.yaml From 898af4becbed1942c2fb942ec7f71d59ea9ffe89 Mon Sep 17 00:00:00 2001 From: Chan-Hoo Jeon Date: Mon, 20 May 2024 10:09:52 -0500 Subject: [PATCH 18/18] update users guide --- .../BuildingRunningTesting/BuildRunLandDA.rst | 64 ++++++++----------- .../BuildingRunningTesting/TestingLandDA.rst | 7 +- 2 files changed, 32 insertions(+), 39 deletions(-) diff --git a/doc/source/BuildingRunningTesting/BuildRunLandDA.rst b/doc/source/BuildingRunningTesting/BuildRunLandDA.rst index 70dae2cf..9e85c4f6 100644 --- a/doc/source/BuildingRunningTesting/BuildRunLandDA.rst +++ b/doc/source/BuildingRunningTesting/BuildRunLandDA.rst @@ -111,12 +111,9 @@ Copy the experiment settings into ``land_analysis.yaml``: .. code-block:: console cd $LANDDAROOT/land-DA_workflow/parm - cp land_analysis__.yaml land_analysis.yaml + cp land_analysis_.yaml land_analysis.yaml -where: - - * ```` is either ``gswp3`` or ``era5`` forcing data. - * ```` is ``hera`` or ``orion``. +where ```` is ``hera`` or ``orion``. Users will need to configure certain elements of their experiment in ``land_analysis.yaml``: @@ -124,7 +121,8 @@ Users will need to configure certain elements of their experiment in ``land_anal * ``ACCOUNT:`` A valid account name. Hera, Orion, and most NOAA RDHPCS systems require a valid account name; other systems may not * ``EXP_BASEDIR:`` The full path to the directory where land-DA_workflow was cloned (i.e., ``$LANDDAROOT``) * ``JEDI_INSTALL:`` The full path to the system's ``jedi-bundle`` installation - * ``LANDDA_INPUTS:`` The full path to the experiment data. See :ref:`Data ` below for information on prestaged data on Level 1 platforms. + * ``FORCING:`` Forcing options; ``gswp3`` or ``era5`` + * ``cycledef/spec:`` Cycle specification .. note:: @@ -151,7 +149,7 @@ Data | Orion | /work/noaa/epic/UFS_Land-DA/inputs | +-----------+--------------------------------------------------+ -Users who have difficulty accessing the data on Hera or Orion may download it according to the instructions in :numref:`Section %s ` and set ``LANDDA_INPUTS`` to point to the location of the downloaded data. Similarly, users with access to data for additional experiments may set the path to that data in ``LANDDA_INPUTS``. +Users who have difficulty accessing the data on Hera or Orion may download it according to the instructions in :numref:`Section %s `. Its sub-directories are soft-linked to the ``fix`` directory of the land-DA workflow by the build script ``sorc/app_build.sh``. .. _generate-wflow: @@ -186,16 +184,16 @@ Each Land DA experiment includes multiple tasks that must be run in order to sat * - J-job Task - Description - * - JLANDDA_PREP_EXP - - Sets up the experiment * - JLANDDA_PREP_OBS - - Sets up the observation files - * - JLANDDA_PREP_BMAT - - Sets up the :term:`JEDI` run + - Sets up the observation data files + * - JLANDDA_PRE_ANAL + - Transfers the snow data from the restart files to the surface data files * - JLANDDA_ANALYSIS - - Runs JEDI + - Runs :term:`JEDI` and adds the increment to the surface data files + * - JLANDDA_POST_ANAL + - Transfers the JEDI result from the surface data files to the restart files * - JLANDDA_FORECAST - - Runs forecast + - Runs the forecast model Users may run these tasks :ref:`using the Rocoto workflow manager ` or :ref:`using a batch script `. @@ -246,7 +244,7 @@ The experiment has successfully completed when all tasks say SUCCEEDED under STA Run Without Rocoto -------------------- -Users may choose not to run the workflow with uwtools and Rocoto. To run the :term:`J-jobs` scripts in the ``jobs`` directory, navigate to the ``parm`` directory and edit ``run_without_rocoto.sh`` (e.g., using vim or preferred command line editor). Users will likely need to change the ``MACHINE``, ``ACCOUNT``, and ``EXP_BASEDIR`` variables to match their system. Then, run ``run_without_rocoto.sh``: +Users may choose not to run the workflow with uwtools and Rocoto for a non-cycled run. To run the :term:`J-jobs` scripts in the ``jobs`` directory, navigate to the ``parm`` directory and edit ``run_without_rocoto.sh`` (e.g., using vim or preferred command line editor). Users will likely need to change the ``MACHINE``, ``ACCOUNT``, and ``EXP_BASEDIR`` variables to match their system. Then, run ``run_without_rocoto.sh``: .. code-block:: console @@ -265,32 +263,24 @@ As the experiment progresses, it will generate a number of directories to hold i ├── ptmp () │ └── test () │ └── com - │ ├── landda () - │ │ └── vX.Y.Z () - │ │ └── landda.YYYYMMDD (.) - │ │ └── HH () - │ │ ├── DA: Directory containing the output files of JEDI run - │ │ │ ├── hofx - │ │ │ └── jedi_incr - │ │ └── mem000: Directory containing the output files - │ └── output - │ └── logs - │ └── run_ (): Directory containing the log file of the Rocoto workflow - └── workdir() - └── run_ - └── mem000: Working directory + │ ├── landda () + │ │ └── vX.Y.Z () + │ │ └── landda.YYYYMMDD (.): Directory containing the output files + │ └── output + │ └── logs + │ └── run_ (): Directory containing the log file of the Rocoto workflow + └── tmp () + ├── (): Working directory + └── DATA_SHARE + ├── YYYYMMDD (): Directory containing the intermediate or temporary files + └── DATA_RESTART: Directory containing the soft-links to the restart files for the next cycles ```` refers to the type of forcing data used (``gswp3`` or ``era5``). Each variable in parentheses and angle brackets (e.g., ``()``) is the name for the directory defined in the file ``land_analysis.yaml``. In the future, this directory structure will be further modified to meet the :nco:`NCO Implementation Standards<>`. -Check for the background and analysis files in the experiment directory: +Check for the output files for each cycle in the experiment directory: .. code-block:: console - ls -l $LANDDAROOT/ptmp/test/com/landda/v1.2.1/landda.//run_/mem000/restarts/ - -where: - - * ```` is either ``era5`` or ``gswp3``, and - * ```` is either ``vector`` or ``tile`` depending on whether ERA5 or GSWP3 forcing data was used, respectively. + ls -l $LANDDAROOT/ptmp/test/com/landda/v1.2.1/landda.YYYYMMDD -The experiment should generate several restart files. +where ``YYYYMMDD`` is the cycle date. The experiment should generate several restart files. diff --git a/doc/source/BuildingRunningTesting/TestingLandDA.rst b/doc/source/BuildingRunningTesting/TestingLandDA.rst index 986a2245..ba7d3cd6 100644 --- a/doc/source/BuildingRunningTesting/TestingLandDA.rst +++ b/doc/source/BuildingRunningTesting/TestingLandDA.rst @@ -18,12 +18,15 @@ From the working directory (``$LANDDAROOT``), navigate to ``build``. Then run: .. code-block:: console salloc --ntasks 8 --exclusive --qos=debug --partition=debug --time=00:30:00 --account= - module use modulefiles && module load landda_.intel + cd land-DA_workflow/sorc/build + source ../../versions/build.ver_ + module use ../../modulefiles + module load build__intel ctest where ```` corresponds to the user's actual account name and ```` is ``hera`` or ``orion``. -This will allocate a compute node, load the appropriate modulefiles, and run the CTests. +This will submit an interactive job, load the appropriate modulefiles, and run the CTests. Tests *******