diff --git a/dev/drivers/scripts/plots/cam/jevs_cam_href_grid2obs_cape_past31days_plots.sh b/dev/drivers/scripts/plots/cam/jevs_cam_href_grid2obs_cape_last31days_plots.sh similarity index 87% rename from dev/drivers/scripts/plots/cam/jevs_cam_href_grid2obs_cape_past31days_plots.sh rename to dev/drivers/scripts/plots/cam/jevs_cam_href_grid2obs_cape_last31days_plots.sh index 9c7cd914d3..56b87302d5 100755 --- a/dev/drivers/scripts/plots/cam/jevs_cam_href_grid2obs_cape_past31days_plots.sh +++ b/dev/drivers/scripts/plots/cam/jevs_cam_href_grid2obs_cape_last31days_plots.sh @@ -1,32 +1,27 @@ -#PBS -N jevs_cam_href_grid2obs_cape_past31days_plots +#PBS -N jevs_cam_href_grid2obs_cape_last31days_plots #PBS -j oe #PBS -q dev #PBS -S /bin/bash #PBS -A VERF-DEV #PBS -l walltime=00:15:00 -#PBS -l place=vscatter,select=10:ncpus=84:mem=100GB +#PBS -l place=vscatter,select=9:ncpus=85:mem=20GB #PBS -l debug=true set -x export OMP_NUM_THREADS=1 +export NET=evs export HOMEevs=/lfs/h2/emc/vpppg/noscrub/${USER}/EVS - source $HOMEevs/versions/run.ver - - export envir=prod - -export NET=evs export STEP=plots export COMPONENT=cam export RUN=atmos export VERIF_CASE=grid2obs_cape export MODELNAME=href - module reset module load prod_envir/${prod_envir_ver} source $HOMEevs/dev/modulefiles/$COMPONENT/${COMPONENT}_${STEP}.sh @@ -37,7 +32,7 @@ export SENDMAIL=YES export SENDDBN=NO export vhr=00 -export past_days=31 +export last_days=31 export run_mpi=yes diff --git a/dev/drivers/scripts/plots/cam/jevs_cam_href_grid2obs_cape_past90days_plots.sh b/dev/drivers/scripts/plots/cam/jevs_cam_href_grid2obs_cape_last90days_plots.sh similarity index 87% rename from dev/drivers/scripts/plots/cam/jevs_cam_href_grid2obs_cape_past90days_plots.sh rename to dev/drivers/scripts/plots/cam/jevs_cam_href_grid2obs_cape_last90days_plots.sh index 9bc0028bd0..89ab91ad28 100755 --- a/dev/drivers/scripts/plots/cam/jevs_cam_href_grid2obs_cape_past90days_plots.sh +++ b/dev/drivers/scripts/plots/cam/jevs_cam_href_grid2obs_cape_last90days_plots.sh @@ -1,32 +1,27 @@ -#PBS -N jevs_cam_href_grid2obs_cape_past90days_plots +#PBS -N jevs_cam_href_grid2obs_cape_last90days_plots #PBS -j oe #PBS -q dev #PBS -S /bin/bash #PBS -A VERF-DEV #PBS -l walltime=00:15:00 -#PBS -l place=vscatter,select=10:ncpus=84:mem=100GB +#PBS -l place=vscatter,select=9:ncpus=85:mem=30GB #PBS -l debug=true set -x export OMP_NUM_THREADS=1 +export NET=evs export HOMEevs=/lfs/h2/emc/vpppg/noscrub/${USER}/EVS - source $HOMEevs/versions/run.ver - - export envir=prod - -export NET=evs export STEP=plots export COMPONENT=cam export RUN=atmos export VERIF_CASE=grid2obs_cape export MODELNAME=href - module reset module load prod_envir/${prod_envir_ver} source $HOMEevs/dev/modulefiles/$COMPONENT/${COMPONENT}_${STEP}.sh @@ -37,7 +32,7 @@ export SENDMAIL=YES export SENDDBN=NO export vhr=00 -export past_days=90 +export last_days=90 export run_mpi=yes diff --git a/dev/drivers/scripts/plots/cam/jevs_cam_href_grid2obs_ctc_past31days_plots.sh b/dev/drivers/scripts/plots/cam/jevs_cam_href_grid2obs_ctc_last31days_plots.sh similarity index 88% rename from dev/drivers/scripts/plots/cam/jevs_cam_href_grid2obs_ctc_past31days_plots.sh rename to dev/drivers/scripts/plots/cam/jevs_cam_href_grid2obs_ctc_last31days_plots.sh index a5f49254b1..a4f224fb1e 100755 --- a/dev/drivers/scripts/plots/cam/jevs_cam_href_grid2obs_ctc_past31days_plots.sh +++ b/dev/drivers/scripts/plots/cam/jevs_cam_href_grid2obs_ctc_last31days_plots.sh @@ -1,25 +1,21 @@ -#PBS -N jevs_cam_href_grid2obs_ctc_past31days_plots +#PBS -N jevs_cam_href_grid2obs_ctc_last31days_plots #PBS -j oe #PBS -q dev #PBS -S /bin/bash #PBS -A VERF-DEV #PBS -l walltime=00:15:00 -#PBS -l place=vscatter,select=10:ncpus=82:mem=100GB +#PBS -l place=vscatter,select=6:ncpus=85:mem=30GB #PBS -l debug=true set -x export OMP_NUM_THREADS=1 +export NET=evs export HOMEevs=/lfs/h2/emc/vpppg/noscrub/${USER}/EVS - source $HOMEevs/versions/run.ver - - export envir=prod - -export NET=evs export STEP=plots export COMPONENT=cam export RUN=atmos @@ -37,7 +33,7 @@ export SENDMAIL=YES export SENDDBN=NO export vhr=00 -export past_days=31 +export last_days=31 export run_mpi=yes diff --git a/dev/drivers/scripts/plots/cam/jevs_cam_href_grid2obs_ctc_past90days_plots.sh b/dev/drivers/scripts/plots/cam/jevs_cam_href_grid2obs_ctc_last90days_plots.sh similarity index 88% rename from dev/drivers/scripts/plots/cam/jevs_cam_href_grid2obs_ctc_past90days_plots.sh rename to dev/drivers/scripts/plots/cam/jevs_cam_href_grid2obs_ctc_last90days_plots.sh index 79ffa97839..b864f97e1d 100755 --- a/dev/drivers/scripts/plots/cam/jevs_cam_href_grid2obs_ctc_past90days_plots.sh +++ b/dev/drivers/scripts/plots/cam/jevs_cam_href_grid2obs_ctc_last90days_plots.sh @@ -1,25 +1,21 @@ -#PBS -N jevs_cam_href_grid2obs_ctc_past90days_plots +#PBS -N jevs_cam_href_grid2obs_ctc_last90days_plots #PBS -j oe #PBS -q dev #PBS -S /bin/bash #PBS -A VERF-DEV #PBS -l walltime=00:15:00 -#PBS -l place=vscatter,select=10:ncpus=82:mem=100GB +#PBS -l place=vscatter,select=6:ncpus=85:mem=50GB #PBS -l debug=true set -x export OMP_NUM_THREADS=1 +export NET=evs export HOMEevs=/lfs/h2/emc/vpppg/noscrub/${USER}/EVS - source $HOMEevs/versions/run.ver - - export envir=prod - -export NET=evs export STEP=plots export COMPONENT=cam export RUN=atmos @@ -37,7 +33,7 @@ export SENDMAIL=YES export SENDDBN=NO export vhr=00 -export past_days=90 +export last_days=90 export run_mpi=yes diff --git a/dev/drivers/scripts/plots/cam/jevs_cam_href_grid2obs_ecnt_past31days_plots.sh b/dev/drivers/scripts/plots/cam/jevs_cam_href_grid2obs_ecnt_last31days_plots.sh similarity index 87% rename from dev/drivers/scripts/plots/cam/jevs_cam_href_grid2obs_ecnt_past31days_plots.sh rename to dev/drivers/scripts/plots/cam/jevs_cam_href_grid2obs_ecnt_last31days_plots.sh index 14a23d08fd..61654446e4 100755 --- a/dev/drivers/scripts/plots/cam/jevs_cam_href_grid2obs_ecnt_past31days_plots.sh +++ b/dev/drivers/scripts/plots/cam/jevs_cam_href_grid2obs_ecnt_last31days_plots.sh @@ -1,25 +1,21 @@ -#PBS -N jevs_cam_href_grid2obs_ecnt_past31days_plots +#PBS -N jevs_cam_href_grid2obs_ecnt_last31days_plots #PBS -j oe #PBS -q dev #PBS -S /bin/bash #PBS -A VERF-DEV #PBS -l walltime=00:15:00 -#PBS -l place=vscatter:shared,select=2:ncpus=66:mem=500GB +#PBS -l place=vscatter,select=2:ncpus=33:mem=40GB #PBS -l debug=true set -x export OMP_NUM_THREADS=1 +export NET=evs export HOMEevs=/lfs/h2/emc/vpppg/noscrub/${USER}/EVS - source $HOMEevs/versions/run.ver - - export envir=prod - -export NET=evs export STEP=plots export COMPONENT=cam export RUN=atmos @@ -36,7 +32,7 @@ export SENDMAIL=YES export SENDDBN=NO export vhr=00 -export past_days=31 +export last_days=31 export run_mpi=yes diff --git a/dev/drivers/scripts/plots/cam/jevs_cam_href_grid2obs_ecnt_past90days_plots.sh b/dev/drivers/scripts/plots/cam/jevs_cam_href_grid2obs_ecnt_last90days_plots.sh similarity index 88% rename from dev/drivers/scripts/plots/cam/jevs_cam_href_grid2obs_ecnt_past90days_plots.sh rename to dev/drivers/scripts/plots/cam/jevs_cam_href_grid2obs_ecnt_last90days_plots.sh index 4e76879c05..38e91e73d5 100755 --- a/dev/drivers/scripts/plots/cam/jevs_cam_href_grid2obs_ecnt_past90days_plots.sh +++ b/dev/drivers/scripts/plots/cam/jevs_cam_href_grid2obs_ecnt_last90days_plots.sh @@ -1,25 +1,21 @@ -#PBS -N jevs_cam_href_grid2obs_ecnt_past90days_plots +#PBS -N jevs_cam_href_grid2obs_ecnt_last90days_plots #PBS -j oe #PBS -q dev #PBS -S /bin/bash #PBS -A VERF-DEV #PBS -l walltime=00:15:00 -#PBS -l place=vscatter:shared,select=2:ncpus=66:mem=500GB +#PBS -l place=vscatter,select=2:ncpus=33:mem=100GB #PBS -l debug=true set -x export OMP_NUM_THREADS=1 +export NET=evs export HOMEevs=/lfs/h2/emc/vpppg/noscrub/${USER}/EVS - source $HOMEevs/versions/run.ver - - export envir=prod - -export NET=evs export STEP=plots export COMPONENT=cam export RUN=atmos @@ -37,7 +33,7 @@ export SENDMAIL=YES export SENDDBN=NO export vhr=00 -export past_days=90 +export last_days=90 export run_mpi=yes diff --git a/dev/drivers/scripts/plots/cam/jevs_cam_href_precip_past31days_plots.sh b/dev/drivers/scripts/plots/cam/jevs_cam_href_precip_last31days_plots.sh similarity index 88% rename from dev/drivers/scripts/plots/cam/jevs_cam_href_precip_past31days_plots.sh rename to dev/drivers/scripts/plots/cam/jevs_cam_href_precip_last31days_plots.sh index 21fa05f19b..e38b34690d 100755 --- a/dev/drivers/scripts/plots/cam/jevs_cam_href_precip_past31days_plots.sh +++ b/dev/drivers/scripts/plots/cam/jevs_cam_href_precip_last31days_plots.sh @@ -1,25 +1,21 @@ -#PBS -N jevs_cam_href_precip_past31days_plots +#PBS -N jevs_cam_href_precip_last31days_plots #PBS -j oe #PBS -q dev #PBS -S /bin/bash #PBS -A VERF-DEV #PBS -l walltime=00:15:00 -#PBS -l place=vscatter,select=4:ncpus=78:mem=100GB +#PBS -l place=vscatter,select=4:ncpus=76:mem=100GB #PBS -l debug=true set -x export OMP_NUM_THREADS=1 +export NET=evs export HOMEevs=/lfs/h2/emc/vpppg/noscrub/${USER}/EVS - source $HOMEevs/versions/run.ver - - export envir=prod - -export NET=evs export STEP=plots export COMPONENT=cam export RUN=atmos @@ -37,7 +33,7 @@ export SENDMAIL=YES export SENDDBN=NO export vhr=00 -export past_days=31 +export last_days=31 export run_mpi=yes diff --git a/dev/drivers/scripts/plots/cam/jevs_cam_href_precip_past90days_plots.sh b/dev/drivers/scripts/plots/cam/jevs_cam_href_precip_last90days_plots.sh similarity index 88% rename from dev/drivers/scripts/plots/cam/jevs_cam_href_precip_past90days_plots.sh rename to dev/drivers/scripts/plots/cam/jevs_cam_href_precip_last90days_plots.sh index c9879f385d..a764eb592d 100755 --- a/dev/drivers/scripts/plots/cam/jevs_cam_href_precip_past90days_plots.sh +++ b/dev/drivers/scripts/plots/cam/jevs_cam_href_precip_last90days_plots.sh @@ -1,25 +1,21 @@ -#PBS -N jevs_cam_href_precip_past90days_plots +#PBS -N jevs_cam_href_precip_last90days_plots #PBS -j oe #PBS -q dev #PBS -S /bin/bash #PBS -A VERF-DEV #PBS -l walltime=00:15:00 -#PBS -l place=vscatter,select=4:ncpus=78:mem=100GB +#PBS -l place=vscatter,select=4:ncpus=76:mem=150GB #PBS -l debug=true set -x export OMP_NUM_THREADS=1 +export NET=evs export HOMEevs=/lfs/h2/emc/vpppg/noscrub/${USER}/EVS - source $HOMEevs/versions/run.ver - - export envir=prod - -export NET=evs export STEP=plots export COMPONENT=cam export RUN=atmos @@ -36,7 +32,7 @@ export SENDMAIL=YES export SENDDBN=NO export vhr=00 -export past_days=90 +export last_days=90 export run_mpi=yes export valid_time=both diff --git a/dev/drivers/scripts/plots/cam/jevs_cam_href_precip_spatial_plots.sh b/dev/drivers/scripts/plots/cam/jevs_cam_href_precip_spatial_plots.sh index 753160771a..e87733d7ee 100755 --- a/dev/drivers/scripts/plots/cam/jevs_cam_href_precip_spatial_plots.sh +++ b/dev/drivers/scripts/plots/cam/jevs_cam_href_precip_spatial_plots.sh @@ -4,7 +4,7 @@ #PBS -S /bin/bash #PBS -A VERF-DEV #PBS -l walltime=00:15:00 -#PBS -l place=vscatter,select=1:ncpus=2:mem=100GB +#PBS -l place=vscatter,select=1:ncpus=1:mem=2GB #PBS -l debug=true set -x diff --git a/dev/drivers/scripts/plots/cam/jevs_cam_href_profile_past90days_plots.sh b/dev/drivers/scripts/plots/cam/jevs_cam_href_profile_last31days_plots.sh similarity index 86% rename from dev/drivers/scripts/plots/cam/jevs_cam_href_profile_past90days_plots.sh rename to dev/drivers/scripts/plots/cam/jevs_cam_href_profile_last31days_plots.sh index 1ad9ab6c53..ba45c633a6 100755 --- a/dev/drivers/scripts/plots/cam/jevs_cam_href_profile_past90days_plots.sh +++ b/dev/drivers/scripts/plots/cam/jevs_cam_href_profile_last31days_plots.sh @@ -1,25 +1,21 @@ -#PBS -N jevs_cam_href_profile_past90days_plots +#PBS -N jevs_cam_href_profile_last31days_plots #PBS -j oe #PBS -q dev #PBS -S /bin/bash #PBS -A VERF-DEV -#PBS -l walltime=00:10:00 -#PBS -l place=vscatter,select=1:ncpus=60:mem=100GB +#PBS -l walltime=00:15:00 +#PBS -l place=vscatter,select=1:ncpus=60:mem=50GB #PBS -l debug=true set -x export OMP_NUM_THREADS=1 +export NET=evs export HOMEevs=/lfs/h2/emc/vpppg/noscrub/${USER}/EVS - source $HOMEevs/versions/run.ver - - export envir=prod - -export NET=evs export STEP=plots export COMPONENT=cam export RUN=atmos @@ -36,7 +32,7 @@ export SENDMAIL=YES export SENDDBN=NO export vhr=00 -export past_days=90 +export last_days=31 export run_mpi=yes export valid_time=both diff --git a/dev/drivers/scripts/plots/cam/jevs_cam_href_profile_past31days_plots.sh b/dev/drivers/scripts/plots/cam/jevs_cam_href_profile_last90days_plots.sh similarity index 86% rename from dev/drivers/scripts/plots/cam/jevs_cam_href_profile_past31days_plots.sh rename to dev/drivers/scripts/plots/cam/jevs_cam_href_profile_last90days_plots.sh index eb43473b12..d9d9c18de6 100755 --- a/dev/drivers/scripts/plots/cam/jevs_cam_href_profile_past31days_plots.sh +++ b/dev/drivers/scripts/plots/cam/jevs_cam_href_profile_last90days_plots.sh @@ -1,26 +1,21 @@ -#PBS -N jevs_cam_href_profile_past31days_plots +#PBS -N jevs_cam_href_profile_last90days_plots #PBS -j oe #PBS -q dev #PBS -S /bin/bash #PBS -A VERF-DEV -#PBS -l walltime=00:10:00 -#PBS -l place=vscatter,select=1:ncpus=60:mem=100GB +#PBS -l walltime=00:15:00 +#PBS -l place=vscatter,select=1:ncpus=60:mem=50GB #PBS -l debug=true set -x export OMP_NUM_THREADS=1 +export NET=evs export HOMEevs=/lfs/h2/emc/vpppg/noscrub/${USER}/EVS - source $HOMEevs/versions/run.ver - - - export envir=prod - -export NET=evs export STEP=plots export COMPONENT=cam export RUN=atmos @@ -37,7 +32,7 @@ export SENDMAIL=YES export SENDDBN=NO export vhr=00 -export past_days=31 +export last_days=90 export run_mpi=yes export valid_time=both diff --git a/dev/drivers/scripts/plots/cam/jevs_cam_href_snowfall_past90days_plots.sh b/dev/drivers/scripts/plots/cam/jevs_cam_href_snowfall_last31days_plots.sh similarity index 88% rename from dev/drivers/scripts/plots/cam/jevs_cam_href_snowfall_past90days_plots.sh rename to dev/drivers/scripts/plots/cam/jevs_cam_href_snowfall_last31days_plots.sh index 176fd030dc..29d47ee805 100755 --- a/dev/drivers/scripts/plots/cam/jevs_cam_href_snowfall_past90days_plots.sh +++ b/dev/drivers/scripts/plots/cam/jevs_cam_href_snowfall_last31days_plots.sh @@ -1,25 +1,21 @@ -#PBS -N jevs_cam_href_snowfall_past90days_plots +#PBS -N jevs_cam_href_snowfall_last31days_plots #PBS -j oe #PBS -q dev #PBS -S /bin/bash #PBS -A VERF-DEV #PBS -l walltime=00:15:00 -#PBS -l place=vscatter,select=1:ncpus=30:mem=100GB +#PBS -l place=vscatter,select=1:ncpus=30:mem=10GB #PBS -l debug=true set -x export OMP_NUM_THREADS=1 +export NET=evs export HOMEevs=/lfs/h2/emc/vpppg/noscrub/${USER}/EVS - source $HOMEevs/versions/run.ver - - export envir=prod - -export NET=evs export STEP=plots export COMPONENT=cam export RUN=atmos @@ -35,8 +31,9 @@ export KEEPDATA=YES export SENDMAIL=YES export SENDDBN=NO + export vhr=00 -export past_days=90 +export last_days=31 export run_mpi=yes diff --git a/dev/drivers/scripts/plots/cam/jevs_cam_href_snowfall_past31days_plots.sh b/dev/drivers/scripts/plots/cam/jevs_cam_href_snowfall_last90days_plots.sh similarity index 85% rename from dev/drivers/scripts/plots/cam/jevs_cam_href_snowfall_past31days_plots.sh rename to dev/drivers/scripts/plots/cam/jevs_cam_href_snowfall_last90days_plots.sh index 748b800c40..f4084bdc4c 100755 --- a/dev/drivers/scripts/plots/cam/jevs_cam_href_snowfall_past31days_plots.sh +++ b/dev/drivers/scripts/plots/cam/jevs_cam_href_snowfall_last90days_plots.sh @@ -1,25 +1,21 @@ -#PBS -N jevs_cam_href_snowfall_past31days_plots +#PBS -N jevs_cam_href_snowfall_last90days_plots #PBS -j oe #PBS -q dev #PBS -S /bin/bash #PBS -A VERF-DEV -#PBS -l walltime=00:10:00 -#PBS -l place=vscatter,select=1:ncpus=30:mem=100GB +#PBS -l walltime=00:15:00 +#PBS -l place=vscatter,select=1:ncpus=30:mem=20GB #PBS -l debug=true set -x export OMP_NUM_THREADS=1 +export NET=evs export HOMEevs=/lfs/h2/emc/vpppg/noscrub/${USER}/EVS - source $HOMEevs/versions/run.ver - - export envir=prod - -export NET=evs export STEP=plots export COMPONENT=cam export RUN=atmos @@ -35,9 +31,8 @@ export KEEPDATA=YES export SENDMAIL=YES export SENDDBN=NO - export vhr=00 -export past_days=31 +export last_days=90 export run_mpi=yes diff --git a/dev/drivers/scripts/plots/cam/jevs_cam_href_spcoutlook_past90days_plots.sh b/dev/drivers/scripts/plots/cam/jevs_cam_href_spcoutlook_last31days_plots.sh similarity index 86% rename from dev/drivers/scripts/plots/cam/jevs_cam_href_spcoutlook_past90days_plots.sh rename to dev/drivers/scripts/plots/cam/jevs_cam_href_spcoutlook_last31days_plots.sh index e463bbf3c1..0c14dc8c82 100755 --- a/dev/drivers/scripts/plots/cam/jevs_cam_href_spcoutlook_past90days_plots.sh +++ b/dev/drivers/scripts/plots/cam/jevs_cam_href_spcoutlook_last31days_plots.sh @@ -1,25 +1,21 @@ -#PBS -N jevs_cam_href_spcoutlook_past90days_plots +#PBS -N jevs_cam_href_spcoutlook_last31days_plots #PBS -j oe #PBS -q dev #PBS -S /bin/bash #PBS -A VERF-DEV -#PBS -l walltime=00:20:00 -#PBS -l place=vscatter,select=1:ncpus=12:mem=100GB +#PBS -l walltime=00:15:00 +#PBS -l place=vscatter,select=1:ncpus=6:mem=5GB #PBS -l debug=true set -x export OMP_NUM_THREADS=1 +export NET=evs export HOMEevs=/lfs/h2/emc/vpppg/noscrub/${USER}/EVS - source $HOMEevs/versions/run.ver - - export envir=prod - -export NET=evs export STEP=plots export COMPONENT=cam export RUN=atmos @@ -36,7 +32,7 @@ export SENDMAIL=YES export SENDDBN=NO export vhr=00 -export past_days=90 +export last_days=31 export run_mpi=no export valid_time=both diff --git a/dev/drivers/scripts/plots/cam/jevs_cam_href_spcoutlook_past31days_plots.sh b/dev/drivers/scripts/plots/cam/jevs_cam_href_spcoutlook_last90days_plots.sh similarity index 86% rename from dev/drivers/scripts/plots/cam/jevs_cam_href_spcoutlook_past31days_plots.sh rename to dev/drivers/scripts/plots/cam/jevs_cam_href_spcoutlook_last90days_plots.sh index daabb9cec9..d592e41a50 100755 --- a/dev/drivers/scripts/plots/cam/jevs_cam_href_spcoutlook_past31days_plots.sh +++ b/dev/drivers/scripts/plots/cam/jevs_cam_href_spcoutlook_last90days_plots.sh @@ -1,26 +1,21 @@ -#PBS -N jevs_cam_href_spcoutlook_past31days_plots +#PBS -N jevs_cam_href_spcoutlook_last90days_plots #PBS -j oe #PBS -q dev #PBS -S /bin/bash #PBS -A VERF-DEV -#PBS -l walltime=00:10:00 -#PBS -l place=vscatter,select=1:ncpus=12:mem=100GB +#PBS -l walltime=00:15:00 +#PBS -l place=vscatter,select=1:ncpus=6:mem=5GB #PBS -l debug=true set -x export OMP_NUM_THREADS=1 +export NET=evs export HOMEevs=/lfs/h2/emc/vpppg/noscrub/${USER}/EVS - source $HOMEevs/versions/run.ver - - - export envir=prod - -export NET=evs export STEP=plots export COMPONENT=cam export RUN=atmos @@ -37,7 +32,7 @@ export SENDMAIL=YES export SENDDBN=NO export vhr=00 -export past_days=31 +export last_days=90 export run_mpi=no export valid_time=both diff --git a/dev/drivers/scripts/stats/cam/jevs_cam_href_grid2obs_stats.sh b/dev/drivers/scripts/stats/cam/jevs_cam_href_grid2obs_stats.sh index c704933f14..1ede4150de 100755 --- a/dev/drivers/scripts/stats/cam/jevs_cam_href_grid2obs_stats.sh +++ b/dev/drivers/scripts/stats/cam/jevs_cam_href_grid2obs_stats.sh @@ -3,18 +3,18 @@ #PBS -q dev #PBS -S /bin/bash #PBS -A VERF-DEV -#PBS -l walltime=04:30:00 -#PBS -l place=vscatter:exclhost,select=1:ncpus=72:mem=500GB +#PBS -l walltime=02:00:00 +#PBS -l place=vscatter,select=1:ncpus=72:mem=300GB #PBS -l debug=true set -x export OMP_NUM_THREADS=1 +export NET=evs export HOMEevs=/lfs/h2/emc/vpppg/noscrub/${USER}/EVS source $HOMEevs/versions/run.ver -export NET=evs export STEP=stats export COMPONENT=cam export RUN=atmos diff --git a/dev/drivers/scripts/stats/cam/jevs_cam_href_precip_stats.sh b/dev/drivers/scripts/stats/cam/jevs_cam_href_precip_stats.sh index 66a6348112..3720c77911 100755 --- a/dev/drivers/scripts/stats/cam/jevs_cam_href_precip_stats.sh +++ b/dev/drivers/scripts/stats/cam/jevs_cam_href_precip_stats.sh @@ -4,7 +4,7 @@ #PBS -S /bin/bash #PBS -A VERF-DEV #PBS -l walltime=02:00:00 -#PBS -l place=vscatter:exclhost,select=1:ncpus=88:mem=60GB +#PBS -l place=vscatter,select=1:ncpus=72:mem=100GB #PBS -l debug=true set -x @@ -13,10 +13,10 @@ export OMP_NUM_THREADS=1 ## 3x7 conus(ccpa) + 3x7 alaska(mrms) + 2 snow = 44 jobs +export NET=evs export HOMEevs=/lfs/h2/emc/vpppg/noscrub/${USER}/EVS source $HOMEevs/versions/run.ver -export NET=evs export STEP=stats export COMPONENT=cam export RUN=atmos diff --git a/dev/drivers/scripts/stats/cam/jevs_cam_href_spcoutlook_stats.sh b/dev/drivers/scripts/stats/cam/jevs_cam_href_spcoutlook_stats.sh index 8034106829..049e0b70f5 100755 --- a/dev/drivers/scripts/stats/cam/jevs_cam_href_spcoutlook_stats.sh +++ b/dev/drivers/scripts/stats/cam/jevs_cam_href_spcoutlook_stats.sh @@ -3,18 +3,18 @@ #PBS -q dev #PBS -S /bin/bash #PBS -A VERF-DEV -#PBS -l walltime=01:00:00 -#PBS -l place=vscatter,select=1:ncpus=2:mem=100GB +#PBS -l walltime=00:40:00 +#PBS -l place=vscatter,select=1:ncpus=2:mem=10GB #PBS -l debug=true set -x export OMP_NUM_THREADS=1 +export NET=evs export HOMEevs=/lfs/h2/emc/vpppg/noscrub/${USER}/EVS source $HOMEevs/versions/run.ver -export NET=evs export STEP=stats export COMPONENT=cam export RUN=atmos diff --git a/ecf/defs/evs-nco.def b/ecf/defs/evs-nco.def index ade47e1178..39c953ef6a 100644 --- a/ecf/defs/evs-nco.def +++ b/ecf/defs/evs-nco.def @@ -1616,35 +1616,35 @@ suite evs_nco trigger :TIME >= 1355 and ../../stats/global_det/jevs_global_det_gfs_wave_grid2obs_stats == complete endfamily family cam - task jevs_cam_href_grid2obs_ecnt_past90days_plots + task jevs_cam_href_grid2obs_ecnt_last90days_plots trigger :TIME >= 1230 and ../../../../06/evs/stats/cam/jevs_cam_href_grid2obs_stats == complete - task jevs_cam_href_grid2obs_ecnt_past31days_plots + task jevs_cam_href_grid2obs_ecnt_last31days_plots trigger :TIME >= 1230 and ../../../../06/evs/stats/cam/jevs_cam_href_grid2obs_stats == complete - task jevs_cam_href_grid2obs_ctc_past90days_plots + task jevs_cam_href_grid2obs_ctc_last90days_plots trigger :TIME >= 1230 and ../../../../06/evs/stats/cam/jevs_cam_href_grid2obs_stats == complete - task jevs_cam_href_grid2obs_ctc_past31days_plots + task jevs_cam_href_grid2obs_ctc_last31days_plots trigger :TIME >= 1230 and ../../../../06/evs/stats/cam/jevs_cam_href_grid2obs_stats == complete - task jevs_cam_href_snowfall_past90days_plots + task jevs_cam_href_snowfall_last90days_plots trigger :TIME >= 1230 and ../../../../06/evs/stats/cam/jevs_cam_href_precip_stats == complete - task jevs_cam_href_snowfall_past31days_plots + task jevs_cam_href_snowfall_last31days_plots trigger :TIME >= 1230 and ../../../../06/evs/stats/cam/jevs_cam_href_precip_stats == complete - task jevs_cam_href_profile_past90days_plots + task jevs_cam_href_profile_last90days_plots trigger :TIME >= 1230 and ../../../../06/evs/stats/cam/jevs_cam_href_grid2obs_stats == complete - task jevs_cam_href_profile_past31days_plots + task jevs_cam_href_profile_last31days_plots trigger :TIME >= 1230 and ../../../../06/evs/stats/cam/jevs_cam_href_grid2obs_stats == complete - task jevs_cam_href_precip_past90days_plots + task jevs_cam_href_precip_last90days_plots trigger :TIME >= 1230 and ../../../../06/evs/stats/cam/jevs_cam_href_precip_stats == complete - task jevs_cam_href_precip_past31days_plots + task jevs_cam_href_precip_last31days_plots trigger :TIME >= 1230 and ../../../../06/evs/stats/cam/jevs_cam_href_precip_stats == complete task jevs_cam_href_precip_spatial_plots trigger :TIME >= 1230 and ../../../../06/evs/stats/cam/jevs_cam_href_precip_stats == complete - task jevs_cam_href_spcoutlook_past90days_plots + task jevs_cam_href_spcoutlook_last90days_plots trigger :TIME >= 1230 and ../../../../06/evs/stats/cam/jevs_cam_href_spcoutlook_stats == complete - task jevs_cam_href_spcoutlook_past31days_plots + task jevs_cam_href_spcoutlook_last31days_plots trigger :TIME >= 1230 and ../../../../06/evs/stats/cam/jevs_cam_href_spcoutlook_stats == complete - task jevs_cam_href_grid2obs_cape_past90days_plots + task jevs_cam_href_grid2obs_cape_last90days_plots trigger :TIME >= 1430 and ../../../../06/evs/stats/cam/jevs_cam_href_grid2obs_stats == complete - task jevs_cam_href_grid2obs_cape_past31days_plots + task jevs_cam_href_grid2obs_cape_last31days_plots trigger :TIME >= 1430 and ../../../../06/evs/stats/cam/jevs_cam_href_grid2obs_stats == complete endfamily endfamily # 12 plots diff --git a/ecf/scripts/plots/cam/jevs_cam_href_grid2obs_cape_past31days_plots.ecf b/ecf/scripts/plots/cam/jevs_cam_href_grid2obs_cape_last31days_plots.ecf similarity index 93% rename from ecf/scripts/plots/cam/jevs_cam_href_grid2obs_cape_past31days_plots.ecf rename to ecf/scripts/plots/cam/jevs_cam_href_grid2obs_cape_last31days_plots.ecf index 125869451f..958a4eb572 100755 --- a/ecf/scripts/plots/cam/jevs_cam_href_grid2obs_cape_past31days_plots.ecf +++ b/ecf/scripts/plots/cam/jevs_cam_href_grid2obs_cape_last31days_plots.ecf @@ -1,10 +1,10 @@ -#PBS -N evs_cam_href_grid2obs_cape_past31days_plots +#PBS -N evs_cam_href_grid2obs_cape_last31days_plots #PBS -j oe #PBS -S /bin/bash #PBS -q %QUEUE% #PBS -A %PROJ%-%PROJENVIR% #PBS -l walltime=00:15:00 -#PBS -l place=vscatter:exclhost,select=10:ncpus=84:mem=100GB +#PBS -l place=vscatter,select=9:ncpus=85:mem=20GB #PBS -l debug=true export model=evs @@ -55,7 +55,7 @@ export NET="evs" export RUN="atmos" export VERIF_CASE="grid2obs_cape" export MODELNAME=href -export past_days=31 +export last_days=31 export run_mpi=yes ############################################################ diff --git a/ecf/scripts/plots/cam/jevs_cam_href_grid2obs_cape_past90days_plots.ecf b/ecf/scripts/plots/cam/jevs_cam_href_grid2obs_cape_last90days_plots.ecf similarity index 93% rename from ecf/scripts/plots/cam/jevs_cam_href_grid2obs_cape_past90days_plots.ecf rename to ecf/scripts/plots/cam/jevs_cam_href_grid2obs_cape_last90days_plots.ecf index d545ec041d..3b4dac0155 100755 --- a/ecf/scripts/plots/cam/jevs_cam_href_grid2obs_cape_past90days_plots.ecf +++ b/ecf/scripts/plots/cam/jevs_cam_href_grid2obs_cape_last90days_plots.ecf @@ -1,10 +1,10 @@ -#PBS -N evs_cam_href_grid2obs_cape_past90days_plots +#PBS -N evs_cam_href_grid2obs_cape_last90days_plots #PBS -j oe #PBS -S /bin/bash #PBS -q %QUEUE% #PBS -A %PROJ%-%PROJENVIR% #PBS -l walltime=00:15:00 -#PBS -l place=vscatter:exclhost,select=10:ncpus=84:mem=100GB +#PBS -l place=vscatter,select=9:ncpus=85:mem=30GB #PBS -l debug=true export model=evs @@ -55,7 +55,7 @@ export NET="evs" export RUN="atmos" export VERIF_CASE="grid2obs_cape" export MODELNAME=href -export past_days=90 +export last_days=90 export run_mpi=yes ############################################################ diff --git a/ecf/scripts/plots/cam/jevs_cam_href_grid2obs_ctc_past31days_plots.ecf b/ecf/scripts/plots/cam/jevs_cam_href_grid2obs_ctc_last31days_plots.ecf similarity index 93% rename from ecf/scripts/plots/cam/jevs_cam_href_grid2obs_ctc_past31days_plots.ecf rename to ecf/scripts/plots/cam/jevs_cam_href_grid2obs_ctc_last31days_plots.ecf index 16371ae5ab..c19823cadd 100755 --- a/ecf/scripts/plots/cam/jevs_cam_href_grid2obs_ctc_past31days_plots.ecf +++ b/ecf/scripts/plots/cam/jevs_cam_href_grid2obs_ctc_last31days_plots.ecf @@ -1,10 +1,10 @@ -#PBS -N evs_cam_href_grid2obs_ctc_past31days_plots +#PBS -N evs_cam_href_grid2obs_ctc_last31days_plots #PBS -j oe #PBS -S /bin/bash #PBS -q %QUEUE% #PBS -A %PROJ%-%PROJENVIR% #PBS -l walltime=00:15:00 -#PBS -l place=vscatter:exclhost,select=10:ncpus=82:mem=100GB +#PBS -l place=vscatter,select=6:ncpus=85:mem=30GB #PBS -l debug=true export model=evs @@ -51,7 +51,7 @@ else export vhr=00 fi export OMP_NUM_THREADS=1 -export past_days=31 +export last_days=31 export NET="evs" export RUN="atmos" export VERIF_CASE="grid2obs_ctc" diff --git a/ecf/scripts/plots/cam/jevs_cam_href_grid2obs_ctc_past90days_plots.ecf b/ecf/scripts/plots/cam/jevs_cam_href_grid2obs_ctc_last90days_plots.ecf similarity index 93% rename from ecf/scripts/plots/cam/jevs_cam_href_grid2obs_ctc_past90days_plots.ecf rename to ecf/scripts/plots/cam/jevs_cam_href_grid2obs_ctc_last90days_plots.ecf index a9af3487a4..49061e896a 100755 --- a/ecf/scripts/plots/cam/jevs_cam_href_grid2obs_ctc_past90days_plots.ecf +++ b/ecf/scripts/plots/cam/jevs_cam_href_grid2obs_ctc_last90days_plots.ecf @@ -1,10 +1,10 @@ -#PBS -N evs_cam_href_grid2obs_ctc_past90days_plots +#PBS -N evs_cam_href_grid2obs_ctc_last90days_plots #PBS -j oe #PBS -S /bin/bash #PBS -q %QUEUE% #PBS -A %PROJ%-%PROJENVIR% #PBS -l walltime=00:15:00 -#PBS -l place=vscatter:exclhost,select=10:ncpus=82:mem=100GB +#PBS -l place=vscatter,select=6:ncpus=85:mem=50GB #PBS -l debug=true export model=evs @@ -51,7 +51,7 @@ else export vhr=00 fi export OMP_NUM_THREADS=1 -export past_days=90 +export last_days=90 export NET="evs" export RUN="atmos" export VERIF_CASE="grid2obs_ctc" diff --git a/ecf/scripts/plots/cam/jevs_cam_href_grid2obs_ecnt_past31days_plots.ecf b/ecf/scripts/plots/cam/jevs_cam_href_grid2obs_ecnt_last31days_plots.ecf similarity index 93% rename from ecf/scripts/plots/cam/jevs_cam_href_grid2obs_ecnt_past31days_plots.ecf rename to ecf/scripts/plots/cam/jevs_cam_href_grid2obs_ecnt_last31days_plots.ecf index d7d85b76a3..8505b2e2c7 100755 --- a/ecf/scripts/plots/cam/jevs_cam_href_grid2obs_ecnt_past31days_plots.ecf +++ b/ecf/scripts/plots/cam/jevs_cam_href_grid2obs_ecnt_last31days_plots.ecf @@ -1,10 +1,10 @@ -#PBS -N evs_cam_href_grid2obs_ecnt_past31days_plots +#PBS -N evs_cam_href_grid2obs_ecnt_last31days_plots #PBS -j oe #PBS -S /bin/bash #PBS -q %QUEUE% #PBS -A %PROJ%-%PROJENVIR% #PBS -l walltime=00:15:00 -#PBS -l place=vscatter:shared,select=2:ncpus=66:mem=500GB +#PBS -l place=vscatter,select=2:ncpus=33:mem=40GB #PBS -l debug=true export model=evs @@ -51,7 +51,7 @@ else export vhr=00 fi export OMP_NUM_THREADS=1 -export past_days=31 +export last_days=31 export NET="evs" export RUN="atmos" export VERIF_CASE="grid2obs_ecnt" diff --git a/ecf/scripts/plots/cam/jevs_cam_href_grid2obs_ecnt_past90days_plots.ecf b/ecf/scripts/plots/cam/jevs_cam_href_grid2obs_ecnt_last90days_plots.ecf similarity index 93% rename from ecf/scripts/plots/cam/jevs_cam_href_grid2obs_ecnt_past90days_plots.ecf rename to ecf/scripts/plots/cam/jevs_cam_href_grid2obs_ecnt_last90days_plots.ecf index b1bf4b11e3..d74f6f2a2d 100755 --- a/ecf/scripts/plots/cam/jevs_cam_href_grid2obs_ecnt_past90days_plots.ecf +++ b/ecf/scripts/plots/cam/jevs_cam_href_grid2obs_ecnt_last90days_plots.ecf @@ -1,10 +1,10 @@ -#PBS -N evs_cam_href_grid2obs_ecnt_past90days_plots +#PBS -N evs_cam_href_grid2obs_ecnt_last90days_plots #PBS -j oe #PBS -S /bin/bash #PBS -q %QUEUE% #PBS -A %PROJ%-%PROJENVIR% #PBS -l walltime=00:15:00 -#PBS -l place=vscatter:shared,select=2:ncpus=66:mem=500GB +#PBS -l place=vscatter,select=2:ncpus=33:mem=100GB #PBS -l debug=true export model=evs @@ -51,7 +51,7 @@ else export vhr=00 fi export OMP_NUM_THREADS=1 -export past_days=90 +export last_days=90 export NET="evs" export RUN="atmos" export VERIF_CASE="grid2obs_ecnt" diff --git a/ecf/scripts/plots/cam/jevs_cam_href_precip_past31days_plots.ecf b/ecf/scripts/plots/cam/jevs_cam_href_precip_last31days_plots.ecf similarity index 93% rename from ecf/scripts/plots/cam/jevs_cam_href_precip_past31days_plots.ecf rename to ecf/scripts/plots/cam/jevs_cam_href_precip_last31days_plots.ecf index 8d832cf44b..67cdf17890 100755 --- a/ecf/scripts/plots/cam/jevs_cam_href_precip_past31days_plots.ecf +++ b/ecf/scripts/plots/cam/jevs_cam_href_precip_last31days_plots.ecf @@ -1,10 +1,10 @@ -#PBS -N evs_cam_href_precip_past31days_plots +#PBS -N evs_cam_href_precip_last31days_plots #PBS -j oe #PBS -S /bin/bash #PBS -q %QUEUE% #PBS -A %PROJ%-%PROJENVIR% #PBS -l walltime=00:15:00 -#PBS -l place=vscatter:shared,select=4:ncpus=78:mem=200GB +#PBS -l place=vscatter,select=4:ncpus=76:mem=100GB #PBS -l debug=true export model=evs @@ -51,7 +51,7 @@ else export vhr=00 fi export OMP_NUM_THREADS=1 -export past_days=31 +export last_days=31 export NET="evs" export RUN="atmos" export VERIF_CASE="precip" diff --git a/ecf/scripts/plots/cam/jevs_cam_href_precip_past90days_plots.ecf b/ecf/scripts/plots/cam/jevs_cam_href_precip_last90days_plots.ecf similarity index 93% rename from ecf/scripts/plots/cam/jevs_cam_href_precip_past90days_plots.ecf rename to ecf/scripts/plots/cam/jevs_cam_href_precip_last90days_plots.ecf index deabe9904f..cae7354501 100755 --- a/ecf/scripts/plots/cam/jevs_cam_href_precip_past90days_plots.ecf +++ b/ecf/scripts/plots/cam/jevs_cam_href_precip_last90days_plots.ecf @@ -1,10 +1,10 @@ -#PBS -N evs_cam_href_precip_past90days_plots +#PBS -N evs_cam_href_precip_last90days_plots #PBS -j oe #PBS -S /bin/bash #PBS -q %QUEUE% #PBS -A %PROJ%-%PROJENVIR% #PBS -l walltime=00:15:00 -#PBS -l place=vscatter:shared,select=4:ncpus=78:mem=400GB +#PBS -l place=vscatter,select=4:ncpus=76:mem=150GB #PBS -l debug=true export model=evs @@ -51,7 +51,7 @@ else export vhr=00 fi export OMP_NUM_THREADS=1 -export past_days=90 +export last_days=90 export NET="evs" export RUN="atmos" export VERIF_CASE="precip" diff --git a/ecf/scripts/plots/cam/jevs_cam_href_precip_spatial_plots.ecf b/ecf/scripts/plots/cam/jevs_cam_href_precip_spatial_plots.ecf index 86088952b9..3240fd74cd 100755 --- a/ecf/scripts/plots/cam/jevs_cam_href_precip_spatial_plots.ecf +++ b/ecf/scripts/plots/cam/jevs_cam_href_precip_spatial_plots.ecf @@ -4,7 +4,7 @@ #PBS -q %QUEUE% #PBS -A %PROJ%-%PROJENVIR% #PBS -l walltime=00:15:00 -#PBS -l place=vscatter:shared,select=1:ncpus=2:mem=100GB +#PBS -l place=vscatter,select=1:ncpus=1:mem=2GB #PBS -l debug=true export model=evs diff --git a/ecf/scripts/plots/cam/jevs_cam_href_profile_past31days_plots.ecf b/ecf/scripts/plots/cam/jevs_cam_href_profile_last31days_plots.ecf similarity index 92% rename from ecf/scripts/plots/cam/jevs_cam_href_profile_past31days_plots.ecf rename to ecf/scripts/plots/cam/jevs_cam_href_profile_last31days_plots.ecf index 37ea101112..9a4f1d2641 100755 --- a/ecf/scripts/plots/cam/jevs_cam_href_profile_past31days_plots.ecf +++ b/ecf/scripts/plots/cam/jevs_cam_href_profile_last31days_plots.ecf @@ -1,10 +1,10 @@ -#PBS -N evs_cam_href_profile_past31days_plots +#PBS -N evs_cam_href_profile_last31days_plots #PBS -j oe #PBS -S /bin/bash #PBS -q %QUEUE% #PBS -A %PROJ%-%PROJENVIR% -#PBS -l walltime=00:10:00 -#PBS -l place=vscatter:shared,select=1:ncpus=60:mem=100GB +#PBS -l walltime=00:15:00 +#PBS -l place=vscatter,select=1:ncpus=60:mem=50GB #PBS -l debug=true export model=evs @@ -51,7 +51,7 @@ else export vhr=00 fi export OMP_NUM_THREADS=1 -export past_days=31 +export last_days=31 export NET="evs" export RUN="atmos" export VERIF_CASE="profile" diff --git a/ecf/scripts/plots/cam/jevs_cam_href_profile_past90days_plots.ecf b/ecf/scripts/plots/cam/jevs_cam_href_profile_last90days_plots.ecf similarity index 92% rename from ecf/scripts/plots/cam/jevs_cam_href_profile_past90days_plots.ecf rename to ecf/scripts/plots/cam/jevs_cam_href_profile_last90days_plots.ecf index 0c104163ae..05e320d9dd 100755 --- a/ecf/scripts/plots/cam/jevs_cam_href_profile_past90days_plots.ecf +++ b/ecf/scripts/plots/cam/jevs_cam_href_profile_last90days_plots.ecf @@ -1,10 +1,10 @@ -#PBS -N evs_cam_href_profile_past90days_plots +#PBS -N evs_cam_href_profile_last90days_plots #PBS -j oe #PBS -S /bin/bash #PBS -q %QUEUE% #PBS -A %PROJ%-%PROJENVIR% -#PBS -l walltime=00:10:00 -#PBS -l place=vscatter:shared,select=1:ncpus=60:mem=100GB +#PBS -l walltime=00:15:00 +#PBS -l place=vscatter,select=1:ncpus=60:mem=50GB #PBS -l debug=true export model=evs @@ -51,7 +51,7 @@ else export vhr=00 fi export OMP_NUM_THREADS=1 -export past_days=90 +export last_days=90 export NET="evs" export RUN="atmos" export VERIF_CASE="profile" diff --git a/ecf/scripts/plots/cam/jevs_cam_href_snowfall_past31days_plots.ecf b/ecf/scripts/plots/cam/jevs_cam_href_snowfall_last31days_plots.ecf similarity index 92% rename from ecf/scripts/plots/cam/jevs_cam_href_snowfall_past31days_plots.ecf rename to ecf/scripts/plots/cam/jevs_cam_href_snowfall_last31days_plots.ecf index f1d9f48263..321c8b11a4 100755 --- a/ecf/scripts/plots/cam/jevs_cam_href_snowfall_past31days_plots.ecf +++ b/ecf/scripts/plots/cam/jevs_cam_href_snowfall_last31days_plots.ecf @@ -1,10 +1,10 @@ -#PBS -N evs_cam_href_snowfall_past31days_plots +#PBS -N evs_cam_href_snowfall_last31days_plots #PBS -j oe #PBS -S /bin/bash #PBS -q %QUEUE% #PBS -A %PROJ%-%PROJENVIR% -#PBS -l walltime=00:10:00 -#PBS -l place=vscatter:shared,select=1:ncpus=30:mem=100GB +#PBS -l walltime=00:15:00 +#PBS -l place=vscatter,select=1:ncpus=30:mem=10GB #PBS -l debug=true export model=evs @@ -51,7 +51,7 @@ else export vhr=00 fi export OMP_NUM_THREADS=1 -export past_days=31 +export last_days=31 export NET="evs" export RUN="atmos" export VERIF_CASE="snowfall" diff --git a/ecf/scripts/plots/cam/jevs_cam_href_snowfall_past90days_plots.ecf b/ecf/scripts/plots/cam/jevs_cam_href_snowfall_last90days_plots.ecf similarity index 93% rename from ecf/scripts/plots/cam/jevs_cam_href_snowfall_past90days_plots.ecf rename to ecf/scripts/plots/cam/jevs_cam_href_snowfall_last90days_plots.ecf index fb31e371ac..695751235d 100755 --- a/ecf/scripts/plots/cam/jevs_cam_href_snowfall_past90days_plots.ecf +++ b/ecf/scripts/plots/cam/jevs_cam_href_snowfall_last90days_plots.ecf @@ -1,10 +1,10 @@ -#PBS -N evs_cam_href_snowfall_past90days_plots +#PBS -N evs_cam_href_snowfall_last90days_plots #PBS -j oe #PBS -S /bin/bash #PBS -q %QUEUE% #PBS -A %PROJ%-%PROJENVIR% #PBS -l walltime=00:15:00 -#PBS -l place=vscatter:shared,select=1:ncpus=30:mem=100GB +#PBS -l place=vscatter,select=1:ncpus=30:mem=20GB #PBS -l debug=true export model=evs @@ -51,7 +51,7 @@ else export vhr=00 fi export OMP_NUM_THREADS=1 -export past_days=90 +export last_days=90 export NET="evs" export RUN="atmos" export VERIF_CASE="snowfall" diff --git a/ecf/scripts/plots/cam/jevs_cam_href_spcoutlook_past31days_plots.ecf b/ecf/scripts/plots/cam/jevs_cam_href_spcoutlook_last31days_plots.ecf similarity index 92% rename from ecf/scripts/plots/cam/jevs_cam_href_spcoutlook_past31days_plots.ecf rename to ecf/scripts/plots/cam/jevs_cam_href_spcoutlook_last31days_plots.ecf index ce3ae1c3ee..413bc2543e 100755 --- a/ecf/scripts/plots/cam/jevs_cam_href_spcoutlook_past31days_plots.ecf +++ b/ecf/scripts/plots/cam/jevs_cam_href_spcoutlook_last31days_plots.ecf @@ -1,10 +1,10 @@ -#PBS -N evs_cam_href_spcoutlook_past31days_plots +#PBS -N evs_cam_href_spcoutlook_last31days_plots #PBS -j oe #PBS -S /bin/bash #PBS -q %QUEUE% #PBS -A %PROJ%-%PROJENVIR% -#PBS -l walltime=00:10:00 -#PBS -l place=vscatter:shared,select=1:ncpus=12:mem=100GB +#PBS -l walltime=00:15:00 +#PBS -l place=vscatter,select=1:ncpus=6:mem=5GB #PBS -l debug=true export model=evs @@ -55,7 +55,7 @@ export NET="evs" export RUN="atmos" export VERIF_CASE="spcoutlook" export MODELNAME=href -export past_days=31 +export last_days=31 export run_mpi=no export valid_time=both diff --git a/ecf/scripts/plots/cam/jevs_cam_href_spcoutlook_past90days_plots.ecf b/ecf/scripts/plots/cam/jevs_cam_href_spcoutlook_last90days_plots.ecf similarity index 90% rename from ecf/scripts/plots/cam/jevs_cam_href_spcoutlook_past90days_plots.ecf rename to ecf/scripts/plots/cam/jevs_cam_href_spcoutlook_last90days_plots.ecf index 354fe054ef..7afd0dfc64 100755 --- a/ecf/scripts/plots/cam/jevs_cam_href_spcoutlook_past90days_plots.ecf +++ b/ecf/scripts/plots/cam/jevs_cam_href_spcoutlook_last90days_plots.ecf @@ -1,11 +1,10 @@ -#PBS -N evs_cam_href_spcoutlook_past90days_plots +#PBS -N evs_cam_href_spcoutlook_last90days_plots #PBS -j oe #PBS -S /bin/bash #PBS -q %QUEUE% #PBS -A %PROJ%-%PROJENVIR% -#PBS -l walltime=00:20:00 -##PBS -l walltime=00:15:00 -#PBS -l place=vscatter:shared,select=1:ncpus=12:mem=100GB +#PBS -l walltime=00:15:00 +#PBS -l place=vscatter,select=1:ncpus=6:mem=5GB #PBS -l debug=true export model=evs @@ -56,7 +55,7 @@ export NET="evs" export RUN="atmos" export VERIF_CASE="spcoutlook" export MODELNAME=href -export past_days=90 +export last_days=90 export run_mpi=no export valid_time=both diff --git a/ecf/scripts/stats/cam/jevs_cam_href_grid2obs_stats.ecf b/ecf/scripts/stats/cam/jevs_cam_href_grid2obs_stats.ecf index 88850738bb..4259893344 100755 --- a/ecf/scripts/stats/cam/jevs_cam_href_grid2obs_stats.ecf +++ b/ecf/scripts/stats/cam/jevs_cam_href_grid2obs_stats.ecf @@ -3,9 +3,8 @@ #PBS -S /bin/bash #PBS -q %QUEUE% #PBS -A %PROJ%-%PROJENVIR% -#PBS -l walltime=05:30:00 -##PBS -l walltime=04:30:00 -#PBS -l place=vscatter:exclhost,select=1:ncpus=72:mem=500GB +#PBS -l walltime=02:00:00 +#PBS -l place=vscatter,select=1:ncpus=72:mem=300GB #PBS -l debug=true export model=evs diff --git a/ecf/scripts/stats/cam/jevs_cam_href_precip_stats.ecf b/ecf/scripts/stats/cam/jevs_cam_href_precip_stats.ecf index 77d9c261df..c32eae190d 100755 --- a/ecf/scripts/stats/cam/jevs_cam_href_precip_stats.ecf +++ b/ecf/scripts/stats/cam/jevs_cam_href_precip_stats.ecf @@ -3,8 +3,8 @@ #PBS -S /bin/bash #PBS -q %QUEUE% #PBS -A %PROJ%-%PROJENVIR% -#PBS -l walltime=02:10:00 -#PBS -l place=vscatter:exclhost,select=1:ncpus=88:mem=100GB +#PBS -l walltime=02:00:00 +#PBS -l place=vscatter,select=1:ncpus=72:mem=100GB #PBS -l debug=true export model=evs diff --git a/ecf/scripts/stats/cam/jevs_cam_href_spcoutlook_stats.ecf b/ecf/scripts/stats/cam/jevs_cam_href_spcoutlook_stats.ecf index 3daa562b32..a6f8551789 100755 --- a/ecf/scripts/stats/cam/jevs_cam_href_spcoutlook_stats.ecf +++ b/ecf/scripts/stats/cam/jevs_cam_href_spcoutlook_stats.ecf @@ -3,9 +3,8 @@ #PBS -S /bin/bash #PBS -q %QUEUE% #PBS -A %PROJ%-%PROJENVIR% -#PBS -l walltime=01:00:00 -##PBS -l walltime=00:40:00 -#PBS -l place=vscatter:shared,select=1:ncpus=4:mem=100GB +#PBS -l walltime=00:40:00 +#PBS -l place=vscatter,select=1:ncpus=2:mem=10GB #PBS -l debug=true export model=evs diff --git a/parm/metplus_config/stats/cam/grid2obs/EnsembleStat_fcstHREF_obsPREPBUFR_PROFILE.conf b/parm/metplus_config/stats/cam/grid2obs/EnsembleStat_fcstHREF_obsPREPBUFR_PROFILE.conf index 774a0731d3..9426b16945 100755 --- a/parm/metplus_config/stats/cam/grid2obs/EnsembleStat_fcstHREF_obsPREPBUFR_PROFILE.conf +++ b/parm/metplus_config/stats/cam/grid2obs/EnsembleStat_fcstHREF_obsPREPBUFR_PROFILE.conf @@ -19,16 +19,8 @@ METPLUS_PATH = {ENV[METPLUS_PATH]} # Increment between METplus runs in seconds. Must be >= 60 -# 86400 sec=24h VALID_INCREMENT = {ENV[valid_increment]} -# Options are times, processes -# times = run all items in the PROCESS_LIST for a single initialization -# time, then repeat until all times have been evaluated. -# processes = run each item in the PROCESS_LIST for all times -# specified, then repeat for the next item in the PROCESS_LIST. -LOOP_ORDER = times - # List of applications to run PROCESS_LIST = EnsembleStat @@ -165,7 +157,7 @@ METPLUS_CONF = {OUTPUT_BASE}/final_{model}_g2o_{domain}_{ENV[vbeg]}_profile.conf FCST_ENSEMBLE_STAT_INPUT_DATATYPE = GRIB OBS_ENSEMBLE_STAT_INPUT_POINT_DATATYPE = NETCDF -ENSEMBLE_STAT_N_MEMBERS = 10 +ENSEMBLE_STAT_N_MEMBERS = {ENV[nmbrs]} ENSEMBLE_STAT_ENS_THRESH = 0.4 ENSEMBLE_STAT_ENS_VLD_THRESH = 0.4 diff --git a/parm/metplus_config/stats/cam/grid2obs/EnsembleStat_fcstHREF_obsPREPBUFR_SFC.conf b/parm/metplus_config/stats/cam/grid2obs/EnsembleStat_fcstHREF_obsPREPBUFR_SFC.conf index 298d8f94e7..82a4b1cea9 100755 --- a/parm/metplus_config/stats/cam/grid2obs/EnsembleStat_fcstHREF_obsPREPBUFR_SFC.conf +++ b/parm/metplus_config/stats/cam/grid2obs/EnsembleStat_fcstHREF_obsPREPBUFR_SFC.conf @@ -14,22 +14,12 @@ VALID_BEG = {ENV[vday]}{ENV[vbeg]} # End time for METplus run VALID_END = {ENV[vday]}{ENV[vend]} -#VALID_END = {now?fmt=%Y%m%d}12 - METPLUS_PATH = {ENV[METPLUS_PATH]} # Increment between METplus runs in seconds. Must be >= 60 -# 86400 sec=24h VALID_INCREMENT = {ENV[valid_increment]} -# Options are times, processes -# times = run all items in the PROCESS_LIST for a single initialization -# time, then repeat until all times have been evaluated. -# processes = run each item in the PROCESS_LIST for all times -# specified, then repeat for the next item in the PROCESS_LIST. -LOOP_ORDER = times - # List of applications to run PROCESS_LIST = EnsembleStat @@ -70,10 +60,10 @@ OBS_VAR6_LEVELS = {FCST_VAR6_LEVELS} FCST_VAR7_NAME = CAPE FCST_VAR7_LEVELS = L0 -FCST_VAR7_OPTIONS = GRIB_lvl_typ = 1 +FCST_VAR7_OPTIONS = GRIB_lvl_typ = 1; cnt_thresh = [ NA ]; cnt_logic = INTERSECTION; OBS_VAR7_NAME = {FCST_VAR7_NAME} OBS_VAR7_LEVELS = {FCST_VAR7_LEVELS} -OBS_VAR7_OPTIONS = cnt_thresh = [ >0 ]; cnt_logic = UNION +OBS_VAR7_OPTIONS = cnt_thresh = [ >0 ]; cnt_logic = INTERSECTION; FCST_VAR8_NAME = RH FCST_VAR8_LEVELS = Z2 @@ -82,10 +72,10 @@ OBS_VAR8_LEVELS = {FCST_VAR8_LEVELS} FCST_VAR9_NAME = CAPE FCST_VAR9_LEVELS = P90-0 -FCST_VAR9_OPTIONS = cnt_thresh = [ >0 ] +FCST_VAR9_OPTIONS = cnt_thresh = [ NA ]; cnt_logic = INTERSECTION; OBS_VAR9_NAME = MLCAPE OBS_VAR9_LEVELS = L0-100000 -OBS_VAR9_OPTIONS = cnt_thresh = [ >0 ]; cnt_logic = UNION +OBS_VAR9_OPTIONS = cnt_thresh = [ >0 ]; cnt_logic = INTERSECTION; FCST_VAR10_NAME = GUST FCST_VAR10_LEVELS = L0 @@ -198,7 +188,7 @@ METPLUS_CONF = {OUTPUT_BASE}/final_{model}_g2o_{domain}_{ENV[valid_at]}_sfc.conf CST_ENSEMBLE_STAT_INPUT_DATATYPE = GRIB OBS_ENSEMBLE_STAT_INPUT_POINT_DATATYPE = NETCDF -ENSEMBLE_STAT_N_MEMBERS = 10 +ENSEMBLE_STAT_N_MEMBERS = {ENV[nmbrs]} ENSEMBLE_STAT_ENS_THRESH = 0.4 ENSEMBLE_STAT_ENS_VLD_THRESH = 0.4 diff --git a/parm/metplus_config/stats/cam/grid2obs/GenEnsProd_fcstHREF_obsPREPBUFR_PROFILE.conf b/parm/metplus_config/stats/cam/grid2obs/GenEnsProd_fcstHREF_obsPREPBUFR_PROFILE.conf index dbd310a05d..5c97ffa74c 100644 --- a/parm/metplus_config/stats/cam/grid2obs/GenEnsProd_fcstHREF_obsPREPBUFR_PROFILE.conf +++ b/parm/metplus_config/stats/cam/grid2obs/GenEnsProd_fcstHREF_obsPREPBUFR_PROFILE.conf @@ -20,8 +20,6 @@ METPLUS_PATH = {ENV[METPLUS_PATH]} VALID_INCREMENT = {ENV[valid_increment]} -LOOP_ORDER = times - PROCESS_LIST = GenEnsProd ENS_VAR1_NAME = HGT @@ -95,7 +93,7 @@ extradir = {ENV[extradir]} METPLUS_CONF = {OUTPUT_BASE}/final_g2o_profile_{domain}_{ENV[vbeg]}_gen_ens_prod.conf -GEN_ENS_PROD_N_MEMBERS = 10 +GEN_ENS_PROD_N_MEMBERS = {ENV[nmbrs]} GEN_ENS_PROD_ENS_THRESH = 0.4 GEN_ENS_PROD_ENS_VLD_THRESH = 0.4 diff --git a/parm/metplus_config/stats/cam/grid2obs/GenEnsProd_fcstHREF_obsPREPBUFR_SFC.conf b/parm/metplus_config/stats/cam/grid2obs/GenEnsProd_fcstHREF_obsPREPBUFR_SFC.conf index 8b65ea1ba4..f44ef20402 100644 --- a/parm/metplus_config/stats/cam/grid2obs/GenEnsProd_fcstHREF_obsPREPBUFR_SFC.conf +++ b/parm/metplus_config/stats/cam/grid2obs/GenEnsProd_fcstHREF_obsPREPBUFR_SFC.conf @@ -20,8 +20,6 @@ METPLUS_PATH = {ENV[METPLUS_PATH]} VALID_INCREMENT = {ENV[valid_increment]} -LOOP_ORDER = times - PROCESS_LIST = GenEnsProd ENS_VAR1_NAME = VIS @@ -49,7 +47,7 @@ ENS_VAR6_THRESH = lt152, lt305, lt914, lt1524, lt3048, ge914 ENS_VAR7_NAME = CAPE ENS_VAR7_LEVELS = L0 ENS_VAR7_THRESH = ge250, ge500, ge1000, ge2000 -ENS_VAR7_OPTIONS = GRIB_lvl_typ = 1 +ENS_VAR7_OPTIONS = GRIB_lvl_typ = 1; cnt_thresh = [ NA ]; cnt_logic = INTERSECTION; ENS_VAR8_NAME = RH ENS_VAR8_LEVELS = Z2 @@ -57,7 +55,7 @@ ENS_VAR8_THRESH = le15, le20, le25, le30 ENS_VAR9_NAME = CAPE ENS_VAR9_LEVELS = P90-0 -ENS_VAR9_OPTIONS = cnt_thresh = [ >0 ] +ENS_VAR9_OPTIONS = cnt_thresh = [ NA ]; cnt_logic = INTERSECTION; ENS_VAR9_THRESH = ge250, ge500, ge1000, ge2000 ENS_VAR10_NAME = GUST @@ -124,7 +122,7 @@ extradir = {ENV[extradir]} METPLUS_CONF = {OUTPUT_BASE}/final_g2o_sfc_{domain}_{ENV[vbeg]}_gen_ens_prod.conf -GEN_ENS_PROD_N_MEMBERS = 10 +GEN_ENS_PROD_N_MEMBERS = {ENV[nmbrs]} GEN_ENS_PROD_ENS_THRESH = 0.4 GEN_ENS_PROD_ENS_VLD_THRESH = 0.4 diff --git a/parm/metplus_config/stats/cam/grid2obs/Pb2nc_obsGDAS_Prepbufr_href_profile.conf b/parm/metplus_config/stats/cam/grid2obs/Pb2nc_obsGDAS_Prepbufr_href_profile.conf index c71276cef6..d4331d5cd9 100644 --- a/parm/metplus_config/stats/cam/grid2obs/Pb2nc_obsGDAS_Prepbufr_href_profile.conf +++ b/parm/metplus_config/stats/cam/grid2obs/Pb2nc_obsGDAS_Prepbufr_href_profile.conf @@ -36,16 +36,6 @@ VALID_INCREMENT = 1M # If unset, defaults to 0 (don't loop through forecast leads) LEAD_SEQ = 0 -# Order of loops to process data - Options are times, processes -# Not relevant if only one item is in the PROCESS_LIST -# times = run all wrappers in the PROCESS_LIST for a single run time, then -# increment the run time and run all wrappers again until all times have -# been evaluated. -# processes = run the first wrapper in the PROCESS_LIST for all times -# specified, then repeat for the next item in the PROCESS_LIST until all -# wrappers have been run -LOOP_ORDER = processes - # Location of MET config file to pass to PB2NC # References CONFIG_DIR from the [dir] section PB2NC_CONFIG_FILE = {METPLUS_BASE}/parm/met_config/PB2NCConfig_wrapped diff --git a/parm/metplus_config/stats/cam/grid2obs/Pb2nc_obsRAP_Prepbufr_href.conf b/parm/metplus_config/stats/cam/grid2obs/Pb2nc_obsRAP_Prepbufr_href.conf index 868a098303..132772cc68 100644 --- a/parm/metplus_config/stats/cam/grid2obs/Pb2nc_obsRAP_Prepbufr_href.conf +++ b/parm/metplus_config/stats/cam/grid2obs/Pb2nc_obsRAP_Prepbufr_href.conf @@ -76,7 +76,7 @@ PB2NC_OBS_BUFR_MAP = { key = "ZOB"; val = "HGT"; }, { key = "TOB"; val = "TMP" CONFIG_DIR = {PARM_BASE}/met_config # directory containing input to PB2NC -PB2NC_INPUT_DIR = {ENV[PREPBUFR]} +PB2NC_INPUT_DIR = {ENV[bufrpath]} # directory to write output from PB2NC PB2NC_OUTPUT_DIR = {OUTPUT_BASE}/prepbufr_nc @@ -85,7 +85,7 @@ METPLUS_CONF = {OUTPUT_BASE}/final_pb2nc_{ENV[vbeg]}_{ENV[verif_grid]}.conf # End of [dir] section and start of [filename_templates] section [filename_templates] # Template to look for forecast input to PB2NC relative to PB2NC_INPUT_DIR -PB2NC_INPUT_TEMPLATE = rap.{da_init?fmt=%Y%m%d}/rap.t{da_init?fmt=%H}z.prepbufr.tm00 +PB2NC_INPUT_TEMPLATE = prepbufr.{da_init?fmt=%Y%m%d}/rap.t{da_init?fmt=%H}z.{ENV[verif_grid]}.prepbufr # Template to use to write output from PB2NC PB2NC_OUTPUT_TEMPLATE = prepbufr.t{da_init?fmt=%H}z.{ENV[verif_grid]}.nc diff --git a/parm/metplus_config/stats/cam/grid2obs/Pb2nc_obsRAP_Prepbufr_href_profile.conf b/parm/metplus_config/stats/cam/grid2obs/Pb2nc_obsRAP_Prepbufr_href_profile.conf index 6d68fe77ae..73abc62bc2 100644 --- a/parm/metplus_config/stats/cam/grid2obs/Pb2nc_obsRAP_Prepbufr_href_profile.conf +++ b/parm/metplus_config/stats/cam/grid2obs/Pb2nc_obsRAP_Prepbufr_href_profile.conf @@ -56,7 +56,7 @@ PB2NC_SKIP_IF_OUTPUT_EXISTS = True # Values to pass to pb2nc config file using environment variables of the same name. # See MET User's Guide for more information -PB2NC_GRID = {ENV[verif_grid]} +PB2NC_GRID = {ENV[verif_grid]} PB2NC_POLY = PB2NC_STATION_ID = PB2NC_MESSAGE_TYPE = ADPUPA @@ -75,7 +75,7 @@ PB2NC_OBS_BUFR_MAP = { key = "ZOB"; val = "HGT"; }, { key = "TOB"; val = "TMP" CONFIG_DIR = {PARM_BASE}/met_config # directory containing input to PB2NC -PB2NC_INPUT_DIR = {ENV[PREPBUFR]} +PB2NC_INPUT_DIR = {ENV[bufrpath]} # directory to write output from PB2NC PB2NC_OUTPUT_DIR = {OUTPUT_BASE}/prepbufr_nc @@ -84,7 +84,7 @@ METPLUS_CONF = {OUTPUT_BASE}/final_pb2nc_{ENV[vbeg]}_{ENV[verif_grid]}.conf # End of [dir] section and start of [filename_templates] section [filename_templates] # Template to look for forecast input to PB2NC relative to PB2NC_INPUT_DIR -PB2NC_INPUT_TEMPLATE = rap.{da_init?fmt=%Y%m%d}/rap.t{da_init?fmt=%H}z.prepbufr.tm00 +PB2NC_INPUT_TEMPLATE = prepbufr.{da_init?fmt=%Y%m%d}/rap.t{da_init?fmt=%H}z.{ENV[verif_grid]}.prepbufr # Template to use to write output from PB2NC PB2NC_OUTPUT_TEMPLATE = prepbufr_profile.t{da_init?fmt=%H}z.{ENV[verif_grid]}.nc diff --git a/parm/metplus_config/stats/cam/grid2obs/PointStat_fcstHREF_obsPREPBUFR_PROFILE_prob.conf b/parm/metplus_config/stats/cam/grid2obs/PointStat_fcstHREF_obsPREPBUFR_PROFILE_prob.conf index b42b139fb9..1e6245f0cf 100755 --- a/parm/metplus_config/stats/cam/grid2obs/PointStat_fcstHREF_obsPREPBUFR_PROFILE_prob.conf +++ b/parm/metplus_config/stats/cam/grid2obs/PointStat_fcstHREF_obsPREPBUFR_PROFILE_prob.conf @@ -20,13 +20,6 @@ METPLUS_PATH = {ENV[METPLUS_PATH]} # 86400 sec=24h VALID_INCREMENT = {ENV[valid_increment]} -# Options are times, processes -# times = run all items in the PROCESS_LIST for a single initialization -# time, then repeat until all times have been evaluated. -# processes = run each item in the PROCESS_LIST for all times -# specified, then repeat for the next item in the PROCESS_LIST. -LOOP_ORDER = times - # List of applications to run PROCESS_LIST = PointStat @@ -121,19 +114,6 @@ POINT_STAT_OUTPUT_FLAG_NBRCNT = NONE POINT_STAT_OUTPUT_FLAG_GRAD = NONE POINT_STAT_OUTPUT_FLAG_DMAP = NONE -POINT_STAT_NC_PAIRS_FLAG_LATLON = TRUE -POINT_STAT_NC_PAIRS_FLAG_RAW = TRUE -POINT_STAT_NC_PAIRS_FLAG_DIFF = TRUE -POINT_STAT_NC_PAIRS_FLAG_CLIMO = -POINT_STAT_NC_PAIRS_FLAG_CLIMO_CDP = FALSE -POINT_STAT_NC_PAIRS_FLAG_WEIGHT = FALSE -POINT_STAT_NC_PAIRS_FLAG_NBRHD = FALSE -POINT_STAT_NC_PAIRS_FLAG_FOURIER = FALSE -POINT_STAT_NC_PAIRS_FLAG_GRADIENT = FALSE -POINT_STAT_NC_PAIRS_FLAG_DISTANCE_MAP = FALSE -POINT_STAT_NC_PAIRS_FLAG_APPLY_MASK = TRUE - - ############################################################### modelpath = {ENV[modelpath]} modelgrid = {ENV[modelgrid]} diff --git a/parm/metplus_config/stats/cam/grid2obs/PointStat_fcstHREF_obsPREPBUFR_SFC_prob.conf b/parm/metplus_config/stats/cam/grid2obs/PointStat_fcstHREF_obsPREPBUFR_SFC_prob.conf index 182e3a4896..a0404587ee 100755 --- a/parm/metplus_config/stats/cam/grid2obs/PointStat_fcstHREF_obsPREPBUFR_SFC_prob.conf +++ b/parm/metplus_config/stats/cam/grid2obs/PointStat_fcstHREF_obsPREPBUFR_SFC_prob.conf @@ -20,13 +20,6 @@ METPLUS_PATH = {ENV[METPLUS_PATH]} # 86400 sec=24h VALID_INCREMENT = {ENV[valid_increment]} -# Options are times, processes -# times = run all items in the PROCESS_LIST for a single initialization -# time, then repeat until all times have been evaluated. -# processes = run each item in the PROCESS_LIST for all times -# specified, then repeat for the next item in the PROCESS_LIST. -LOOP_ORDER = times - # List of applications to run PROCESS_LIST = PointStat @@ -260,19 +253,6 @@ POINT_STAT_OUTPUT_FLAG_NBRCNT = NONE POINT_STAT_OUTPUT_FLAG_GRAD = NONE POINT_STAT_OUTPUT_FLAG_DMAP = NONE -POINT_STAT_NC_PAIRS_FLAG_LATLON = TRUE -POINT_STAT_NC_PAIRS_FLAG_RAW = TRUE -POINT_STAT_NC_PAIRS_FLAG_DIFF = TRUE -POINT_STAT_NC_PAIRS_FLAG_CLIMO = -POINT_STAT_NC_PAIRS_FLAG_CLIMO_CDP = FALSE -POINT_STAT_NC_PAIRS_FLAG_WEIGHT = FALSE -POINT_STAT_NC_PAIRS_FLAG_NBRHD = FALSE -POINT_STAT_NC_PAIRS_FLAG_FOURIER = FALSE -POINT_STAT_NC_PAIRS_FLAG_GRADIENT = FALSE -POINT_STAT_NC_PAIRS_FLAG_DISTANCE_MAP = FALSE -POINT_STAT_NC_PAIRS_FLAG_APPLY_MASK = TRUE - - ############################################################### modelpath = {ENV[modelpath]} modelgrid = {ENV[modelgrid]} diff --git a/parm/metplus_config/stats/cam/grid2obs/PointStat_fcstHREFmean_obsPREPBUFR_SFC.conf b/parm/metplus_config/stats/cam/grid2obs/PointStat_fcstHREFmean_obsPREPBUFR_SFC.conf index d2e2b73ceb..c610bc2828 100755 --- a/parm/metplus_config/stats/cam/grid2obs/PointStat_fcstHREFmean_obsPREPBUFR_SFC.conf +++ b/parm/metplus_config/stats/cam/grid2obs/PointStat_fcstHREFmean_obsPREPBUFR_SFC.conf @@ -20,13 +20,6 @@ METPLUS_PATH = {ENV[METPLUS_PATH]} # 86400 sec=24h VALID_INCREMENT = {ENV[valid_increment]} -# Options are times, processes -# times = run all items in the PROCESS_LIST for a single initialization -# time, then repeat until all times have been evaluated. -# processes = run each item in the PROCESS_LIST for all times -# specified, then repeat for the next item in the PROCESS_LIST. -LOOP_ORDER = times - # List of applications to run PROCESS_LIST = PointStat @@ -52,7 +45,8 @@ FCST_VAR4_OPTIONS = GRIB_lvl_typ = 215 BOTH_VAR5_NAME = CAPE BOTH_VAR5_LEVELS = L0 BOTH_VAR5_THRESH = >=250, >=500, >=1000, >=2000 -FCST_VAR5_OPTIONS = GRIB_lvl_typ = 1 +FCST_VAR5_OPTIONS = GRIB_lvl_typ = 1; cnt_thresh = [ NA ]; cnt_logic = INTERSECTION; +OBS_VAR5_OPTIONS = cnt_thresh = [ >0 ]; cnt_logic = INTERSECTION; BOTH_VAR6_NAME = VIS BOTH_VAR6_LEVELS = L0 @@ -60,10 +54,10 @@ BOTH_VAR6_THRESH = <805, <1609, <4828, <8045, <16090, >=8045 FCST_VAR7_NAME = CAPE FCST_VAR7_LEVELS = P90-0 -FCST_VAR7_OPTIONS = cnt_thresh = [ >0 ] +FCST_VAR7_OPTIONS = cnt_thresh = [ NA ]; cnt_logic = INTERSECTION; OBS_VAR7_NAME = MLCAPE OBS_VAR7_LEVELS = L0-100000 -OBS_VAR7_OPTIONS = cnt_thresh = [ >0 ]; cnt_logic = UNION +OBS_VAR7_OPTIONS = cnt_thresh = [ >0 ]; cnt_logic = INTERSECTION; BOTH_VAR7_THRESH = >=250, >=500, >=1000, >=2000 BOTH_VAR8_NAME = TCDC @@ -131,19 +125,6 @@ POINT_STAT_OUTPUT_FLAG_NBRCNT = NONE POINT_STAT_OUTPUT_FLAG_GRAD = NONE POINT_STAT_OUTPUT_FLAG_DMAP = NONE -POINT_STAT_NC_PAIRS_FLAG_LATLON = TRUE -POINT_STAT_NC_PAIRS_FLAG_RAW = TRUE -POINT_STAT_NC_PAIRS_FLAG_DIFF = TRUE -POINT_STAT_NC_PAIRS_FLAG_CLIMO = FALSE -POINT_STAT_NC_PAIRS_FLAG_CLIMO_CDP = FALSE -POINT_STAT_NC_PAIRS_FLAG_WEIGHT = FALSE -POINT_STAT_NC_PAIRS_FLAG_NBRHD = FALSE -POINT_STAT_NC_PAIRS_FLAG_FOURIER = FALSE -POINT_STAT_NC_PAIRS_FLAG_GRADIENT = FALSE -POINT_STAT_NC_PAIRS_FLAG_DISTANCE_MAP = FALSE -POINT_STAT_NC_PAIRS_FLAG_APPLY_MASK = TRUE - - ############################################################### modelpath = {ENV[modelpath]} modelgrid = {ENV[modelgrid]} diff --git a/parm/metplus_config/stats/cam/grid2obs/PointStat_fcstHREFmean_obsPREPBUFR_SPCoutlook.conf b/parm/metplus_config/stats/cam/grid2obs/PointStat_fcstHREFmean_obsPREPBUFR_SPCoutlook.conf index a62aa9f54b..9cf3288d8f 100755 --- a/parm/metplus_config/stats/cam/grid2obs/PointStat_fcstHREFmean_obsPREPBUFR_SPCoutlook.conf +++ b/parm/metplus_config/stats/cam/grid2obs/PointStat_fcstHREFmean_obsPREPBUFR_SPCoutlook.conf @@ -20,31 +20,22 @@ METPLUS_PATH = {ENV[METPLUS_PATH]} # 86400 sec=24h VALID_INCREMENT = {ENV[valid_increment]} -# Options are times, processes -# times = run all items in the PROCESS_LIST for a single initialization -# time, then repeat until all times have been evaluated. -# processes = run each item in the PROCESS_LIST for all times -# specified, then repeat for the next item in the PROCESS_LIST. -LOOP_ORDER = times - # List of applications to run PROCESS_LIST = PointStat # list of variables to compare - - - BOTH_VAR1_NAME = CAPE BOTH_VAR1_LEVELS = L0 BOTH_VAR1_THRESH = >=250, >=500, >=1000, >=2000 -FCST_VAR1_OPTIONS = GRIB_lvl_typ = 1 +FCST_VAR1_OPTIONS = GRIB_lvl_typ = 1; cnt_thresh = [ NA ]; cnt_logic = INTERSECTION; +OBS_VAR1_OPTIONS = cnt_thresh = [ >0 ]; cnt_logic = INTERSECTION; FCST_VAR2_NAME = CAPE FCST_VAR2_LEVELS = P0-90 -FCST_VAR2_OPTIONS = cnt_thresh = [ >0 ] +FCST_VAR2_OPTIONS = cnt_thresh = [ NA ]; cnt_logic = INTERSECTION; OBS_VAR2_NAME = MLCAPE OBS_VAR2_LEVELS = L0-90000 -OBS_VAR2_OPTIONS = cnt_thresh = [ >0 ]; cnt_logic = UNION +OBS_VAR2_OPTIONS = cnt_thresh = [ >0 ]; cnt_logic = INTERSECTION; BOTH_VAR2_THRESH = >=250, >=500, >=1000, >=2000 @@ -107,19 +98,6 @@ POINT_STAT_OUTPUT_FLAG_NBRCNT = NONE POINT_STAT_OUTPUT_FLAG_GRAD = NONE POINT_STAT_OUTPUT_FLAG_DMAP = NONE -POINT_STAT_NC_PAIRS_FLAG_LATLON = TRUE -POINT_STAT_NC_PAIRS_FLAG_RAW = TRUE -POINT_STAT_NC_PAIRS_FLAG_DIFF = TRUE -POINT_STAT_NC_PAIRS_FLAG_CLIMO = FALSE -POINT_STAT_NC_PAIRS_FLAG_CLIMO_CDP = FALSE -POINT_STAT_NC_PAIRS_FLAG_WEIGHT = FALSE -POINT_STAT_NC_PAIRS_FLAG_NBRHD = FALSE -POINT_STAT_NC_PAIRS_FLAG_FOURIER = FALSE -POINT_STAT_NC_PAIRS_FLAG_GRADIENT = FALSE -POINT_STAT_NC_PAIRS_FLAG_DISTANCE_MAP = FALSE -POINT_STAT_NC_PAIRS_FLAG_APPLY_MASK = TRUE - - ############################################################### modelpath = {ENV[modelpath]} modelgrid = {ENV[modelgrid]} diff --git a/parm/metplus_config/stats/cam/grid2obs/PointStat_fcstHREFprob_obsPREPBUFR_SFC.conf b/parm/metplus_config/stats/cam/grid2obs/PointStat_fcstHREFprob_obsPREPBUFR_SFC.conf index 9fa0955a4f..14e5c50942 100755 --- a/parm/metplus_config/stats/cam/grid2obs/PointStat_fcstHREFprob_obsPREPBUFR_SFC.conf +++ b/parm/metplus_config/stats/cam/grid2obs/PointStat_fcstHREFprob_obsPREPBUFR_SFC.conf @@ -22,13 +22,6 @@ METPLUS_PATH = {ENV[METPLUS_PATH]} # 86400 sec=24h VALID_INCREMENT = {ENV[valid_increment]} -# Options are times, processes -# times = run all items in the PROCESS_LIST for a single initialization -# time, then repeat until all times have been evaluated. -# processes = run each item in the PROCESS_LIST for all times -# specified, then repeat for the next item in the PROCESS_LIST. -LOOP_ORDER = times - # List of applications to run PROCESS_LIST = PointStat @@ -130,19 +123,6 @@ POINT_STAT_OUTPUT_FLAG_NBRCNT = NONE POINT_STAT_OUTPUT_FLAG_GRAD = NONE POINT_STAT_OUTPUT_FLAG_DMAP = NONE -POINT_STAT_NC_PAIRS_FLAG_LATLON = TRUE -POINT_STAT_NC_PAIRS_FLAG_RAW = TRUE -POINT_STAT_NC_PAIRS_FLAG_DIFF = TRUE -POINT_STAT_NC_PAIRS_FLAG_CLIMO = -POINT_STAT_NC_PAIRS_FLAG_CLIMO_CDP = FALSE -POINT_STAT_NC_PAIRS_FLAG_WEIGHT = FALSE -POINT_STAT_NC_PAIRS_FLAG_NBRHD = FALSE -POINT_STAT_NC_PAIRS_FLAG_FOURIER = FALSE -POINT_STAT_NC_PAIRS_FLAG_GRADIENT = FALSE -POINT_STAT_NC_PAIRS_FLAG_DISTANCE_MAP = FALSE -POINT_STAT_NC_PAIRS_FLAG_APPLY_MASK = TRUE - - ############################################################### modelpath = {ENV[modelpath]} modelgrid = {ENV[modelgrid]} diff --git a/parm/metplus_config/stats/cam/grid2obs/StatAnlysis_fcstHREF_obsPREPBUFR_GatherByDay.conf b/parm/metplus_config/stats/cam/grid2obs/StatAnlysis_fcstHREF_obsPREPBUFR_GatherByDay.conf index 886fa50490..780de28ab5 100755 --- a/parm/metplus_config/stats/cam/grid2obs/StatAnlysis_fcstHREF_obsPREPBUFR_GatherByDay.conf +++ b/parm/metplus_config/stats/cam/grid2obs/StatAnlysis_fcstHREF_obsPREPBUFR_GatherByDay.conf @@ -15,7 +15,6 @@ MET_CONFIG = {ENV[MET_CONFIG]} #VERIFY: grid2grid, gridobs, precip VERIFY = {ENV[verify]} -LOOP_ORDER = times PROCESS_LIST = StatAnalysis @@ -72,14 +71,8 @@ GROUP_LIST_ITEMS = # Full path to output stat files, can use wildcards if in multiple directories STAT_ANALYSIS_OUTPUT_DIR = {OUTPUT_BASE} -#Location of input stat files and the stat file pattern to be gathered -#verify_type:ensemble_stat, grid_stat, point_stat -#MODEL1: GEFS, CMCE, ECME, NAEFS -#preci_type=BIN1, APCP24, '' - MODEL1_STAT_ANALYSIS_LOOKIN_DIR = {ENV[stat_file_dir]}/*{MODEL1}* - [filename_templates] STAT_ANALYSIS_DUMP_ROW_TEMPLATE = {valid_beg?fmt=%Y%m%d}/{MODEL1}_{VERIFY}_{valid_beg?fmt=%Y%m%d}.stat STAT_ANALYSIS_OUT_STAT_TEMPLATE = diff --git a/parm/metplus_config/stats/cam/grid2obs/readme.txt b/parm/metplus_config/stats/cam/grid2obs/readme.txt deleted file mode 100644 index 0e0c098726..0000000000 --- a/parm/metplus_config/stats/cam/grid2obs/readme.txt +++ /dev/null @@ -1,4 +0,0 @@ -HREFmean -> directly from HREF mean product files -HREF_*_mean -> from EnsembleStat -HREFprob -> directly from HREF prob product files -HREF_*_prob -> from EnsembleStat diff --git a/parm/metplus_config/stats/cam/precip/EnsembleStat_fcstHREF_obsCCPA.conf b/parm/metplus_config/stats/cam/precip/EnsembleStat_fcstHREF_obsCCPA.conf index 7c064a928b..48699a3faa 100755 --- a/parm/metplus_config/stats/cam/precip/EnsembleStat_fcstHREF_obsCCPA.conf +++ b/parm/metplus_config/stats/cam/precip/EnsembleStat_fcstHREF_obsCCPA.conf @@ -22,13 +22,6 @@ METPLUS_PATH = {ENV[METPLUS_PATH]} # 86400 sec=24h VALID_INCREMENT = {ENV[valid_increment]} -# Options are times, processes -# times = run all items in the PROCESS_LIST for a single initialization -# time, then repeat until all times have been evaluated. -# processes = run each item in the PROCESS_LIST for all times -# specified, then repeat for the next item in the PROCESS_LIST. -LOOP_ORDER = times - # List of applications to run PROCESS_LIST = EnsembleStat @@ -141,7 +134,7 @@ METPLUS_CONF = {OUTPUT_BASE}/final_{model}_ens_{obsvhead}.conf FCST_ENSEMBLE_STAT_INPUT_DATATYPE = GRIB OBS_ENSEMBLE_STAT_INPUT_GRID_DATATYPE = GRIB -ENSEMBLE_STAT_N_MEMBERS = 10 +ENSEMBLE_STAT_N_MEMBERS = {ENV[mbrs]} ENSEMBLE_STAT_ENS_THRESH = 0.5 LOG_ENSEMBLE_STAT_VERBOSITY = {LOG_MET_VERBOSITY} diff --git a/parm/metplus_config/stats/cam/precip/EnsembleStat_fcstHREF_obsCCPA_G227.conf b/parm/metplus_config/stats/cam/precip/EnsembleStat_fcstHREF_obsCCPA_G227.conf index bc7ac18ab2..2dac6de18b 100755 --- a/parm/metplus_config/stats/cam/precip/EnsembleStat_fcstHREF_obsCCPA_G227.conf +++ b/parm/metplus_config/stats/cam/precip/EnsembleStat_fcstHREF_obsCCPA_G227.conf @@ -21,13 +21,6 @@ METPLUS_PATH = {ENV[METPLUS_PATH]} # 86400 sec=24h VALID_INCREMENT = {ENV[valid_increment]} -# Options are times, processes -# times = run all items in the PROCESS_LIST for a single initialization -# time, then repeat until all times have been evaluated. -# processes = run each item in the PROCESS_LIST for all times -# specified, then repeat for the next item in the PROCESS_LIST. -LOOP_ORDER = times - # List of applications to run PROCESS_LIST = EnsembleStat @@ -118,7 +111,7 @@ METPLUS_CONF = {OUTPUT_BASE}/final_{model}_ens_{obsvhead}.conf FCST_ENSEMBLE_STAT_INPUT_DATATYPE = GRIB OBS_ENSEMBLE_STAT_INPUT_GRID_DATATYPE = GRIB -ENSEMBLE_STAT_N_MEMBERS = {ENV[nmem]} +ENSEMBLE_STAT_N_MEMBERS = {ENV[mbrs]} ENSEMBLE_STAT_ENS_THRESH = 0.3 LOG_METPLUS = {LOG_DIR}/EnsembleStat_fcst{MODEL}_obs{ENV[obsv]}_for{ENV[VDATE]}{ENV[vend]}_f{lead}_runon{CLOCK_TIME}.log diff --git a/parm/metplus_config/stats/cam/precip/EnsembleStat_fcstHREF_obsMRMS_G255.conf b/parm/metplus_config/stats/cam/precip/EnsembleStat_fcstHREF_obsMRMS_G255.conf index 461f6e3787..dcce4612a5 100755 --- a/parm/metplus_config/stats/cam/precip/EnsembleStat_fcstHREF_obsMRMS_G255.conf +++ b/parm/metplus_config/stats/cam/precip/EnsembleStat_fcstHREF_obsMRMS_G255.conf @@ -21,13 +21,6 @@ METPLUS_PATH = {ENV[METPLUS_PATH]} # 86400 sec=24h VALID_INCREMENT = {ENV[valid_increment]} -# Options are times, processes -# times = run all items in the PROCESS_LIST for a single initialization -# time, then repeat until all times have been evaluated. -# processes = run each item in the PROCESS_LIST for all times -# specified, then repeat for the next item in the PROCESS_LIST. -LOOP_ORDER = times - # List of applications to run PROCESS_LIST = EnsembleStat @@ -116,7 +109,7 @@ METPLUS_CONF = {OUTPUT_BASE}/final_{model}_ens_{obsvhead}.conf FCST_ENSEMBLE_STAT_INPUT_DATATYPE = GRIB OBS_ENSEMBLE_STAT_INPUT_GRID_DATATYPE = NETCDF -ENSEMBLE_STAT_N_MEMBERS = {ENV[nmem]} +ENSEMBLE_STAT_N_MEMBERS = {ENV[mbrs]} ENSEMBLE_STAT_ENS_THRESH = 0.3 LOG_METPLUS = {LOG_DIR}/EnsembleStat_fcst{MODEL}_obs{ENV[obsv]}_for{ENV[VDATE]}{ENV[vend]}_f{lead}_runon{CLOCK_TIME}.log diff --git a/parm/metplus_config/stats/cam/precip/GridStat_fcstHREFmean_obsCCPA.conf b/parm/metplus_config/stats/cam/precip/GridStat_fcstHREFmean_obsCCPA.conf index 421b64a597..c7fd680342 100755 --- a/parm/metplus_config/stats/cam/precip/GridStat_fcstHREFmean_obsCCPA.conf +++ b/parm/metplus_config/stats/cam/precip/GridStat_fcstHREFmean_obsCCPA.conf @@ -20,13 +20,6 @@ METPLUS_PATH = {ENV[METPLUS_PATH]} # 86400 sec=24h VALID_INCREMENT = {ENV[valid_increment]} -# Options are times, processes -# times = run all items in the PROCESS_LIST for a single initialization -# time, then repeat until all times have been evaluated. -# processes = run each item in the PROCESS_LIST for all times -# specified, then repeat for the next item in the PROCESS_LIST. -LOOP_ORDER = times - # List of applications to run PROCESS_LIST = GridStat diff --git a/parm/metplus_config/stats/cam/precip/GridStat_fcstHREFmean_obsCCPA_G212.conf b/parm/metplus_config/stats/cam/precip/GridStat_fcstHREFmean_obsCCPA_G212.conf index 3d630d01fe..c817a47693 100755 --- a/parm/metplus_config/stats/cam/precip/GridStat_fcstHREFmean_obsCCPA_G212.conf +++ b/parm/metplus_config/stats/cam/precip/GridStat_fcstHREFmean_obsCCPA_G212.conf @@ -20,13 +20,6 @@ METPLUS_PATH = {ENV[METPLUS_PATH]} # 86400 sec=24h VALID_INCREMENT = {ENV[valid_increment]} -# Options are times, processes -# times = run all items in the PROCESS_LIST for a single initialization -# time, then repeat until all times have been evaluated. -# processes = run each item in the PROCESS_LIST for all times -# specified, then repeat for the next item in the PROCESS_LIST. -LOOP_ORDER = times - # List of applications to run PROCESS_LIST = GridStat diff --git a/parm/metplus_config/stats/cam/precip/GridStat_fcstHREFmean_obsCCPA_G240.conf b/parm/metplus_config/stats/cam/precip/GridStat_fcstHREFmean_obsCCPA_G240.conf index 3c09b301ac..61e7664708 100755 --- a/parm/metplus_config/stats/cam/precip/GridStat_fcstHREFmean_obsCCPA_G240.conf +++ b/parm/metplus_config/stats/cam/precip/GridStat_fcstHREFmean_obsCCPA_G240.conf @@ -21,13 +21,6 @@ METPLUS_PATH = {ENV[METPLUS_PATH]} # 86400 sec=24h VALID_INCREMENT = {ENV[valid_increment]} -# Options are times, processes -# times = run all items in the PROCESS_LIST for a single initialization -# time, then repeat until all times have been evaluated. -# processes = run each item in the PROCESS_LIST for all times -# specified, then repeat for the next item in the PROCESS_LIST. -LOOP_ORDER = times - # List of applications to run PROCESS_LIST = GridStat diff --git a/parm/metplus_config/stats/cam/precip/GridStat_fcstHREFmean_obsMRMS_G216.conf b/parm/metplus_config/stats/cam/precip/GridStat_fcstHREFmean_obsMRMS_G216.conf index 8ca44b867a..99f7a49a27 100755 --- a/parm/metplus_config/stats/cam/precip/GridStat_fcstHREFmean_obsMRMS_G216.conf +++ b/parm/metplus_config/stats/cam/precip/GridStat_fcstHREFmean_obsMRMS_G216.conf @@ -22,13 +22,6 @@ METPLUS_PATH = {ENV[METPLUS_PATH]} # 86400 sec=24h VALID_INCREMENT = {ENV[valid_increment]} -# Options are times, processes -# times = run all items in the PROCESS_LIST for a single initialization -# time, then repeat until all times have been evaluated. -# processes = run each item in the PROCESS_LIST for all times -# specified, then repeat for the next item in the PROCESS_LIST. -LOOP_ORDER = times - # List of applications to run PROCESS_LIST = GridStat diff --git a/parm/metplus_config/stats/cam/precip/GridStat_fcstHREFmean_obsMRMS_G91.conf b/parm/metplus_config/stats/cam/precip/GridStat_fcstHREFmean_obsMRMS_G91.conf index 651a6aed6c..6cb55b3e57 100755 --- a/parm/metplus_config/stats/cam/precip/GridStat_fcstHREFmean_obsMRMS_G91.conf +++ b/parm/metplus_config/stats/cam/precip/GridStat_fcstHREFmean_obsMRMS_G91.conf @@ -20,13 +20,6 @@ METPLUS_PATH = {ENV[METPLUS_PATH]} # 86400 sec=24h VALID_INCREMENT = {ENV[valid_increment]} -# Options are times, processes -# times = run all items in the PROCESS_LIST for a single initialization -# time, then repeat until all times have been evaluated. -# processes = run each item in the PROCESS_LIST for all times -# specified, then repeat for the next item in the PROCESS_LIST. -LOOP_ORDER = times - # List of applications to run PROCESS_LIST = GridStat diff --git a/parm/metplus_config/stats/cam/precip/GridStat_fcstHREFprob_obsCCPA.conf b/parm/metplus_config/stats/cam/precip/GridStat_fcstHREFprob_obsCCPA.conf index af0f199111..2c84f46d66 100755 --- a/parm/metplus_config/stats/cam/precip/GridStat_fcstHREFprob_obsCCPA.conf +++ b/parm/metplus_config/stats/cam/precip/GridStat_fcstHREFprob_obsCCPA.conf @@ -22,13 +22,6 @@ METPLUS_PATH = {ENV[METPLUS_PATH]} # 86400 sec=24h VALID_INCREMENT = {ENV[valid_increment]} -# Options are times, processes -# times = run all items in the PROCESS_LIST for a single initialization -# time, then repeat until all times have been evaluated. -# processes = run each item in the PROCESS_LIST for all times -# specified, then repeat for the next item in the PROCESS_LIST. -LOOP_ORDER = times - # List of applications to run PROCESS_LIST = GridStat diff --git a/parm/metplus_config/stats/cam/precip/GridStat_fcstHREFprob_obsCCPA_G227.conf b/parm/metplus_config/stats/cam/precip/GridStat_fcstHREFprob_obsCCPA_G227.conf index 36111dc815..58fff8e373 100755 --- a/parm/metplus_config/stats/cam/precip/GridStat_fcstHREFprob_obsCCPA_G227.conf +++ b/parm/metplus_config/stats/cam/precip/GridStat_fcstHREFprob_obsCCPA_G227.conf @@ -20,13 +20,6 @@ METPLUS_PATH = {ENV[METPLUS_PATH]} # 86400 sec=24h VALID_INCREMENT = {ENV[valid_increment]} -# Options are times, processes -# times = run all items in the PROCESS_LIST for a single initialization -# time, then repeat until all times have been evaluated. -# processes = run each item in the PROCESS_LIST for all times -# specified, then repeat for the next item in the PROCESS_LIST. -LOOP_ORDER = times - # List of applications to run PROCESS_LIST = GridStat diff --git a/parm/metplus_config/stats/cam/precip/GridStat_fcstHREFprob_obsMRMS_G255.conf b/parm/metplus_config/stats/cam/precip/GridStat_fcstHREFprob_obsMRMS_G255.conf index 4a3f3c653a..3a4259596e 100755 --- a/parm/metplus_config/stats/cam/precip/GridStat_fcstHREFprob_obsMRMS_G255.conf +++ b/parm/metplus_config/stats/cam/precip/GridStat_fcstHREFprob_obsMRMS_G255.conf @@ -20,13 +20,6 @@ METPLUS_PATH = {ENV[METPLUS_PATH]} # 86400 sec=24h VALID_INCREMENT = {ENV[valid_increment]} -# Options are times, processes -# times = run all items in the PROCESS_LIST for a single initialization -# time, then repeat until all times have been evaluated. -# processes = run each item in the PROCESS_LIST for all times -# specified, then repeat for the next item in the PROCESS_LIST. -LOOP_ORDER = times - # List of applications to run PROCESS_LIST = GridStat diff --git a/parm/metplus_config/stats/cam/precip/PcpCombine_fcstHREF_APCP24h.conf b/parm/metplus_config/stats/cam/precip/PcpCombine_fcstHREF_APCP24h.conf index ed569fd82b..45f4f65830 100644 --- a/parm/metplus_config/stats/cam/precip/PcpCombine_fcstHREF_APCP24h.conf +++ b/parm/metplus_config/stats/cam/precip/PcpCombine_fcstHREF_APCP24h.conf @@ -12,8 +12,6 @@ VALID_INCREMENT = 1H LEAD_SEQ = {ENV[fhr]} -LOOP_ORDER = times - FCST_PCP_COMBINE_CONSTANT_INIT = True FCST_PCP_COMBINE_RUN = True diff --git a/parm/metplus_config/stats/cam/precip/StatAnlysis_fcstHREF_obsAnalysis_GatherByDay.conf b/parm/metplus_config/stats/cam/precip/StatAnlysis_fcstHREF_obsAnalysis_GatherByDay.conf index 6295dc98ab..5ef0d8a096 100755 --- a/parm/metplus_config/stats/cam/precip/StatAnlysis_fcstHREF_obsAnalysis_GatherByDay.conf +++ b/parm/metplus_config/stats/cam/precip/StatAnlysis_fcstHREF_obsAnalysis_GatherByDay.conf @@ -15,7 +15,6 @@ MET_CONFIG = {ENV[MET_CONFIG]} #VERIFY: grid2grid, gridobs, precip VERIFY = {ENV[verify]} -LOOP_ORDER = times PROCESS_LIST = StatAnalysis diff --git a/parm/metplus_config/stats/cam/precip/grib2_mrms.txt b/parm/metplus_config/stats/cam/precip/grib2_mrms.txt deleted file mode 100644 index 23edb34906..0000000000 --- a/parm/metplus_config/stats/cam/precip/grib2_mrms.txt +++ /dev/null @@ -1,4 +0,0 @@ -GRIB2 -209 0 0 0 161 1 6 37 "APCP_01" "1hr Accumulated precip" "mm" -209 0 0 0 161 1 6 38 "APCP_03" "3hr Accumulated precip" "mm" -209 0 0 0 161 1 6 41 "APCP_24" "24hr Accumulated precip" "mm" diff --git a/parm/metplus_config/stats/cam/precip/note.txt b/parm/metplus_config/stats/cam/precip/note.txt deleted file mode 100644 index 2596a43797..0000000000 --- a/parm/metplus_config/stats/cam/precip/note.txt +++ /dev/null @@ -1,74 +0,0 @@ -HREF APCP over CONUS - 01h 03h 06h 12h 24h -MEAN y y x x x -PROB y y y y y -EAS x y y y y -Members x y x x x - -HREF APCP over Alaska - 01h 03h 06h 12h 24h -MEAN y y x x x -PROB x y y y y -EAS x y y y y -Members x y x x x - -only 03, 06, 09, .... 21, 24 ,,,,45,48fhr have APCP/03h -HREF(conus): cycle: 00, 06, 12 and 18Z -HREF(ak): cycle: 06Z - - -Precip over CONUS (CCPA) -APCP_03 - fcst_grid obs_grid verif_grid -MEAN G227(conus) G212 G212 - G240 G240 - -PROB G227(conus) G240 G227 - -System G227(conus) G240 G227 - - -APCP_24 - - fcst_grid obs_grid verif_grid -MEAN G227(conus) G212 G212 - G240 G240 - -PROB G227(conus) G240 G227 - -System G227(conus) G240 G227 - -Precip over CONUS (MRMS) -MRMS_03 - fcst_grid obs_grid verif_grid -MEAN G255(ak) G216 G216 - G91 G91 - -PROB G255(ak) G255 G255 - -System G255(ak) G255 G255 - - -APCP_24 - - fcst_grid obs_grid verif_grid -MEAN G255(ak) G216 G216 - G91 G91 - -PROB G255(ak) G255(mrms) G255(ak) - -System G255(ak) G255(mrms) G255(ak) - - -Snow over CONUS (NOHRSC) - - -Snow 6hr and 24h - - fcst_grid obs_grid verif_grid -System G227 (conus) G227 G227 - -MEAN G227 (conus) G212 G212 -(derived NOHRSCgrid NOHRSCgrid -from Ystem) - diff --git a/parm/metplus_config/stats/cam/snowfall/EnsembleStat_fcstHREF_obsNOHRSC.conf b/parm/metplus_config/stats/cam/snowfall/EnsembleStat_fcstHREF_obsNOHRSC.conf index 9695a55931..0c32fcb872 100755 --- a/parm/metplus_config/stats/cam/snowfall/EnsembleStat_fcstHREF_obsNOHRSC.conf +++ b/parm/metplus_config/stats/cam/snowfall/EnsembleStat_fcstHREF_obsNOHRSC.conf @@ -23,13 +23,6 @@ METPLUS_PATH = {ENV[METPLUS_PATH]} # 86400 sec=24h VALID_INCREMENT = {ENV[valid_increment]} -# Options are times, processes -# times = run all items in the PROCESS_LIST for a single initialization -# time, then repeat until all times have been evaluated. -# processes = run each item in the PROCESS_LIST for all times -# specified, then repeat for the next item in the PROCESS_LIST. -LOOP_ORDER = times - # List of applications to run PROCESS_LIST = EnsembleStat @@ -118,7 +111,7 @@ METPLUS_CONF = {OUTPUT_BASE}/final_{MODEL}_ens_snow{obsv}.conf FCST_ENSEMBLE_STAT_INPUT_DATATYPE = GRIB OBS_ENSEMBLE_STAT_INPUT_GRID_DATATYPE = GRIB -ENSEMBLE_STAT_N_MEMBERS = {ENV[nmem]} +ENSEMBLE_STAT_N_MEMBERS = {ENV[mbrs]} ENSEMBLE_STAT_ENS_THRESH = 0.3 LOG_METPLUS = {LOG_DIR}/EnsembleStat_fcst{MODEL}_obs{ENV[obsv]}_for{ENV[vday]}{ENV[vend]}_f{lead}_runon{CLOCK_TIME}.log diff --git a/parm/metplus_config/stats/cam/snowfall/GenEnsProd_fcstHREF_obsNOHRSC.conf b/parm/metplus_config/stats/cam/snowfall/GenEnsProd_fcstHREF_obsNOHRSC.conf index c6141e5174..d08ce78f0e 100644 --- a/parm/metplus_config/stats/cam/snowfall/GenEnsProd_fcstHREF_obsNOHRSC.conf +++ b/parm/metplus_config/stats/cam/snowfall/GenEnsProd_fcstHREF_obsNOHRSC.conf @@ -23,8 +23,6 @@ METPLUS_PATH = {ENV[METPLUS_PATH]} VALID_INCREMENT = {ENV[valid_increment]} -LOOP_ORDER = times - PROCESS_LIST = GenEnsProd ENS_VAR1_NAME = {ENV[name]} @@ -94,7 +92,7 @@ extradir = {ENV[extradir]} METPLUS_CONF = {OUTPUT_BASE}/final_{MODEL}_ens_snow{obsv}_gen_ens_prod.conf -GEN_ENS_PROD_N_MEMBERS = {ENV[nmem]} +GEN_ENS_PROD_N_MEMBERS = {ENV[mbrs]} GEN_ENS_PROD_ENS_THRESH = 0.3 GEN_ENS_PROD_ENS_VLD_THRESH = 0.3 diff --git a/parm/metplus_config/stats/cam/snowfall/GridStat_fcstHREFmean_obsNOHRSC.conf b/parm/metplus_config/stats/cam/snowfall/GridStat_fcstHREFmean_obsNOHRSC.conf index bfb3b94775..0e334b31fd 100755 --- a/parm/metplus_config/stats/cam/snowfall/GridStat_fcstHREFmean_obsNOHRSC.conf +++ b/parm/metplus_config/stats/cam/snowfall/GridStat_fcstHREFmean_obsNOHRSC.conf @@ -20,13 +20,6 @@ METPLUS_PATH = {ENV[METPLUS_PATH]} # 86400 sec=24h VALID_INCREMENT = {ENV[valid_increment]} -# Options are times, processes -# times = run all items in the PROCESS_LIST for a single initialization -# time, then repeat until all times have been evaluated. -# processes = run each item in the PROCESS_LIST for all times -# specified, then repeat for the next item in the PROCESS_LIST. -LOOP_ORDER = times - # List of applications to run PROCESS_LIST = GridStat diff --git a/parm/metplus_config/stats/cam/snowfall/GridStat_fcstHREFmean_obsNOHRSC_G212.conf b/parm/metplus_config/stats/cam/snowfall/GridStat_fcstHREFmean_obsNOHRSC_G212.conf index a1f3e28cc3..0ea6d61b03 100755 --- a/parm/metplus_config/stats/cam/snowfall/GridStat_fcstHREFmean_obsNOHRSC_G212.conf +++ b/parm/metplus_config/stats/cam/snowfall/GridStat_fcstHREFmean_obsNOHRSC_G212.conf @@ -21,13 +21,6 @@ METPLUS_PATH = {ENV[METPLUS_PATH]} # 86400 sec=24h VALID_INCREMENT = {ENV[valid_increment]} -# Options are times, processes -# times = run all items in the PROCESS_LIST for a single initialization -# time, then repeat until all times have been evaluated. -# processes = run each item in the PROCESS_LIST for all times -# specified, then repeat for the next item in the PROCESS_LIST. -LOOP_ORDER = times - # List of applications to run PROCESS_LIST = GridStat diff --git a/parm/metplus_config/stats/cam/snowfall/GridStat_fcstHREFmean_obsNOHRSC_NOHRSCgrid.conf b/parm/metplus_config/stats/cam/snowfall/GridStat_fcstHREFmean_obsNOHRSC_NOHRSCgrid.conf index 5214a8ec9f..064eef0228 100755 --- a/parm/metplus_config/stats/cam/snowfall/GridStat_fcstHREFmean_obsNOHRSC_NOHRSCgrid.conf +++ b/parm/metplus_config/stats/cam/snowfall/GridStat_fcstHREFmean_obsNOHRSC_NOHRSCgrid.conf @@ -21,13 +21,6 @@ METPLUS_PATH = {ENV[METPLUS_PATH]} # 86400 sec=24h VALID_INCREMENT = {ENV[valid_increment]} -# Options are times, processes -# times = run all items in the PROCESS_LIST for a single initialization -# time, then repeat until all times have been evaluated. -# processes = run each item in the PROCESS_LIST for all times -# specified, then repeat for the next item in the PROCESS_LIST. -LOOP_ORDER = times - # List of applications to run PROCESS_LIST = GridStat diff --git a/parm/metplus_config/stats/cam/snowfall/note.txt b/parm/metplus_config/stats/cam/snowfall/note.txt deleted file mode 100644 index dd7e72251e..0000000000 --- a/parm/metplus_config/stats/cam/snowfall/note.txt +++ /dev/null @@ -1,8 +0,0 @@ -GridStat_fcstHREFmean_obsNOHRSC.conf - The mean of snow is from EnsembleStat -GridStat_fcstHREFmean_obsCCPA.conf - The mean, pmmn, plmm, avrg of APCP, all are from HREF mean/pmmn/plmm/avrg files -GridStat_fcstHREFmean_obsCCPA.conf - The prob of APCP are from HREF prob files - - diff --git a/scripts/plots/cam/exevs_href_grid2obs_cape_plots.sh b/scripts/plots/cam/exevs_href_grid2obs_cape_plots.sh index 40817607b6..6223514531 100755 --- a/scripts/plots/cam/exevs_href_grid2obs_cape_plots.sh +++ b/scripts/plots/cam/exevs_href_grid2obs_cape_plots.sh @@ -2,27 +2,28 @@ #******************************************************************************* # Purpose: setup environment, paths, and run the href cape plotting python script # Last updated: +# 01/10/2025, add MPMD, by Binbin Zhou Lynker@EMC/NCEP # 07/09/2024, add restart, by Binbin Zhou Lynker@EMC/NCEP # 05/30/2023, Binbin Zhou Lynker@EMC/NCEP #****************************************************************************** set -x -cd $DATA +mkdir -p $DATA/scripts +cd $DATA/scripts export machine=${machine:-"WCOSS2"} -export prune_dir=$DATA/data -export save_dir=$DATA/out export output_base_dir=$DATA/stat_archive -export log_metplus=$DATA/logs/GENS_verif_plotting_job -mkdir -p $prune_dir -mkdir -p $save_dir mkdir -p $output_base_dir -mkdir -p $DATA/logs -restart=$COMOUT/restart/$past_days/href_cape_plots -if [ ! -d $restart ] ; then +all_plots=$DATA/plots/all_plots +mkdir -p $all_plots +if [ $SENDCOM = YES ] ; then + restart=$COMOUT/restart/$last_days/href_cape_plots + if [ ! -d $restart ] ; then mkdir -p $restart -fi + fi +fi + export eval_period='TEST' export interp_pnts='' @@ -34,7 +35,7 @@ model_list='HREF_MEAN' models='HREF_MEAN' n=0 -while [ $n -le $past_days ] ; do +while [ $n -le $last_days ] ; do hrs=$((n*24)) first_day=`$NDATE -$hrs ${VDATE}00|cut -c1-8` n=$((n+1)) @@ -44,11 +45,10 @@ export init_beg=$first_day export valid_beg=$first_day #************************************************************* -# Virtual link the href's stat data files of past 31/90 days +# Virtual link the href's stat data files of last 31/90 days #************************************************************* n=0 -while [ $n -le $past_days ] ; do - #hrs=`expr $n \* 24` +while [ $n -le $last_days ] ; do hrs=$((n*24)) day=`$NDATE -$hrs ${VDATE}00|cut -c1-8` echo $day @@ -56,15 +56,6 @@ while [ $n -le $past_days ] ; do n=$((n+1)) done - -export fcst_init_hour="0,6,12,18" - -export plot_dir=$DATA/out/sfc_upper/${valid_beg}-${valid_end} -#For restart: -if [ ! -d $plot_dir ] ; then - mkdir -p $plot_dir -fi - export fcst_init_hour="0,6,12,18" init_time='init00z_06z_12z_18z' line_type=ctc @@ -158,6 +149,20 @@ for valid_time in 00 12 ; do level=`echo $FCST_LEVEL_value | tr '[A-Z]' '[a-z]'` + if [ $score_type = lead_average ] ; then + thresh_fcst=">=${threshold}" + thresh_obs=$thresh_fcst + tail=ge${thresh} + elif [ $score_type = threshold_average ] ; then + thresh_fcst=${threshold} + thresh_obs=$thresh_fcst + tail=f${lead} + else + thresh_fcst=' ' + thresh_obs=' ' + tail='other' + fi + #********************* # Build sub-jobs #********************* @@ -169,8 +174,18 @@ for valid_time in 00 12 ; do #*********************************************************************************************************************************** echo "#!/bin/ksh" >> run_${stats}.${thresh}.${score_type}.${lead}.${VAR}.${dom}.${FCST_LEVEL_value}.${valid_time}.sh + echo "set -x" >> run_${stats}.${thresh}.${score_type}.${lead}.${VAR}.${dom}.${FCST_LEVEL_value}.${valid_time}.sh verif_type=conus_sfc + save_dir=$DATA/plots/run_${stats}.${thresh}.${score_type}.${lead}.${VAR}.${dom}.${FCST_LEVEL_value}.${valid_time} + plot_dir=$save_dir/sfc_upper/${valid_beg}-${valid_end} + mkdir -p $plot_dir + mkdir -p $save_dir/data + + echo "export save_dir=$save_dir" >> run_${stats}.${thresh}.${score_type}.${lead}.${VAR}.${dom}.${FCST_LEVEL_value}.${valid_time}.sh + echo "export log_metplus=$save_dir/log_verif_plotting_job.out" >> run_${stats}.${thresh}.${score_type}.${lead}.${VAR}.${dom}.${FCST_LEVEL_value}.${valid_time}.sh + echo "export prune_dir=$save_dir/data" >> run_${stats}.${thresh}.${score_type}.${lead}.${VAR}.${dom}.${FCST_LEVEL_value}.${valid_time}.sh + if [ $score_type = lead_average ] ; then echo "export PLOT_TYPE=lead_average_valid" >> run_${stats}.${thresh}.${score_type}.${lead}.${VAR}.${dom}.${FCST_LEVEL_value}.${valid_time}.sh else @@ -203,40 +218,36 @@ for valid_time in 00 12 ; do echo "export interp=BILIN" >> run_${stats}.${thresh}.${score_type}.${lead}.${VAR}.${dom}.${FCST_LEVEL_value}.${valid_time}.sh echo "export score_py=$score_type" >> run_${stats}.${thresh}.${score_type}.${lead}.${VAR}.${dom}.${FCST_LEVEL_value}.${valid_time}.sh - - if [ $score_type = lead_average ] ; then - thresh_fcst=">=${threshold}" - thresh_obs=$thresh_fcst - elif [ $score_type = threshold_average ] ; then - thresh_fcst=${threshold} - thresh_obs=$thresh_fcst - else - thresh_fcst=' ' - thresh_obs=' ' - fi - sed -e "s!model_list!$models!g" -e "s!stat_list!$stat_list!g" -e "s!thresh_fcst!$thresh_fcst!g" -e "s!thresh_obs!$thresh_obs!g" -e "s!fcst_init_hour!$fcst_init_hour!g" -e "s!fcst_valid_hour!$valid_time!g" -e "s!fcst_lead!$fcst_lead!g" -e "s!interp_pnts!$interp_pnts!g" $USHevs/cam/evs_href_plots_config.sh > run_py.${stats}.${thresh}.${score_type}.${lead}.${VAR}.${dom}.${FCST_LEVEL_value}.${valid_time}.sh chmod +x run_py.${stats}.${thresh}.${score_type}.${lead}.${VAR}.${dom}.${FCST_LEVEL_value}.${valid_time}.sh - echo "${DATA}/run_py.${stats}.${thresh}.${score_type}.${lead}.${VAR}.${dom}.${FCST_LEVEL_value}.${valid_time}.sh" >> run_${stats}.${thresh}.${score_type}.${lead}.${VAR}.${dom}.${FCST_LEVEL_value}.${valid_time}.sh + echo "${DATA}/scripts/run_py.${stats}.${thresh}.${score_type}.${lead}.${VAR}.${dom}.${FCST_LEVEL_value}.${valid_time}.sh" >> run_${stats}.${thresh}.${score_type}.${lead}.${VAR}.${dom}.${FCST_LEVEL_value}.${valid_time}.sh - #Save for restart + #Save for restart and tar files echo "for domain in $subregions ; do " >> run_${stats}.${thresh}.${score_type}.${lead}.${VAR}.${dom}.${FCST_LEVEL_value}.${valid_time}.sh - echo " if [ -s ${plot_dir}/${score_type}_regional_\${domain}_valid_${valid_time}z_${variable}_${stats}_*.png ] ; then " >>run_${stats}.${thresh}.${score_type}.${lead}.${VAR}.${dom}.${FCST_LEVEL_value}.${valid_time}.sh - echo " cp -v ${plot_dir}/${score_type}_regional_\${domain}_valid_${valid_time}z_${variable}_${stats}_*.png $restart" >> run_${stats}.${thresh}.${score_type}.${lead}.${VAR}.${dom}.${FCST_LEVEL_value}.${valid_time}.sh - echo " >$restart/run_${stats}.${thresh}.${score_type}.${lead}.${VAR}.${dom}.${FCST_LEVEL_value}.${valid_time}.completed" >> run_${stats}.${thresh}.${score_type}.${lead}.${VAR}.${dom}.${FCST_LEVEL_value}.${valid_time}.sh - echo " fi" >> run_${stats}.${thresh}.${score_type}.${lead}.${VAR}.${dom}.${FCST_LEVEL_value}.${valid_time}.sh - echo "done" >> run_${stats}.${thresh}.${score_type}.${lead}.${VAR}.${dom}.${FCST_LEVEL_value}.${valid_time}.sh + echo " plot=${plot_dir}/${score_type}_regional_\${domain}_valid_${valid_time}z_${variable}_${stats}_${tail}.png" >> run_${stats}.${thresh}.${score_type}.${lead}.${VAR}.${dom}.${FCST_LEVEL_value}.${valid_time}.sh + echo " if [ -s \$plot ] ; then " >> run_${stats}.${thresh}.${score_type}.${lead}.${VAR}.${dom}.${FCST_LEVEL_value}.${valid_time}.sh + echo " cp -v \$plot $all_plots" >> run_${stats}.${thresh}.${score_type}.${lead}.${VAR}.${dom}.${FCST_LEVEL_value}.${valid_time}.sh + echo " >$all_plots/run_${stats}.${thresh}.${score_type}.${lead}.${VAR}.${dom}.${FCST_LEVEL_value}.${valid_time}.completed" >> run_${stats}.${thresh}.${score_type}.${lead}.${VAR}.${dom}.${FCST_LEVEL_value}.${valid_time}.sh + + #Copy files to restart directory + echo " if [ $SENDCOM = YES ] ; then" >> run_${stats}.${thresh}.${score_type}.${lead}.${VAR}.${dom}.${FCST_LEVEL_value}.${valid_time}.sh + echo " cp -v $all_plots/${score_type}_regional_\${domain}_valid_${valid_time}z_${variable}_${stats}_${tail}.png $restart" >> run_${stats}.${thresh}.${score_type}.${lead}.${VAR}.${dom}.${FCST_LEVEL_value}.${valid_time}.sh + echo " cp -v $all_plots/run_${stats}.${thresh}.${score_type}.${lead}.${VAR}.${dom}.${FCST_LEVEL_value}.${valid_time}.completed $restart" >> run_${stats}.${thresh}.${score_type}.${lead}.${VAR}.${dom}.${FCST_LEVEL_value}.${valid_time}.sh + echo " fi" >> run_${stats}.${thresh}.${score_type}.${lead}.${VAR}.${dom}.${FCST_LEVEL_value}.${valid_time}.sh + echo " fi" >> run_${stats}.${thresh}.${score_type}.${lead}.${VAR}.${dom}.${FCST_LEVEL_value}.${valid_time}.sh + echo "done" >> run_${stats}.${thresh}.${score_type}.${lead}.${VAR}.${dom}.${FCST_LEVEL_value}.${valid_time}.sh chmod +x run_${stats}.${thresh}.${score_type}.${lead}.${VAR}.${dom}.${FCST_LEVEL_value}.${valid_time}.sh - echo "${DATA}/run_${stats}.${thresh}.${score_type}.${lead}.${VAR}.${dom}.${FCST_LEVEL_value}.${valid_time}.sh" >> run_all_poe.sh + echo "${DATA}/scripts/run_${stats}.${thresh}.${score_type}.${lead}.${VAR}.${dom}.${FCST_LEVEL_value}.${valid_time}.sh" >> run_all_poe.sh else - #Restart from png files of previous runs - for domain in $subregions ; do - cp $restart/${score_type}_regional_${domain}_valid_${valid_time}z_${variable}_${stats}_*.png ${plot_dir}/. - done + for domain in $subregions ; do + if [ -s ${restart}/${score_type}_regional_${domain}_valid_${valid_time}z_${variable}_${stats}_${tail}.png ] ; then + cp -v ${restart}/${score_type}_regional_${domain}_valid_${valid_time}z_${variable}_${stats}_${tail}.png $all_plots + fi + done fi done #end of FCST_LEVEL_value @@ -261,17 +272,17 @@ chmod +x run_all_poe.sh # Run the POE script in parallel or in sequence order to generate png files #************************************************************************** if [ $run_mpi = yes ] ; then - mpiexec -np 840 -ppn 84 --cpu-bind verbose,depth cfp ${DATA}/run_all_poe.sh + mpiexec -np 765 -ppn 85 --cpu-bind verbose,depth cfp ${DATA}/scripts/run_all_poe.sh else - ${DATA}/run_all_poe.sh + ${DATA}/scripts/run_all_poe.sh fi export err=$?; err_chk #************************************************** # Change plot file names to meet the EVS standard #************************************************** -cd $plot_dir - +cd $all_plots + for score_type in lead_average threshold_average; do for valid in 00z 12z ; do @@ -315,12 +326,12 @@ for score_type in lead_average threshold_average; do for thresh in ge250 ge500 ge1000 ge2000 ; do if [ -s ${score_type}_regional_${domain}_valid_${valid}_${var}_${stat}_${thresh}.png ] ; then - mv ${score_type}_regional_${domain}_valid_${valid}_${var}_${stat}_${thresh}.png evs.href.${stat}.${var}_${level}.${thresh}.last${past_days}days.${scoretype}_valid_${valid}.${new_domain}.png + mv ${score_type}_regional_${domain}_valid_${valid}_${var}_${stat}_${thresh}.png evs.href.${stat}.${var}_${level}.${thresh}.last${last_days}days.${scoretype}_valid${valid}.${new_domain}.png fi done else if [ -s ${score_type}_regional_${domain}_valid_${valid}_${var}_${stat}_${lead}.png ] ; then - mv ${score_type}_regional_${domain}_valid_${valid}_${var}_${stat}_${lead}.png evs.href.${stat}.${var}_${level}.last${past_days}days.${scoretype}_valid_${valid}.${new_lead}.${new_domain}.png + mv ${score_type}_regional_${domain}_valid_${valid}_${var}_${stat}_${lead}.png evs.href.${stat}.${var}_${level}.last${last_days}days.${scoretype}_valid${valid}_${new_lead}.${new_domain}.png fi fi @@ -332,32 +343,27 @@ for score_type in lead_average threshold_average; do done #valid done #score_type -if [ -s *.png ] ; then - tar -cvf evs.plots.href.grid2obs.cape.past${past_days}days.v${VDATE}.tar *.png +if [ -s evs*.png ] ; then + tar -cvf evs.plots.href.grid2obs.cape.last${last_days}days.v${VDATE}.tar evs*.png fi # Cat the plotting log files -log_dir="$DATA/logs" -if [ -d $log_dir ]; then - log_file_count=$(find $log_dir -type f | wc -l) - if [[ $log_file_count -ne 0 ]]; then - log_files=("$log_dir"/*) - for log_file in "${log_files[@]}"; do - if [ -f "$log_file" ]; then - echo "Start: $log_file" - cat "$log_file" - echo "End: $log_file" - fi - done - fi +log_dir="$DATA/plots" +if [ -s $log_dir/*/log*.out ]; then + log_files=`ls $log_dir/*/log*.out` + for log_file in $log_files ; do + echo "Start: $log_file" + cat "$log_file" + echo "End: $log_file" + done fi -if [ $SENDCOM = YES ] && [ -s evs.plots.href.grid2obs.cape.past${past_days}days.v${VDATE}.tar ] ; then - cp -v evs.plots.href.grid2obs.cape.past${past_days}days.v${VDATE}.tar $COMOUT/. +if [ $SENDCOM = YES ] && [ -s evs.plots.href.grid2obs.cape.last${last_days}days.v${VDATE}.tar ] ; then + cp -v evs.plots.href.grid2obs.cape.last${last_days}days.v${VDATE}.tar $COMOUT/. fi if [ $SENDDBN = YES ] ; then - $DBNROOT/bin/dbn_alert MODEL EVS_RZDM $job $COMOUT/evs.plots.href.grid2obs.cape.past${past_days}days.v${VDATE}.tar + $DBNROOT/bin/dbn_alert MODEL EVS_RZDM $job $COMOUT/evs.plots.href.grid2obs.cape.last${last_days}days.v${VDATE}.tar fi diff --git a/scripts/plots/cam/exevs_href_grid2obs_ctc_plots.sh b/scripts/plots/cam/exevs_href_grid2obs_ctc_plots.sh index a01337f7f1..62c22ca77e 100755 --- a/scripts/plots/cam/exevs_href_grid2obs_ctc_plots.sh +++ b/scripts/plots/cam/exevs_href_grid2obs_ctc_plots.sh @@ -2,27 +2,27 @@ #******************************************************************************* # Purpose: setup environment, paths, and run the href ctc plotting python script # Last updated: +# 01/10/2025, add MPMD, by Binbin Zhou Lynker@EMC/NCEP # 07/09/2024, add restart, by Binbin Zhou Lynker@EMC/NCEP # 05/30/2024, Binbin Zhou Lynker@EMC/NCEP #****************************************************************************** set -x -cd $DATA +mkdir -p $DATA/scripts +cd $DATA/scripts export machine=${machine:-"WCOSS2"} -export prune_dir=$DATA/data -export save_dir=$DATA/out export output_base_dir=$DATA/stat_archive -export log_metplus=$DATA/logs/GENS_verif_plotting_job -mkdir -p $prune_dir -mkdir -p $save_dir mkdir -p $output_base_dir -mkdir -p $DATA/logs -restart=$COMOUT/restart/$past_days/href_ctc_plots -if [ ! -d $restart ] ; then - mkdir -p $restart -fi +all_plots=$DATA/plots/all_plots +mkdir -p $all_plots +if [ $SENDCOM = YES ] ; then + restart=$COMOUT/restart/$last_days/href_ctc_plots + if [ ! -d $restart ] ; then + mkdir -p $restart + fi +fi export eval_period='TEST' @@ -35,7 +35,7 @@ model_list='HREF_MEAN' models='HREF_MEAN' n=0 -while [ $n -le $past_days ] ; do +while [ $n -le $last_days ] ; do hrs=$((n*24)) first_day=`$NDATE -$hrs ${VDATE}00|cut -c1-8` n=$((n+1)) @@ -45,11 +45,10 @@ export init_beg=$first_day export valid_beg=$first_day #************************************************************* -# Virtual link the href's stat data files of past 31/90 days +# Virtual link the href's stat data files of last 31/90 days #************************************************************* n=0 -while [ $n -le $past_days ] ; do - #hrs=`expr $n \* 24` +while [ $n -le $last_days ] ; do hrs=$((n*24)) day=`$NDATE -$hrs ${VDATE}00|cut -c1-8` echo $day @@ -58,15 +57,6 @@ while [ $n -le $past_days ] ; do n=$((n+1)) done - -export fcst_init_hour="0,6,12,18" - -export plot_dir=$DATA/out/sfc_upper/${valid_beg}-${valid_end} -#For restart: -if [ ! -d $plot_dir ] ; then - mkdir -p $plot_dir -fi - export fcst_init_hour="0,6,12,18" init_time='init00z_06z_12z_18z' @@ -209,8 +199,17 @@ for stats in $stats_list ; do #*********************************************************************************************************************************** verif_type=conus_sfc - echo "#!/bin/ksh" >> run_${stats}.${score_type}.${lead}.${VAR}.${dom}.${FCST_LEVEL_value}.${fcst_valid_hour}.sh + echo "set -x" >> run_${stats}.${score_type}.${lead}.${VAR}.${dom}.${FCST_LEVEL_value}.${fcst_valid_hour}.sh + + save_dir=$DATA/plots/run_${stats}.${score_type}.${lead}.${VAR}.${dom}.${FCST_LEVEL_value}.${fcst_valid_hour} + plot_dir=$save_dir/sfc_upper/${valid_beg}-${valid_end} + mkdir -p $plot_dir + mkdir -p $save_dir/data + + echo "export save_dir=$save_dir" >> run_${stats}.${score_type}.${lead}.${VAR}.${dom}.${FCST_LEVEL_value}.${fcst_valid_hour}.sh + echo "export log_metplus=$save_dir/log_verif_plotting_job.out" >> run_${stats}.${score_type}.${lead}.${VAR}.${dom}.${FCST_LEVEL_value}.${fcst_valid_hour}.sh + echo "export prune_dir=$save_dir/data" >> run_${stats}.${score_type}.${lead}.${VAR}.${dom}.${FCST_LEVEL_value}.${fcst_valid_hour}.sh if [ $score_type = lead_average ] ; then echo "export PLOT_TYPE=lead_average_valid" >> run_${stats}.${score_type}.${lead}.${VAR}.${dom}.${FCST_LEVEL_value}.${fcst_valid_hour}.sh @@ -261,24 +260,32 @@ for stats in $stats_list ; do chmod +x run_py.${stats}.${score_type}.${lead}.${VAR}.${dom}.${FCST_LEVEL_value}.${fcst_valid_hour}.sh - echo "${DATA}/run_py.${stats}.${score_type}.${lead}.${VAR}.${dom}.${FCST_LEVEL_value}.${fcst_valid_hour}.sh" >> run_${stats}.${score_type}.${lead}.${VAR}.${dom}.${FCST_LEVEL_value}.${fcst_valid_hour}.sh + echo "${DATA}/scripts/run_py.${stats}.${score_type}.${lead}.${VAR}.${dom}.${FCST_LEVEL_value}.${fcst_valid_hour}.sh" >> run_${stats}.${score_type}.${lead}.${VAR}.${dom}.${FCST_LEVEL_value}.${fcst_valid_hour}.sh #Save for restart echo "for domain in $subregions ; do " >> run_${stats}.${score_type}.${lead}.${VAR}.${dom}.${FCST_LEVEL_value}.${fcst_valid_hour}.sh echo " if [ -s ${plot_dir}/${score_type}_regional_\${domain}_valid_${fcst_valid_hour}z_${new_var}_*.png ] ; then " >> run_${stats}.${score_type}.${lead}.${VAR}.${dom}.${FCST_LEVEL_value}.${fcst_valid_hour}.sh - echo " cp -v ${plot_dir}/${score_type}_regional_\${domain}_valid_${fcst_valid_hour}z_${new_var}_*.png $restart" >> run_${stats}.${score_type}.${lead}.${VAR}.${dom}.${FCST_LEVEL_value}.${fcst_valid_hour}.sh - echo " >$restart/run_${stats}.${score_type}.${lead}.${VAR}.${dom}.${FCST_LEVEL_value}.${fcst_valid_hour}.completed" >> run_${stats}.${score_type}.${lead}.${VAR}.${dom}.${FCST_LEVEL_value}.${fcst_valid_hour}.sh + echo " cp -v ${plot_dir}/${score_type}_regional_\${domain}_valid_${fcst_valid_hour}z_${new_var}_*.png $all_plots" >> run_${stats}.${score_type}.${lead}.${VAR}.${dom}.${FCST_LEVEL_value}.${fcst_valid_hour}.sh + echo " >$all_plots/run_${stats}.${score_type}.${lead}.${VAR}.${dom}.${FCST_LEVEL_value}.${fcst_valid_hour}.completed" >> run_${stats}.${score_type}.${lead}.${VAR}.${dom}.${FCST_LEVEL_value}.${fcst_valid_hour}.sh + + #Copy files to restart directory + echo " if [ $SENDCOM = YES ] ; then" >> run_${stats}.${score_type}.${lead}.${VAR}.${dom}.${FCST_LEVEL_value}.${fcst_valid_hour}.sh + echo " cp -v $all_plots/${score_type}_regional_\${domain}_valid_${fcst_valid_hour}z_${new_var}_*.png $restart" >> run_${stats}.${score_type}.${lead}.${VAR}.${dom}.${FCST_LEVEL_value}.${fcst_valid_hour}.sh + echo " cp -v $all_plots/run_${stats}.${score_type}.${lead}.${VAR}.${dom}.${FCST_LEVEL_value}.${fcst_valid_hour}.completed $restart" >> run_${stats}.${score_type}.${lead}.${VAR}.${dom}.${FCST_LEVEL_value}.${fcst_valid_hour}.sh + echo " fi" >> run_${stats}.${score_type}.${lead}.${VAR}.${dom}.${FCST_LEVEL_value}.${fcst_valid_hour}.sh echo " fi" >> run_${stats}.${score_type}.${lead}.${VAR}.${dom}.${FCST_LEVEL_value}.${fcst_valid_hour}.sh echo "done" >> run_${stats}.${score_type}.${lead}.${VAR}.${dom}.${FCST_LEVEL_value}.${fcst_valid_hour}.sh + chmod +x run_${stats}.${score_type}.${lead}.${VAR}.${dom}.${FCST_LEVEL_value}.${fcst_valid_hour}.sh - echo "${DATA}/run_${stats}.${score_type}.${lead}.${VAR}.${dom}.${FCST_LEVEL_value}.${fcst_valid_hour}.sh" >> run_all_poe.sh + echo "${DATA}/scripts/run_${stats}.${score_type}.${lead}.${VAR}.${dom}.${FCST_LEVEL_value}.${fcst_valid_hour}.sh" >> run_all_poe.sh else - #Restart from png files of previous runs - for domain in $subregions ; do - cp $restart/${score_type}_regional_${domain}_valid_${fcst_valid_hour}z_${new_var}_*.png ${plot_dir}/. - done + for domain in $subregions ; do + if [ -s $restart/${score_type}_regional_${domain}_valid_${fcst_valid_hour}z_${new_var}_*.png ] ; then + cp -v $restart/${score_type}_regional_${domain}_valid_${fcst_valid_hour}z_${new_var}_*.png $all_plots + fi + done fi done #end of FCST_LEVEL_value @@ -302,16 +309,16 @@ chmod +x run_all_poe.sh # Run the POE script in parallel or in sequence order to generate png files #************************************************************************** if [ $run_mpi = yes ] ; then - mpiexec -np 820 -ppn 82 --cpu-bind verbose,depth cfp ${DATA}/run_all_poe.sh + mpiexec -np 510 -ppn 85 --cpu-bind verbose,depth cfp ${DATA}/scripts/run_all_poe.sh else - ${DATA}/run_all_poe.sh + ${DATA}/scripts/run_all_poe.sh fi export err=$?; err_chk #************************************************** # Change plot file names to meet the EVS standard #************************************************** -cd $plot_dir +cd $all_plots for valid in 00z 03z 06z 09z 12z 15z 18z 21z ; do @@ -342,7 +349,7 @@ for valid in 00z 03z 06z 09z 12z 15z 18z 21z ; do fi if [ -s performance_diagram_regional_${domain}_valid_${valid}_${var}_*.png ] ; then - mv performance_diagram_regional_${domain}_valid_${valid}_${var}_*.png evs.href.ctc.${var_new}_${level}.last${past_days}days.perfdiag_valid_${valid}.${new_domain}.png + mv performance_diagram_regional_${domain}_valid_${valid}_${var}_*.png evs.href.ctc.${var_new}_${level}.last${last_days}days.perfdiag_valid${valid}.${new_domain}.png fi done @@ -385,7 +392,7 @@ for valid in 00z 03z 06z 09z 12z 15z 18z 21z ; do fi if [ -s ${score_type}_regional_${domain}_valid_${valid}_${var}_${stat}*.png ] ; then - mv ${score_type}_regional_${domain}_valid_${valid}_${var}_${stat}*.png evs.href.${stat}.${var_new}_${level}.last${past_days}days.${scoretype}_valid_${valid}.${new_domain}.png + mv ${score_type}_regional_${domain}_valid_${valid}_${var}_${stat}*.png evs.href.${stat}.${var_new}_${level}.last${last_days}days.${scoretype}_valid${valid}.${new_domain}.png fi done #domain @@ -394,31 +401,28 @@ for valid in 00z 03z 06z 09z 12z 15z 18z 21z ; do done #score_type done -tar -cvf evs.plots.href.grid2obs.ctc.past${past_days}days.v${VDATE}.tar *.png +if [ -s evs*.png ] ; then + tar -cvf evs.plots.href.grid2obs.ctc.last${last_days}days.v${VDATE}.tar evs*.png +fi # Cat the plotting log files -log_dir="$DATA/logs" -if [ -d $log_dir ]; then - log_file_count=$(find $log_dir -type f | wc -l) - if [[ $log_file_count -ne 0 ]]; then - log_files=("$log_dir"/*) - for log_file in "${log_files[@]}"; do - if [ -f "$log_file" ]; then - echo "Start: $log_file" - cat "$log_file" - echo "End: $log_file" - fi - done - fi +log_dir="$DATA/plots" +if [ -s $log_dir/*/log*.out ]; then + log_files=`ls $log_dir/*/log*.out` + for log_file in $log_files ; do + echo "Start: $log_file" + cat "$log_file" + echo "End: $log_file" + done fi -if [ $SENDCOM = YES ] && [ -s evs.plots.href.grid2obs.ctc.past${past_days}days.v${VDATE}.tar ] ; then - cp -v evs.plots.href.grid2obs.ctc.past${past_days}days.v${VDATE}.tar $COMOUT/. +if [ $SENDCOM = YES ] && [ -s evs.plots.href.grid2obs.ctc.last${last_days}days.v${VDATE}.tar ] ; then + cp -v evs.plots.href.grid2obs.ctc.last${last_days}days.v${VDATE}.tar $COMOUT/. fi if [ $SENDDBN = YES ] ; then - $DBNROOT/bin/dbn_alert MODEL EVS_RZDM $job $COMOUT/evs.plots.href.grid2obs.ctc.past${past_days}days.v${VDATE}.tar + $DBNROOT/bin/dbn_alert MODEL EVS_RZDM $job $COMOUT/evs.plots.href.grid2obs.ctc.last${last_days}days.v${VDATE}.tar fi diff --git a/scripts/plots/cam/exevs_href_grid2obs_ecnt_plots.sh b/scripts/plots/cam/exevs_href_grid2obs_ecnt_plots.sh index d105290bdc..df1b6fd1c5 100755 --- a/scripts/plots/cam/exevs_href_grid2obs_ecnt_plots.sh +++ b/scripts/plots/cam/exevs_href_grid2obs_ecnt_plots.sh @@ -2,28 +2,27 @@ #******************************************************************************* # Purpose: setup environment, paths, and run the href ecnt plotting python script # Last updated: +# 01/10/2025, add MPMD, by Binbin Zhou Lynker@EMC/NCEP # 07/09/2024, add restart, by Binbin Zhou Lynker@EMC/NCEP # 05/30/2024, Binbin Zhou Lynker@EMC/NCEP ##****************************************************************************** set -x -cd $DATA +mkdir -p $DATA/scripts +cd $DATA/scripts export machine=${machine:-"WCOSS2"} -export prune_dir=$DATA/data -export save_dir=$DATA/out export output_base_dir=$DATA/stat_archive -export log_metplus=$DATA/logs/GENS_verif_plotting_job -mkdir -p $prune_dir -mkdir -p $save_dir mkdir -p $output_base_dir -mkdir -p $DATA/logs -restart=$COMOUT/restart/$past_days/href_ecnt_plots -if [ ! -d $restart ] ; then +all_plots=$DATA/plots/all_plots +mkdir -p $all_plots +if [ $SENDCOM = YES ] ; then + restart=$COMOUT/restart/$last_days/href_ecnt_plots + if [ ! -d $restart ] ; then mkdir -p $restart + fi fi - export eval_period='TEST' export interp_pnts='' @@ -35,7 +34,7 @@ model_list='HREF' models='HREF' n=0 -while [ $n -le $past_days ] ; do +while [ $n -le $last_days ] ; do hrs=$((n*24)) first_day=`$NDATE -$hrs ${VDATE}00|cut -c1-8` n=$((n+1)) @@ -45,11 +44,10 @@ export init_beg=$first_day export valid_beg=$first_day #************************************************************* -# Virtual link the href's stat data files of past 31/90 days +# Virtual link the href's stat data files of last 31/90 days #************************************************************* n=0 -while [ $n -le $past_days ] ; do - #hrs=`expr $n \* 24` +while [ $n -le $last_days ] ; do hrs=$((n*24)) day=`$NDATE -$hrs ${VDATE}00|cut -c1-8` echo $day @@ -63,13 +61,6 @@ VX_MASK_LIST="CONUS, CONUS_East, CONUS_West, CONUS_South, CONUS_Central, Alaska, export fcst_init_hour="0,6,12,18" -export plot_dir=$DATA/out/sfc_upper/${valid_beg}-${valid_end} -#For restart: -if [ ! -d $plot_dir ] ; then - mkdir -p $plot_dir -fi - - verif_case=grid2obs #***************************************** @@ -151,8 +142,19 @@ for fcst_valid_hour in 00 03 06 09 12 15 18 21 ; do #*********************************************************************************************************************************** echo "#!/bin/ksh" >> run_${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${fcst_valid_hour}.sh + echo "set -x" >> run_${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${fcst_valid_hour}.sh verif_type=conus_sfc + save_dir=$DATA/plots/run_${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${fcst_valid_hour} + plot_dir=$save_dir/sfc_upper/${valid_beg}-${valid_end} + mkdir -p $plot_dir + mkdir -p $save_dir/data + + echo "export save_dir=$save_dir" >> run_${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${fcst_valid_hour}.sh + echo "export log_metplus=$save_dir/log_verif_plotting_job.out" >> run_${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${fcst_valid_hour}.sh + echo "export prune_dir=$save_dir/data" >> run_${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${fcst_valid_hour}.sh + + echo "export PLOT_TYPE=lead_average_valid" >> run_${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${fcst_valid_hour}.sh echo "export field=${var}_${level}" >> run_${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${fcst_valid_hour}.sh @@ -188,20 +190,26 @@ for fcst_valid_hour in 00 03 06 09 12 15 18 21 ; do chmod +x run_py.${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${fcst_valid_hour}.sh - echo "${DATA}/run_py.${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${fcst_valid_hour}.sh" >> run_${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${fcst_valid_hour}.sh + echo "${DATA}/scripts/run_py.${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${fcst_valid_hour}.sh" >> run_${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${fcst_valid_hour}.sh - #Save for restart + #Save for restart and tar files echo "if [ -s ${plot_dir}/${score_type}_regional_*_valid_${fcst_valid_hour}z_*${new_var}_${stats}.png ] ; then" >> run_${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${fcst_valid_hour}.sh - echo " cp -v ${plot_dir}/${score_type}_regional_*_valid_${fcst_valid_hour}z_*${new_var}_${stats}.png $restart" >> run_${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${fcst_valid_hour}.sh - echo " >$restart/run_${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${fcst_valid_hour}.completed" >> run_${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${fcst_valid_hour}.sh + echo " cp -v ${plot_dir}/${score_type}_regional_*_valid_${fcst_valid_hour}z_*${new_var}_${stats}.png $all_plots" >> run_${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${fcst_valid_hour}.sh + echo " >$all_plots/run_${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${fcst_valid_hour}.completed" >> run_${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${fcst_valid_hour}.sh + #Copy files to restart directory + echo " if [ $SENDCOM = YES ] ; then" >> run_${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${fcst_valid_hour}.sh + echo " cp -v $all_plots/${score_type}_regional_*_valid_${fcst_valid_hour}z_*${new_var}_${stats}.png $restart" >> run_${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${fcst_valid_hour}.sh + echo " cp -v $all_plots/run_${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${fcst_valid_hour}.completed $restart" >> run_${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${fcst_valid_hour}.sh + echo " fi" >> run_${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${fcst_valid_hour}.sh echo "fi" >> run_${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${fcst_valid_hour}.sh chmod +x run_${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${fcst_valid_hour}.sh - echo "${DATA}/run_${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${fcst_valid_hour}.sh" >> run_all_poe.sh + echo "${DATA}/scripts/run_${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${fcst_valid_hour}.sh" >> run_all_poe.sh else - #Restart from existing png files of previous run - cp $restart/${score_type}_regional_*_valid_${fcst_valid_hour}z_*${new_var}_${stats}.png ${plot_dir}/. + if [ -s $restart/${plot_dir}/${score_type}_regional_*_valid_${fcst_valid_hour}z_*${new_var}_${stats}.png ] ; then + cp -v $restart/${plot_dir}/${score_type}_regional_*_valid_${fcst_valid_hour}z_*${new_var}_${stats}.png $all_plots + fi fi done #end of FCST_LEVEL_value @@ -222,17 +230,16 @@ chmod +x run_all_poe.sh # Run the POE script in parallel or in sequence order to generate png files #************************************************************************** if [ $run_mpi = yes ] ; then - mpiexec -np 66 -ppn 33 -depth 2 --cpu-bind verbose,depth cfp ${DATA}/run_all_poe.sh + mpiexec -np 66 -ppn 66 --cpu-bind verbose,depth cfp ${DATA}/scripts/run_all_poe.sh else - ${DATA}/run_all_poe.sh + ${DATA}/scripts/run_all_poe.sh fi export err=$?; err_chk - #************************************************** # Change plot file names to meet the EVS standard #************************************************** -cd $plot_dir +cd $all_plots for valid in 00z 03z 06z 09z 12z 15z 18z 21z ; do for stats in rmse_spread ; do @@ -269,11 +276,11 @@ for stats in rmse_spread ; do if [ $var = mslet ] || [ $var = gust ] || [ $var = hpbl ] ; then if [ -s ${score_type}_regional_${domain}_valid_${valid}_${var}_${stats}.png ] ; then - mv ${score_type}_regional_${domain}_valid_${valid}_${var}_${stats}.png evs.href.${stats}.${var}_${level}.last${past_days}days.${scoretype}_valid_${valid}.${new_domain}.png + mv ${score_type}_regional_${domain}_valid_${valid}_${var}_${stats}.png evs.href.${stats}.${var}_${level}.last${last_days}days.${scoretype}_valid${valid}.${new_domain}.png fi else if [ -s ${score_type}_regional_${domain}_valid_${valid}_${level}_${var}_${stats}.png ] ; then - mv ${score_type}_regional_${domain}_valid_${valid}_${level}_${var}_${stats}.png evs.href.${stats}.${var}_${level}.last${past_days}days.${scoretype}_valid_${valid}.${new_domain}.png + mv ${score_type}_regional_${domain}_valid_${valid}_${level}_${var}_${stats}.png evs.href.${stats}.${var}_${level}.last${last_days}days.${scoretype}_valid${valid}.${new_domain}.png fi fi @@ -283,31 +290,28 @@ for stats in rmse_spread ; do done #stats done #valid - -tar -cvf evs.plots.href.grid2obs.ecnt.past${past_days}days.v${VDATE}.tar *.png +if [ -s evs*.png ] ; then + tar -cvf evs.plots.href.grid2obs.ecnt.last${last_days}days.v${VDATE}.tar evs*.png +fi # Cat the plotting log files -log_dir="$DATA/logs" -if [ -d $log_dir ]; then - log_file_count=$(find $log_dir -type f | wc -l) - if [[ $log_file_count -ne 0 ]]; then - log_files=("$log_dir"/*) - for log_file in "${log_files[@]}"; do - if [ -f "$log_file" ]; then - echo "Start: $log_file" - cat "$log_file" - echo "End: $log_file" - fi - done - fi + +log_dir="$DATA/plots" +if [ -s $log_dir/*/log*.out ]; then + log_files=`ls $log_dir/*/log*.out` + for log_file in $log_files ; do + echo "Start: $log_file" + cat "$log_file" + echo "End: $log_file" + done fi -if [ $SENDCOM = YES ] && [ -s evs.plots.href.grid2obs.ecnt.past${past_days}days.v${VDATE}.tar ] ; then - cp -v evs.plots.href.grid2obs.ecnt.past${past_days}days.v${VDATE}.tar $COMOUT/. +if [ $SENDCOM = YES ] && [ -s evs.plots.href.grid2obs.ecnt.last${last_days}days.v${VDATE}.tar ] ; then + cp -v evs.plots.href.grid2obs.ecnt.last${last_days}days.v${VDATE}.tar $COMOUT/. fi if [ $SENDDBN = YES ] ; then - $DBNROOT/bin/dbn_alert MODEL EVS_RZDM $job $COMOUT/evs.plots.href.grid2obs.ecnt.past${past_days}days.v${VDATE}.tar + $DBNROOT/bin/dbn_alert MODEL EVS_RZDM $job $COMOUT/evs.plots.href.grid2obs.ecnt.last${last_days}days.v${VDATE}.tar fi diff --git a/scripts/plots/cam/exevs_href_precip_plots.sh b/scripts/plots/cam/exevs_href_precip_plots.sh index 1f5dcd8966..50d862f7a5 100755 --- a/scripts/plots/cam/exevs_href_precip_plots.sh +++ b/scripts/plots/cam/exevs_href_precip_plots.sh @@ -2,27 +2,27 @@ #******************************************************************************* # Purpose: setup environment, paths, and run the href cape plotting python script # Last updated: +# 01/10/2025, add MPMD, by Binbin Zhou Lynker@EMC/NCEP # 07/09/2024, add restart, by Binbin Zhou Lynker@EMC/NCEP # 05/30/2024, Binbin Zhou Lynker@EMC/NCEP #****************************************************************************** set -x -cd $DATA +mkdir -p $DATA/scripts +cd $DATA/scripts export machine=${machine:-"WCOSS2"} -export prune_dir=$DATA/data -export save_dir=$DATA/out export output_base_dir=$DATA/stat_archive -export log_metplus=$DATA/logs/GENS_verif_plotting_job -mkdir -p $prune_dir -mkdir -p $save_dir mkdir -p $output_base_dir -mkdir -p $DATA/logs -restart=$COMOUT/restart/$past_days/href_precip_plots -if [ ! -d $restart ] ; then +all_plots=$DATA/plots/all_plots +mkdir -p $all_plots +if [ $SENDCOM = YES ] ; then + restart=$COMOUT/restart/$last_days/href_precip_plots + if [ ! -d $restart ] ; then mkdir -p $restart -fi + fi +fi export eval_period='TEST' @@ -37,7 +37,7 @@ models='HREF_MEAN, HREF_AVRG, HREF_LPMM, HREF_PMMN' VX_MASK_LISTs='CONUS CONUS_East CONUS_West CONUS_South CONUS_Central Alaska' n=0 -while [ $n -le $past_days ] ; do +while [ $n -le $last_days ] ; do hrs=$((n*24)) first_day=`$NDATE -$hrs ${VDATE}00|cut -c1-8` n=$((n+1)) @@ -47,10 +47,10 @@ export init_beg=$first_day export valid_beg=$first_day #************************************************************* -# Virtual link the href's stat data files of past 31/90 days +# Virtual link the href's stat data files of last 31/90 days #************************************************************* n=0 -while [ $n -le $past_days ] ; do +while [ $n -le $last_days ] ; do #hrs=`expr $n \* 24` hrs=$((n*24)) day=`$NDATE -$hrs ${VDATE}00|cut -c1-8` @@ -60,12 +60,6 @@ while [ $n -le $past_days ] ; do done -export plot_dir=$DATA/out/precip/${valid_beg}-${valid_end} -#For restart: -if [ ! -d $plot_dir ] ; then - mkdir -p $plot_dir -fi - verif_case=precip verif_type=ccpa @@ -149,7 +143,17 @@ for stats in ets_fbias ratio_pod_csi fss ; do #*********************************************************************************************************************************** echo "#!/bin/ksh" >> run_${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${line_type}.${VX_MASK_LIST}.${fcst_valid_hour}.sh - echo "export PLOT_TYPE=$score_type" >> run_${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${line_type}.${VX_MASK_LIST}.${fcst_valid_hour}.sh + echo "set -x" >> run_${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${line_type}.${VX_MASK_LIST}.${fcst_valid_hour}.sh + save_dir=$DATA/plots/run_${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${line_type}.${VX_MASK_LIST}.${fcst_valid_hour} + plot_dir=$save_dir/precip/${valid_beg}-${valid_end} + mkdir -p $plot_dir + mkdir -p $save_dir/data + + echo "export save_dir=$save_dir" >> run_${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${line_type}.${VX_MASK_LIST}.${fcst_valid_hour}.sh + echo "export log_metplus=$save_dir/log_verif_plotting_job.out" >> run_${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${line_type}.${VX_MASK_LIST}.${fcst_valid_hour}.sh + echo "export prune_dir=$save_dir/data" >> run_${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${line_type}.${VX_MASK_LIST}.${fcst_valid_hour}.sh + + echo "export PLOT_TYPE=$score_type" >> run_${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${line_type}.${VX_MASK_LIST}.${fcst_valid_hour}.sh echo "export vx_mask_list='$VX_MASK_LIST'" >> run_${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${line_type}.${VX_MASK_LIST}.${fcst_valid_hour}.sh echo "export verif_case=$verif_case" >> run_${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${line_type}.${VX_MASK_LIST}.${fcst_valid_hour}.sh echo "export verif_type=$verif_type" >> run_${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${line_type}.${VX_MASK_LIST}.${fcst_valid_hour}.sh @@ -194,22 +198,25 @@ for stats in ets_fbias ratio_pod_csi fss ; do chmod +x run_py.${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${line_type}.${VX_MASK_LIST}.${fcst_valid_hour}.sh - echo "${DATA}/run_py.${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${line_type}.${VX_MASK_LIST}.${fcst_valid_hour}.sh" >> run_${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${line_type}.${VX_MASK_LIST}.${fcst_valid_hour}.sh + echo "${DATA}/scripts/run_py.${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${line_type}.${VX_MASK_LIST}.${fcst_valid_hour}.sh" >> run_${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${line_type}.${VX_MASK_LIST}.${fcst_valid_hour}.sh - #threshold: ${score_type}_regional_${domain}_valid_${valid}_${level}_${var}_${stats}_${lead}.png - #performance: ${score_type}_regional_${domain}_valid_${valid}_${level}_${var}_${lead}.png - #Save for restart echo "if [ -s ${plot_dir}/${score_type}_regional_${domain}_valid_${fcst_valid_hour}z_*${var}*.png ] ; then " >> run_${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${line_type}.${VX_MASK_LIST}.${fcst_valid_hour}.sh - echo " cp -v ${plot_dir}/${score_type}_regional_${domain}_valid_${fcst_valid_hour}z_*${var}*.png $restart" >> run_${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${line_type}.${VX_MASK_LIST}.${fcst_valid_hour}.sh - echo " >$restart/run_${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${line_type}.${VX_MASK_LIST}.${fcst_valid_hour}.completed" >> run_${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${line_type}.${VX_MASK_LIST}.${fcst_valid_hour}.sh + echo " cp -v ${plot_dir}/${score_type}_regional_${domain}_valid_${fcst_valid_hour}z_*${var}*.png $all_plots" >> run_${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${line_type}.${VX_MASK_LIST}.${fcst_valid_hour}.sh + echo " >$all_plots/run_${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${line_type}.${VX_MASK_LIST}.${fcst_valid_hour}.completed" >> run_${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${line_type}.${VX_MASK_LIST}.${fcst_valid_hour}.sh + #Copy files to restart directory + echo " if [ $SENDCOM = YES ] ; then" >> run_${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${line_type}.${VX_MASK_LIST}.${fcst_valid_hour}.sh + echo " cp -v $all_plots/${score_type}_regional_${domain}_valid_${fcst_valid_hour}z_*${var}*.png $restart" >> run_${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${line_type}.${VX_MASK_LIST}.${fcst_valid_hour}.sh + echo " cp -v $all_plots/run_${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${line_type}.${VX_MASK_LIST}.${fcst_valid_hour}.completed $restart" >> run_${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${line_type}.${VX_MASK_LIST}.${fcst_valid_hour}.sh + echo " fi" >> run_${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${line_type}.${VX_MASK_LIST}.${fcst_valid_hour}.sh echo "fi" >> run_${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${line_type}.${VX_MASK_LIST}.${fcst_valid_hour}.sh chmod +x run_${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${line_type}.${VX_MASK_LIST}.${fcst_valid_hour}.sh - echo "${DATA}/run_${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${line_type}.${VX_MASK_LIST}.${fcst_valid_hour}.sh" >> run_all_poe.sh + echo "${DATA}/scripts/run_${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${line_type}.${VX_MASK_LIST}.${fcst_valid_hour}.sh" >> run_all_poe.sh else - #Restart from existing png files of previous run - cp $restart/${score_type}_regional_${domain}_valid_${fcst_valid_hour}z_*${var}*.png ${plot_dir}/. + if [ -s $restart/${score_type}_regional_${domain}_valid_${fcst_valid_hour}z_*${var}*.png ] ; then + cp -v $restart/${score_type}_regional_${domain}_valid_${fcst_valid_hour}z_*${var}*.png $all_plots + fi fi done # end of fcst_valid_hour @@ -233,18 +240,17 @@ chmod +x run_all_poe.sh # Run the POE script in parallel or in sequence order to generate png files #************************************************************************** if [ $run_mpi = yes ] ; then - mpiexec -np 312 -ppn 78 --cpu-bind verbose,depth cfp ${DATA}/run_all_poe.sh + mpiexec -np 304 -ppn 76 --cpu-bind verbose,depth cfp ${DATA}/scripts/run_all_poe.sh else - ${DATA}/run_all_poe.sh + ${DATA}/scripts/run_all_poe.sh fi export err=$?; err_chk - #************************************************** # Change plot file names to meet the EVS standard #************************************************** -cd $plot_dir +cd $all_plots for stats in ets fbias fss ; do score_type='threshold_average' @@ -288,7 +294,7 @@ for stats in ets fbias fss ; do if [ -s ${score_type}_regional_${domain}_valid_${valid}_${level}_${var}_${stats}_${lead}.png ] ; then ls ${score_type}_regional_${domain}_valid_${valid}_${level}_${var}_${stats}_${lead}.png - mv ${score_type}_regional_${domain}_valid_${valid}_${level}_${var}_${stats}_${lead}.png evs.href.${stats}.${var}h.last${past_days}days.${scoretype}_valid_${valid}.${new_domain}.png + mv ${score_type}_regional_${domain}_valid_${valid}_${level}_${var}_${stats}_${lead}.png evs.href.${stats}.${var}h.last${last_days}days.${scoretype}_valid${valid}.${new_domain}.png fi done done @@ -327,39 +333,35 @@ for var in apcp_01 apcp_03 apcp_24 ; do fi if [ -s ${score_type}_regional_${domain}_valid_${valid}_${level}_${var}_${lead}.png ] ; then - mv ${score_type}_regional_${domain}_valid_${valid}_${level}_${var}_${lead}.png evs.href.ctc.${var}h.last${past_days}days.${scoretype}_valid_${valid}.${new_domain}.png + mv ${score_type}_regional_${domain}_valid_${valid}_${level}_${var}_${lead}.png evs.href.ctc.${var}h.last${last_days}days.${scoretype}_valid${valid}.${new_domain}.png fi done done done - -tar -cvf evs.plots.href.precip.past${past_days}days.v${VDATE}.tar *.png +if [ -s evs*.png ] ; then + tar -cvf evs.plots.href.precip.last${last_days}days.v${VDATE}.tar evs*.png +fi # Cat the plotting log files -log_dir="$DATA/logs" -if [ -d $log_dir ]; then - log_file_count=$(find $log_dir -type f | wc -l) - if [[ $log_file_count -ne 0 ]]; then - log_files=("$log_dir"/*) - for log_file in "${log_files[@]}"; do - if [ -f "$log_file" ]; then - echo "Start: $log_file" - cat "$log_file" - echo "End: $log_file" - fi - done - fi +log_dir="$DATA/plots" +if [ -s $log_dir/*/log*.out ]; then + log_files=`ls $log_dir/*/log*.out` + for log_file in $log_files ; do + echo "Start: $log_file" + cat "$log_file" + echo "End: $log_file" + done fi -if [ $SENDCOM = YES ] && [ -s evs.plots.href.precip.past${past_days}days.v${VDATE}.tar ] ; then - cp -v evs.plots.href.precip.past${past_days}days.v${VDATE}.tar $COMOUT/. +if [ $SENDCOM = YES ] && [ -s evs.plots.href.precip.last${last_days}days.v${VDATE}.tar ] ; then + cp -v evs.plots.href.precip.last${last_days}days.v${VDATE}.tar $COMOUT/. fi if [ $SENDDBN = YES ] ; then - $DBNROOT/bin/dbn_alert MODEL EVS_RZDM $job $COMOUT/evs.plots.href.precip.past${past_days}days.v${VDATE}.tar + $DBNROOT/bin/dbn_alert MODEL EVS_RZDM $job $COMOUT/evs.plots.href.precip.last${last_days}days.v${VDATE}.tar fi diff --git a/scripts/plots/cam/exevs_href_profile_plots.sh b/scripts/plots/cam/exevs_href_profile_plots.sh index ea26d7ed0b..40cff37f13 100755 --- a/scripts/plots/cam/exevs_href_profile_plots.sh +++ b/scripts/plots/cam/exevs_href_profile_plots.sh @@ -2,29 +2,28 @@ #******************************************************************************* # Purpose: setup environment, paths, and run the href profile plotting python script # Last updated: +# 01/10/2025, add MPMD, by Binbin Zhou Lynker@EMC/NCEP # 07/09/2024, add restart, by Binbin Zhou Lynker@EMC/NCEP # 05/30/2024, Binbin Zhou Lynker@EMC/NCEP #****************************************************************************** set -x -cd $DATA +mkdir -p $DATA/scripts +cd $DATA/scripts export machine=${machine:-"WCOSS2"} -export prune_dir=$DATA/data -export save_dir=$DATA/out export output_base_dir=$DATA/stat_archive -export log_metplus=$DATA/logs/GENS_verif_plotting_job -mkdir -p $prune_dir -mkdir -p $save_dir mkdir -p $output_base_dir -mkdir -p $DATA/logs -restart=$COMOUT/restart/$past_days/href_profile_plots -if [ ! -d $restart ] ; then +all_plots=$DATA/plots/all_plots +mkdir -p $all_plots +if [ $SENDCOM = YES ] ; then + restart=$COMOUT/restart/$last_days/href_profile_plots + if [ ! -d $restart ] ; then mkdir -p $restart + fi fi - export eval_period='TEST' export interp_pnts='' @@ -36,7 +35,7 @@ model_list='HREF' models='HREF' n=0 -while [ $n -le $past_days ] ; do +while [ $n -le $last_days ] ; do hrs=$((n*24)) first_day=`$NDATE -$hrs ${VDATE}00|cut -c1-8` n=$((n+1)) @@ -46,10 +45,10 @@ export init_beg=$first_day export valid_beg=$first_day #************************************************************* -# Virtual link the href's stat data files of past 31/90 days +# Virtual link the href's stat data files of last 31/90 days #************************************************************* n=0 -while [ $n -le $past_days ] ; do +while [ $n -le $last_days ] ; do #hrs=`expr $n \* 24` hrs=$((n*24)) day=`$NDATE -$hrs ${VDATE}00|cut -c1-8` @@ -63,13 +62,6 @@ export fcst_init_hour="0,6,12,18" valid_time='valid00_12z' init_time='init00z_06z_12z_18Z' -export plot_dir=$DATA/out/sfc_upper/${valid_beg}-${valid_end} -#For restart: -if [ ! -d $plot_dir ] ; then - mkdir -p $plot_dir -fi - - verif_case=grid2obs verif_type=upper_air @@ -156,6 +148,17 @@ fi #*********************************************************************************************************************************** echo "#!/bin/ksh" >> run_${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${line_type}.${fcst_valid_hour}.sh + echo "set -x" >> run_${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${line_type}.${fcst_valid_hour}.sh + + save_dir=$DATA/plots/run_${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${line_type}.${fcst_valid_hour} + plot_dir=$save_dir/sfc_upper/${valid_beg}-${valid_end} + mkdir -p $plot_dir + mkdir -p $save_dir/data + + echo "export save_dir=$save_dir" >> run_${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${line_type}.${fcst_valid_hour}.sh + echo "export log_metplus=$save_dir/log_verif_plotting_job.out" >> run_${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${line_type}.${fcst_valid_hour}.sh + echo "export prune_dir=$save_dir/data" >> run_${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${line_type}.${fcst_valid_hour}.sh + if [ $score_type = lead_average ] ; then echo "export PLOT_TYPE=lead_average_valid" >> run_${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${line_type}.${fcst_valid_hour}.sh else @@ -208,20 +211,30 @@ fi chmod +x run_py.${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${line_type}.${fcst_valid_hour}.sh - echo "${DATA}/run_py.${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${line_type}.${fcst_valid_hour}.sh" >> run_${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${line_type}.${fcst_valid_hour}.sh + echo "${DATA}/scripts/run_py.${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${line_type}.${fcst_valid_hour}.sh" >> run_${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${line_type}.${fcst_valid_hour}.sh - #Save for restart - echo "if [ -s ${plot_dir}/${score_type}_regional_*_valid_${fcst_valid_hour}z_*${var_rst}*_${stats_rst}*.png ] ; then" >> run_${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${line_type}.${fcst_valid_hour}.sh - echo " cp -v ${plot_dir}/${score_type}_regional_*_valid_${fcst_valid_hour}z_*${var_rst}*_${stats_rst}*.png $restart" >> run_${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${line_type}.${fcst_valid_hour}.sh - echo " >$restart/run_${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${line_type}.${fcst_valid_hour}.completed" >> run_${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${line_type}.${fcst_valid_hour}.sh - echo "fi" >> run_${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${line_type}.${fcst_valid_hour}.sh + #Save for restart and tar files + echo "for domain in conus alaska hawaii prico ; do" >> run_${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${line_type}.${fcst_valid_hour}.sh + echo " if [ -s ${plot_dir}/${score_type}_regional_\${domain}_valid_${fcst_valid_hour}z_*${var_rst}*_${stats_rst}*.png ] ; then" >> run_${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${line_type}.${fcst_valid_hour}.sh + echo " cp -v ${plot_dir}/${score_type}_regional_\${domain}_valid_${fcst_valid_hour}z_*${var_rst}*_${stats_rst}*.png $all_plots" >> run_${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${line_type}.${fcst_valid_hour}.sh + echo " >$all_plots/run_${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${line_type}.${fcst_valid_hour}.completed" >> run_${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${line_type}.${fcst_valid_hour}.sh + #Copy files to restart directory" + echo " if [ $SENDCOM = YES ] ; then" >> run_${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${line_type}.${fcst_valid_hour}.sh + echo " cp -v $all_plots/${score_type}_regional_\${domain}_valid_${fcst_valid_hour}z_*${var_rst}*_${stats_rst}*.png $restart" >> run_${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${line_type}.${fcst_valid_hour}.sh + echo " cp -v $all_plots/run_${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${line_type}.${fcst_valid_hour}.completed $restart" >> run_${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${line_type}.${fcst_valid_hour}.sh + echo " fi" >> run_${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${line_type}.${fcst_valid_hour}.sh + echo " fi" >> run_${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${line_type}.${fcst_valid_hour}.sh + echo "done" >>run_${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${line_type}.${fcst_valid_hour}.sh - chmod +x run_${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${line_type}.${fcst_valid_hour}.sh - echo "${DATA}/run_${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${line_type}.${fcst_valid_hour}.sh" >> run_all_poe.sh + chmod +x run_${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${line_type}.${fcst_valid_hour}.sh + echo "${DATA}/scripts/run_${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${line_type}.${fcst_valid_hour}.sh" >> run_all_poe.sh else - #Restart from existing png files of previous run - cp $restart/${score_type}_regional_*_valid_${fcst_valid_hour}z_*${var_rst}*_${stats_rst}*.png ${plot_dir}/. + for domain in conus alaska hawaii prico ; do + if [ -s $restart/${score_type}_regional_${domain}_valid_${fcst_valid_hour}z_*${var_rst}*_${stats_rst}*.png ] ; then + cp -v $restart/${score_type}_regional_${domain}_valid_${fcst_valid_hour}z_*${var_rst}*_${stats_rst}*.png $all_plots + fi + done fi done #end of line_type @@ -244,16 +257,16 @@ chmod +x run_all_poe.sh # Run the POE script in parallel or in sequence order to generate png files #************************************************************************** if [ $run_mpi = yes ] ; then - mpiexec -np 60 -depth 1 --cpu-bind verbose,depth cfp ${DATA}/run_all_poe.sh + mpiexec -np 60 -ppn 60 --cpu-bind verbose,depth cfp ${DATA}/scripts/run_all_poe.sh else - ${DATA}/run_all_poe.sh + ${DATA}/scripts/run_all_poe.sh fi export err=$?; err_chk #************************************************** # Change plot file names to meet the EVS standard #************************************************** -cd $plot_dir +cd $all_plots for valid in 00z 12z ; do @@ -320,16 +333,16 @@ for valid in 00z 12z ; do if [ $var = 850mb_tmp_ens_freq_lt273.15 ] ; then if [ -s ${score_type}_regional_${domain}_valid_${valid}_${var}_${end} ] ; then - mv ${score_type}_regional_${domain}_valid_${valid}_${var}_${end} evs.href.${stats}.${var_new}.last${past_days}days.${scoretype}_valid_${valid}.${new_domain}.png + mv ${score_type}_regional_${domain}_valid_${valid}_${var}_${end} evs.href.${stats}.${var_new}.last${last_days}days.${scoretype}_valid${valid}.${new_domain}.png fi else if [ -s ${score_type}_regional_${domain}_valid_${valid}_${var}_${stats}_${end} ] ; then - mv ${score_type}_regional_${domain}_valid_${valid}_${var}_${stats}_${end} evs.href.${stats}.${var_new}.last${past_days}days.${scoretype}_valid_${valid}.${new_domain}.png + mv ${score_type}_regional_${domain}_valid_${valid}_${var}_${stats}_${end} evs.href.${stats}.${var_new}.last${last_days}days.${scoretype}_valid${valid}.${new_domain}.png fi fi else if [ -s ${score_type}_regional_${domain}_valid_${valid}_${var}_${stats}_${lead}.png ] ; then - mv ${score_type}_regional_${domain}_valid_${valid}_${var}_${stats}_${lead}.png evs.href.${stats}.${var_new}.last${past_days}days.${scoretype}_valid_${valid}_${new_lead}.${new_domain}.png + mv ${score_type}_regional_${domain}_valid_${valid}_${var}_${stats}_${lead}.png evs.href.${stats}.${var_new}.last${last_days}days.${scoretype}_valid${valid}_${new_lead}.${new_domain}.png fi fi done #lead @@ -340,31 +353,27 @@ for valid in 00z 12z ; do done #stats done #vlaid - -tar -cvf evs.plots.href.profile.past${past_days}days.v${VDATE}.tar *.png +if [ -s evs*.png ] ; then + tar -cvf evs.plots.href.profile.last${last_days}days.v${VDATE}.tar evs*.png +fi # Cat the plotting log files -log_dir="$DATA/logs" -if [ -d $log_dir ]; then - log_file_count=$(find $log_dir -type f | wc -l) - if [[ $log_file_count -ne 0 ]]; then - log_files=("$log_dir"/*) - for log_file in "${log_files[@]}"; do - if [ -f "$log_file" ]; then - echo "Start: $log_file" - cat "$log_file" - echo "End: $log_file" - fi - done - fi +log_dir="$DATA/plots" +if [ -s $log_dir/*/log*.out ]; then + log_files=`ls $log_dir/*/log*.out` + for log_file in $log_files ; do + echo "Start: $log_file" + cat "$log_file" + echo "End: $log_file" + done fi -if [ $SENDCOM = YES ] && [ -s evs.plots.href.profile.past${past_days}days.v${VDATE}.tar ] ; then - cp -v evs.plots.href.profile.past${past_days}days.v${VDATE}.tar $COMOUT/. +if [ $SENDCOM = YES ] && [ -s evs.plots.href.profile.last${last_days}days.v${VDATE}.tar ] ; then + cp -v evs.plots.href.profile.last${last_days}days.v${VDATE}.tar $COMOUT/. fi if [ $SENDDBN = YES ] ; then - $DBNROOT/bin/dbn_alert MODEL EVS_RZDM $job $COMOUT/evs.plots.href.profile.past${past_days}days.v${VDATE}.tar + $DBNROOT/bin/dbn_alert MODEL EVS_RZDM $job $COMOUT/evs.plots.href.profile.last${last_days}days.v${VDATE}.tar fi diff --git a/scripts/plots/cam/exevs_href_snowfall_plots.sh b/scripts/plots/cam/exevs_href_snowfall_plots.sh index ba714036ea..ddc9fb276f 100755 --- a/scripts/plots/cam/exevs_href_snowfall_plots.sh +++ b/scripts/plots/cam/exevs_href_snowfall_plots.sh @@ -2,27 +2,27 @@ #******************************************************************************* # Purpose: setup environment, paths, and run the href snowfall plotting python script # Last updated: +# 01/10/2025, add MPMD, by Binbin Zhou Lynker@EMC/NCEP # 07/09/2024, add restart, by Binbin Zhou Lynker@EMC/NCEP # 05/30/2024, Binbin Zhou Lynker@EMC/NCEP #****************************************************************************** set -x -cd $DATA +mkdir -p $DATA/scripts +cd $DATA/scripts export machine=${machine:-"WCOSS2"} -export prune_dir=$DATA/data -export save_dir=$DATA/out export output_base_dir=$DATA/stat_archive -export log_metplus=$DATA/logs/GENS_verif_plotting_job -mkdir -p $prune_dir -mkdir -p $save_dir mkdir -p $output_base_dir -mkdir -p $DATA/logs -restart=$COMOUT/restart/$past_days/href_snowfall_plots -if [ ! -d $restart ] ; then +all_plots=$DATA/plots/all_plots +mkdir -p $all_plots +if [ $SENDCOM = YES ] ; then + restart=$COMOUT/restart/$last_days/href_snowfall_plots + if [ ! -d $restart ] ; then mkdir -p $restart -fi + fi +fi export eval_period='TEST' @@ -37,7 +37,7 @@ models='HREF_SNOW' VX_MASK_LISTs='CONUS CONUS_East CONUS_West CONUS_South CONUS_Central' n=0 -while [ $n -le $past_days ] ; do +while [ $n -le $last_days ] ; do hrs=$((n*24)) first_day=`$NDATE -$hrs ${VDATE}00|cut -c1-8` n=$((n+1)) @@ -47,10 +47,10 @@ export init_beg=$first_day export valid_beg=$first_day #************************************************************* -# Virtual link the href's stat data files of past 31/90 days +# Virtual link the href's stat data files of last 31/90 days #************************************************************* n=0 -while [ $n -le $past_days ] ; do +while [ $n -le $last_days ] ; do #hrs=`expr $n \* 24` hrs=$((n*24)) day=`$NDATE -$hrs ${VDATE}00|cut -c1-8` @@ -60,12 +60,6 @@ while [ $n -le $past_days ] ; do n=$((n+1)) done -export plot_dir=$DATA/out/precip/${valid_beg}-${valid_end} -#For restart: -if [ ! -d $plot_dir ] ; then - mkdir -p $plot_dir -fi - verif_case=precip verif_type=ccpa @@ -113,12 +107,12 @@ for stats in ets_fbias ratio_pod_csi fss ; do if [ $FCST_LEVEL_value = A06 ] ; then export fcst_leads='6,12,18,24,30,36,42,48' - export fcst_valid_hour='0,6,12,18' - valid_rst=00z_06z_12z_18z + export fcst_valid_hours='00 06 12 18' + accum=06h elif [ $FCST_LEVEL_value = A24 ] ; then export fcst_leads='24,30,36,42,48' - export fcst_valid_hour='0,12' - valid_rst=00z_12z + export fcst_valid_hours='00 12' + accum=24h fi @@ -128,72 +122,92 @@ for stats in ets_fbias ratio_pod_csi fss ; do for line_type in $line_tp ; do + for fcst_valid_hour in $fcst_valid_hours ; do + #***************************** # Build sub-jobs # **************************** - > run_${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${line_type}.${VX_MASK_LIST}.sh + > run_${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${line_type}.${VX_MASK_LIST}.${fcst_valid_hour}.sh #*********************************************************************************************************************************** # Check if this sub-job has been completed in the previous run for restart - if [ ! -e $restart/run_${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${line_type}.${VX_MASK_LIST}.completed ] ; then + if [ ! -e $restart/run_${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${line_type}.${VX_MASK_LIST}.${fcst_valid_hour}.completed ] ; then #*********************************************************************************************************************************** - echo "#!/bin/ksh" >> run_${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${line_type}.${VX_MASK_LIST}.sh - echo "export PLOT_TYPE=$score_type" >> run_${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${line_type}.${VX_MASK_LIST}.sh - echo "export vx_mask_list='$VX_MASK_LIST'" >> run_${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${line_type}.${VX_MASK_LIST}.sh - echo "export verif_case=$verif_case" >> run_${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${line_type}.${VX_MASK_LIST}.sh - echo "export verif_type=$verif_type" >> run_${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${line_type}.${VX_MASK_LIST}.sh + echo "#!/bin/ksh" >> run_${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${line_type}.${VX_MASK_LIST}.${fcst_valid_hour}.sh + echo "set -x" >> run_${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${line_type}.${VX_MASK_LIST}.${fcst_valid_hour}.sh - echo "export log_level=DEBUG" >> run_${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${line_type}.${VX_MASK_LIST}.sh + save_dir=$DATA/plots/run_${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${line_type}.${VX_MASK_LIST} + plot_dir=$save_dir/precip/${valid_beg}-${valid_end} + mkdir -p $plot_dir + mkdir -p $save_dir/data - echo "export eval_period=TEST" >> run_${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${line_type}.${VX_MASK_LIST}.sh + echo "export save_dir=$save_dir" >> run_${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${line_type}.${VX_MASK_LIST}.${fcst_valid_hour}.sh + echo "export log_metplus=$save_dir/log_verif_plotting_job.out" >> run_${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${line_type}.${VX_MASK_LIST}.${fcst_valid_hour}.sh + echo "export prune_dir=$save_dir/data" >> run_${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${line_type}.${VX_MASK_LIST}.${fcst_valid_hour}.sh + + echo "export PLOT_TYPE=$score_type" >> run_${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${line_type}.${VX_MASK_LIST}.${fcst_valid_hour}.sh + echo "export vx_mask_list='$VX_MASK_LIST'" >> run_${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${line_type}.${VX_MASK_LIST}.${fcst_valid_hour}.sh + echo "export verif_case=$verif_case" >> run_${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${line_type}.${VX_MASK_LIST}.${fcst_valid_hour}.sh + echo "export verif_type=$verif_type" >> run_${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${line_type}.${VX_MASK_LIST}.${fcst_valid_hour}.sh + + echo "export log_level=DEBUG" >> run_${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${line_type}.${VX_MASK_LIST}.${fcst_valid_hour}.sh + + echo "export eval_period=TEST" >> run_${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${line_type}.${VX_MASK_LIST}.${fcst_valid_hour}.sh if [ $score_type = valid_hour_average ] ; then - echo "export date_type=INIT" >> run_${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${line_type}.${VX_MASK_LIST}.sh + echo "export date_type=INIT" >> run_${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${line_type}.${VX_MASK_LIST}.${fcst_valid_hour}.sh else - echo "export date_type=VALID" >> run_${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${line_type}.${VX_MASK_LIST}.sh + echo "export date_type=VALID" >> run_${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${line_type}.${VX_MASK_LIST}.${fcst_valid_hour}.sh fi - echo "export var_name=$VAR" >> run_${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${line_type}.${VX_MASK_LIST}.sh - echo "export fcts_level=$FCST_LEVEL_value" >> run_${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${line_type}.${VX_MASK_LIST}.sh - echo "export obs_level=$OBS_LEVEL_value" >> run_${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${line_type}.${VX_MASK_LIST}.sh + echo "export var_name=$VAR" >> run_${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${line_type}.${VX_MASK_LIST}.${fcst_valid_hour}.sh + echo "export fcts_level=$FCST_LEVEL_value" >> run_${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${line_type}.${VX_MASK_LIST}.${fcst_valid_hour}.sh + echo "export obs_level=$OBS_LEVEL_value" >> run_${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${line_type}.${VX_MASK_LIST}.${fcst_valid_hour}.sh - echo "export line_type=$line_type" >> run_${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${line_type}.${VX_MASK_LIST}.sh + echo "export line_type=$line_type" >> run_${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${line_type}.${VX_MASK_LIST}.${fcst_valid_hour}.sh if [ $stats = fss ] ; then - echo "export interp=NBRHD_SQUARE" >> run_${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${line_type}.${VX_MASK_LIST}.sh + echo "export interp=NBRHD_SQUARE" >> run_${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${line_type}.${VX_MASK_LIST}.${fcst_valid_hour}.sh else - echo "export interp=NEAREST" >> run_${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${line_type}.${VX_MASK_LIST}.sh + echo "export interp=NEAREST" >> run_${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${line_type}.${VX_MASK_LIST}.${fcst_valid_hour}.sh fi - echo "export score_py=$score_type" >> run_${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${line_type}.${VX_MASK_LIST}.sh + echo "export score_py=$score_type" >> run_${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${line_type}.${VX_MASK_LIST}.${fcst_valid_hour}.sh thresh_fcst='>=0.0254, >=0.1016, >=0.2032, >=0.3048' thresh_obs=$thresh_fcst - sed -e "s!model_list!$models!g" -e "s!stat_list!$stat_list!g" -e "s!thresh_fcst!$thresh_fcst!g" -e "s!thresh_obs!$thresh_obs!g" -e "s!fcst_init_hour!$fcst_init_hour!g" -e "s!fcst_valid_hour!$fcst_valid_hour!g" -e "s!fcst_lead!$lead!g" -e "s!interp_pnts!$interp_pnts!g" $USHevs/cam/evs_href_plots_config.sh > run_py.${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${line_type}.${VX_MASK_LIST}.sh + sed -e "s!model_list!$models!g" -e "s!stat_list!$stat_list!g" -e "s!thresh_fcst!$thresh_fcst!g" -e "s!thresh_obs!$thresh_obs!g" -e "s!fcst_init_hour!$fcst_init_hour!g" -e "s!fcst_valid_hour!$fcst_valid_hour!g" -e "s!fcst_lead!$lead!g" -e "s!interp_pnts!$interp_pnts!g" $USHevs/cam/evs_href_plots_config.sh > run_py.${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${line_type}.${VX_MASK_LIST}.${fcst_valid_hour}.sh - chmod +x run_py.${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${line_type}.${VX_MASK_LIST}.sh + chmod +x run_py.${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${line_type}.${VX_MASK_LIST}.${fcst_valid_hour}.sh - echo "${DATA}/run_py.${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${line_type}.${VX_MASK_LIST}.sh" >> run_${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${line_type}.${VX_MASK_LIST}.sh + echo "${DATA}/scripts/run_py.${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${line_type}.${VX_MASK_LIST}.${fcst_valid_hour}.sh" >> run_${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${line_type}.${VX_MASK_LIST}.${fcst_valid_hour}.sh - #Save for restart - echo "if [ -s ${plot_dir}/${score_type}_regional_${domain}_valid_${valid_rst}_*${var}*.png ] ; then " >> run_${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${line_type}.${VX_MASK_LIST}.sh - echo " cp -v ${plot_dir}/${score_type}_regional_${domain}_valid_${valid_rst}_*${var}*.png $restart" >> run_${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${line_type}.${VX_MASK_LIST}.sh - echo " >$restart/run_${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${line_type}.${VX_MASK_LIST}.completed" >> run_${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${line_type}.${VX_MASK_LIST}.sh - echo "fi" >> run_${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${line_type}.${VX_MASK_LIST}.sh + echo "if [ -s ${plot_dir}/${score_type}_regional_${domain}_valid_${fcst_valid_hour}z_${accum}_${var}*.png ] ; then " >> run_${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${line_type}.${VX_MASK_LIST}.${fcst_valid_hour}.sh + echo " cp -v ${plot_dir}/${score_type}_regional_${domain}_valid_${fcst_valid_hour}z_${accum}_${var}*.png $all_plots" >> run_${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${line_type}.${VX_MASK_LIST}.${fcst_valid_hour}.sh + echo " >$all_plots/run_${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${line_type}.${VX_MASK_LIST}.${fcst_valid_hour}.completed" >> run_${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${line_type}.${VX_MASK_LIST}.${fcst_valid_hour}.sh + #Copy files to restart directory + echo " if [ $SENDCOM = YES ] ; then" >> run_${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${line_type}.${VX_MASK_LIST}.${fcst_valid_hour}.sh + echo " cp -v $all_plots/${score_type}_regional_${domain}_valid_${fcst_valid_hour}z_${accum}_${var}*.png $restart" >> run_${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${line_type}.${VX_MASK_LIST}.${fcst_valid_hour}.sh + echo " cp -v $all_plots/run_${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${line_type}.${VX_MASK_LIST}.${fcst_valid_hour}.completed $restart" >> run_${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${line_type}.${VX_MASK_LIST}.${fcst_valid_hour}.sh + echo " fi" >> run_${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${line_type}.${VX_MASK_LIST}.${fcst_valid_hour}.sh + echo "fi" >> run_${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${line_type}.${VX_MASK_LIST}.${fcst_valid_hour}.sh - chmod +x run_${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${line_type}.${VX_MASK_LIST}.sh - echo "${DATA}/run_${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${line_type}.${VX_MASK_LIST}.sh" >> run_all_poe.sh + chmod +x run_${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${line_type}.${VX_MASK_LIST}.${fcst_valid_hour}.sh + echo "${DATA}/scripts/run_${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${line_type}.${VX_MASK_LIST}.${fcst_valid_hour}.sh" >> run_all_poe.sh - else - #Restart from existing png files of previous run - cp $restart/${score_type}_regional_${domain}_valid_${valid_rst}_*${var}*.png ${plot_dir}/. + else + if [ -s $restart/${score_type}_regional_${domain}_valid_${fcst_valid_hour}z_${accum}_${var}*.png ] ; then + cp -v $restart/${score_type}_regional_${domain}_valid_${fcst_valid_hour}z_${accum}_${var}*.png $all_plots + fi fi + done #end of fcst_valid_hour + done #end of line_type done #end of FCST_LEVEL_value @@ -213,17 +227,16 @@ chmod +x run_all_poe.sh # Run the POE script in parallel or in sequence order to generate png files #************************************************************************** if [ $run_mpi = yes ] ; then - mpiexec -np 30 -ppn 30 --cpu-bind verbose,depth cfp ${DATA}/run_all_poe.sh + mpiexec -np 30 -ppn 30 --cpu-bind verbose,depth cfp ${DATA}/scripts/run_all_poe.sh else - ${DATA}/run_all_poe.sh + ${DATA}/scripts/run_all_poe.sh fi export err=$?; err_chk - #************************************************** # Change plot file names to meet the EVS standard #************************************************** -cd $plot_dir +cd $all_plots for stats in ets fbias fss ; do score_type='threshold_average' @@ -233,28 +246,29 @@ for stats in ets fbias fss ; do for level in 06h 24h ; do if [ $stats = fss ] ; then if [ $level = 06h ] ; then - valid=valid_00z_06z_12z_18z + valids="00z 06z 12z 18z" lead=width1-3-5-7-9-11_f6-12-18-24-30-36-42-48 elif [ $level = 24h ] ; then - valid=valid_00z_12z + valids="00z 12z" lead=width1-3-5-7-9-11_f24-30-36-42-48 fi else if [ $level = 06h ] ; then - valid=valid_00z_06z_12z_18z + valids="00z 06z 12z 18z" lead=f6-12-18-24-30-36-42-48 elif [ $level = 24h ] ; then - valid=valid_00z_12z + valids="00z 12z" lead=f24-30-36-42-48 fi fi - for domain in conus conus_east conus_west conus_south conus_central ; do - if [ -s ${score_type}_regional_${domain}_${valid}_${level}_${var}_${stats}_${lead}.png ] ; then - mv ${score_type}_regional_${domain}_${valid}_${level}_${var}_${stats}_${lead}.png evs.href.${stats}.${var}_${level}.last${past_days}days.${scoretype}_valid_all_times.buk_${domain}.png + for valid in $valids ; do + for domain in conus conus_east conus_west conus_south conus_central ; do + if [ -s ${score_type}_regional_${domain}_valid_${valid}_${level}_${var}_${stats}_${lead}.png ] ; then + mv ${score_type}_regional_${domain}_valid_${valid}_${level}_${var}_${stats}_${lead}.png evs.href.${stats}.${var}_${level}.last${last_days}days.${scoretype}_valid${valid}.buk_${domain}.png fi + done done - done done done @@ -266,47 +280,44 @@ scoretype='perfdiag' for var in weasd ; do for level in 06h 24h ; do if [ $level = 06h ] ; then - valid=valid_00z_06z_12z_18z + valids="00z 06z 12z 18z" lead=f6-12-18-24-30-36-42-48__ge0.0254ge0.1016ge0.2032ge0.3048 elif [ $level = 24h ] ; then - valid=valid_00z_12z + valids="00z 12z" lead=f24-30-36-42-48__ge0.0254ge0.1016ge0.2032ge0.3048 fi - for domain in conus conus_east conus_west conus_south conus_central ; do - if [ -s ${score_type}_regional_${domain}_${valid}_${level}_${var}_${lead}.png ] ; then - mv ${score_type}_regional_${domain}_${valid}_${level}_${var}_${lead}.png evs.href.ctc.${var}_${level}.last${past_days}days.${scoretype}_valid_all_times.buk_${domain}.png + for valid in $valids ; do + for domain in conus conus_east conus_west conus_south conus_central ; do + if [ -s ${score_type}_regional_${domain}_valid_${valid}_${level}_${var}_${lead}.png ] ; then + mv ${score_type}_regional_${domain}_valid_${valid}_${level}_${var}_${lead}.png evs.href.ctc.${var}_${level}.last${last_days}days.${scoretype}_valid${valid}.buk_${domain}.png fi + done done done done - -tar -cvf evs.plots.href.snowfall.past${past_days}days.v${VDATE}.tar *.png - -# Cat the plotting log files -log_dir="$DATA/logs" -if [ -d $log_dir ]; then - log_file_count=$(find $log_dir -type f | wc -l) - if [[ $log_file_count -ne 0 ]]; then - log_files=("$log_dir"/*) - for log_file in "${log_files[@]}"; do - if [ -f "$log_file" ]; then - echo "Start: $log_file" - cat "$log_file" - echo "End: $log_file" - fi - done - fi +if [ -s evs*.png ] ; then + tar -cvf evs.plots.href.snowfall.last${last_days}days.v${VDATE}.tar evs*.png fi +# Cat the plotting log files +log_dir="$DATA/plots" +if [ -s $log_dir/*/log*.out ]; then + log_files=`ls $log_dir/*/log*.out` + for log_file in $log_files ; do + echo "Start: $log_file" + cat "$log_file" + echo "End: $log_file" + done +fi -if [ $SENDCOM = YES ] && [ -s evs.plots.href.snowfall.past${past_days}days.v${VDATE}.tar ] ; then - cp -v evs.plots.href.snowfall.past${past_days}days.v${VDATE}.tar $COMOUT/. +if [ $SENDCOM = YES ] && [ -s evs.plots.href.snowfall.last${last_days}days.v${VDATE}.tar ] ; then + cp -v evs.plots.href.snowfall.last${last_days}days.v${VDATE}.tar $COMOUT/. fi if [ $SENDDBN = YES ] ; then - $DBNROOT/bin/dbn_alert MODEL EVS_RZDM $job $COMOUT/evs.plots.href.snowfall.past${past_days}days.v${VDATE}.tar + $DBNROOT/bin/dbn_alert MODEL EVS_RZDM $job $COMOUT/evs.plots.href.snowfall.last${last_days}days.v${VDATE}.tar fi diff --git a/scripts/plots/cam/exevs_href_spcoutlook_plots.sh b/scripts/plots/cam/exevs_href_spcoutlook_plots.sh index ae73f230a3..b2d5b2edcc 100755 --- a/scripts/plots/cam/exevs_href_spcoutlook_plots.sh +++ b/scripts/plots/cam/exevs_href_spcoutlook_plots.sh @@ -3,26 +3,26 @@ # Purpose: setup environment, paths, and run the href spcoutlook plotting python # script # Last updated: +# 01/10/2025, add MPMD, by Binbin Zhou Lynker@EMC/NCEP # 07/09/2024, add restart, by Binbin Zhou Lynker@EMC/NCEP # 05/30/2025, Binbin Zhou Lynker@EMC/NCEP #****************************************************************************** set -x -cd $DATA +mkdir -p $DATA/scripts +cd $DATA/scripts export machine=${machine:-"WCOSS2"} -export prune_dir=$DATA/data -export save_dir=$DATA/out export output_base_dir=$DATA/stat_archive -export log_metplus=$DATA/logs/GENS_verif_plotting_job -mkdir -p $prune_dir -mkdir -p $save_dir mkdir -p $output_base_dir -mkdir -p $DATA/logs -restart=$COMOUT/restart/$past_days/href_spcoutlook_plots -if [ ! -d $restart ] ; then +all_plots=$DATA/plots/all_plots +mkdir -p $all_plots +if [ $SENDCOM = YES ] ; then + restart=$COMOUT/restart/$last_days/href_spcoutlook_plots + if [ ! -d $restart ] ; then mkdir -p $restart + fi fi export eval_period='TEST' @@ -36,7 +36,7 @@ model_list='HREF_MEAN' models='HREF_MEAN' n=0 -while [ $n -le $past_days ] ; do +while [ $n -le $last_days ] ; do hrs=$((n*24)) first_day=`$NDATE -$hrs ${VDATE}00|cut -c1-8` n=$((n+1)) @@ -46,10 +46,10 @@ export init_beg=$first_day export valid_beg=$first_day #************************************************************* -# Virtual link the href's stat data files of past 31/90 days +# Virtual link the href's stat data files of last 31/90 days #************************************************************* n=0 -while [ $n -le $past_days ] ; do +while [ $n -le $last_days ] ; do #hrs=`expr $n \* 24` hrs=$((n*24)) day=`$NDATE -$hrs ${VDATE}00|cut -c1-8` @@ -58,15 +58,6 @@ while [ $n -le $past_days ] ; do n=$((n+1)) done -export fcst_init_hour="0,6,12,18" -export fcst_valid_hour="0,12" - -export plot_dir=$DATA/out/sfc_upper/${valid_beg}-${valid_end} -#For restart: -if [ ! -d $plot_dir ] ; then - mkdir -p $plot_dir -fi - export fcst_init_hour="0,6,12,18" export fcst_valid_hour="0,12" valid_time='valid00z_12z' @@ -143,7 +134,17 @@ for stats in csi_fbias ratio_pod_csi ; do verif_type=conus_sfc echo "#!/bin/ksh" >> run_${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${line_type}.sh - echo "export PLOT_TYPE=$score_type" >> run_${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${line_type}.sh + echo "set -x" >> run_${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${line_type}.sh + save_dir=$DATA/plots/run_${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${line_type} + plot_dir=$save_dir/sfc_upper/${valid_beg}-${valid_end} + mkdir -p $plot_dir + mkdir -p $save_dir/data + + echo "export save_dir=$save_dir" >> run_${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${line_type}.sh + echo "export log_metplus=$save_dir/log_verif_plotting_job.out" >> run_${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${line_type}.sh + echo "export prune_dir=$save_dir/data" >> run_${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${line_type}.sh + + echo "export PLOT_TYPE=$score_type" >> run_${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${line_type}.sh echo "export vx_mask_list='$VX_MASK_LIST'" >> run_${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${line_type}.sh echo "export verif_case=$verif_case" >> run_${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${line_type}.sh echo "export verif_type=$verif_type" >> run_${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${line_type}.sh @@ -174,20 +175,25 @@ for stats in csi_fbias ratio_pod_csi ; do chmod +x run_py.${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${line_type}.sh - echo "${DATA}/run_py.${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${line_type}.sh" >> run_${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${line_type}.sh + echo "${DATA}/scripts/run_py.${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${line_type}.sh" >> run_${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${line_type}.sh - #Save for restart echo "if [ -s ${plot_dir}/${score_type}_regional_*_${valid_rst}_${var_rst}*.png ] ; then" >> run_${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${line_type}.sh - echo " cp -v ${plot_dir}/${score_type}_regional_*_${valid_rst}_${var_rst}*.png $restart" >> run_${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${line_type}.sh - echo " >$restart/run_${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${line_type}.completed" >> run_${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${line_type}.sh + echo " cp -v ${plot_dir}/${score_type}_regional_*_${valid_rst}_${var_rst}*.png $all_plots" >> run_${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${line_type}.sh + echo " >$all_plots/run_${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${line_type}.completed" >> run_${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${line_type}.sh + + #Copy files to restart directory + echo " if [ $SENDCOM = YES ] ; then" >> run_${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${line_type}.sh + echo " cp -v $all_plots/${score_type}_regional_*_${valid_rst}_${var_rst}*.png $restart" >> run_${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${line_type}.sh + echo " cp -v $all_plots/run_${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${line_type}.completed $restart" >> run_${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${line_type}.sh + echo " fi" >> run_${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${line_type}.sh echo "fi" >> run_${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${line_type}.sh chmod +x run_${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${line_type}.sh - echo "${DATA}/run_${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${line_type}.sh" >> run_all_poe.sh + echo "${DATA}/scripts/run_${stats}.${score_type}.${lead}.${VAR}.${FCST_LEVEL_value}.${line_type}.sh" >> run_all_poe.sh + + else + cp -v ${restart}/${score_type}_regional_*_${valid_rst}_${var_rst}*.png $all_plots - else - #Restart from png files of previous runs - cp $restart/${score_type}_regional_*_${valid_rst}_${var_rst}*.png ${plot_dir}/. fi done #end of line_type @@ -208,17 +214,17 @@ chmod +x run_all_poe.sh # Run the POE script in parallel or in sequence order to generate png files #************************************************************************** if [ $run_mpi = yes ] ; then - mpiexec -np 6 -ppn 6 --cpu-bind verbose,depth cfp ${DATA}/run_all_poe.sh + mpiexec -np 6 -ppn 6 --cpu-bind verbose,depth cfp ${DATA}/scripts/run_all_poe.sh else - ${DATA}/run_all_poe.sh + ${DATA}/scripts/run_all_poe.sh fi export err=$?; err_chk + #************************************************** # Change plot file names to meet the EVS standard #************************************************** - -cd $plot_dir +cd $all_plots for domain in day1_mrgl day1_slgt day1_tstm day1_enh day1_mdt day1_high day2_mrgl day2_slgt day2_tstm day2_enh day2_mdt day2_high day3_mrgl day3_slgt day3_tstm day3_enh day3_mdt day3_high ; do for var in cape mlcape ; do @@ -232,47 +238,43 @@ for domain in day1_mrgl day1_slgt day1_tstm day1_enh day1_mdt day1_high day2_mrg valid=valid_00z_12z fi if ls lead_average_regional_${domain}_valid_all_times_${var}*.png 1> /dev/null 2>&1; then - mv lead_average_regional_${domain}_valid_all_times_${var}*.png evs.href.csi_fbias.${var_new}_${level}.last${past_days}days.fhrmean_${valid}.${domain}.png + mv lead_average_regional_${domain}_valid_all_times_${var}*.png evs.href.csi_fbias.${var_new}_${level}.last${last_days}days.fhrmean_valid00z12z.${domain}.png fi if ls threshold_average_regional_${domain}_${valid}_${var}_csi*.png 1> /dev/null 2>&1; then - mv threshold_average_regional_${domain}_${valid}_${var}_csi*.png evs.href.csi.${var_new}_${level}.last${past_days}days.threshmean_${valid}.${domain}.png + mv threshold_average_regional_${domain}_${valid}_${var}_csi*.png evs.href.csi.${var_new}_${level}.last${last_days}days.threshmean_valid00z12z.${domain}.png fi if ls threshold_average_regional_${domain}_${valid}_${var}_fbias*.png 1> /dev/null 2>&1; then - mv threshold_average_regional_${domain}_${valid}_${var}_fbias*.png evs.href.fbias.${var_new}_${level}.last${past_days}days.threshmean_${valid}.${domain}.png + mv threshold_average_regional_${domain}_${valid}_${var}_fbias*.png evs.href.fbias.${var_new}_${level}.last${last_days}days.threshmean_valid00z12z.${domain}.png fi if ls performance_diagram_regional_${domain}_${valid}_${var}*.png 1> /dev/null 2>&1; then - mv performance_diagram_regional_${domain}_${valid}_${var}*.png evs.href.ctc.${var_new}_${level}.last${past_days}days.perfdiag_${valid}.${domain}.png + mv performance_diagram_regional_${domain}_${valid}_${var}*.png evs.href.ctc.${var_new}_${level}.last${last_days}days.perfdiag_valid00z12z.${domain}.png fi done done - -tar -cvf evs.plots.href.spcoutlook.past${past_days}days.v${VDATE}.tar *.png +if [ -s evs*.png ] ; then + tar -cvf evs.plots.href.spcoutlook.last${last_days}days.v${VDATE}.tar evs*.png +fi # Cat the plotting log files -log_dir="$DATA/logs" -if [ -d $log_dir ]; then - log_file_count=$(find $log_dir -type f | wc -l) - if [[ $log_file_count -ne 0 ]]; then - log_files=("$log_dir"/*) - for log_file in "${log_files[@]}"; do - if [ -f "$log_file" ]; then - echo "Start: $log_file" - cat "$log_file" - echo "End: $log_file" - fi - done - fi +log_dir="$DATA/plots" +if [ -s $log_dir/*/log*.out ]; then + log_files=`ls $log_dir/*/log*.out` + for log_file in $log_files ; do + echo "Start: $log_file" + cat "$log_file" + echo "End: $log_file" + done fi -if [ $SENDCOM = YES ] && [ -s evs.plots.href.spcoutlook.past${past_days}days.v${VDATE}.tar ] ; then - cp -v evs.plots.href.spcoutlook.past${past_days}days.v${VDATE}.tar $COMOUT/. +if [ $SENDCOM = YES ] && [ -s evs.plots.href.spcoutlook.last${last_days}days.v${VDATE}.tar ] ; then + cp -v evs.plots.href.spcoutlook.last${last_days}days.v${VDATE}.tar $COMOUT/. fi if [ $SENDDBN = YES ] ; then - $DBNROOT/bin/dbn_alert MODEL EVS_RZDM $job $COMOUT/evs.plots.href.spcoutlook.past${past_days}days.v${VDATE}.tar + $DBNROOT/bin/dbn_alert MODEL EVS_RZDM $job $COMOUT/evs.plots.href.spcoutlook.last${last_days}days.v${VDATE}.tar fi diff --git a/scripts/stats/cam/exevs_href_grid2obs_stats.sh b/scripts/stats/cam/exevs_href_grid2obs_stats.sh index 769050da07..6899057c11 100755 --- a/scripts/stats/cam/exevs_href_grid2obs_stats.sh +++ b/scripts/stats/cam/exevs_href_grid2obs_stats.sh @@ -3,6 +3,7 @@ # Purpose: Setup some paths and run href grid2obs stat ush scripts # # Last updated +# 01/10/2025: Add MPMD: by Binbin Zhou, Lynker@EMC/NCEP # 06/25/2024: add restart: by Binbin Zhou, Lynker@EMC/NCEP # 10/30/2023: by Binbin Zhou, Lynker@EMC/NCEP ##################################################################### @@ -12,12 +13,7 @@ set -x export machine=${machine:-"WCOSS2"} export WORK=$DATA cd $WORK - -#************************************* -#check input data are available: -#************************************* -source $USHevs/$COMPONENT/evs_check_href_files.sh -export err=$?; err_chk +mkdir -p $WORK/scripts #lvl = profile or sfc or both export lvl='both' @@ -34,6 +30,21 @@ export gather=${gather:-'yes'} export verify=$VERIF_CASE export run_mpi=${run_mpi:-'yes'} +#************************************* +#check input data are available: +#************************************* +source $USHevs/$COMPONENT/evs_check_href_files.sh +export err=$?; err_chk +if [ -e $DATA/verif_all.no ] ; then + export prepare='no' + export verif_system='no' + export verif_profile='no' + export verif_product='no' + export gather='no' + echo "Either prepbufr or HREF forecast files do not exist, skip grid2obs verification!" +fi + + export COMHREF=$COMINhref export PREPBUFR=$COMINobsproc @@ -46,7 +57,6 @@ export vday=$VDATE # domain = conus or alaska or all export domain="all" -#export domain="HI" export COMOUTrestart=$COMOUTsmall/restart [[ ! -d $COMOUTrestart ]] && mkdir -p $COMOUTrestart @@ -56,7 +66,6 @@ export COMOUTrestart=$COMOUTsmall/restart [[ ! -d $COMOUTrestart/profile ]] && mkdir -p $COMOUTrestart/profile [[ ! -d $COMOUTrestart/product ]] && mkdir -p $COMOUTrestart/product - #*************************************** # Prepare the prepbufr data # ************************************** @@ -87,43 +96,40 @@ fi #***************************************** # Build a POE script to collect sub-jobs #**************************************** ->run_href_all_grid2obs_poe +>$DATA/scripts/run_href_all_grid2obs_poe -#system: 10 jobs (8 on CONUS, 2 on Alaska) if [ $verif_system = yes ] ; then $USHevs/cam/evs_href_grid2obs_system.sh export err=$?; err_chk - cat ${DATA}/run_all_href_system_poe.sh >> run_href_all_grid2obs_poe + cat ${DATA}/scripts/run_all_href_system_poe.sh >> $DATA/scripts/run_href_all_grid2obs_poe fi -#profile: total 10 jobs (4 for conus and 2 for alaska) if [ $verif_profile = yes ] ; then $USHevs/cam/evs_href_grid2obs_profile.sh $domain export err=$?; err_chk - cat ${DATA}/run_all_href_profile_poe.sh >> run_href_all_grid2obs_poe + cat ${DATA}/scripts/run_all_href_profile_poe.sh >> $DATA/scripts/run_href_all_grid2obs_poe fi -#Product: 16 jobs if [ $verif_product = yes ] ; then $USHevs/cam/evs_href_grid2obs_product.sh export err=$?; err_chk - cat ${DATA}/run_all_href_product_poe.sh >> run_href_all_grid2obs_poe + cat ${DATA}/scripts/run_all_href_product_poe.sh >> $DATA/scripts/run_href_all_grid2obs_poe fi -#totall: 36 jobs for all (both conus and alaska, profile, system and product) -chmod 775 run_href_all_grid2obs_poe +#totall: 72 jobs for all (both conus and alaska, profile, system and product) +chmod 775 $DATA/scripts/run_href_all_grid2obs_poe #************************************************* # Run the POE script to generate small stat files #************************************************* -if [ -s run_href_all_grid2obs_poe ] ; then +if [ -s $DATA/scripts/run_href_all_grid2obs_poe ] ; then if [ $run_mpi = yes ] ; then - mpiexec -np 72 -ppn 72 --cpu-bind verbose,depth cfp ${DATA}/run_href_all_grid2obs_poe + mpiexec -np 72 -ppn 72 --cpu-bind verbose,depth cfp ${DATA}/scripts/run_href_all_grid2obs_poe export err=$?; err_chk else - ${DATA}/run_href_all_grid2obs_poe + ${DATA}/scripts/run_href_all_grid2obs_poe export err=$?; err_chk fi fi diff --git a/scripts/stats/cam/exevs_href_precip_stats.sh b/scripts/stats/cam/exevs_href_precip_stats.sh index f95f7400b2..efdc5fed12 100755 --- a/scripts/stats/cam/exevs_href_precip_stats.sh +++ b/scripts/stats/cam/exevs_href_precip_stats.sh @@ -3,6 +3,7 @@ # Purpose: Setup some paths and run href precip stat ush scripts # # Last updated +# 01/10/2025: Add MPMD: by Binbin Zhou, Lynker@EMC/NCEP # 06/25/2024: Add restart, Binbin Zhou, Lynker@EMC/NCEP # 10/30/2023: by Binbin Zhou, Lynker@EMC/NCEP ################################################################### @@ -18,6 +19,7 @@ export err=$?; err_chk export WORK=$DATA cd $WORK +mkdir -p $WORK/scripts export run_mpi=${run_mpi:-'yes'} export verif_precip=${verif_precip:-'yes'} @@ -70,33 +72,33 @@ fi #*************************************** # Build a POE script to collect sub-jobs # ************************************** -> run_all_precip_poe.sh +>$DATA/scripts/run_all_precip_poe.sh # Build sub-jobs for precip if [ $verif_precip = yes ] ; then $USHevs/cam/evs_href_precip.sh export err=$?; err_chk - cat ${DATA}/run_all_href_precip_poe.sh >> run_all_precip_poe.sh + cat ${DATA}/scripts/run_all_href_precip_poe.sh >> $DATA/scripts/run_all_precip_poe.sh fi # Build sub-jobs for snowfall if [ $verif_snowfall = yes ] ; then $USHevs/cam/evs_href_snowfall.sh export err=$?; err_chk - cat ${DATA}/run_all_href_snowfall_poe.sh >> run_all_precip_poe.sh + cat ${DATA}/scripts/run_all_href_snowfall_poe.sh >> $DATA/scripts/run_all_precip_poe.sh fi #************************************************* # Run the POE script to generate small stat files #************************************************* -if [ -s ${DATA}/run_all_precip_poe.sh ] ; then - chmod 775 run_all_precip_poe.sh +if [ -s ${DATA}/scripts/run_all_precip_poe.sh ] ; then + chmod 775 ${DATA}/scripts/run_all_precip_poe.sh if [ $run_mpi = yes ] ; then - mpiexec -n 44 -ppn 44 --cpu-bind core --depth=2 cfp ${DATA}/run_all_precip_poe.sh + mpiexec -n 72 -ppn 72 --cpu-bind verbose,depth cfp ${DATA}/scripts/run_all_precip_poe.sh export err=$?; err_chk else - ${DATA}/run_all_precip_poe.sh + ${DATA}/scripts/run_all_precip_poe.sh export err=$?; err_chk fi diff --git a/scripts/stats/cam/exevs_href_spcoutlook_stats.sh b/scripts/stats/cam/exevs_href_spcoutlook_stats.sh index dea23d4cc7..ec554590b7 100755 --- a/scripts/stats/cam/exevs_href_spcoutlook_stats.sh +++ b/scripts/stats/cam/exevs_href_spcoutlook_stats.sh @@ -3,6 +3,7 @@ # Purpose: Setup some paths and run href spcoutlook job # # Last updated +# 01/10/2025: Add MPMD: by Binbin Zhou, Lynker@EMC/NCEP # 05/04/2024: add restart, Binbin Zhou, Lynker@EMC/NCEP # 10/30/2023: by Binbin Zhou, Lynker@EMC/NCEP ############################################################## @@ -12,6 +13,7 @@ set -x export machine=${machine:-"WCOSS2"} export WORK=$DATA cd $WORK +mkdir -p $WORK/scripts #********************************* #check input data are available: @@ -61,7 +63,6 @@ export COMOUTrestart=$COMOUTsmall/restart [[ ! -d $COMOUTrestart/spcoutlook ]] && mkdir -p $COMOUTrestart/spcoutlook - #********************************* # Prepare prepbufr data files # ******************************** @@ -73,26 +74,26 @@ fi #**************************************** # Build a POE script to collect sub-jobs # *************************************** ->run_href_all_grid2obs_poe +>$DATA/scripts/run_href_all_grid2obs_poe #Spc_outlook: 2 job if [ $verif_spcoutlook = yes ] ; then $USHevs/cam/evs_href_spcoutlook.sh export err=$?; err_chk - cat ${DATA}/run_all_href_spcoutlook_poe.sh >> run_href_all_grid2obs_poe + cat ${DATA}/scripts/run_all_href_spcoutlook_poe.sh >> $DATA/scripts/run_href_all_grid2obs_poe fi -chmod 775 run_href_all_grid2obs_poe #**************************************** # Run POE script to get small stat files # *************************************** -if [ -s run_href_all_grid2obs_poe ] ; then +if [ -s $DATA/scripts/run_href_all_grid2obs_poe ] ; then + chmod 775 $DATA/scripts/run_href_all_grid2obs_poe if [ $run_mpi = yes ] ; then - mpiexec -np 2 -ppn 2 --cpu-bind verbose,core cfp ${DATA}/run_href_all_grid2obs_poe + mpiexec -np 2 -ppn 2 --cpu-bind verbose,core cfp ${DATA}/scripts/run_href_all_grid2obs_poe export err=$?; err_chk else - ${DATA}/run_href_all_grid2obs_poe + ${DATA}/scripts/run_href_all_grid2obs_poe export err=$?; err_chk fi fi diff --git a/ush/cam/evs_check_href_files.sh b/ush/cam/evs_check_href_files.sh index d831bdf9e0..00ac3736c2 100755 --- a/ush/cam/evs_check_href_files.sh +++ b/ush/cam/evs_check_href_files.sh @@ -2,7 +2,7 @@ #************************************************************************** # Purpose: check the required input forecast and validation data files # for href stat jobs -# Last update: 10/30/2023, by Binbin Zhou Lynker@EMC/NCEP +# Last update: 11/01/2024, by Binbin Zhou Lynker@EMC/NCEP #************************************************************************ set -x @@ -22,8 +22,16 @@ if [ $VERIF_CASE = grid2obs ] || [ $VERIF_CASE = spcoutlook ] ; then done echo "Missing prepbufr files = " $missing if [ $missing -eq 24 ] ; then - echo "WARNING: All of the preppbufr files are missing." + echo "WARNING: All of the RAP prepbufr files are missing for EVS ${COMPONENT}" export verif_all=no + >$DATA/verif_all.no + if [ "$SENDMAIL" = "YES" ] ; then + export subject="RAP Prepbufr Data Missing for EVS ${COMPONENT}" + echo "WARNING: No RAP Prepbufr data available for ${vday}" > mailmsg + echo "All of $COMINobsproc/rap.${vday}/rap.txxz.prepbufr.tm00" files are missing >> mailmsg + echo "Job ID: $jobid" >> mailmsg + cat mailmsg | mail -s "$subject" $MAILTO + fi fi fi @@ -60,8 +68,17 @@ if [ $VERIF_CASE = precip ] ; then done echo "Missing ccpa01h files = " $missing if [ $missing -eq 24 ] ; then - echo "WARNING: All of the ccpa files are missing" + echo "WARNING: All of the ccpa01h files are missing for EVS ${COMPONENT}" export verif_precip=no + >$DATA/verif_precip.no + if [ "$SENDMAIL" = "YES" ] ; then + export subject="CCPA_01h Data Missing for EVS ${COMPONENT}" + echo "WARNING: No CCPA_01h data available for ${vday}" > mailmsg + echo "All of $COMINccpa/ccpa.${vday}/cycle/ccpa.txxz.01h.hrap.conus.gb2 are missing" >> mailmsg + echo "Job ID: $jobid" >> mailmsg + cat mailmsg | mail -s "$subject" $MAILTO + fi + fi missing=0 @@ -90,8 +107,17 @@ if [ $VERIF_CASE = precip ] ; then done echo "Missing ccpa03h files = " $missing if [ $missing -eq 8 ] ; then - echo "WARNING: All of the ccpa03h files are missing" + echo "WARNING: All of the ccpa03h files are missing for EVS ${COMPONENT}" export verif_precip=no + >$DATA/verif_precip.no + if [ "$SENDMAIL" = "YES" ] ; then + export subject="CCPA_03h Data Missing for EVS ${COMPONENT}" + echo "WARNING: No CCPA_03h data available for ${VDATE}" > mailmsg + echo "All of $COMINccpa/ccpa.${vday}/cycle/ccpa.txxz.03h.hrap.conus.gb2 are missing" >> mailmsg + echo "Job ID: $jobid" >> mailmsg + cat mailmsg | mail -s "$subject" $MAILTO + fi + fi missing=0 @@ -112,8 +138,18 @@ if [ $VERIF_CASE = precip ] ; then done echo "Missing ccpa06h files = " $missing if [ $missing -ge 1 ] ; then - echo "WARNING: At least one of the ccpa06h files are missing" + echo "WARNING: At least one of the ccpa06h files are missing for EVS ${COMPONENT}" export verif_precip=no + >$DATA/verif_precip.no + + if [ "$SENDMAIL" = "YES" ] ; then + export subject="CCPA_06h Data Missing for EVS ${COMPONENT}" + echo "WARNING: No CCPA_06h data available for ${vday}" > mailmsg + echo "All of $COMINccpa/ccpa.${vday}/cycle/ccpa.txxz.06h.hrap.conus.gb2 are missing" >> mailmsg + echo "Job ID: $jobid" >> mailmsg + cat mailmsg | mail -s "$subject" $MAILTO + fi + fi accum=01 @@ -127,8 +163,18 @@ if [ $VERIF_CASE = precip ] ; then done echo "Missing mrms01h files = " $missing if [ $missing -eq 24 ] ; then - echo "WARNING: All of mrms01h files are missing" + echo "WARNING: All of mrms01h files are missing for EVS ${COMPONENT}" export verif_precip=no + >$DATA/verif_precip.no + + if [ "$SENDMAIL" = "YES" ] ; then + export subject="MRMS_01h Data Missing for EVS ${COMPONENT}" + echo "WARNING: No MRMS_01h data available for ${vday}" > mailmsg + echo "All of $DCOMINmrms/MultiSensor_QPE_${accum}H_Pass2_00.00_${vday}-vhr0000.grib2.gz are missing" >> mailmsg + echo "Job ID: $jobid" >> mailmsg + cat mailmsg | mail -s "$subject" $MAILTO + fi + fi accum=03 @@ -142,8 +188,18 @@ if [ $VERIF_CASE = precip ] ; then done echo "Missing mrms03h files = " $missing if [ $missing -eq 8 ] ; then - echo "WARNING: All of mrms03h files are missing" + echo "WARNING: All of mrms03h files are missing for EVS ${COMPONENT}" export verif_precip=no + >$DATA/verif_precip.no + + if [ "$SENDMAIL" = "YES" ] ; then + export subject="MRMS_03h Data Missing for EVS ${COMPONENT}" + echo "WARNING: No MRMS_03h data available for ${vday}" > mailmsg + echo "All of $DCOMINmrms/MultiSensor_QPE_${accum}H_Pass2_00.00_${vday}-vhr0000.grib2.gz are missing" >> mailmsg + echo "Job ID: $jobid" >> mailmsg + cat mailmsg | mail -s "$subject" $MAILTO + fi + fi accum=24 @@ -157,140 +213,18 @@ if [ $VERIF_CASE = precip ] ; then done echo "Missing mrms24h files = " $missing if [ $missing -eq 4 ] ; then - echo "WARNING: All of the mrms24h files are missing" + echo "WARNING: All of the mrms24h files are missing for EVS ${COMPONENT}" export verif_precip=no - fi -fi - -echo "Checking HREF members files ..." - -domain=conus -for obsv_cyc in 00 03 06 09 12 15 18 21 ; do - for fhr in 03 06 09 12 15 18 21 24 27 30 33 36 39 42 45 48 ; do - fcst_time=`$NDATE -$fhr ${vday}${obsv_cyc}` - fday=${fcst_time:0:8} - fcyc=${fcst_time:8:2} - if [ $fcyc = 00 ] || [ $fcyc = 06 ] || [ $fcyc = 12 ] || [ $fcyc = 18 ] ; then - href_mbrs=0 - for mb in 01 02 03 04 05 06 07 08 09 10 ; do - if ! ([ "$mb" = "04" ] && (( fhr >= 45 ))) && \ - ! ([ "$mb" = "06" ] && ([ "$fcyc" = "06" ] || [ "$fcyc" = "18" ]) && (( fhr >= 45 ))) && \ - ! ( ([ "$mb" = "07" ] || [ "$mb" = "08" ]) && ([ "$fcyc" = "06" ] || [ "$fcyc" = "18" ]) && (( fhr >= 45 )) ) && \ - ! ( ([ "$mb" = "09" ] || [ "$mb" = "10" ]) && ([ "$fcyc" = "00" ] || [ "$fcyc" = "12" ]) && (( fhr >= 39 )) ) && \ - ! ( ([ "$mb" = "09" ] || [ "$mb" = "10" ]) && ([ "$fcyc" = "06" ] || [ "$fcyc" = "18" ]) && (( fhr >= 33 )) ) - then - href=$COMINhref/href.${fday}/verf_g2g/href.m${mb}.t${fcyc}z.conus.f${fhr} - if [ -s $href ] ; then - href_mbrs=$((href_mbrs+1)) - else - echo "WARNING: $href is missing" - fi - fi - done - if [ $href_mbrs -lt 4 ] ; then - echo "WARNING: HREF members = " $href_mbrs " which < 4" - export verif_precip=no - export verif_snowfall=no - export verif_all=no - fi + >$DATA/verif_precip.no + + if [ "$SENDMAIL" = "YES" ] ; then + export subject="MRMS_24h Data Missing for EVS ${COMPONENT}" + echo "WARNING: No MRMS_24h data available for ${vday}" > mailmsg + echo "All of $DCOMINmrms/MultiSensor_QPE_${accum}H_Pass2_00.00_${vday}-vhr0000.grib2.gz are missing" >> mailmsg + echo "Job ID: $jobid" >> mailmsg + cat mailmsg | mail -s "$subject" $MAILTO fi - done -done - -echo "All HREF member files in CONUS are available. Continue checking ..." -domain=ak -href_mbrs=0 -for obsv_cyc in 00 03 06 09 12 15 18 21 ; do - for fhr in 03 06 09 12 15 18 21 24 27 30 33 36 39 42 45 48 ; do - fcst_time=`$NDATE -$fhr ${vday}${obsv_cyc}` - fday=${fcst_time:0:8} - fcyc=${fcst_time:8:2} - if [ $fcyc = 06 ] ; then - href_mbrs=0 - for mb in 01 02 03 04 05 06 07 08 09 10 ; do - if ! ([ "$mb" = "02" ] && (( fhr >= 45 ))) && \ - ! (([ "$mb" = "07" ] || [ "$mb" = "08" ]) && (( fhr >= 39 ))) && \ - ! ([ "$mb" = "09" ] || [ "$mb" = "10" ]) - then - href=$COMINhref/href.${fday}/verf_g2g/href.m${mb}.t${fcyc}z.ak.f${fhr} - if [ -s $href ] ; then - href_mbrs=$((href_mbrs+1)) - else - echo "WARNING: $href is missing" - fi - fi - done - if [ $href_mbrs -lt 4 ] ; then - echo "WARNING: HREF members = " $href_mbrs " which < 4" - export verif_precip=no - export verif_snowfall=no - export verif_all=no - fi - fi - done -done - -echo "All HREF member files in Alaska are available. Continue checking ..." - -domain=conus -for obsv_cyc in 00 01 02 03 04 05 06 07 08 09 10 11 12 13 14 15 16 17 18 19 20 21 22 23 ; do - typeset -Z2 fhr - fhr=01 - while [ $fhr -le 48 ] ; do - fcst_time=`$NDATE -$fhr ${vday}${obsv_cyc}` - fday=${fcst_time:0:8} - fcyc=${fcst_time:8:2} - if [ $fcyc = 00 ] || [ $fcyc = 06 ] || [ $fcyc = 12 ] || [ $fcyc = 18 ] ; then - href_prod=0 - for prod in mean prob eas pmmn lpmm avrg ; do - href=$COMINhref/href.${fday}/ensprod/href.t${fcyc}z.conus.${prod}.f${fhr}.grib2 - if [ -s $href ] ; then - href_prod=$((href_prod+1)) - else - echo "WARNING: $href is missing" - fi - done - if [ $href_prod -lt 4 ] ; then - echo "WARNING: HREF Products = " $href_prod " which < 4, some products are missing" - export verif_precip=no - export verif_snowfall=no - export verif_all=no - fi - fi - fhr=$((fhr+1)) - done -done - -echo "All HREF ensemble products files in CONUS are available. Continue checking ..." + fi +fi -domain=ak -for obsv_cyc in 00 01 02 03 04 05 06 07 08 09 10 11 12 13 14 15 16 17 18 19 20 21 22 23 ; do - typeset -Z2 fhr - fhr=01 - while [ $fhr -le 48 ] ; do - fcst_time=`$NDATE -$fhr ${vday}${obsv_cyc}` - fday=${fcst_time:0:8} - fcyc=${fcst_time:8:2} - if [ $fcyc = 06 ] ; then - href_prod=0 - for prod in mean prob eas pmmn lpmm avrg ; do - href=$COMINhref/href.${fday}/ensprod/href.t${fcyc}z.ak.${prod}.f${fhr}.grib2 - if [ -s $href ] ; then - href_prod=$((href_prod+1)) - else - echo "WARNING: $href is missing" - fi - done - if [ $href_prod -lt 4 ] ; then - echo "WARNING: HREF Products = " $href_prod " which < 4, some products are missing" - export verif_precip=no - export verif_snowfall=no - export verif_all=no - fi - fi - fhr=$((fhr+1)) - done -done -echo "All HREF ensemble products files in Alaska are available." -echo "File checks are complete." diff --git a/ush/cam/evs_href_gather.sh b/ush/cam/evs_href_gather.sh index 099bfad8f4..005be3df55 100755 --- a/ush/cam/evs_href_gather.sh +++ b/ush/cam/evs_href_gather.sh @@ -36,14 +36,16 @@ for MODL in $MODELS ; do #************************************************ # Build sub-jobs #*********************************************** +cd $DATA/scripts >run_gather_${verify}_${MODL}.sh - + + echo "set -x" >> run_gather_${verify}_${MODL}.sh echo "export output_base=${WORK}/gather" >> run_gather_${verify}_${MODL}.sh echo "export verify=$verify" >> run_gather_${verify}_${MODL}.sh echo "export vbeg=00" >> run_gather_${verify}_${MODL}.sh - echo "export vend=23" >> run_gather_${verify}_${MODL}.sh + echo "export vend=00" >> run_gather_${verify}_${MODL}.sh echo "export valid_increment=3600" >> run_gather_${verify}_${MODL}.sh echo "export model=$modnam" >> run_gather_${verify}_${MODL}.sh echo "export stat_file_dir=${COMOUTsmall}" >> run_gather_${verify}_${MODL}.sh @@ -53,16 +55,17 @@ for MODL in $MODELS ; do if [ $verify = grid2obs ] || [ $verify = spcoutlook ] ; then echo "${METPLUS_PATH}/ush/run_metplus.py -c ${PARMevs}/metplus_config/machine.conf -c ${GRID2OBS_CONF}/StatAnlysis_fcstHREF_obsPREPBUFR_GatherByDay.conf " >> run_gather_${verify}_${MODL}.sh + echo "export err=$?; err_chk" >> run_gather_${verify}_${MODL}.sh elif [ $verify = precip ] ; then echo "${METPLUS_PATH}/ush/run_metplus.py -c ${PARMevs}/metplus_config/machine.conf -c ${PRECIP_CONF}/StatAnlysis_fcstHREF_obsAnalysis_GatherByDay.conf " >> run_gather_${verify}_${MODL}.sh - fi + echo "export err=$?; err_chk" >> run_gather_${verify}_${MODL}.sh + fi echo "if [[ $SENDCOM = YES && -s ${WORK}/gather/${vday}/${MODL}_${verify}_${vday}.stat ]]; then cp -v ${WORK}/gather/${vday}/${MODL}_${verify}_${vday}.stat $COMOUTfinal/evs.stats.${modl}.${verify}.v${vday}.stat" >> run_gather_${verify}_${MODL}.sh echo "else echo ${WORK}/gather/${vday}/${MODL}_${verify}_${vday}.stat empty; fi" >> run_gather_${verify}_${MODL}.sh - chmod +x run_gather_${verify}_${MODL}.sh - echo "${DATA}/run_gather_${verify}_${MODL}.sh" >> run_gather_all_poe.sh + echo "${DATA}/scripts/run_gather_${verify}_${MODL}.sh" >> run_gather_all_poe.sh done @@ -72,9 +75,9 @@ chmod 775 run_gather_all_poe.sh # Run the POE script #***************************** if [ $run_mpi = yes ] ; then - mpiexec -np 3 -ppn 3 --cpu-bind verbose,depth cfp ${DATA}/run_gather_all_poe.sh + mpiexec -np 3 -ppn 3 --cpu-bind verbose,depth cfp ${DATA}/scripts/run_gather_all_poe.sh export err=$?; err_chk else - ${DATA}/run_gather_all_poe.sh + ${DATA}/scripts/run_gather_all_poe.sh export err=$?; err_chk fi diff --git a/ush/cam/evs_href_grid2obs_product.sh b/ush/cam/evs_href_grid2obs_product.sh index 0d2bbdb6f3..181aa94d46 100755 --- a/ush/cam/evs_href_grid2obs_product.sh +++ b/ush/cam/evs_href_grid2obs_product.sh @@ -3,14 +3,15 @@ # Purpose: Generate href grid2obs product joe and sub-jobs files by directly using href # operational ensemble mean and probability product files # Last update: -# 04/25/2024, add restart, by Binbin Zhou Lynker@EMC/NCEP -# 10/30/2023, by Binbin Zhou Lynker@EMC/NCEP +# 01/10/2025, add MPMD, by Binbin Zhou Lynker@EMC/NCEP +# 10/30/2024, by Binbin Zhou Lynker@EMC/NCEP #*************************************************************************************** set -x #******************************************* # Build POE script to collect sub-jobs #****************************************** +cd $DATA/scripts >run_all_href_product_poe.sh obsv='prepbufr' @@ -28,64 +29,65 @@ for prod in mean prob ; do if [ $domain = CONUS ] ; then - for valid_run in run1 run2 run3 run4 ; do + for valid_run in 00 03 06 09 12 15 18 21 ; do + if [ $valid_run = 00 ] || [ $valid_run = 06 ] || [ $valid_run = 12 ] || [ $valid_run = 18 ] ; then + fhrs="06 12 18 24 30 36 42 48" + elif [ $valid_run = 03 ] || [ $valid_run = 09 ] || [ $valid_run = 15 ] || [ $valid_run = 21 ] ; then + fhrs="03 09 15 21 27 33 39 45" + fi + + for fhr in $fhrs ; do + # Build sub-jobs # ********************** - >run_href_${model}.${dom}.${valid_run}_product.sh + >run_href_${model}.${dom}.${valid_run}.${fhr}.product.sh ###################################################################################################### - #Restart: check if this CONUS task has been completed in the previous run + #Restart: check if this CONUS task has been completed in the precious run # if not, run this task, and then mark its completion, # otherwise, skip this task # ################################################################################################### - if [ ! -e $COMOUTrestart/product/run_href_${model}.${dom}.${valid_run}_product.completed ] ; then - - echo "export model=HREF${prod} " >> run_href_${model}.${dom}.${valid_run}_product.sh - echo "export domain=$dom " >> run_href_${model}.${dom}.${valid_run}_product.sh - echo "export regrid=G227" >> run_href_${model}.${dom}.${valid_run}_product.sh - echo "export output_base=${WORK}/grid2obs/run_href_${model}.${dom}.${valid_run}_product" >> run_href_${model}.${dom}.${valid_run}_product.sh - echo "export OBTYPE='PREPBUFR'" >> run_href_${model}.${dom}.${valid_run}_product.sh - echo "export domain=CONUS" >> run_href_${model}.${dom}.${valid_run}_product.sh - echo "export obsvgrid=G227" >> run_href_${model}.${dom}.${valid_run}_product.sh + if [ ! -e $COMOUTrestart/product/run_href_${model}.${dom}.${valid_run}.${fhr}_product.completed ] ; then + + ihr=`$NDATE -$fhr $VDATE$valid_run|cut -c 9-10` + iday=`$NDATE -$fhr $VDATE$valid_run|cut -c 1-8` + + input_fcst="$COMINhref/href.${iday}/ensprod/href.t${ihr}z.conus.${prod}.f${fhr}.grib2" + input_obsv="$WORK/prepbufr.${VDATE}/prepbufr.t${valid_run}z.G227.nc" + + if [ -s $input_fcst ] && [ -s $input_obsv ] ; then + + echo "#!/bin/ksh" >> run_href_${model}.${dom}.${valid_run}.${fhr}.product.sh + echo "set -x" >> run_href_${model}.${dom}.${valid_run}.${fhr}.product.sh + echo "export model=HREF${prod} " >> run_href_${model}.${dom}.${valid_run}.${fhr}.product.sh + echo "export domain=$dom " >> run_href_${model}.${dom}.${valid_run}.${fhr}.product.sh + echo "export regrid=G227" >> run_href_${model}.${dom}.${valid_run}.${fhr}.product.sh + echo "export output_base=${WORK}/grid2obs/run_href_${model}.${dom}.${valid_run}.${fhr}_product" >> run_href_${model}.${dom}.${valid_run}.${fhr}.product.sh + echo "export OBTYPE='PREPBUFR'" >> run_href_${model}.${dom}.${valid_run}.${fhr}.product.sh + echo "export domain=CONUS" >> run_href_${model}.${dom}.${valid_run}.${fhr}.product.sh + echo "export obsvgrid=G227" >> run_href_${model}.${dom}.${valid_run}.${fhr}.product.sh if [ $prod = sclr ] ; then - echo "export modelgrid=conus.prob" >> run_href_${model}.${dom}.${valid_run}_product.sh + echo "export modelgrid=conus.prob" >> run_href_${model}.${dom}.${valid_run}.${fhr}.product.sh else - echo "export modelgrid=conus.${prod}" >> run_href_${model}.${dom}.${valid_run}_product.sh + echo "export modelgrid=conus.${prod}" >> run_href_${model}.${dom}.${valid_run}.${fhr}.product.sh fi - echo "export obsvhead=$obsv" >> run_href_${model}.${dom}.${valid_run}_product.sh - echo "export obsvpath=$WORK" >> run_href_${model}.${dom}.${valid_run}_product.sh - - if [ $valid_run = run1 ] ; then - echo "export vbeg=0" >>run_href_${model}.${dom}.${valid_run}_product.sh - echo "export vend=23" >>run_href_${model}.${dom}.${valid_run}_product.sh - echo "export valid_increment=3600" >> run_href_${model}.${dom}.${valid_run}_product.sh - echo "export lead='1,2,3,4,5,6,7,8'" >> run_href_${model}.${dom}.${valid_run}_product.sh - elif [ $valid_run = run2 ] ; then - echo "export vbeg=0" >>run_href_${model}.${dom}.${valid_run}_product.sh - echo "export vend=23" >>run_href_${model}.${dom}.${valid_run}_product.sh - echo "export valid_increment=3600" >> run_href_${model}.${dom}.${valid_run}_product.sh - echo "export lead='9,10,11,12,13,14,15,16'" >> run_href_${model}.${dom}.${valid_run}_product.sh - elif [ $valid_run = run3 ] ; then - echo "export vbeg=0" >>run_href_${model}.${dom}.${valid_run}_product.sh - echo "export vend=23" >>run_href_${model}.${dom}.${valid_run}_product.sh - echo "export valid_increment=3600" >> run_href_${model}.${dom}.${valid_run}_product.sh - echo "export lead='17,18,19,20,21,22,23,24'" >> run_href_${model}.${dom}.${valid_run}_product.sh - elif [ $valid_run = run4 ] ; then - echo "export vbeg=0" >>run_href_${model}.${dom}.${valid_run}_product.sh - echo "export vend=21" >>run_href_${model}.${dom}.${valid_run}_product.sh - echo "export valid_increment=10800" >> run_href_${model}.${dom}.${valid_run}_product.sh - echo "export lead='27,30,33,36,39,42,45,48'" >> run_href_${model}.${dom}.${valid_run}_product.sh - fi + echo "export obsvhead=$obsv" >> run_href_${model}.${dom}.${valid_run}.${fhr}.product.sh + echo "export obsvpath=$WORK" >> run_href_${model}.${dom}.${valid_run}.${fhr}.product.sh + + echo "export vbeg=$valid_run" >>run_href_${model}.${dom}.${valid_run}.${fhr}.product.sh + echo "export vend=$valid_run" >>run_href_${model}.${dom}.${valid_run}.${fhr}.product.sh + echo "export valid_increment=3600" >> run_href_${model}.${dom}.${valid_run}.${fhr}.product.sh + echo "export lead=$fhr" >> run_href_${model}.${dom}.${valid_run}.${fhr}.product.sh - echo "export MODEL=HREF_${PROD}" >> run_href_${model}.${dom}.${valid_run}_product.sh - echo "export regrid=G227" >> run_href_${model}.${dom}.${valid_run}_product.sh - echo "export modelhead=$model" >> run_href_${model}.${dom}.${valid_run}_product.sh - echo "export modelpath=$COMHREF" >> run_href_${model}.${dom}.${valid_run}_product.sh - echo "export modeltail='.grib2'" >> run_href_${model}.${dom}.${valid_run}_product.sh - echo "export extradir='ensprod/'" >> run_href_${model}.${dom}.${valid_run}_product.sh + echo "export MODEL=HREF_${PROD}" >> run_href_${model}.${dom}.${valid_run}.${fhr}.product.sh + echo "export regrid=G227" >> run_href_${model}.${dom}.${valid_run}.${fhr}.product.sh + echo "export modelhead=$model" >> run_href_${model}.${dom}.${valid_run}.${fhr}.product.sh + echo "export modelpath=$COMHREF" >> run_href_${model}.${dom}.${valid_run}.${fhr}.product.sh + echo "export modeltail='.grib2'" >> run_href_${model}.${dom}.${valid_run}.${fhr}.product.sh + echo "export extradir='ensprod/'" >> run_href_${model}.${dom}.${valid_run}.${fhr}.product.sh - echo "export verif_grid=''" >> run_href_${model}.${dom}.${valid_run}_product.sh + echo "export verif_grid=''" >> run_href_${model}.${dom}.${valid_run}.${fhr}.product.sh echo "export verif_poly='${maskpath}/Bukovsky_G227_CONUS.nc, ${maskpath}/Bukovsky_G227_CONUS_East.nc, ${maskpath}/Bukovsky_G227_CONUS_West.nc, @@ -107,99 +109,105 @@ for prod in mean prob ; do ${maskpath}/Bukovsky_G227_Southeast.nc, ${maskpath}/Bukovsky_G227_Southwest.nc, ${maskpath}/Bukovsky_G227_SPlains.nc, - ${maskpath}/Bukovsky_G227_SRockies.nc'" >> run_href_${model}.${dom}.${valid_run}_product.sh + ${maskpath}/Bukovsky_G227_SRockies.nc'" >> run_href_${model}.${dom}.${valid_run}.${fhr}.product.sh - echo "${METPLUS_PATH}/ush/run_metplus.py -c ${PARMevs}/metplus_config/machine.conf -c ${GRID2OBS_CONF}/PointStat_fcstHREF${prod}_obsPREPBUFR_SFC.conf " >> run_href_${model}.${dom}.${valid_run}_product.sh - - echo "cp \$output_base/stat/\${MODEL}/*.stat $COMOUTsmall" >> run_href_${model}.${dom}.${valid_run}_product.sh + echo "${METPLUS_PATH}/ush/run_metplus.py -c ${PARMevs}/metplus_config/machine.conf -c ${GRID2OBS_CONF}/PointStat_fcstHREF${prod}_obsPREPBUFR_SFC.conf " >> run_href_${model}.${dom}.${valid_run}.${fhr}.product.sh + echo "export err=\$?; err_chk" >> run_href_${model}.${dom}.${valid_run}.${fhr}.product.sh + echo "if [ $SENDCOM = YES ] ; then " >> run_href_${model}.${dom}.${valid_run}.${fhr}.product.sh + echo " for FILEn in \$output_base/stat/\${MODEL}/*.stat; do if [ -f \"\$FILEn\" ]; then cp -v \$FILEn $COMOUTsmall; fi; done" >> run_href_${model}.${dom}.${valid_run}.${fhr}.product.sh #Mark this CONUS task is completed - echo "[[ \$? = 0 ]] && >$COMOUTrestart/product/run_href_${model}.${dom}.${valid_run}_product.completed" >> run_href_${model}.${dom}.${valid_run}_product.sh + echo " [[ \$? = 0 ]] && >$COMOUTrestart/product/run_href_${model}.${dom}.${valid_run}.${fhr}_product.completed" >> run_href_${model}.${dom}.${valid_run}.${fhr}.product.sh + echo "fi" >> run_href_${model}.${dom}.${valid_run}.${fhr}.product.sh - chmod +x run_href_${model}.${dom}.${valid_run}_product.sh - echo "${DATA}/run_href_${model}.${dom}.${valid_run}_product.sh" >> run_all_href_product_poe.sh + chmod +x run_href_${model}.${dom}.${valid_run}.${fhr}.product.sh + echo "${DATA}/scripts/run_href_${model}.${dom}.${valid_run}.${fhr}.product.sh" >> run_all_href_product_poe.sh + fi - fi + fi + done # end of fhr done # end of valid_run elif [ $domain = Alaska ] ; then - for valid_run in run1 run2 run3 run4 ; do + for valid_run in 00 03 06 09 12 15 18 21 ; do + if [ $valid_run = 00 ] || [ $valid_run = 06 ] || [ $valid_run = 12 ] || [ $valid_run = 18 ] ; then + fhrs="06 12 18 24 30 36 42 48" + elif [ $valid_run = 03 ] || [ $valid_run = 09 ] || [ $valid_run = 15 ] || [ $valid_run = 21 ] ; then + fhrs="03 09 15 21 27 33 39 45" + fi + + for fhr in $fhrs ; do - >run_href_${model}.${dom}.${valid_run}_product.sh + >run_href_${model}.${dom}.${valid_run}.${fhr}.product.sh ####################################################################### #Restart check: - # check if this Alaska task has been completed in the previous run + # check if this Alaska task has been completed in the precious run # if not, run this task, and then mark its completion, # otherwise, skip this task ######################################################################## - if [ ! -e $COMOUTrestart/product/run_href_${model}.${dom}.${valid_run}_product.completed ] ; then + if [ ! -e $COMOUTrestart/product/run_href_${model}.${dom}.${valid_run}.${fhr}_product.completed ] ; then - echo "export model=HREF${prod} " >> run_href_${model}.${dom}.${valid_run}_product.sh - echo "export domain=$dom " >> run_href_${model}.${dom}.${valid_run}_product.sh - echo "export regrid=NONE" >> run_href_${model}.${dom}.${valid_run}_product.sh + ihr=`$NDATE -$fhr $VDATE$valid_run|cut -c 9-10` + iday=`$NDATE -$fhr $VDATE$valid_run|cut -c 1-8` - echo "export output_base=${WORK}/grid2obs/run_href_${model}.${dom}.${valid_run}_product" >> run_href_${model}.${dom}.${valid_run}_product.sh - echo "export OBTYPE='PREPBUFR'" >> run_href_${model}.${dom}.${valid_run}_product.sh - echo "export domain=Alaska" >> run_href_${model}.${dom}.${valid_run}_product.sh - echo "export obsvgrid=G198" >> run_href_${model}.${dom}.${valid_run}_product.sh + input_fcst="$COMINhref/href.${iday}/ensprod/href.t${ihr}z.ak.${prod}.f${fhr}.grib2" + input_obsv="$WORK/prepbufr.${VDATE}/prepbufr.t${valid_run}z.G198.nc" + + if [ -s $input_fcst ] && [ -s $input_obsv ] ; then + + dcho "#!/bin/ksh" >> run_href_${model}.${dom}.${valid_run}.${fhr}.product.sh + dcho "set -x" >> run_href_${model}.${dom}.${valid_run}.${fhr}.product.sh + echo "export model=HREF${prod} " >> run_href_${model}.${dom}.${valid_run}.${fhr}.product.sh + echo "export domain=$dom " >> run_href_${model}.${dom}.${valid_run}.${fhr}.product.sh + echo "export regrid=NONE" >> run_href_${model}.${dom}.${valid_run}.${fhr}.product.sh + + echo "export output_base=${WORK}/grid2obs/run_href_${model}.${dom}.${valid_run}.${fhr}_product" >> run_href_${model}.${dom}.${valid_run}.${fhr}.product.sh + echo "export OBTYPE='PREPBUFR'" >> run_href_${model}.${dom}.${valid_run}.${fhr}.product.sh + echo "export domain=Alaska" >> run_href_${model}.${dom}.${valid_run}.${fhr}.product.sh + echo "export obsvgrid=G198" >> run_href_${model}.${dom}.${valid_run}.${fhr}.product.sh if [ $prod = sclr ] ; then - echo "export modelgrid=ak.prob" >> run_href_${model}.${dom}.${valid_run}_product.sh + echo "export modelgrid=ak.prob" >> run_href_${model}.${dom}.${valid_run}.${fhr}.product.sh else - echo "export modelgrid=ak.${prod}" >> run_href_${model}.${dom}.${valid_run}_product.sh + echo "export modelgrid=ak.${prod}" >> run_href_${model}.${dom}.${valid_run}.${fhr}.product.sh fi - echo "export verif_grid=''" >> run_href_${model}.${dom}.${valid_run}_product.sh - echo "export verif_poly='${maskpath}/Alaska_HREF.nc' " >> run_href_${model}.${dom}.${valid_run}_product.sh - echo "export obsvhead=$obsv" >> run_href_${model}.${dom}.${valid_run}_product.sh - echo "export obsvpath=$WORK" >> run_href_${model}.${dom}.${valid_run}_product.sh - - if [ $valid_run = run1 ] ; then - echo "export vbeg=00" >> run_href_${model}.${dom}.${valid_run}_product.sh - echo "export vend=11" >> run_href_${model}.${dom}.${valid_run}_product.sh - echo "export valid_increment=3600" >> run_href_${model}.${dom}.${valid_run}_product.sh - echo "export lead='1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16'" >> run_href_${model}.${dom}.${valid_run}_product.sh - elif [ $valid_run = run2 ] ; then - echo "export vbeg=00" >> run_href_${model}.${dom}.${valid_run}_product.sh - echo "export vend=12" >> run_href_${model}.${dom}.${valid_run}_product.sh - echo "export valid_increment=3600" >> run_href_${model}.${dom}.${valid_run}_product.sh - echo "export lead='17,18,19,20,21,22,23,24,27,30,33,36,39,42,45,48'" >> run_href_${model}.${dom}.${valid_run}_product.sh - - elif [ $valid_run = run3 ] ; then - echo "export vbeg=12" >> run_href_${model}.${dom}.${valid_run}_product.sh - echo "export vend=23" >> run_href_${model}.${dom}.${valid_run}_product.sh - echo "export valid_increment=3600" >> run_href_${model}.${dom}.${valid_run}_product.sh - echo "export lead='1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16'" >> run_href_${model}.${dom}.${valid_run}_product.sh - elif [ $valid_run = run4 ] ; then - echo "export vbeg=13" >> run_href_${model}.${dom}.${valid_run}_product.sh - echo "export vend=23" >> run_href_${model}.${dom}.${valid_run}_product.sh - echo "export valid_increment=3600" >> run_href_${model}.${dom}.${valid_run}_product.sh - echo "export lead='17,18,19,20,21,22,23,24,27,30,33,36,39,42,45,48'" >> run_href_${model}.${dom}.${valid_run}_product.sh - - else - err_exit "$valid_run is not a valid valid_run setting" - fi - - echo "export MODEL=HREF_${PROD}" >> run_href_${model}.${dom}.${valid_run}_product.sh - echo "export regrid=NONE" >> run_href_${model}.${dom}.${valid_run}_product.sh - echo "export modelhead=$model" >> run_href_${model}.${dom}.${valid_run}_product.sh - echo "export modelpath=$COMHREF" >> run_href_${model}.${dom}.${valid_run}_product.sh - echo "export modeltail='.grib2'" >> run_href_${model}.${dom}.${valid_run}_product.sh - echo "export extradir='ensprod/'" >> run_href_${model}.${dom}.${valid_run}_product.sh + echo "export verif_grid=''" >> run_href_${model}.${dom}.${valid_run}.${fhr}.product.sh + echo "export verif_poly='${maskpath}/Alaska_HREF.nc' " >> run_href_${model}.${dom}.${valid_run}.${fhr}.product.sh + echo "export obsvhead=$obsv" >> run_href_${model}.${dom}.${valid_run}.${fhr}.product.sh + echo "export obsvpath=$WORK" >> run_href_${model}.${dom}.${valid_run}.${fhr}.product.sh + + echo "export vbeg=$valid_run" >> run_href_${model}.${dom}.${valid_run}.${fhr}.product.sh + echo "export vend=$valid_run" >> run_href_${model}.${dom}.${valid_run}.${fhr}.product.sh + echo "export valid_increment=3600" >> run_href_${model}.${dom}.${valid_run}.${fhr}.product.sh + echo "export lead=$fhr" >> run_href_${model}.${dom}.${valid_run}.${fhr}.product.sh + + echo "export MODEL=HREF_${PROD}" >> run_href_${model}.${dom}.${valid_run}.${fhr}.product.sh + echo "export regrid=NONE" >> run_href_${model}.${dom}.${valid_run}.${fhr}.product.sh + echo "export modelhead=$model" >> run_href_${model}.${dom}.${valid_run}.${fhr}.product.sh + echo "export modelpath=$COMHREF" >> run_href_${model}.${dom}.${valid_run}.${fhr}.product.sh + echo "export modeltail='.grib2'" >> run_href_${model}.${dom}.${valid_run}.${fhr}.product.sh + echo "export extradir='ensprod/'" >> run_href_${model}.${dom}.${valid_run}.${fhr}.product.sh - echo "${METPLUS_PATH}/ush/run_metplus.py -c ${PARMevs}/metplus_config/machine.conf -c ${GRID2OBS_CONF}/PointStat_fcstHREF${prod}_obsPREPBUFR_SFC.conf " >> run_href_${model}.${dom}.${valid_run}_product.sh - echo "cp \$output_base/stat/\${MODEL}/*.stat $COMOUTsmall" >> run_href_${model}.${dom}.${valid_run}_product.sh - + echo "${METPLUS_PATH}/ush/run_metplus.py -c ${PARMevs}/metplus_config/machine.conf -c ${GRID2OBS_CONF}/PointStat_fcstHREF${prod}_obsPREPBUFR_SFC.conf " >> run_href_${model}.${dom}.${valid_run}.${fhr}.product.sh + echo "export err=\$?; err_chk" >> run_href_${model}.${dom}.${valid_run}.${fhr}.product.sh + + echo "if [ $SENDCOM = YES ] ; then " >> run_href_${model}.${dom}.${valid_run}.${fhr}.product.sh + echo "for FILEn in \$output_base/stat/\${MODEL}/*.stat ; do if [ -f \"\$FILEn\" ] ; then cp -v \$FILEn $COMOUTsmall; fi; done" >> run_href_${model}.${dom}.${valid_run}.${fhr}.product.sh #Mark this Alaska task is completed - echo "[[ \$? = 0 ]] && >$COMOUTrestart/product/run_href_${model}.${dom}.${valid_run}_product.completed" >> run_href_${model}.${dom}.${valid_run}_product.sh + echo "[[ \$? = 0 ]] && >$COMOUTrestart/product/run_href_${model}.${dom}.${valid_run}.${fhr}_product.completed" >> run_href_${model}.${dom}.${valid_run}.${fhr}.product.sh + echo "fi" >> run_href_${model}.${dom}.${valid_run}.${fhr}.product.sh + + chmod +x run_href_${model}.${dom}.${valid_run}.${fhr}.product.sh + echo "${DATA}/scripts/run_href_${model}.${dom}.${valid_run}.${fhr}.product.sh" >> run_all_href_product_poe.sh - chmod +x run_href_${model}.${dom}.${valid_run}_product.sh - echo "${DATA}/run_href_${model}.${dom}.${valid_run}_product.sh" >> run_all_href_product_poe.sh + fi - fi #end if check restart + fi #end if check restart + done #end of fhr done # end of valid_run else diff --git a/ush/cam/evs_href_grid2obs_profile.sh b/ush/cam/evs_href_grid2obs_profile.sh index 5146f2d73a..2189a448fc 100755 --- a/ush/cam/evs_href_grid2obs_profile.sh +++ b/ush/cam/evs_href_grid2obs_profile.sh @@ -1,7 +1,10 @@ #!/bin/ksh #************************************************************************* # Purpose: Generate href grid2obs profile poe and sub-jobs files -# Last update: 10/30/2023, by Binbin Zhou Lynker@EMC/NCEP +# +# Last update: +# 01/10/2025, add MPMD, by Binbin Zhou Lynker@EMC/NCEP +# 10/30/2024, by Binbin Zhou Lynker@EMC/NCEP #************************************************************************* set -x @@ -16,37 +19,69 @@ fi #******************************************* # Build POE script to collect sub-jobs #****************************************** +cd $DATA/scripts >run_all_href_profile_poe.sh export obsv=prepbufr +typeset -Z2 hh for dom in $domains ; do if [ $dom = CONUS ] ; then export domain=CONUS + for valid_at in 00 12 ; do - - for fhr in fhr1 fhr2 ; do - + for fhr in 06 12 18 24 30 36 42 48 ; do + + #**************************** # Build sub-jobs #**************************** >run_href_${domain}.${valid_at}.${fhr}_profile.sh ######################################################################################### - # Restart: check if this CONUS task has been completed in the previous run + # Restart: check if this CONUS task has been completed in the precious run # if not, do this task, and mark it is completed after it is done # otherwise, skip this task ######################################################################################### - if [ ! -e $COMOUTrestart/profile/run_href_${domain}.${valid_at}.${fhr}_profile.completed ] ; then - + if [ ! -e $COMOUTrestart/profile/run_href_${domain}.${valid_at}.${fhr}_profile.completed ] ; then + + ihr=`$NDATE -$fhr $VDATE$valid_at|cut -c 9-10` + iday=`$NDATE -$fhr $VDATE$valid_at|cut -c 1-8` + + input_fcst="$COMINhref/href.${iday}/verf_g2g/href.*.t${ihr}z.conus.f${fhr}" + input_obsv="$WORK/prepbufr.${VDATE}/prepbufr_profile.t${valid_at}z.G227.nc" + + if [ -s $input_fcst ] && [ -s $input_obsv ] ; then + + if [ $ihr = 00 ] || [ $ihr = 12 ] ; then + if [ $fhr -ge 45 ] ; then + mbrs=7 + elif [ $fhr -eq 42 ] || [ $fhr -eq 39 ] ; then + mbrs=8 + else + mbrs=10 + fi + elif [ $ihr = 06 ] || [ $ihr = 18 ] ; then + if [ $fhr -ge 45 ] ; then + mbrs=4 + elif [ $fhr -le 42 ] && [ $fhr -ge 33 ] ; then + mbrs=8 + else + mbrs=10 + fi + fi + + echo "#!/bin/ksh" >> run_href_${domain}.${valid_at}.${fhr}_profile.sh + echo "set -x" >> run_href_${domain}.${valid_at}.${fhr}_profile.sh echo "export regrid=NONE" >> run_href_${domain}.${valid_at}.${fhr}_profile.sh echo "export obsv=prepbufr" >> run_href_${domain}.${valid_at}.${fhr}_profile.sh echo "export domain=CONUS" >> run_href_${domain}.${valid_at}.${fhr}_profile.sh + echo "export nmbrs=$mbrs" >> run_href_${domain}.${valid_at}.${fhr}_profile.sh echo "export output_base=$WORK/grid2obs/run_href_${domain}.${valid_at}.${fhr}_profile" >> run_href_${domain}.${valid_at}.${fhr}_profile.sh echo "export OBTYPE='PREPBUFR'" >> run_href_${domain}.${valid_at}.${fhr}_profile.sh @@ -59,14 +94,8 @@ for dom in $domains ; do echo "export vend=${valid_at}" >>run_href_${domain}.${valid_at}.${fhr}_profile.sh echo "export valid_increment=10800" >> run_href_${domain}.${valid_at}.${fhr}_profile.sh - if [ $valid_at = 00 ] || [ $valid_at = 06 ] || [ $valid_at = 12 ] || [ $valid_at = 18 ] ; then - if [ $fhr = fhr1 ] ; then - echo "export lead=' 6,12,18,24'" >> run_href_${domain}.${valid_at}.${fhr}_profile.sh - elif [ $fhr = fhr2 ] ; then - echo "export lead='30,36,42,48'" >> run_href_${domain}.${valid_at}.${fhr}_profile.sh - fi - fi + echo "export lead=$fhr" >> run_href_${domain}.${valid_at}.${fhr}_profile.sh echo "export domain=CONUS" >> run_href_${domain}.${valid_at}.${fhr}_profile.sh echo "export model=href" >> run_href_${domain}.${valid_at}.${fhr}_profile.sh @@ -80,28 +109,7 @@ for dom in $domains ; do echo "export verif_grid=''" >> run_href_${domain}.${valid_at}.${fhr}_profile.sh - echo "export verif_poly='${maskpath}/Bukovsky_G227_CONUS.nc, - ${maskpath}/Bukovsky_G227_CONUS_East.nc, - ${maskpath}/Bukovsky_G227_CONUS_West.nc, - ${maskpath}/Bukovsky_G227_CONUS_South.nc, - ${maskpath}/Bukovsky_G227_CONUS_Central.nc, - ${maskpath}/Bukovsky_G227_Appalachia.nc, - ${maskpath}/Bukovsky_G227_CPlains.nc, - ${maskpath}/Bukovsky_G227_DeepSouth.nc, - ${maskpath}/Bukovsky_G227_GreatBasin.nc, - ${maskpath}/Bukovsky_G227_GreatLakes.nc, - ${maskpath}/Bukovsky_G227_Mezquital.nc, - ${maskpath}/Bukovsky_G227_MidAtlantic.nc, - ${maskpath}/Bukovsky_G227_NorthAtlantic.nc, - ${maskpath}/Bukovsky_G227_NPlains.nc, - ${maskpath}/Bukovsky_G227_NRockies.nc, - ${maskpath}/Bukovsky_G227_PacificNW.nc, - ${maskpath}/Bukovsky_G227_PacificSW.nc, - ${maskpath}/Bukovsky_G227_Prairie.nc, - ${maskpath}/Bukovsky_G227_Southeast.nc, - ${maskpath}/Bukovsky_G227_Southwest.nc, - ${maskpath}/Bukovsky_G227_SPlains.nc, - ${maskpath}/Bukovsky_G227_SRockies.nc'" >> run_href_${domain}.${valid_at}.${fhr}_profile.sh + echo "export verif_poly='${maskpath}/Bukovsky_G227_CONUS.nc'" >> run_href_${domain}.${valid_at}.${fhr}_profile.sh ################################################################################################################ # Adding following "if blocks" for restart capability for CONUS: @@ -112,8 +120,9 @@ for dom in $domains ; do ################################################################################################################# echo "if [ ! -e $COMOUTrestart/profile/run_href_${domain}.${valid_at}.${fhr}_profile.GenEnsProd.completed ] ; then" >> run_href_${domain}.${valid_at}.${fhr}_profile.sh - echo " ${METPLUS_PATH}/ush/run_metplus.py -c ${PARMevs}/metplus_config/machine.conf -c ${GRID2OBS_CONF}/GenEnsProd_fcstHREF_obsPREPBUFR_PROFILE.conf " >> run_href_${domain}.${valid_at}.${fhr}_profile.sh - echo " cp \$output_base/stat/\${MODEL}/GenEnsProd*CONUS*.nc $COMOUTrestart/profile" >> run_href_${domain}.${valid_at}.${fhr}_profile.sh + echo " ${METPLUS_PATH}/ush/run_metplus.py -c ${PARMevs}/metplus_config/machine.conf -c ${GRID2OBS_CONF}/GenEnsProd_fcstHREF_obsPREPBUFR_PROFILE.conf " >> run_href_${domain}.${valid_at}.${fhr}_profile.sh + echo " export err=\$?; err_chk" >> run_href_${domain}.${valid_at}.${fhr}_profile.sh + echo " for FILEn in \$output_base/stat/\${MODEL}/GenEnsProd*CONUS*.nc; do if [ -f \"\$FILEn\" ] && [ $SENDCOM = YES ] ; then cp -v \$FILEn $COMOUTrestart/profile; fi; done" >> run_href_${domain}.${valid_at}.${fhr}_profile.sh echo " [[ \$? = 0 ]] && >$COMOUTrestart/profile/run_href_${domain}.${valid_at}.${fhr}_profile.GenEnsProd.completed" >> run_href_${domain}.${valid_at}.${fhr}_profile.sh echo "else " >> run_href_${domain}.${valid_at}.${fhr}_profile.sh echo " mkdir -p \$output_base/stat/\${MODEL}" >> run_href_${domain}.${valid_at}.${fhr}_profile.sh @@ -122,21 +131,27 @@ for dom in $domains ; do echo "if [ ! -e $COMOUTrestart/profile/run_href_${domain}.${valid_at}.${fhr}_profile.EnsembleStat.completed ] ; then" >> run_href_${domain}.${valid_at}.${fhr}_profile.sh echo " ${METPLUS_PATH}/ush/run_metplus.py -c ${PARMevs}/metplus_config/machine.conf -c ${GRID2OBS_CONF}/EnsembleStat_fcstHREF_obsPREPBUFR_PROFILE.conf " >> run_href_${domain}.${valid_at}.${fhr}_profile.sh + echo " export err=\$?; err_chk" >> run_href_${domain}.${valid_at}.${fhr}_profile.sh echo " [[ \$? = 0 ]] && >$COMOUTrestart/profile/run_href_${domain}.${valid_at}.${fhr}_profile.EnsembleStat.completed" >> run_href_${domain}.${valid_at}.${fhr}_profile.sh echo "fi" >> run_href_${domain}.${valid_at}.${fhr}_profile.sh echo "if [ ! -e $COMOUTrestart/profile/run_href_${domain}.${valid_at}.${fhr}_profile.PointStat.completed ] ; then" >> run_href_${domain}.${valid_at}.${fhr}_profile.sh echo " ${METPLUS_PATH}/ush/run_metplus.py -c ${PARMevs}/metplus_config/machine.conf -c ${GRID2OBS_CONF}/PointStat_fcstHREF_obsPREPBUFR_PROFILE_prob.conf " >> run_href_${domain}.${valid_at}.${fhr}_profile.sh - echo " [[ \$? = 0 ]] && >$COMOUTrestart/profile/run_href_${domain}.${valid_at}.${fhr}_profile.PointStat.completed" >> run_href_${domain}.${valid_at}.${fhr}_profile.sh + echo " export err=\$?; err_chk" >> run_href_${domain}.${valid_at}.${fhr}_profile.sh + echo " [[ \$? = 0 ]] && >$COMOUTrestart/profile/run_href_${domain}.${valid_at}.${fhr}_profile.PointStat.completed" >> run_href_${domain}.${valid_at}.${fhr}_profile.sh echo "fi" >> run_href_${domain}.${valid_at}.${fhr}_profile.sh - echo "cp \$output_base/stat/\${MODEL}/*.stat $COMOUTsmall" >> run_href_${domain}.${valid_at}.${fhr}_profile.sh + echo "if [ $SENDCOM = YES ] ; then " >> run_href_${domain}.${valid_at}.${fhr}_profile.sh + echo "for FILEn in \$output_base/stat/\${MODEL}/*.stat; do if [ -f \"\$FILEn\" ]; then cp -v \$FILEn $COMOUTsmall; fi; done" >> run_href_${domain}.${valid_at}.${fhr}_profile.sh + echo "fi" >> run_href_${domain}.${valid_at}.${fhr}_profile.sh #Mark that all of the 3 METplus processes for this task have been completed for next restart run: echo "[[ \$? = 0 ]] && >$COMOUTrestart/profile/run_href_${domain}.${valid_at}.${fhr}_profile.completed" >> run_href_${domain}.${valid_at}.${fhr}_profile.sh chmod +x run_href_${domain}.${valid_at}.${fhr}_profile.sh - echo "${DATA}/run_href_${domain}.${valid_at}.${fhr}_profile.sh" >> run_all_href_profile_poe.sh + echo "${DATA}/scripts/run_href_${domain}.${valid_at}.${fhr}_profile.sh" >> run_all_href_profile_poe.sh + + fi fi @@ -150,22 +165,40 @@ for dom in $domains ; do for valid_at in 00 12 ; do - for fhr in fhr1 ; do + for fhr in 06 12 18 24 30 36 42 48 ; do >run_href_${domain}.${valid_at}.${fhr}_profile.sh ######################################################################################### - # Restart: check if this Alaska task has been completed in the previous run + # Restart: check if this Alaska task has been completed in the precious run # if not, do this task, and mark it is completed after it is done # otherwise, skip this task ######################################################################################### - if [ ! -e $COMOUTrestart/profile/run_href_${domain}.${valid_at}.${fhr}_profile.completed ] ; then - - + if [ ! -e $COMOUTrestart/profile/run_href_${domain}.${valid_at}.${fhr}_profile.completed ] ; then + + ihr=`$NDATE -$fhr $VDATE$valid_at|cut -c 9-10` + iday=`$NDATE -$fhr $VDATE$valid_at|cut -c 1-8` + + input_fcst="$COMINhref/href.${iday}/verf_g2g/href.*.t${ihr}z.ak.f${fhr}" + input_obsv="$WORK/prepbufr.${VDATE}/prepbufr_profile.t${valid_at}z.G198.nc" + + if [ -s $input_fcst ] && [ -s $input_obsv ] ; then + + if [ $ihr = 06 ] || [ $ihr = 18 ] ; then + if [ $fhr -ge 45 ] ; then + mbrs=5 + elif [ $fhr -eq 42 ] || [ $fhr -eq 39 ] ; then + mbrs=6 + else + mbrs=8 + fi + fi + echo "#!/bin/ksh" >> run_href_${domain}.${valid_at}.${fhr}_profile.sh + echo "set -x" >> run_href_${domain}.${valid_at}.${fhr}_profile.sh echo "export regrid=NONE" >> run_href_${domain}.${valid_at}.${fhr}_profile.sh echo "export obsv=prepbufr" >> run_href_${domain}.${valid_at}.${fhr}_profile.sh echo "export domain=Alaska" >> run_href_${domain}.${valid_at}.${fhr}_profile.sh - + echo "export nmbrs=$mbrs" >> run_href_${domain}.${valid_at}.${fhr}_profile.sh echo "export output_base=$WORK/grid2obs/run_href_${domain}.${valid_at}.${fhr}_profile" >> run_href_${domain}.${valid_at}.${fhr}_profile.sh @@ -181,10 +214,8 @@ for dom in $domains ; do echo "export valid_increment=10800" >> run_href_${domain}.${valid_at}.${fhr}_profile.sh - if [ $valid_at = 00 ] || [ $valid_at = 12 ] ; then - #Alaska run cycles are 06Z and 18Z - echo "export lead=' 6,18,30,42'" >> run_href_${domain}.${valid_at}.${fhr}_profile.sh - fi + echo "export lead=$fhr" >> run_href_${domain}.${valid_at}.${fhr}_profile.sh + echo "export model=href" >> run_href_${domain}.${valid_at}.${fhr}_profile.sh echo "export MODEL=HREF" >> run_href_${domain}.${valid_at}.${fhr}_profile.sh @@ -209,7 +240,8 @@ for dom in $domains ; do echo "if [ ! -e $COMOUTrestart/profile/run_href_${domain}.${valid_at}.${fhr}_profile.GenEnsProd.completed ] ; then" >> run_href_${domain}.${valid_at}.${fhr}_profile.sh echo " ${METPLUS_PATH}/ush/run_metplus.py -c ${PARMevs}/metplus_config/machine.conf -c ${GRID2OBS_CONF}/GenEnsProd_fcstHREF_obsPREPBUFR_PROFILE.conf " >> run_href_${domain}.${valid_at}.${fhr}_profile.sh - echo " cp \$output_base/stat/\${MODEL}/GenEnsProd*Alaska*.nc $COMOUTrestart/profile" >> run_href_${domain}.${valid_at}.${fhr}_profile.sh + echo " export err=\$?; err_chk" >> run_href_${domain}.${valid_at}.${fhr}_profile.sh + echo " for FILEn in \$output_base/stat/\${MODEL}/GenEnsProd*Alaska*.nc; do if [ -f \"\$FILEn\" ] && [ $SENDCOM = YES ] ; then cp -v \$FILEn $COMOUTrestart/profile; fi; done" >> run_href_${domain}.${valid_at}.${fhr}_profile.sh echo " [[ \$? = 0 ]] && >$COMOUTrestart/profile/run_href_${domain}.${valid_at}.${fhr}_profile.GenEnsProd.completed" >> run_href_${domain}.${valid_at}.${fhr}_profile.sh echo "else " >> run_href_${domain}.${valid_at}.${fhr}_profile.sh echo " mkdir -p \$output_base/stat/\${MODEL}" >> run_href_${domain}.${valid_at}.${fhr}_profile.sh @@ -218,27 +250,32 @@ for dom in $domains ; do echo "if [ ! -e $COMOUTrestart/profile/run_href_${domain}.${valid_at}.${fhr}_profile.EnsembleStat.completed ] ; then" >> run_href_${domain}.${valid_at}.${fhr}_profile.sh echo " ${METPLUS_PATH}/ush/run_metplus.py -c ${PARMevs}/metplus_config/machine.conf -c ${GRID2OBS_CONF}/EnsembleStat_fcstHREF_obsPREPBUFR_PROFILE.conf " >> run_href_${domain}.${valid_at}.${fhr}_profile.sh - echo " [[ \$? = 0 ]] && >$COMOUTrestart/profile/run_href_${domain}.${valid_at}.${fhr}_profile.EnsembleStat.completed" >> run_href_${domain}.${valid_at}.${fhr}_profile.sh + echo " export err=\$?; err_chk" >> run_href_${domain}.${valid_at}.${fhr}_profile.sh + echo " [[ \$? = 0 ]] && >$COMOUTrestart/profile/run_href_${domain}.${valid_at}.${fhr}_profile.EnsembleStat.completed" >> run_href_${domain}.${valid_at}.${fhr}_profile.sh echo "fi" >> run_href_${domain}.${valid_at}.${fhr}_profile.sh echo "if [ ! -e $COMOUTrestart/profile/run_href_${domain}.${valid_at}.${fhr}_profile.PointStat.completed ] ; then" >> run_href_${domain}.${valid_at}.${fhr}_profile.sh echo " ${METPLUS_PATH}/ush/run_metplus.py -c ${PARMevs}/metplus_config/machine.conf -c ${GRID2OBS_CONF}/PointStat_fcstHREF_obsPREPBUFR_PROFILE_prob.conf " >> run_href_${domain}.${valid_at}.${fhr}_profile.sh - echo " [[ \$? = 0 ]] && >$COMOUTrestart/profile/run_href_${domain}.${valid_at}.${fhr}_profile.PointStat.completed" >> run_href_${domain}.${valid_at}.${fhr}_profile.sh + echo " export err=\$?; err_chk" >> run_href_${domain}.${valid_at}.${fhr}_profile.sh + echo " [[ \$? = 0 ]] && >$COMOUTrestart/profile/run_href_${domain}.${valid_at}.${fhr}_profile.PointStat.completed" >> run_href_${domain}.${valid_at}.${fhr}_profile.sh echo "fi" >> run_href_${domain}.${valid_at}.${fhr}_profile.sh - echo "cp \$output_base/stat/\${MODEL}/*.stat $COMOUTsmall" >> run_href_${domain}.${valid_at}.${fhr}_profile.sh + echo "if [ $SENDCOM = YES ] ; then " >> run_href_${domain}.${valid_at}.${fhr}_profile.sh + echo "for FILEn in \$output_base/stat/\${MODEL}/*.stat; do if [ -f \"\$FILEn\" ]; then cp -v \$FILEn $COMOUTsmall; fi; done" >> run_href_${domain}.${valid_at}.${fhr}_profile.sh + echo "fi" >> run_href_${domain}.${valid_at}.${fhr}_profile.sh #Mark that all of the 3 METplus processes for this task have been completed for next restart run: echo "[[ \$? = 0 ]] && >$COMOUTrestart/profile/run_href_${domain}.${valid_at}.${fhr}_profile.completed" >> run_href_${domain}.${valid_at}.${fhr}_profile.sh chmod +x run_href_${domain}.${valid_at}.${fhr}_profile.sh - echo "${DATA}/run_href_${domain}.${valid_at}.${fhr}_profile.sh" >> run_all_href_profile_poe.sh + echo "${DATA}/scripts/run_href_${domain}.${valid_at}.${fhr}_profile.sh" >> run_all_href_profile_poe.sh - fi + fi + fi - done done + done elif [ $dom = HI ] ; then @@ -247,22 +284,39 @@ for dom in $domains ; do for valid_at in 00 12 ; do - for fhr in fhr1 ; do + for fhr in 06 12 18 24 30 36 42 48 ; do >run_href_${domain}.${valid_at}.${fhr}_profile.sh ######################################################################################### - # Restart: check if this Hawaii task has been completed in the previous run + # Restart: check if this Hawaii task has been completed in the precious run # if not, do this task, and mark it is completed after it is done # otherwise, skip this task ######################################################################################### - if [ ! -e $COMOUTrestart/profile/run_href_${domain}.${valid_at}.${fhr}_profile.completed ] ; then + if [ ! -e $COMOUTrestart/profile/run_href_${domain}.${valid_at}.${fhr}_profile.completed ] ; then + ihr=`$NDATE -$fhr $VDATE$valid_at|cut -c 9-10` + iday=`$NDATE -$fhr $VDATE$valid_at|cut -c 1-8` + input_fcst="$COMINhref/href.${iday}/verf_g2g/href.*.t${ihr}z.hi.f${fhr}" + input_obsv="$WORK/prepbufr.${VDATE}/prepbufr_profile.t${valid_at}z.G139.nc" + + if [ -s $input_fcst ] && [ -s $input_obsv ] ; then + + if [ $ihr = 00 ] || [ $ihr = 12 ] ; then + if [ $fhr -ge 42 ] ; then + mbrs=4 + else + mbrs=6 + fi + fi + + echo "#!/bin/ksh" >> run_href_${domain}.${valid_at}.${fhr}_profile.sh + echo "set -x" >> run_href_${domain}.${valid_at}.${fhr}_profile.sh echo "export regrid=NONE" >> run_href_${domain}.${valid_at}.${fhr}_profile.sh echo "export obsv=prepbufr_profile" >> run_href_${domain}.${valid_at}.${fhr}_profile.sh echo "export domain=HI" >> run_href_${domain}.${valid_at}.${fhr}_profile.sh - + echo "export nmbrs=$mbrs" >> run_href_${domain}.${valid_at}.${fhr}_profile.sh echo "export output_base=$WORK/grid2obs/run_href_${domain}.${valid_at}.${fhr}_profile" >> run_href_${domain}.${valid_at}.${fhr}_profile.sh @@ -275,10 +329,8 @@ for dom in $domains ; do echo "export vbeg=${valid_at}" >>run_href_${domain}.${valid_at}.${fhr}_profile.sh echo "export vend=${valid_at}" >>run_href_${domain}.${valid_at}.${fhr}_profile.sh echo "export valid_increment=10800" >> run_href_${domain}.${valid_at}.${fhr}_profile.sh - if [ $valid_at = 00 ] || [ $valid_at = 12 ] ; then - #Hawaii run only has 00Z cycle, and validaded at 00Z and 12Z Raobs (sounding) - echo "export lead='12, 24, 36, 48 '" >> run_href_${domain}.${valid_at}.${fhr}_profile.sh - fi + + echo "export lead=$fhr" >> run_href_${domain}.${valid_at}.${fhr}_profile.sh echo "export model=href" >> run_href_${domain}.${valid_at}.${fhr}_profile.sh echo "export MODEL=HREF" >> run_href_${domain}.${valid_at}.${fhr}_profile.sh @@ -302,7 +354,8 @@ for dom in $domains ; do echo "if [ ! -e $COMOUTrestart/profile/run_href_${domain}.${valid_at}.${fhr}_profile.GenEnsProd.completed ] ; then" >> run_href_${domain}.${valid_at}.${fhr}_profile.sh echo " ${METPLUS_PATH}/ush/run_metplus.py -c ${PARMevs}/metplus_config/machine.conf -c ${GRID2OBS_CONF}/GenEnsProd_fcstHREF_obsPREPBUFR_PROFILE.conf " >> run_href_${domain}.${valid_at}.${fhr}_profile.sh - echo " cp \$output_base/stat/\${MODEL}/GenEnsProd*_HI_*.nc $COMOUTrestart/profile" >> run_href_${domain}.${valid_at}.${fhr}_profile.sh + echo " export err=\$?; err_chk" >> run_href_${domain}.${valid_at}.${fhr}_profile.sh + echo " for FILEn in \$output_base/stat/\${MODEL}/GenEnsProd*_HI_*.nc; do if [ -f \"\$FILEn\" ] && [ $SENDCOM = YES ] ; then cp -v \$FILEn $COMOUTrestart/profile; fi; done" >> run_href_${domain}.${valid_at}.${fhr}_profile.sh echo " [[ \$? = 0 ]] && >$COMOUTrestart/profile/run_href_${domain}.${valid_at}.${fhr}_profile.GenEnsProd.completed" >> run_href_${domain}.${valid_at}.${fhr}_profile.sh echo "else " >> run_href_${domain}.${valid_at}.${fhr}_profile.sh echo " mkdir -p \$output_base/stat/\${MODEL}" >> run_href_${domain}.${valid_at}.${fhr}_profile.sh @@ -311,23 +364,28 @@ for dom in $domains ; do echo "if [ ! -e $COMOUTrestart/profile/run_href_${domain}.${valid_at}.${fhr}_profile.EnsembleStat.completed ] ; then" >> run_href_${domain}.${valid_at}.${fhr}_profile.sh echo " ${METPLUS_PATH}/ush/run_metplus.py -c ${PARMevs}/metplus_config/machine.conf -c ${GRID2OBS_CONF}/EnsembleStat_fcstHREF_obsPREPBUFR_PROFILE.conf " >> run_href_${domain}.${valid_at}.${fhr}_profile.sh + echo " export err=\$?; err_chk" >> run_href_${domain}.${valid_at}.${fhr}_profile.sh echo " [[ \$? = 0 ]] && >$COMOUTrestart/profile/run_href_${domain}.${valid_at}.${fhr}_profile.EnsembleStat.completed" >> run_href_${domain}.${valid_at}.${fhr}_profile.sh echo "fi" >> run_href_${domain}.${valid_at}.${fhr}_profile.sh echo "if [ ! -e $COMOUTrestart/profile/run_href_${domain}.${valid_at}.${fhr}_profile.PointStat.completed ] ; then" >> run_href_${domain}.${valid_at}.${fhr}_profile.sh echo " ${METPLUS_PATH}/ush/run_metplus.py -c ${PARMevs}/metplus_config/machine.conf -c ${GRID2OBS_CONF}/PointStat_fcstHREF_obsPREPBUFR_PROFILE_prob.conf " >> run_href_${domain}.${valid_at}.${fhr}_profile.sh - echo " [[ \$? = 0 ]] && >$COMOUTrestart/profile/run_href_${domain}.${valid_at}.${fhr}_profile.PointStat.completed" >> run_href_${domain}.${valid_at}.${fhr}_profile.sh + echo " export err=\$?; err_chk" >> run_href_${domain}.${valid_at}.${fhr}_profile.sh + echo " [[ \$? = 0 ]] && >$COMOUTrestart/profile/run_href_${domain}.${valid_at}.${fhr}_profile.PointStat.completed" >> run_href_${domain}.${valid_at}.${fhr}_profile.sh echo "fi" >> run_href_${domain}.${valid_at}.${fhr}_profile.sh - echo "cp \$output_base/stat/\${MODEL}/*.stat $COMOUTsmall" >> run_href_${domain}.${valid_at}.${fhr}_profile.sh + echo "if [ $SENDCOM = YES ] ; then " >> run_href_${domain}.${valid_at}.${fhr}_profile.sh + echo "for FILEn in \$output_base/stat/\${MODEL}/*.stat; do if [ -f \"\$FILEn\" ]; then cp -v \$FILEn $COMOUTsmall; fi; done" >> run_href_${domain}.${valid_at}.${fhr}_profile.sh + echo "fi" >> run_href_${domain}.${valid_at}.${fhr}_profile.sh #Mark that all of the 3 METplus processes for this task have been completed for next restart run: echo "[[ \$? = 0 ]] && >$COMOUTrestart/profile/run_href_${domain}.${valid_at}.${fhr}_profile.completed" >> run_href_${domain}.${valid_at}.${fhr}_profile.sh chmod +x run_href_${domain}.${valid_at}.${fhr}_profile.sh - echo "${DATA}/run_href_${domain}.${valid_at}.${fhr}_profile.sh" >> run_all_href_profile_poe.sh + echo "${DATA}/scripts/run_href_${domain}.${valid_at}.${fhr}_profile.sh" >> run_all_href_profile_poe.sh - fi + fi + fi done done @@ -339,22 +397,38 @@ for dom in $domains ; do for valid_at in 00 12 ; do - for fhr in fhr1 ; do + for fhr in 06 12 18 24 30 36 42 48 ; do >run_href_${domain}.${valid_at}.${fhr}_profile.sh ######################################################################################### - # Restart: check if this Puerto Rico task has been completed in the previous run + # Restart: check if this Puerto Rico task has been completed in the precious run # if not, do this task, and mark it is completed after it is done # otherwise, skip this task ######################################################################################### if [ ! -e $COMOUTrestart/profile/run_href_${domain}.${valid_at}.${fhr}_profile.completed ] ; then + ihr=`$NDATE -$fhr $VDATE$valid_at|cut -c 9-10` + iday=`$NDATE -$fhr $VDATE$valid_at|cut -c 1-8` + input_fcst="$COMINhref/href.${iday}/verf_g2g/href.*.t${ihr}z.pr.f${fhr}" + input_obsv="$WORK/prepbufr.${VDATE}/prepbufr_profile.t${valid_at}z.G200.nc" + + if [ -s $input_fcst ] && [ -s $input_obsv ] ; then + if [ $ihr = 06 ] || [ $ihr = 18 ] ; then + if [ $fhr -ge 42 ] ; then + mbrs=4 + else + mbrs=6 + fi + fi + + echo "#!/bin/ksh" >> run_href_${domain}.${valid_at}.${fhr}_profile.sh + echo "set -x" >> run_href_${domain}.${valid_at}.${fhr}_profile.sh echo "export regrid=NONE" >> run_href_${domain}.${valid_at}.${fhr}_profile.sh echo "export obsv=prepbufr_profile" >> run_href_${domain}.${valid_at}.${fhr}_profile.sh echo "export domain=PR" >> run_href_${domain}.${valid_at}.${fhr}_profile.sh - + echo "export nmbrs=$mbrs" >> run_href_${domain}.${valid_at}.${fhr}_profile.sh echo "export output_base=$WORK/grid2obs/run_href_${domain}.${valid_at}.${fhr}_profile" >> run_href_${domain}.${valid_at}.${fhr}_profile.sh @@ -367,10 +441,9 @@ for dom in $domains ; do echo "export vbeg=${valid_at}" >>run_href_${domain}.${valid_at}.${fhr}_profile.sh echo "export vend=${valid_at}" >>run_href_${domain}.${valid_at}.${fhr}_profile.sh echo "export valid_increment=10800" >> run_href_${domain}.${valid_at}.${fhr}_profile.sh - #Puerto Rico run only has 06Z and 18Z run , and validated at 00Z and 12Z Raobs (sounding) - if [ $valid_at = 00 ] || [ $valid_at = 12 ] ; then - echo "export lead='6, 18, 30, 42'" >> run_href_${domain}.${valid_at}.${fhr}_profile.sh - fi + + echo "export lead=$fhr" >> run_href_${domain}.${valid_at}.${fhr}_profile.sh + echo "export model=href" >> run_href_${domain}.${valid_at}.${fhr}_profile.sh echo "export MODEL=HREF" >> run_href_${domain}.${valid_at}.${fhr}_profile.sh @@ -394,7 +467,8 @@ for dom in $domains ; do echo "if [ ! -e $COMOUTrestart/profile/run_href_${domain}.${valid_at}.${fhr}_profile.GenEnsProd.completed ] ; then" >> run_href_${domain}.${valid_at}.${fhr}_profile.sh echo " ${METPLUS_PATH}/ush/run_metplus.py -c ${PARMevs}/metplus_config/machine.conf -c ${GRID2OBS_CONF}/GenEnsProd_fcstHREF_obsPREPBUFR_PROFILE.conf " >> run_href_${domain}.${valid_at}.${fhr}_profile.sh - echo " cp \$output_base/stat/\${MODEL}/GenEnsProd*_PR_*.nc $COMOUTrestart/profile" >> run_href_${domain}.${valid_at}.${fhr}_profile.sh + echo " export err=\$?; err_chk" >> run_href_${domain}.${valid_at}.${fhr}_profile.sh + echo " for FILEn in \$output_base/stat/\${MODEL}/GenEnsProd*_PR_*.nc; do if [ -f \"\$FILEn\" ] && [ $SENDCOM = YES ] ; then cp -v \$FILEn $COMOUTrestart/profile; fi; done" >> run_href_${domain}.${valid_at}.${fhr}_profile.sh echo " [[ \$? = 0 ]] && >$COMOUTrestart/profile/run_href_${domain}.${valid_at}.${fhr}_profile.GenEnsProd.completed" >> run_href_${domain}.${valid_at}.${fhr}_profile.sh echo "else " >> run_href_${domain}.${valid_at}.${fhr}_profile.sh echo " mkdir -p \$output_base/stat/\${MODEL}" >> run_href_${domain}.${valid_at}.${fhr}_profile.sh @@ -403,23 +477,28 @@ for dom in $domains ; do echo "if [ ! -e $COMOUTrestart/profile/run_href_${domain}.${valid_at}.${fhr}_profile.EnsembleStat.completed ] ; then" >> run_href_${domain}.${valid_at}.${fhr}_profile.sh echo " ${METPLUS_PATH}/ush/run_metplus.py -c ${PARMevs}/metplus_config/machine.conf -c ${GRID2OBS_CONF}/EnsembleStat_fcstHREF_obsPREPBUFR_PROFILE.conf " >> run_href_${domain}.${valid_at}.${fhr}_profile.sh + echo " export err=\$?; err_chk" >> run_href_${domain}.${valid_at}.${fhr}_profile.sh echo " [[ \$? = 0 ]] && >$COMOUTrestart/profile/run_href_${domain}.${valid_at}.${fhr}_profile.EnsembleStat.completed" >> run_href_${domain}.${valid_at}.${fhr}_profile.sh echo "fi" >> run_href_${domain}.${valid_at}.${fhr}_profile.sh echo "if [ ! -e $COMOUTrestart/profile/run_href_${domain}.${valid_at}.${fhr}_profile.PointStat.completed ] ; then" >> run_href_${domain}.${valid_at}.${fhr}_profile.sh echo " ${METPLUS_PATH}/ush/run_metplus.py -c ${PARMevs}/metplus_config/machine.conf -c ${GRID2OBS_CONF}/PointStat_fcstHREF_obsPREPBUFR_PROFILE_prob.conf " >> run_href_${domain}.${valid_at}.${fhr}_profile.sh + echo " export err=\$?; err_chk" >> run_href_${domain}.${valid_at}.${fhr}_profile.sh echo " [[ \$? = 0 ]] && >$COMOUTrestart/profile/run_href_${domain}.${valid_at}.${fhr}_profile.PointStat.completed" >> run_href_${domain}.${valid_at}.${fhr}_profile.sh echo "fi" >> run_href_${domain}.${valid_at}.${fhr}_profile.sh - echo "cp \$output_base/stat/\${MODEL}/*.stat $COMOUTsmall" >> run_href_${domain}.${valid_at}.${fhr}_profile.sh + echo "if [ $SENDCOM = YES ] ; then" >> run_href_${domain}.${valid_at}.${fhr}_profile.sh + echo "for FILEn in \$output_base/stat/\${MODEL}/*.stat; do if [ -f \"\$FILEn\" ]; then cp -v \$FILEn $COMOUTsmall; fi; done" >> run_href_${domain}.${valid_at}.${fhr}_profile.sh + echo "fi" >> run_href_${domain}.${valid_at}.${fhr}_profile.sh - #Mark that all of the 3 METplus processes for this task have been completed for next restart run: + #Mark that all of the 3 METplus processes for this task have been completed for next restart run: echo "[[ \$? = 0 ]] && >$COMOUTrestart/profile/run_href_${domain}.${valid_at}.${fhr}_profile.completed" >> run_href_${domain}.${valid_at}.${fhr}_profile.sh chmod +x run_href_${domain}.${valid_at}.${fhr}_profile.sh - echo "${DATA}/run_href_${domain}.${valid_at}.${fhr}_profile.sh" >> run_all_href_profile_poe.sh - - fi + echo "${DATA}/scripts/run_href_${domain}.${valid_at}.${fhr}_profile.sh" >> run_all_href_profile_poe.sh + + fi + fi done done diff --git a/ush/cam/evs_href_grid2obs_system.sh b/ush/cam/evs_href_grid2obs_system.sh index 007cd8438c..8ad5a3db15 100755 --- a/ush/cam/evs_href_grid2obs_system.sh +++ b/ush/cam/evs_href_grid2obs_system.sh @@ -2,17 +2,17 @@ #************************************************************************* # Purpose: Generate href grid2obs ecnt poe and sub-jobs files # Last update: -# 04/25/2024, add restart, by Binbin Zhou Lynker@EMC/NCEP -# 10/30/2023, by Binbin Zhou Lynker@EMC/NCEP +# 01/10/2025, add MPMD, by Binbin Zhou Lynker@EMC/NCEP +# 10/30/2024, by Binbin Zhou Lynker@EMC/NCEP ##************************************************************************* set -x #******************************************* # Build POE script to collect sub-jobs #****************************************** +cd $DATA/scripts >run_all_href_system_poe.sh - export obsv=prepbufr for dom in CONUS Alaska ; do @@ -21,69 +21,103 @@ for dom in CONUS Alaska ; do export domain=CONUS - for valid_at in 1fhr 2fhr 3fhr 4fhr 5fhr 6fhr 7fhr 8fhr ; do - + for valid_at in 00 03 06 09 12 15 18 21 ; do + + if [ $valid_at = 00 ] || [ $valid_at = 06 ] || [ $valid_at = 12 ] || [ $valid_at = 18 ] ; then + fhrs='06 12 18 24 30 36 42 48' + elif [ $valid_at = 03 ] || [ $valid_at = 09 ] || [ $valid_at = 15 ] || [ $valid_at = 21 ] ; then + fhrs='03 09 15 21 27 33 39 45' + fi + + for fhr in $fhrs ; do + #********************** # Build sub-jobs #********************** - >run_href_${domain}.${valid_at}_system.sh + >run_href_${domain}.${valid_at}.${fhr}_system.sh ######################################################################################### - # Restart: check if this CONUS task has been completed in the previous run + # Restart: check if this CONUS task has been completed in the precious run # if not, do this task, and mark it is completed after it is done # otherwise, skip this task ######################################################################################### - if [ ! -e $COMOUTrestart/system/run_href_${domain}.${valid_at}_system.completed ] ; then - - echo "export regrid=G227" >> run_href_${domain}.${valid_at}_system.sh - echo "export obsv=prepbufr" >> run_href_${domain}.${valid_at}_system.sh - echo "export domain=CONUS" >> run_href_${domain}.${valid_at}_system.sh - - echo "export output_base=$WORK/grid2obs/run_href_${domain}.${valid_at}_system" >> run_href_${domain}.${valid_at}_system.sh - - echo "export OBTYPE='PREPBUFR'" >> run_href_${domain}.${valid_at}_system.sh - - echo "export obsvhead=$obsv" >> run_href_${domain}.${valid_at}_system.sh - echo "export obsvgrid=G227" >> run_href_${domain}.${valid_at}_system.sh - echo "export obsvpath=$WORK" >> run_href_${domain}.${valid_at}_system.sh - - echo "export vbeg=00" >>run_href_${domain}.${valid_at}_system.sh - echo "export vend=21" >>run_href_${domain}.${valid_at}_system.sh - echo "export valid_increment=10800" >> run_href_${domain}.${valid_at}_system.sh - - if [ $valid_at = 1fhr ] ; then - echo "export lead='3,6'" >> run_href_${domain}.${valid_at}_system.sh - elif [ $valid_at = 2fhr ] ; then - echo "export lead='9,12'" >> run_href_${domain}.${valid_at}_system.sh - elif [ $valid_at = 3fhr ] ; then - echo "export lead='15,18'" >> run_href_${domain}.${valid_at}_system.sh - elif [ $valid_at = 4fhr ] ; then - echo "export lead='21,24'" >> run_href_${domain}.${valid_at}_system.sh - elif [ $valid_at = 5fhr ] ; then - echo "export lead='27,30'" >> run_href_${domain}.${valid_at}_system.sh - elif [ $valid_at = 6fhr ] ; then - echo "export lead='33,36'" >> run_href_${domain}.${valid_at}_system.sh - elif [ $valid_at = 7fhr ] ; then - echo "export lead='39,42'" >> run_href_${domain}.${valid_at}_system.sh - elif [ $valid_at = 8fhr ] ; then - echo "export lead='45,48'" >> run_href_${domain}.${valid_at}_system.sh - elif [ $valid_at = test ] ; then - echo "export vbeg=18" >>run_href_${domain}.${valid_at}_system.sh - echo "export vend=18" >>run_href_${domain}.${valid_at}_system.sh - echo "export lead='12'" >> run_href_${domain}.${valid_at}_system.sh + if [ ! -e $COMOUTrestart/system/run_href_${domain}.${valid_at}.${fhr}_system.completed ] ; then + + ihr=`$NDATE -$fhr $VDATE$valid_at|cut -c 9-10` + iday=`$NDATE -$fhr $VDATE$valid_at|cut -c 1-8` + + if [ $ihr = 00 ] || [ $ihr = 12 ] ; then + if [ $fhr -ge 45 ] ; then + mbrs=7 + elif [ $fhr -eq 42 ] || [ $fhr -eq 39 ] ; then + mbrs=8 + else + mbrs=10 + fi + elif [ $ihr = 06 ] || [ $ihr = 18 ] ; then + if [ $fhr -ge 45 ] ; then + mbrs=4 + elif [ $fhr -le 42 ] || [ $fhr -ge 33 ] ; then + mbrs=8 + else + mbrs=10 + fi + fi + + input_fcst="$COMINhref/href.${iday}/verf_g2g/href.*.t${ihr}z.conus.f${fhr}" + input_obsv="$WORK/prepbufr.${VDATE}/prepbufr.t${valid_at}z.G227.nc" + + if [ -s $input_fcst ] && [ -s $input_obsv ] ; then + + if [ $ihr = 00 ] || [ $ihr = 12 ] ; then + if [ $fhr -ge 45 ] ; then + mbrs=7 + elif [ $fhr -eq 42 ] || [ $fhr -eq 39 ] ; then + mbrs=8 + else + mbrs=10 + fi + elif [ $ihr = 06 ] || [ $ihr = 18 ] ; then + if [ $fhr -ge 45 ] ; then + mbrs=4 + elif [ $fhr -le 42 ] && [ $fhr -ge 33 ] ; then + mbrs=8 + else + mbrs=10 + fi fi - - echo "export domain=CONUS" >> run_href_${domain}.${valid_at}_system.sh - echo "export model=href" >> run_href_${domain}.${valid_at}_system.sh - echo "export MODEL=HREF" >> run_href_${domain}.${valid_at}_system.sh - echo "export regrid=G227 " >> run_href_${domain}.${valid_at}_system.sh - echo "export modelhead=href" >> run_href_${domain}.${valid_at}_system.sh - echo "export modelpath=$COMHREF" >> run_href_${domain}.${valid_at}_system.sh - echo "export modelgrid=conus.f" >> run_href_${domain}.${valid_at}_system.sh - echo "export modeltail=''" >> run_href_${domain}.${valid_at}_system.sh - echo "export extradir='verf_g2g/'" >> run_href_${domain}.${valid_at}_system.sh + + echo "#!/bin/ksh" >> run_href_${domain}.${valid_at}.${fhr}_system.sh + echo "set -x " >> run_href_${domain}.${valid_at}.${fhr}_system.sh + echo "export regrid=G227" >> run_href_${domain}.${valid_at}.${fhr}_system.sh + echo "export obsv=prepbufr" >> run_href_${domain}.${valid_at}.${fhr}_system.sh + echo "export domain=CONUS" >> run_href_${domain}.${valid_at}.${fhr}_system.sh + echo "export nmbrs=$mbrs" >> run_href_${domain}.${valid_at}.${fhr}_system.sh + + echo "export output_base=$WORK/grid2obs/run_href_${domain}.${valid_at}.${fhr}_system" >> run_href_${domain}.${valid_at}.${fhr}_system.sh + + echo "export OBTYPE='PREPBUFR'" >> run_href_${domain}.${valid_at}.${fhr}_system.sh + + echo "export obsvhead=$obsv" >> run_href_${domain}.${valid_at}.${fhr}_system.sh + echo "export obsvgrid=G227" >> run_href_${domain}.${valid_at}.${fhr}_system.sh + echo "export obsvpath=$WORK" >> run_href_${domain}.${valid_at}.${fhr}_system.sh + + echo "export vbeg=$valid_at" >>run_href_${domain}.${valid_at}.${fhr}_system.sh + echo "export vend=$valid_at" >>run_href_${domain}.${valid_at}.${fhr}_system.sh + echo "export valid_increment=10800" >> run_href_${domain}.${valid_at}.${fhr}_system.sh + echo "export lead='$fhr'" >> run_href_${domain}.${valid_at}.${fhr}_system.sh + + echo "export domain=CONUS" >> run_href_${domain}.${valid_at}.${fhr}_system.sh + echo "export model=href" >> run_href_${domain}.${valid_at}.${fhr}_system.sh + echo "export MODEL=HREF" >> run_href_${domain}.${valid_at}.${fhr}_system.sh + echo "export regrid=G227 " >> run_href_${domain}.${valid_at}.${fhr}_system.sh + echo "export modelhead=href" >> run_href_${domain}.${valid_at}.${fhr}_system.sh + echo "export modelpath=$COMHREF" >> run_href_${domain}.${valid_at}.${fhr}_system.sh + echo "export modelgrid=conus.f" >> run_href_${domain}.${valid_at}.${fhr}_system.sh + echo "export modeltail=''" >> run_href_${domain}.${valid_at}.${fhr}_system.sh + echo "export extradir='verf_g2g/'" >> run_href_${domain}.${valid_at}.${fhr}_system.sh - echo "export verif_grid=''" >> run_href_${domain}.${valid_at}_system.sh + echo "export verif_grid=''" >> run_href_${domain}.${valid_at}.${fhr}_system.sh echo "export verif_poly='${maskpath}/Bukovsky_G227_CONUS.nc, ${maskpath}/Bukovsky_G227_CONUS_East.nc, @@ -106,9 +140,9 @@ for dom in CONUS Alaska ; do ${maskpath}/Bukovsky_G227_Southeast.nc, ${maskpath}/Bukovsky_G227_Southwest.nc, ${maskpath}/Bukovsky_G227_SPlains.nc, - ${maskpath}/Bukovsky_G227_SRockies.nc'" >> run_href_${domain}.${valid_at}_system.sh + ${maskpath}/Bukovsky_G227_SRockies.nc'" >> run_href_${domain}.${valid_at}.${fhr}_system.sh - echo "export valid_at=$valid_at" >> run_href_${domain}.${valid_at}_system.sh + echo "export valid_at=$valid_at" >> run_href_${domain}.${valid_at}.${fhr}_system.sh ################################################################################################################ # Adding following "if blocks" for restart capability for CONUS: @@ -117,85 +151,124 @@ for dom in CONUS Alaska ; do # 3. if any one of the 3 exits, skip it. But for GenEnsProd, all of the nc files generated from previous run # are copied back to the output_base/stat directory ################################################################################################################# - echo "if [ ! -e $COMOUTrestart/system/run_href_${domain}.${valid_at}_system.GenEnsProd.completed ] ; then" >> run_href_${domain}.${valid_at}_system.sh - echo " ${METPLUS_PATH}/ush/run_metplus.py -c ${PARMevs}/metplus_config/machine.conf -c ${GRID2OBS_CONF}/GenEnsProd_fcstHREF_obsPREPBUFR_SFC.conf " >> run_href_${domain}.${valid_at}_system.sh - echo " cp \$output_base/stat/\${MODEL}/GenEnsProd*CONUS*.nc $COMOUTrestart/system" >> run_href_${domain}.${valid_at}_system.sh - echo " [[ \$? = 0 ]] && >$COMOUTrestart/system/run_href_${domain}.${valid_at}_system.GenEnsProd.completed" >> run_href_${domain}.${valid_at}_system.sh - echo "else " >> run_href_${domain}.${valid_at}_system.sh - echo " mkdir -p \$output_base/stat/\${MODEL}" >> run_href_${domain}.${valid_at}_system.sh - echo " cp $COMOUTrestart/system/GenEnsProd*CONUS*.nc \$output_base/stat/\${MODEL}" >> run_href_${domain}.${valid_at}_system.sh - echo "fi" >> run_href_${domain}.${valid_at}_system.sh - - echo "if [ ! -e $COMOUTrestart/system/run_href_${domain}.${valid_at}_system.EnsembleStat.completed ] ; then" >> run_href_${domain}.${valid_at}_system.sh - echo " ${METPLUS_PATH}/ush/run_metplus.py -c ${PARMevs}/metplus_config/machine.conf -c ${GRID2OBS_CONF}/EnsembleStat_fcstHREF_obsPREPBUFR_SFC.conf " >> run_href_${domain}.${valid_at}_system.sh - echo " [[ \$? = 0 ]] && >$COMOUTrestart/system/run_href_${domain}.${valid_at}_system.EnsembleStat.completed" >> run_href_${domain}.${valid_at}_system.sh - echo "fi" >> run_href_${domain}.${valid_at}_system.sh - - echo "if [ ! -e $COMOUTrestart/system/run_href_${domain}.${valid_at}_system.PointStat.completed ] ; then" >> run_href_${domain}.${valid_at}_system.sh - echo " ${METPLUS_PATH}/ush/run_metplus.py -c ${PARMevs}/metplus_config/machine.conf -c ${GRID2OBS_CONF}/PointStat_fcstHREF_obsPREPBUFR_SFC_prob.conf " >> run_href_${domain}.${valid_at}_system.sh - echo " [[ \$? = 0 ]] && >$COMOUTrestart/system/run_href_${domain}.${valid_at}_system.PointStat.completed" >> run_href_${domain}.${valid_at}_system.sh - echo "fi" >> run_href_${domain}.${valid_at}_system.sh - - echo "cp \$output_base/stat/\${MODEL}/*.stat $COMOUTsmall" >> run_href_${domain}.${valid_at}_system.sh + echo "if [ ! -e $COMOUTrestart/system/run_href_${domain}.${valid_at}.${fhr}system.GenEnsProd.completed ] ; then" >> run_href_${domain}.${valid_at}.${fhr}_system.sh + echo " ${METPLUS_PATH}/ush/run_metplus.py -c ${PARMevs}/metplus_config/machine.conf -c ${GRID2OBS_CONF}/GenEnsProd_fcstHREF_obsPREPBUFR_SFC.conf " >> run_href_${domain}.${valid_at}.${fhr}_system.sh + echo " export err=\$?; err_chk" >> run_href_${domain}.${valid_at}.${fhr}_system.sh + echo " if [ $SENDCOM = YES ] ; then" >> run_href_${domain}.${valid_at}.${fhr}_system.sh + echo " for FILEn in \$output_base/stat/\${MODEL}/GenEnsProd*CONUS*.nc; do if [ -f \"\$FILEn\" ]; then cp -v \$FILEn $COMOUTrestart/system; fi; done" >> run_href_${domain}.${valid_at}.${fhr}_system.sh + echo " [[ \$? = 0 ]] && >$COMOUTrestart/system/run_href_${domain}.${valid_at}.${fhr}system.GenEnsProd.completed" >> run_href_${domain}.${valid_at}.${fhr}_system.sh + echo " fi" >> run_href_${domain}.${valid_at}.${fhr}_system.sh + echo "else " >> run_href_${domain}.${valid_at}.${fhr}_system.sh + echo " mkdir -p \$output_base/stat/\${MODEL}" >> run_href_${domain}.${valid_at}.${fhr}_system.sh + echo " cp $COMOUTrestart/system/GenEnsProd*CONUS*.nc \$output_base/stat/\${MODEL}" >> run_href_${domain}.${valid_at}.${fhr}_system.sh + echo "fi" >> run_href_${domain}.${valid_at}.${fhr}_system.sh + + echo "if [ ! -e $COMOUTrestart/system/run_href_${domain}.${valid_at}.${fhr}system.EnsembleStat.completed ] ; then" >> run_href_${domain}.${valid_at}.${fhr}_system.sh + echo " ${METPLUS_PATH}/ush/run_metplus.py -c ${PARMevs}/metplus_config/machine.conf -c ${GRID2OBS_CONF}/EnsembleStat_fcstHREF_obsPREPBUFR_SFC.conf " >> run_href_${domain}.${valid_at}.${fhr}_system.sh + echo " export err=\$?; err_chk" >> run_href_${domain}.${valid_at}.${fhr}_system.sh + echo " if [ $SENDCOM = YES ] ; then" >> run_href_${domain}.${valid_at}.${fhr}_system.sh + echo " [[ \$? = 0 ]] && >$COMOUTrestart/system/run_href_${domain}.${valid_at}.${fhr}system.EnsembleStat.completed" >> run_href_${domain}.${valid_at}.${fhr}_system.sh + echo " fi" >> run_href_${domain}.${valid_at}.${fhr}_system.sh + echo "fi" >> run_href_${domain}.${valid_at}.${fhr}_system.sh + + echo "if [ ! -e $COMOUTrestart/system/run_href_${domain}.${valid_at}.${fhr}system.PointStat.completed ] ; then" >> run_href_${domain}.${valid_at}.${fhr}_system.sh + echo " export modelpath=$COMHREF" >> run_href_${domain}.${valid_at}.${fhr}_system.sh + echo " ${METPLUS_PATH}/ush/run_metplus.py -c ${PARMevs}/metplus_config/machine.conf -c ${GRID2OBS_CONF}/PointStat_fcstHREF_obsPREPBUFR_SFC_prob.conf " >> run_href_${domain}.${valid_at}.${fhr}_system.sh + echo " export err=\$?; err_chk" >> run_href_${domain}.${valid_at}.${fhr}_system.sh + echo " if [ $SENDCOM = YES ] ; then" >> run_href_${domain}.${valid_at}.${fhr}_system.sh + echo " [[ \$? = 0 ]] && >$COMOUTrestart/system/run_href_${domain}.${valid_at}.${fhr}system.PointStat.completed" >> run_href_${domain}.${valid_at}.${fhr}_system.sh + echo " fi" >> run_href_${domain}.${valid_at}.${fhr}_system.sh + echo "fi" >> run_href_${domain}.${valid_at}.${fhr}_system.sh + + echo "if [ $SENDCOM = YES ] ; then" >> run_href_${domain}.${valid_at}.${fhr}_system.sh + echo " for FILEn in \$output_base/stat/\${MODEL}/*.stat; do if [ -f \"\$FILEn\" ]; then cp -v \$FILEn $COMOUTsmall; fi; done" >> run_href_${domain}.${valid_at}.${fhr}_system.sh #Mark that all of the 3 METplus processes for this task have been completed for next restart run: - echo "[[ \$? = 0 ]] && >$COMOUTrestart/system/run_href_${domain}.${valid_at}_system.completed" >> run_href_${domain}.${valid_at}_system.sh + echo " [[ \$? = 0 ]] && >$COMOUTrestart/system/run_href_${domain}.${valid_at}.${fhr}_system.completed" >> run_href_${domain}.${valid_at}.${fhr}_system.sh + echo "fi" >> run_href_${domain}.${valid_at}.${fhr}_system.sh + + chmod +x run_href_${domain}.${valid_at}.${fhr}_system.sh + echo "${DATA}/scripts/run_href_${domain}.${valid_at}.${fhr}_system.sh" >> run_all_href_system_poe.sh - chmod +x run_href_${domain}.${valid_at}_system.sh - echo "${DATA}/run_href_${domain}.${valid_at}_system.sh" >> run_all_href_system_poe.sh + fi fi #end if check CONUS completed + done + done elif [ $dom = Alaska ] ; then export domain=Alaska - for valid_at in 1fhr 2fhr ; do + for valid_at in 00 03 06 09 12 15 18 21 ; do + + if [ $valid_at = 00 ] || [ $valid_at = 06 ] || [ $valid_at = 12 ] || [ $valid_at = 18 ] ; then + fhrs='06 12 18 24 30 36 42 48' + elif [ $valid_at = 03 ] || [ $valid_at = 09 ] || [ $valid_at = 15 ] || [ $valid_at = 21 ] ; then + fhrs='03 09 15 21 27 33 39 45' + fi + + for fhr in $fhrs ; do - >run_href_${domain}.${valid_at}_system.sh + >run_href_${domain}.${valid_at}.${fhr}_system.sh ######################################################################################### - #Restart: check if this Alaska task has been completed in the previous tun - if [ ! -e $COMOUTrestart/system/run_href_${domain}.${valid_at}_system.completed ] ; then + #Restart: check if this Alaska task has been completed in the precious tun + if [ ! -e $COMOUTrestart/system/run_href_${domain}.${valid_at}.${fhr}_system.completed ] ; then ########################################################################################## - echo "export regrid=NONE" >> run_href_${domain}.${valid_at}_system.sh - echo "export obsv=prepbufr" >> run_href_${domain}.${valid_at}_system.sh - echo "export domain=Alaska" >> run_href_${domain}.${valid_at}_system.sh - - echo "export output_base=$WORK/grid2obs/run_href_${domain}.${valid_at}_system" >> run_href_${domain}.${valid_at}_system.sh - - echo "export OBTYPE='PREPBUFR'" >> run_href_${domain}.${valid_at}_system.sh - echo "export obsvhead=$obsv " >> run_href_${domain}.${valid_at}_system.sh - echo "export obsvgrid=G198" >> run_href_${domain}.${valid_at}_system.sh - echo "export obsvpath=$WORK" >> run_href_${domain}.${valid_at}_system.sh - echo "export domain=Alaska " >> run_href_${domain}.${valid_at}_system.sh - - echo "export vbeg=00" >>run_href_${domain}.${valid_at}_system.sh - echo "export vend=21" >>run_href_${domain}.${valid_at}_system.sh - echo "export valid_increment=10800" >> run_href_${domain}.${valid_at}_system.sh - - if [ $valid_at = 1fhr ] ; then - echo "export lead='3,6,9,12,15,18,21,24'" >> run_href_${domain}.${valid_at}_system.sh - elif [ $valid_at = 2fhr ] ; then - echo "export lead='27,30,33,36,39,42,45,48'" >> run_href_${domain}.${valid_at}_system.sh - fi + ihr=`$NDATE -$fhr $VDATE$valid_at|cut -c 9-10` + iday=`$NDATE -$fhr $VDATE$valid_at|cut -c 1-8` + + input_fcst="$COMINhref/href.${iday}/verf_g2g/href.*.t${ihr}z.ak.f${fhr}" + input_obsv="$WORK/prepbufr.${VDATE}/prepbufr.t${valid_at}z.G198.nc" + + if [ -s $input_fcst ] && [ -s $input_obsv ] ; then + + if [ $ihr = 06 ] || [ $ihr = 18 ] ; then + if [ $fhr -ge 45 ] ; then + mbrs=5 + elif [ $fhr -eq 42 ] || [ $fhr -eq 39 ] ; then + mbrs=6 + else + mbrs=8 + fi + fi + + echo "#!/bin/ksh" >> run_href_${domain}.${valid_at}.${fhr}_system.sh + echo "set -x " >> run_href_${domain}.${valid_at}.${fhr}_system.sh + echo "export regrid=NONE" >> run_href_${domain}.${valid_at}.${fhr}_system.sh + echo "export obsv=prepbufr" >> run_href_${domain}.${valid_at}.${fhr}_system.sh + echo "export domain=Alaska" >> run_href_${domain}.${valid_at}.${fhr}_system.sh + echo "export nmbrs=$mbrs" >> run_href_${domain}.${valid_at}.${fhr}_system.sh + + echo "export output_base=$WORK/grid2obs/run_href_${domain}.${valid_at}.${fhr}_system" >> run_href_${domain}.${valid_at}.${fhr}_system.sh + + echo "export OBTYPE='PREPBUFR'" >> run_href_${domain}.${valid_at}.${fhr}_system.sh + echo "export obsvhead=$obsv " >> run_href_${domain}.${valid_at}.${fhr}_system.sh + echo "export obsvgrid=G198" >> run_href_${domain}.${valid_at}.${fhr}_system.sh + echo "export obsvpath=$WORK" >> run_href_${domain}.${valid_at}.${fhr}_system.sh - echo "export model=href" >> run_href_${domain}.${valid_at}_system.sh - echo "export MODEL=HREF" >> run_href_${domain}.${valid_at}_system.sh - echo "export regrid=NONE " >> run_href_${domain}.${valid_at}_system.sh - echo "export modelhead=href" >> run_href_${domain}.${valid_at}_system.sh - echo "export modelpath=$COMHREF" >> run_href_${domain}.${valid_at}_system.sh - echo "export modelgrid=ak.f" >> run_href_${domain}.${valid_at}_system.sh - echo "export modeltail=''" >> run_href_${domain}.${valid_at}_system.sh - echo "export extradir='verf_g2g/'" >> run_href_${domain}.${valid_at}_system.sh + echo "export vbeg=$valid_at" >>run_href_${domain}.${valid_at}.${fhr}_system.sh + echo "export vend=$valid_at" >>run_href_${domain}.${valid_at}.${fhr}_system.sh + echo "export valid_increment=10800" >> run_href_${domain}.${valid_at}.${fhr}_system.sh + echo "export lead='$fhr'" >> run_href_${domain}.${valid_at}.${fhr}_system.sh - echo "export verif_grid=''" >> run_href_${domain}.${valid_at}_system.sh - echo "export verif_poly='${maskpath}/Alaska_HREF.nc'" >> run_href_${domain}.${valid_at}_system.sh + echo "export model=href" >> run_href_${domain}.${valid_at}.${fhr}_system.sh + echo "export modelpath=$COMHREF" >> run_href_${domain}.${valid_at}.${fhr}_system.sh + echo "export MODEL=HREF" >> run_href_${domain}.${valid_at}.${fhr}_system.sh + echo "export regrid=NONE " >> run_href_${domain}.${valid_at}.${fhr}_system.sh + echo "export modelhead=href" >> run_href_${domain}.${valid_at}.${fhr}_system.sh + echo "export modelgrid=ak.f" >> run_href_${domain}.${valid_at}.${fhr}_system.sh + echo "export modeltail=''" >> run_href_${domain}.${valid_at}.${fhr}_system.sh + echo "export extradir='verf_g2g/'" >> run_href_${domain}.${valid_at}.${fhr}_system.sh - echo "export valid_at=$valid_at" >> run_href_${domain}.${valid_at}_system.sh + echo "export verif_grid=''" >> run_href_${domain}.${valid_at}.${fhr}_system.sh + echo "export verif_poly='${maskpath}/Alaska_HREF.nc'" >> run_href_${domain}.${valid_at}.${fhr}_system.sh + + echo "export valid_at=$valid_at" >> run_href_${domain}.${valid_at}.${fhr}_system.sh ################################################################################################################ # Adding following "if blocks" for restart capability for Alaska: @@ -204,38 +277,54 @@ for dom in CONUS Alaska ; do # 3. if any one of the 3 exits, skip it. But for GenEnsProd, all of the nc files generated from previous run # are copied back to the output_base/stat directory ################################################################################################################# - echo "if [ ! -e $COMOUTrestart/system/run_href_${domain}.${valid_at}_system.GenEnsProd.completed ] ; then" >> run_href_${domain}.${valid_at}_system.sh - echo " ${METPLUS_PATH}/ush/run_metplus.py -c ${PARMevs}/metplus_config/machine.conf -c ${GRID2OBS_CONF}/GenEnsProd_fcstHREF_obsPREPBUFR_SFC.conf " >> run_href_${domain}.${valid_at}_system.sh - echo " cp \$output_base/stat/\${MODEL}/GenEnsProd*Alaska*.nc $COMOUTrestart/system" >> run_href_${domain}.${valid_at}_system.sh - echo " [[ \$? = 0 ]] && >$COMOUTrestart/system/run_href_${domain}.${valid_at}_system.GenEnsProd.completed" >> run_href_${domain}.${valid_at}_system.sh - echo "else " >> run_href_${domain}.${valid_at}_system.sh - echo " mkdir -p \$output_base/stat/\${MODEL}" >> run_href_${domain}.${valid_at}_system.sh - echo " cp $COMOUTrestart/system/GenEnsProd*Alaska*.nc \$output_base/stat/HREF" >> run_href_${domain}.${valid_at}_system.sh - echo "fi" >> run_href_${domain}.${valid_at}_system.sh - - echo "if [ ! -e $COMOUTrestart/system/run_href_${domain}.${valid_at}_system.EnsembleStat.completed ] ; then" >> run_href_${domain}.${valid_at}_system.sh - echo " ${METPLUS_PATH}/ush/run_metplus.py -c ${PARMevs}/metplus_config/machine.conf -c ${GRID2OBS_CONF}/EnsembleStat_fcstHREF_obsPREPBUFR_SFC.conf " >> run_href_${domain}.${valid_at}_system.sh - echo " [[ \$? = 0 ]] && >$COMOUTrestart/system/run_href_${domain}.${valid_at}_system.EnsembleStat.completed" >> run_href_${domain}.${valid_at}_system.sh - echo "fi" >> run_href_${domain}.${valid_at}_system.sh - - echo "if [ ! -e $COMOUTrestart/system/run_href_${domain}.${valid_at}_system.PointStat.completed ] ; then" >> run_href_${domain}.${valid_at}_system.sh - echo " ${METPLUS_PATH}/ush/run_metplus.py -c ${PARMevs}/metplus_config/machine.conf -c ${GRID2OBS_CONF}/PointStat_fcstHREF_obsPREPBUFR_SFC_prob.conf " >> run_href_${domain}.${valid_at}_system.sh - echo " [[ \$? = 0 ]] && >$COMOUTrestart/system/run_href_${domain}.${valid_at}_system.PointStat.completed" >> run_href_${domain}.${valid_at}_system.sh - echo "fi" >> run_href_${domain}.${valid_at}_system.sh - - echo "cp \$output_base/stat/\${MODEL}/*.stat $COMOUTsmall" >> run_href_${domain}.${valid_at}_system.sh + echo "if [ ! -e $COMOUTrestart/system/run_href_${domain}.${valid_at}.${fhr}system.GenEnsProd.completed ] ; then" >> run_href_${domain}.${valid_at}.${fhr}_system.sh + echo " ${METPLUS_PATH}/ush/run_metplus.py -c ${PARMevs}/metplus_config/machine.conf -c ${GRID2OBS_CONF}/GenEnsProd_fcstHREF_obsPREPBUFR_SFC.conf " >> run_href_${domain}.${valid_at}.${fhr}_system.sh + echo " export err=\$?; err_chk" >> run_href_${domain}.${valid_at}.${fhr}_system.sh + echo " if [ $SENDCOM = YES ] ; then" >> run_href_${domain}.${valid_at}.${fhr}_system.sh + echo " for FILEn in \$output_base/stat/\${MODEL}/GenEnsProd*CONUS*.nc; do if [ -f \"\$FILEn\" ]; then cp -v \$FILEn $COMOUTrestart/system; fi; done" >> run_href_${domain}.${valid_at}.${fhr}_system.sh + echo " [[ \$? = 0 ]] && >$COMOUTrestart/system/run_href_${domain}.${valid_at}.${fhr}system.GenEnsProd.completed" >> run_href_${domain}.${valid_at}.${fhr}_system.sh + echo " fi" >> run_href_${domain}.${valid_at}.${fhr}_system.sh + echo "else " >> run_href_${domain}.${valid_at}.${fhr}_system.sh + echo " mkdir -p \$output_base/stat/\${MODEL}" >> run_href_${domain}.${valid_at}.${fhr}_system.sh + echo " cp $COMOUTrestart/system/GenEnsProd*Alaska*.nc \$output_base/stat/HREF" >> run_href_${domain}.${valid_at}.${fhr}_system.sh + echo "fi" >> run_href_${domain}.${valid_at}.${fhr}_system.sh + + echo "if [ ! -e $COMOUTrestart/system/run_href_${domain}.${valid_at}.${fhr}system.EnsembleStat.completed ] ; then" >> run_href_${domain}.${valid_at}.${fhr}_system.sh + echo " ${METPLUS_PATH}/ush/run_metplus.py -c ${PARMevs}/metplus_config/machine.conf -c ${GRID2OBS_CONF}/EnsembleStat_fcstHREF_obsPREPBUFR_SFC.conf " >> run_href_${domain}.${valid_at}.${fhr}_system.sh + echo " export err=\$?; err_chk" >> run_href_${domain}.${valid_at}.${fhr}_system.sh + echo " if [ $SENDCOM = YES ] ; then" >> run_href_${domain}.${valid_at}.${fhr}_system.sh + echo " [[ \$? = 0 ]] && >$COMOUTrestart/system/run_href_${domain}.${valid_at}.${fhr}system.EnsembleStat.completed" >> run_href_${domain}.${valid_at}.${fhr}_system.sh + echo " fi" >> run_href_${domain}.${valid_at}.${fhr}_system.sh + echo "fi" >> run_href_${domain}.${valid_at}.${fhr}_system.sh + + echo "if [ ! -e $COMOUTrestart/system/run_href_${domain}.${valid_at}.${fhr}system.PointStat.completed ] ; then" >> run_href_${domain}.${valid_at}.${fhr}_system.sh + echo " ${METPLUS_PATH}/ush/run_metplus.py -c ${PARMevs}/metplus_config/machine.conf -c ${GRID2OBS_CONF}/PointStat_fcstHREF_obsPREPBUFR_SFC_prob.conf " >> run_href_${domain}.${valid_at}.${fhr}_system.sh + echo " export err=\$?; err_chk" >> run_href_${domain}.${valid_at}.${fhr}_system.sh + echo " if [ $SENDCOM = YES ] ; then" >> run_href_${domain}.${valid_at}.${fhr}_system.sh + echo " [[ \$? = 0 ]] && >$COMOUTrestart/system/run_href_${domain}.${valid_at}.${fhr}system.PointStat.completed" >> run_href_${domain}.${valid_at}.${fhr}_system.sh + echo " fi" >> run_href_${domain}.${valid_at}.${fhr}_system.sh + echo "fi" >> run_href_${domain}.${valid_at}.${fhr}_system.sh + + echo "if [ $SENDCOM = YES ] ; then" >> run_href_${domain}.${valid_at}.${fhr}_system.sh + echo " for FILEn in \$output_base/stat/\${MODEL}/*.stat; do if [ -f \"\$FILEn\" ]; then cp -v \$FILEn $COMOUTsmall; fi; done" >> run_href_${domain}.${valid_at}.${fhr}_system.sh #Mark that all of the 3 METplus processes are completed for next restart run: - echo "[[ \$? = 0 ]] && >$COMOUTrestart/system/run_href_${domain}.${valid_at}_system.completed" >> run_href_${domain}.${valid_at}_system.sh + echo " [[ \$? = 0 ]] && >$COMOUTrestart/system/run_href_${domain}.${valid_at}.${fhr}_system.completed" >> run_href_${domain}.${valid_at}.${fhr}_system.sh + echo "fi" >> run_href_${domain}.${valid_at}.${fhr}_system.sh + + + chmod +x run_href_${domain}.${valid_at}.${fhr}_system.sh + echo "${DATA}/scripts/run_href_${domain}.${valid_at}.${fhr}_system.sh" >> run_all_href_system_poe.sh - chmod +x run_href_${domain}.${valid_at}_system.sh - echo "${DATA}/run_href_${domain}.${valid_at}_system.sh" >> run_all_href_system_poe.sh + fi fi # end checking if completed - done + done + + done - fi + fi #end of if dom done #end of dom diff --git a/ush/cam/evs_href_precip.sh b/ush/cam/evs_href_precip.sh index 2bca498701..adbf074ec1 100755 --- a/ush/cam/evs_href_precip.sh +++ b/ush/cam/evs_href_precip.sh @@ -4,26 +4,40 @@ # including 4 mean (mean, pmmn, lpmm and average), probability (prob, eas) # and system (ecnt line type) # Last update: -# 04/29/2024, add restart, Binbin Zhou Lynker@EMC/NCEP -# 10/30/2023, by Binbin Zhou Lynker@EMC/NCEP +# 01/10/2025, add MPMD, by Binbin Zhou Lynker@EMC/NCEP +# 10/30/2024, by Binbin Zhou Lynker@EMC/NCEP #*********************************************************************************** set -x #******************************************* # Build POE script to collect sub-jobs #****************************************** +cd $DATA/scripts >run_all_href_precip_poe.sh for obsvtype in ccpa mrms ; do - for acc in 01h 03h 24h ; do + if [ $obsvtype = ccpa ] ; then + domain=conus + grid=G227 + else + domain=ak + grid=G255 + fi - obsv=$obsvtype$acc + for prod in mean pmmn avrg lpmm prob eas system ; do + + if [ $prod = system ] ; then + acum="03h 24h" + else + acum="01h 03h 24h" + fi - for prod in mean pmmn avrg lpmm prob eas system ; do + for acc in $acum ; do + + obsv=$obsvtype$acc - PROD=`echo $prod | tr '[a-z]' '[A-Z]'` #************************************ @@ -31,136 +45,193 @@ for obsvtype in ccpa mrms ; do # *********************************** if [ $acc = 24h ] ; then - if [ $obsvtype = ccpa ] ; then - export fhrs="24 30 36 42 48" - export vhrs="12" - elif [ $obsvtype = mrms ] ; then - export fhrs="24 30 36 42 48" - export vhrs="00 06 12 18" - fi + export vhrs="12" else - if [ $prod = system ] ; then - export fhrs="03 06 09 12 15 18 21 24 27 30 33 36 39 42 45 48" + if [ $prod = system ] ; then + #Note: system is verified every 3 hrs export vhrs="00 03 06 09 12 15 18 21" else if [ $acc = 01h ] ; then - export fhrs="01 02 03 04 05 06 07 08 09 10 11 12 13 14 15 16 17 18 19 20 21 22 23" export vhrs="00 01 02 03 04 05 06 07 08 09 10 11 12 13 14 15 16 17 18 19 20 21 22 23" elif [ $acc = 03h ] ; then - export fhrs="27 30 33 36 39 42 45 48" export vhrs="00 03 06 09 12 15 18 21" fi fi fi - for fhr in $fhrs; do + if [ $acc = 01h ] || [ $acc = 03h ] ; then + modelpath=$COMHREF + if [ $prod = system ] ; then + extra="verf_g2g" + else + extra="ensprod" + fi + else + if [ $prod = prob ] || [ $prod = eas ] ; then + modelpath=$COMHREF + extra="ensprod" + elif [ $prod = system ] ; then + modelpath=$COMHREF + extra="verf_g2g" + else + modelpath=$WORK + extra="empty" + fi + fi + + for vhr in $vhrs; do + if [ $acc = 24h ] ; then + export fhrs="24 30 36 42 48" + else + if [ $prod = system ] ; then + if [ $vhr = 00 ] || [ $vhr = 06 ] || [ $vhr = 12 ] || [ $vhr = 18 ] ; then + export fhrs="06 12 18 24 30 36 42 48" + elif [ $vhr = 03 ] || [ $vhr = 09 ] || [ $vhr = 15 ] || [ $vhr = 21 ] ; then + export fhrs="03 09 15 21 27 33 39 45" + fi + else + if [ $acc = 01h ] ; then + if [ $vhr = 00 ] || [ $vhr = 06 ] || [ $vhr = 12 ] || [ $vhr = 18 ] ; then + export fhrs="06 12 18 24" + elif [ $vhr = 01 ] || [ $vhr = 07 ] || [ $vhr = 13 ] || [ $vhr = 19 ] ; then + export fhrs="01 07 13 19" + elif [ $vhr = 02 ] || [ $vhr = 08 ] || [ $vhr = 14 ] || [ $vhr = 20 ] ; then + export fhrs="02 08 14 20" + elif [ $vhr = 03 ] || [ $vhr = 09 ] || [ $vhr = 15 ] || [ $vhr = 21 ] ; then + export fhrs="03 09 15 21" + elif [ $vhr = 04 ] || [ $vhr = 10 ] || [ $vhr = 16 ] || [ $vhr = 22 ] ; then + export fhrs="04 10 16 22" + elif [ $vhr = 05 ] || [ $vhr = 11 ] || [ $vhr = 17 ] || [ $vhr = 23 ] ; then + export fhrs="05 11 17 23" + fi + elif [ $acc = 03h ] ; then + if [ $vhr = 00 ] || [ $vhr = 06 ] || [ $vhr = 12 ] || [ $vhr = 18 ] ; then + export fhrs="06 12 18 24" + elif [ $vhr = 03 ] || [ $vhr = 09 ] || [ $vhr = 15 ] || [ $vhr = 21 ] ; then + export fhrs="03 09 15 21" + fi + fi + fi + fi + for fhr in $fhrs; do - for vhr in $vhrs; do >run_href_precip_${prod}.${obsv}.f${fhr}.v${vhr}.sh ######################################################################################### # Restart check: - # check if this sub-task has been completed in the previous run + # check if this sub-task has been completed in the precious run # if not, do this sub-task, and mark it is completed after it is done # if yes, skip this task ######################################################################################### if [ ! -e $COMOUTrestart/${prod}/run_href_precip_${prod}.${obsv}.f${fhr}.v${vhr}.completed ] ; then - ihr=`$NDATE -$fhr $VDATE$vhr|cut -c 9-10` - if [ "$ihr" -eq "00" ] || [ "$ihr" -eq "12" ] ; then - if [ "$fhr" -ge "45" ] ; then - export nmem=7 - export members=7 - elif [ "$fhr" -ge "39" ] ; then - export nmem=8 - export members=8 - else - export nmem=10 - export members=10 - fi - elif [ "$ihr" -eq "06" ] || [ "$ihr" -eq "18" ] ; then - if [ $obsv = ccpa01h ] || [ $obsv = ccpa03h ] || [ $obsv = ccpa24h ] ; then - if [ "$fhr" -ge "45" ] ; then - export nmem=4 - export members=4 - elif [ "$fhr" -ge "33" ] ; then - export nmem=8 - export members=8 - else - export nmem=10 - export members=10 + iday=`$NDATE -$fhr $VDATE$vhr|cut -c 1-8` + + #Check if input fcst and input_obsv files are available + if [ $extra = "verf_g2g" ] ; then + input_fcst=${modelpath}/href.${iday}/verf_g2g/href.m??.t${ihr}z.${domain}.f${fhr} + elif [ $extra = "ensprod" ] ; then + input_fcst=${modelpath}/href.${iday}/ensprod/href.t${ihr}z.${domain}.${prod}.f${fhr}.grib2 + else + input_fcst=${modelpath}/href.${iday}/href${prod}.t${ihr}z.${grid}.24h.f${fhr}.nc + fi + + input_obsv="$WORK/${obsvtype}.${VDATE}/${obsv}.t${vhr}z.*" + + if [ -s $input_fcst ] && [ -s $input_obsv ] ; then + + if [ $prod = system ] && [ $domain = conus ] ; then + + if [ $ihr = 00 ] || [ $ihr = 12 ] ; then + if [ $fhr -ge 45 ] ; then + mbrs=7 + elif [ $fhr -eq 42 ] || [ $fhr -eq 39 ] ; then + mbrs=8 + else + mbrs=10 + fi + elif [ $ihr = 06 ] || [ $ihr = 18 ] ; then + if [ $fhr -ge 45 ] ; then + mbrs=4 + elif [ $fhr -le 42 ] && [ $fhr -ge 33 ] ; then + mbrs=8 + else + mbrs=10 + fi + fi + + elif [ $prod = system ] && [ $domain = ak ] ; then + + if [ $ihr = 06 ] || [ $ihr = 18 ] ; then + if [ $fhr -ge 45 ] ; then + mbrs=5 + elif [ $fhr -eq 42 ] || [ $fhr -eq 39 ] ; then + mbrs=6 + else + mbrs=8 + fi fi else - if [ "$fhr" -ge "45" ] ; then - export nmem=5 - export members=5 - elif [ "$fhr" -ge "39" ] ; then - export nmem=6 - export members=6 - else - export nmem=8 - export members=8 - fi - fi - else - export nmem=10 - export members=10 - fi - echo "export nmem=$nmem" >>run_href_precip_${prod}.${obsv}.f${fhr}.v${vhr}.sh + mbrs=10 + fi + + echo "#!/bin/ksh" >>run_href_precip_${prod}.${obsv}.f${fhr}.v${vhr}.sh + echo "set -x" >>run_href_precip_${prod}.${obsv}.f${fhr}.v${vhr}.sh + echo "export mbrs=$mbrs" >>run_href_precip_${prod}.${obsv}.f${fhr}.v${vhr}.sh - if [ $acc = 24h ] ; then - if [ $obsvtype = ccpa ] ; then - echo "export vbeg=$vhr" >>run_href_precip_${prod}.${obsv}.f${fhr}.v${vhr}.sh - echo "export vend=$vhr" >>run_href_precip_${prod}.${obsv}.f${fhr}.v${vhr}.sh - echo "export valid_increment=3600" >> run_href_precip_${prod}.${obsv}.f${fhr}.v${vhr}.sh - echo "export lead='$fhr'" >> run_href_precip_${prod}.${obsv}.f${fhr}.v${vhr}.sh - elif [ $obsvtype = mrms ] ; then - echo "export vbeg=$vhr" >>run_href_precip_${prod}.${obsv}.f${fhr}.v${vhr}.sh - echo "export vend=$vhr" >>run_href_precip_${prod}.${obsv}.f${fhr}.v${vhr}.sh - echo "export valid_increment=21600" >> run_href_precip_${prod}.${obsv}.f${fhr}.v${vhr}.sh - echo "export lead='$fhr'" >> run_href_precip_${prod}.${obsv}.f${fhr}.v${vhr}.sh - fi - else - if [ $prod = system ] ; then - # Since HREF members are every 3fhr stored in verf_g2g directory - echo "export vbeg=$vhr" >>run_href_precip_${prod}.${obsv}.f${fhr}.v${vhr}.sh - echo "export vend=$vhr" >>run_href_precip_${prod}.${obsv}.f${fhr}.v${vhr}.sh - echo "export valid_increment=10800" >> run_href_precip_${prod}.${obsv}.f${fhr}.v${vhr}.sh - echo "export lead='$fhr'" >> run_href_precip_${prod}.${obsv}.f${fhr}.v${vhr}.sh - else - if [ $acc = 01h ] ; then - echo "export vbeg=$vhr" >>run_href_precip_${prod}.${obsv}.f${fhr}.v${vhr}.sh - echo "export vend=$vhr" >>run_href_precip_${prod}.${obsv}.f${fhr}.v${vhr}.sh - echo "export valid_increment=3600" >> run_href_precip_${prod}.${obsv}.f${fhr}.v${vhr}.sh - echo "export lead='$fhr'" >> run_href_precip_${prod}.${obsv}.f${fhr}.v${vhr}.sh - elif [ $acc = 03h ] ; then - echo "export vbeg=$vhr" >>run_href_precip_${prod}.${obsv}.f${fhr}.v${vhr}.sh - echo "export vend=$vhr" >>run_href_precip_${prod}.${obsv}.f${fhr}.v${vhr}.sh - echo "export valid_increment=10800" >> run_href_precip_${prod}.${obsv}.f${fhr}.v${vhr}.sh - echo "export lead='$fhr'" >> run_href_precip_${prod}.${obsv}.f${fhr}.v${vhr}.sh - fi - fi - fi + if [ $acc = 24h ] ; then + if [ $obsvtype = ccpa ] ; then + echo "export vbeg=$vhr" >>run_href_precip_${prod}.${obsv}.f${fhr}.v${vhr}.sh + echo "export vend=$vhr" >>run_href_precip_${prod}.${obsv}.f${fhr}.v${vhr}.sh + echo "export valid_increment=3600" >> run_href_precip_${prod}.${obsv}.f${fhr}.v${vhr}.sh + echo "export lead='$fhr'" >> run_href_precip_${prod}.${obsv}.f${fhr}.v${vhr}.sh + elif [ $obsvtype = mrms ] ; then + echo "export vbeg=$vhr" >>run_href_precip_${prod}.${obsv}.f${fhr}.v${vhr}.sh + echo "export vend=$vhr" >>run_href_precip_${prod}.${obsv}.f${fhr}.v${vhr}.sh + echo "export valid_increment=21600" >> run_href_precip_${prod}.${obsv}.f${fhr}.v${vhr}.sh + echo "export lead='$fhr'" >> run_href_precip_${prod}.${obsv}.f${fhr}.v${vhr}.sh + fi + else + if [ $prod = system ] ; then + #Since HREF members are every 3fhr stored in verf_g2g directory + echo "export vbeg=$vhr" >>run_href_precip_${prod}.${obsv}.f${fhr}.v${vhr}.sh + echo "export vend=$vhr" >>run_href_precip_${prod}.${obsv}.f${fhr}.v${vhr}.sh + echo "export valid_increment=10800" >> run_href_precip_${prod}.${obsv}.f${fhr}.v${vhr}.sh + echo "export lead='$fhr'" >> run_href_precip_${prod}.${obsv}.f${fhr}.v${vhr}.sh + else + if [ $acc = 01h ] ; then + echo "export vbeg=$vhr" >>run_href_precip_${prod}.${obsv}.f${fhr}.v${vhr}.sh + echo "export vend=$vhr" >>run_href_precip_${prod}.${obsv}.f${fhr}.v${vhr}.sh + echo "export valid_increment=3600" >> run_href_precip_${prod}.${obsv}.f${fhr}.v${vhr}.sh + echo "export lead='$fhr'" >> run_href_precip_${prod}.${obsv}.f${fhr}.v${vhr}.sh + elif [ $acc = 03h ] ; then + echo "export vbeg=$vhr" >>run_href_precip_${prod}.${obsv}.f${fhr}.v${vhr}.sh + echo "export vend=$vhr" >>run_href_precip_${prod}.${obsv}.f${fhr}.v${vhr}.sh + echo "export valid_increment=10800" >> run_href_precip_${prod}.${obsv}.f${fhr}.v${vhr}.sh + echo "export lead='$fhr'" >> run_href_precip_${prod}.${obsv}.f${fhr}.v${vhr}.sh + fi + fi + fi - if [ $prod = system ] ; then + if [ $prod = system ] ; then echo "export MODEL=HREF" >> run_href_precip_${prod}.${obsv}.f${fhr}.v${vhr}.sh echo "export model=HREF" >> run_href_precip_${prod}.${obsv}.f${fhr}.v${vhr}.sh export MODEL=HREF - else + else echo "export MODEL=HREF_${PROD}" >> run_href_precip_${prod}.${obsv}.f${fhr}.v${vhr}.sh echo "export model=HREF_${PROD}" >> run_href_precip_${prod}.${obsv}.f${fhr}.v${vhr}.sh - export MODEL=HREF_${PROD} - fi + export MODEL=HREF_${PROD} + fi - mkdir -p ${COMOUTsmall}/${MODEL} - echo "export output_base=$WORK/precip/run_href_precip_${prod}.${obsv}" >> run_href_precip_${prod}.${obsv}.f${fhr}.v${vhr}.sh - echo "export obsv=${obsv}" >> run_href_precip_${prod}.${obsv}.f${fhr}.v${vhr}.sh - echo "export obsvpath=$WORK" >> run_href_precip_${prod}.${obsv}.f${fhr}.v${vhr}.sh - echo "export OBTYPE=${obsv}" >> run_href_precip_${prod}.${obsv}.f${fhr}.v${vhr}.sh + mkdir -p ${COMOUTsmall}/${MODEL} + echo "export output_base=$WORK/precip/run_href_precip_${prod}.${obsv}.f${fhr}.v${vhr}" >> run_href_precip_${prod}.${obsv}.f${fhr}.v${vhr}.sh + echo "export obsv=${obsv}" >> run_href_precip_${prod}.${obsv}.f${fhr}.v${vhr}.sh + echo "export obsvpath=$WORK" >> run_href_precip_${prod}.${obsv}.f${fhr}.v${vhr}.sh + echo "export OBTYPE=${obsv}" >> run_href_precip_${prod}.${obsv}.f${fhr}.v${vhr}.sh - if [ $obsv = ccpa01h ] ; then + if [ $obsv = ccpa01h ] ; then echo "export name=APCP" >> run_href_precip_${prod}.${obsv}.f${fhr}.v${vhr}.sh echo "export name_obsv=APCP" >> run_href_precip_${prod}.${obsv}.f${fhr}.v${vhr}.sh echo "export level=A01" >> run_href_precip_${prod}.${obsv}.f${fhr}.v${vhr}.sh @@ -172,7 +243,7 @@ for obsvtype in ccpa mrms ; do echo "export thresh='ge0.254, ge2.54, ge6.35, ge12.7, ge25.4, ge50.8'" >> run_href_precip_${prod}.${obsv}.f${fhr}.v${vhr}.sh fi - elif [ $obsv = mrms01h ] ; then + elif [ $obsv = mrms01h ] ; then echo "export name=APCP" >> run_href_precip_${prod}.${obsv}.f${fhr}.v${vhr}.sh echo "export name_obsv=APCP_01" >> run_href_precip_${prod}.${obsv}.f${fhr}.v${vhr}.sh echo "export level=A01" >> run_href_precip_${prod}.${obsv}.f${fhr}.v${vhr}.sh @@ -184,7 +255,7 @@ for obsvtype in ccpa mrms ; do echo "export thresh='ge0.254, ge2.54, ge6.35, ge12.7, ge25.4, ge50.8'" >> run_href_precip_${prod}.${obsv}.f${fhr}.v${vhr}.sh fi - elif [ $obsv = ccpa03h ] ; then + elif [ $obsv = ccpa03h ] ; then echo "export name=APCP" >> run_href_precip_${prod}.${obsv}.f${fhr}.v${vhr}.sh echo "export name_obsv=APCP" >> run_href_precip_${prod}.${obsv}.f${fhr}.v${vhr}.sh echo "export level=A03" >> run_href_precip_${prod}.${obsv}.f${fhr}.v${vhr}.sh @@ -196,7 +267,7 @@ for obsvtype in ccpa mrms ; do echo "export thresh=' ge2.54, ge6.35, ge12.7, ge25.4, ge76.2 '" >> run_href_precip_${prod}.${obsv}.f${fhr}.v${vhr}.sh fi - elif [ $obsv = mrms03h ] ; then + elif [ $obsv = mrms03h ] ; then echo "export name=APCP" >> run_href_precip_${prod}.${obsv}.f${fhr}.v${vhr}.sh echo "export name_obsv=APCP_03" >> run_href_precip_${prod}.${obsv}.f${fhr}.v${vhr}.sh echo "export level=A03" >> run_href_precip_${prod}.${obsv}.f${fhr}.v${vhr}.sh @@ -208,7 +279,7 @@ for obsvtype in ccpa mrms ; do echo "export thresh=' ge2.54, ge6.35, ge12.7, ge25.4, ge50.8, ge76.2 '" >> run_href_precip_${prod}.${obsv}.f${fhr}.v${vhr}.sh fi - elif [ $obsv = ccpa24h ] ; then + elif [ $obsv = ccpa24h ] ; then if [ $prod = system ] || [ $prod = prob ] || [ $prod = eas ] ; then echo "export name=APCP" >> run_href_precip_${prod}.${obsv}.f${fhr}.v${vhr}.sh echo "export name_obsv=APCP_24" >> run_href_precip_${prod}.${obsv}.f${fhr}.v${vhr}.sh @@ -225,7 +296,7 @@ for obsvtype in ccpa mrms ; do echo "export thresh='ge12.7, ge25.4, ge50.8'" >> run_href_precip_${prod}.${obsv}.f${fhr}.v${vhr}.sh fi - elif [ $obsv = mrms24h ] ; then + elif [ $obsv = mrms24h ] ; then if [ $prod = system ] || [ $prod = prob ] || [ $prod = eas ] ; then echo "export name=APCP" >> run_href_precip_${prod}.${obsv}.f${fhr}.v${vhr}.sh echo "export name_obsv=APCP_24" >> run_href_precip_${prod}.${obsv}.f${fhr}.v${vhr}.sh @@ -242,9 +313,9 @@ for obsvtype in ccpa mrms ; do echo "export thresh='ge12.7, ge25.4, ge50.8, ge76.2, ge127'" >> run_href_precip_${prod}.${obsv}.f${fhr}.v${vhr}.sh fi - fi + fi - if [ $obsv = ccpa01h ] ; then + if [ $obsv = ccpa01h ] ; then echo "export obsvtail=grib2" >> run_href_precip_${prod}.${obsv}.f${fhr}.v${vhr}.sh if [ $prod = prob ] || [ $prod = eas ] ; then echo "export modelgrid=conus.${prod}" >> run_href_precip_${prod}.${obsv}.f${fhr}.v${vhr}.sh @@ -264,7 +335,7 @@ for obsvtype in ccpa mrms ; do echo "export extradir='ensprod/'" >> run_href_precip_${prod}.${obsv}.f${fhr}.v${vhr}.sh fi - elif [ $obsv = mrms01h ] ; then + elif [ $obsv = mrms01h ] ; then echo "export obsvtail=nc" >> run_href_precip_${prod}.${obsv}.f${fhr}.v${vhr}.sh if [ $prod = prob ] || [ $prod = eas ] ; then echo "export modelgrid=ak.${prod}" >> run_href_precip_${prod}.${obsv}.f${fhr}.v${vhr}.sh @@ -284,7 +355,7 @@ for obsvtype in ccpa mrms ; do echo "export extradir='ensprod/'" >> run_href_precip_${prod}.${obsv}.f${fhr}.v${vhr}.sh fi - elif [ $obsv = ccpa03h ] ; then + elif [ $obsv = ccpa03h ] ; then echo "export obsvtail=grib2" >> run_href_precip_${prod}.${obsv}.f${fhr}.v${vhr}.sh if [ $prod = prob ] || [ $prod = eas ] ; then echo "export modelgrid=conus.${prod}" >> run_href_precip_${prod}.${obsv}.f${fhr}.v${vhr}.sh @@ -304,7 +375,7 @@ for obsvtype in ccpa mrms ; do echo "export extradir='ensprod/'" >> run_href_precip_${prod}.${obsv}.f${fhr}.v${vhr}.sh fi - elif [ $obsv = mrms03h ] ; then + elif [ $obsv = mrms03h ] ; then if [ $prod = prob ] || [ $prod = eas ] ; then echo "export modelgrid=ak.${prod}" >> run_href_precip_${prod}.${obsv}.f${fhr}.v${vhr}.sh elif [ $prod = system ] ; then @@ -323,7 +394,7 @@ for obsvtype in ccpa mrms ; do echo "export extradir='ensprod/'" >> run_href_precip_${prod}.${obsv}.f${fhr}.v${vhr}.sh fi - elif [ $obsv = ccpa24h ] ; then + elif [ $obsv = ccpa24h ] ; then echo "export obsvtail=nc" >> run_href_precip_${prod}.${obsv}.f${fhr}.v${vhr}.sh if [ $prod = prob ] || [ $prod = eas ] ; then echo "export modelhead=href" >> run_href_precip_${prod}.${obsv}.f${fhr}.v${vhr}.sh @@ -344,7 +415,7 @@ for obsvtype in ccpa mrms ; do echo "export modelgrid=G227.24h" >> run_href_precip_${prod}.${obsv}.f${fhr}.v${vhr}.sh fi - elif [ $obsv = mrms24h ] ; then + elif [ $obsv = mrms24h ] ; then echo "export obsvtail=nc" >> run_href_precip_${prod}.${obsv}.f${fhr}.v${vhr}.sh if [ $prod = prob ] || [ $prod = eas ] ; then echo "export modelhead=href" >> run_href_precip_${prod}.${obsv}.f${fhr}.v${vhr}.sh @@ -365,57 +436,70 @@ for obsvtype in ccpa mrms ; do echo "export modelgrid=G255.24h" >> run_href_precip_${prod}.${obsv}.f${fhr}.v${vhr}.sh fi - else + else err_exit "$obsv is not a valid obsv" - fi + fi - echo "export verif_grid='' " >> run_href_precip_${prod}.${obsv}.f${fhr}.v${vhr}.sh - if [ $prod = prob ] || [ $prod = eas ] ; then + echo "export verif_grid='' " >> run_href_precip_${prod}.${obsv}.f${fhr}.v${vhr}.sh + + if [ $prod = prob ] || [ $prod = eas ] ; then if [ $obsvtype = ccpa ] ; then echo "export verif_poly='${maskpath}/Bukovsky_G227_CONUS.nc, ${maskpath}/Bukovsky_G227_CONUS_East.nc, ${maskpath}/Bukovsky_G227_CONUS_West.nc, ${maskpath}/Bukovsky_G227_CONUS_South.nc, ${maskpath}/Bukovsky_G227_CONUS_Central.nc' " >> run_href_precip_${prod}.${obsv}.f${fhr}.v${vhr}.sh + echo " export err=\$?; err_chk" >> run_href_precip_${prod}.${obsv}.f${fhr}.v${vhr}.sh echo "${METPLUS_PATH}/ush/run_metplus.py -c ${PARMevs}/metplus_config/machine.conf -c ${PRECIP_CONF}/GridStat_fcstHREFprob_obsCCPA_G227.conf " >> run_href_precip_${prod}.${obsv}.f${fhr}.v${vhr}.sh + echo " export err=\$?; err_chk" >> run_href_precip_${prod}.${obsv}.f${fhr}.v${vhr}.sh else - echo "export verif_poly='${maskpath}/Alaska_HREF.nc' " >> run_href_precip_${prod}.${obsv}.f${fhr}.v${vhr}.sh + echo "export verif_poly='${maskpath}/Alaska_HREF.nc' " >> run_href_precip_${prod}.${obsv}.f${fhr}.v${vhr}.sh echo "${METPLUS_PATH}/ush/run_metplus.py -c ${PARMevs}/metplus_config/machine.conf -c ${PRECIP_CONF}/GridStat_fcstHREFprob_obsMRMS_G255.conf " >> run_href_precip_${prod}.${obsv}.f${fhr}.v${vhr}.sh + echo " export err=\$?; err_chk" >> run_href_precip_${prod}.${obsv}.f${fhr}.v${vhr}.sh fi - elif [ $prod = system ] ; then + elif [ $prod = system ] ; then if [ $obsvtype = ccpa ] ; then echo "export verif_poly='${maskpath}/Bukovsky_G227_CONUS.nc, ${maskpath}/Bukovsky_G227_CONUS_East.nc, ${maskpath}/Bukovsky_G227_CONUS_West.nc, ${maskpath}/Bukovsky_G227_CONUS_South.nc, ${maskpath}/Bukovsky_G227_CONUS_Central.nc' " >> run_href_precip_${prod}.${obsv}.f${fhr}.v${vhr}.sh echo "${METPLUS_PATH}/ush/run_metplus.py -c ${PARMevs}/metplus_config/machine.conf -c ${PRECIP_CONF}/EnsembleStat_fcstHREF_obsCCPA_G227.conf " >> run_href_precip_${prod}.${obsv}.f${fhr}.v${vhr}.sh + echo " export err=\$?; err_chk" >> run_href_precip_${prod}.${obsv}.f${fhr}.v${vhr}.sh else - echo "export verif_poly='${maskpath}/Alaska_HREF.nc' " >> run_href_precip_${prod}.${obsv}.f${fhr}.v${vhr}.sh + echo "export verif_poly='${maskpath}/Alaska_HREF.nc' " >> run_href_precip_${prod}.${obsv}.f${fhr}.v${vhr}.sh echo "${METPLUS_PATH}/ush/run_metplus.py -c ${PARMevs}/metplus_config/machine.conf -c ${PRECIP_CONF}/EnsembleStat_fcstHREF_obsMRMS_G255.conf " >> run_href_precip_${prod}.${obsv}.f${fhr}.v${vhr}.sh + echo " export err=\$?; err_chk" >> run_href_precip_${prod}.${obsv}.f${fhr}.v${vhr}.sh fi - else + else if [ $obsvtype = ccpa ] ; then - echo "export verif_poly='${maskpath}/Bukovsky_G212_CONUS.nc, ${maskpath}/Bukovsky_G212_CONUS_East.nc, ${maskpath}/Bukovsky_G212_CONUS_West.nc, ${maskpath}/Bukovsky_G212_CONUS_South.nc, ${maskpath}/Bukovsky_G212_CONUS_Central.nc' " >> run_href_precip_${prod}.${obsv}.f${fhr}.v${vhr}.sh + echo "export verif_poly='${maskpath}/Bukovsky_G212_CONUS.nc, ${maskpath}/Bukovsky_G212_CONUS_East.nc, ${maskpath}/Bukovsky_G212_CONUS_West.nc, ${maskpath}/Bukovsky_G212_CONUS_South.nc, ${maskpath}/Bukovsky_G212_CONUS_Central.nc' " >> run_href_precip_${prod}.${obsv}.f${fhr}.v${vhr}.sh echo "${METPLUS_PATH}/ush/run_metplus.py -c ${PARMevs}/metplus_config/machine.conf -c ${PRECIP_CONF}/GridStat_fcstHREFmean_obsCCPA_G212.conf " >> run_href_precip_${prod}.${obsv}.f${fhr}.v${vhr}.sh echo "export verif_poly='${maskpath}/Bukovsky_G240_CONUS.nc, ${maskpath}/Bukovsky_G240_CONUS_East.nc, ${maskpath}/Bukovsky_G240_CONUS_West.nc, ${maskpath}/Bukovsky_G240_CONUS_South.nc, ${maskpath}/Bukovsky_G240_CONUS_Central.nc' " >> run_href_precip_${prod}.${obsv}.f${fhr}.v${vhr}.sh echo "${METPLUS_PATH}/ush/run_metplus.py -c ${PARMevs}/metplus_config/machine.conf -c ${PRECIP_CONF}/GridStat_fcstHREFmean_obsCCPA_G240.conf " >> run_href_precip_${prod}.${obsv}.f${fhr}.v${vhr}.sh + echo " export err=\$?; err_chk" >> run_href_precip_${prod}.${obsv}.f${fhr}.v${vhr}.sh else echo "export verif_poly='${maskpath}/Alaska_G216.nc' " >> run_href_precip_${prod}.${obsv}.f${fhr}.v${vhr}.sh echo "${METPLUS_PATH}/ush/run_metplus.py -c ${PARMevs}/metplus_config/machine.conf -c ${PRECIP_CONF}/GridStat_fcstHREFmean_obsMRMS_G216.conf " >> run_href_precip_${prod}.${obsv}.f${fhr}.v${vhr}.sh + echo " export err=\$?; err_chk" >> run_href_precip_${prod}.${obsv}.f${fhr}.v${vhr}.sh echo "export verif_poly='${maskpath}/Alaska_G091.nc' " >> run_href_precip_${prod}.${obsv}.f${fhr}.v${vhr}.sh echo "${METPLUS_PATH}/ush/run_metplus.py -c ${PARMevs}/metplus_config/machine.conf -c ${PRECIP_CONF}/GridStat_fcstHREFmean_obsMRMS_G91.conf " >> run_href_precip_${prod}.${obsv}.f${fhr}.v${vhr}.sh + echo " export err=\$?; err_chk" >> run_href_precip_${prod}.${obsv}.f${fhr}.v${vhr}.sh fi - fi - if [ $prod = system ] ; then + fi + if [ $prod = system ] ; then echo "for FILEn in \$output_base/stat/${MODEL}/ensemble_stat_${MODEL}_*_${obsv}_FHR0${fhr}_${VDATE}_${vhr}0000V.stat; do if [ -f \"\$FILEn\" ]; then cp -v \$FILEn $COMOUTsmall/${MODEL}; fi; done" >> run_href_precip_${prod}.${obsv}.f${fhr}.v${vhr}.sh - else + else echo "for FILEn in \$output_base/stat/${MODEL}/grid_stat_${MODEL}_${obsv}_*_${fhr}0000L_${VDATE}_${vhr}0000V.stat; do if [ -f \"\$FILEn\" ]; then cp -v \$FILEn $COMOUTsmall/${MODEL}; fi; done" >> run_href_precip_${prod}.${obsv}.f${fhr}.v${vhr}.sh - fi + fi + + #Mark the completion of this sub-task for restart: + echo "if [ $SENDCOM = YES ] ; then" >> run_href_precip_${prod}.${obsv}.f${fhr}.v${vhr}.sh + echo " [[ \$? = 0 ]] && >$COMOUTrestart/${prod}/run_href_precip_${prod}.${obsv}.f${fhr}.v${vhr}.completed" >> run_href_precip_${prod}.${obsv}.f${fhr}.v${vhr}.sh + echo "fi" >> run_href_precip_${prod}.${obsv}.f${fhr}.v${vhr}.sh - #Mark the completion of this sub-task for restart: - echo "[[ \$? = 0 ]] && >$COMOUTrestart/${prod}/run_href_precip_${prod}.${obsv}.f${fhr}.v${vhr}.completed" >> run_href_precip_${prod}.${obsv}.f${fhr}.v${vhr}.sh + chmod +x run_href_precip_${prod}.${obsv}.f${fhr}.v${vhr}.sh + echo "${DATA}/scripts/run_href_precip_${prod}.${obsv}.f${fhr}.v${vhr}.sh" >> run_all_href_precip_poe.sh + + fi #end if check input files - chmod +x run_href_precip_${prod}.${obsv}.f${fhr}.v${vhr}.sh - echo "${DATA}/run_href_precip_${prod}.${obsv}.f${fhr}.v${vhr}.sh" >> run_all_href_precip_poe.sh - fi #end if check restart done #end of vhr diff --git a/ush/cam/evs_href_prepare.sh b/ush/cam/evs_href_prepare.sh index ca5949ab9c..75f30bb5f6 100755 --- a/ush/cam/evs_href_prepare.sh +++ b/ush/cam/evs_href_prepare.sh @@ -190,11 +190,12 @@ if [ "$data" = "ccpa24h" ] ; then echo "Missing file is ${COMCCPA}/ccpa.${prevday}/18/ccpa.t18z.06h.hrap.conus.gb2\n" >> $DATA/job${data}${domain}_missing_24hrccpa_list fi if [ -s $ccpa24/ccpa1 ] && [ -s $ccpa24/ccpa2 ] && [ -s $ccpa24/ccpa3 ] && [ -s $ccpa24/ccpa4 ] ; then - ${METPLUS_PATH}/ush/master_metplus.py -c ${PARMevs}/metplus_config/machine.conf -c ${PRECIP_CONF}/PcpCombine_obsCCPA24h.conf + ${METPLUS_PATH}/ush/run_metplus.py -c ${PARMevs}/metplus_config/machine.conf -c ${PRECIP_CONF}/PcpCombine_obsCCPA24h.conf export err=$?; err_chk mkdir -p ${COMOUTfinal}/precip_mean24 - cp ${WORK}/ccpa.${vday}/ccpa24h.t12z.G240.nc ${COMOUTfinal}/precip_mean24 - + if [ -s ${WORK}/ccpa.${vday}/ccpa24h.t12z.G240.nc ] ; then + cp ${WORK}/ccpa.${vday}/ccpa24h.t12z.G240.nc ${COMOUTfinal}/precip_mean24 + fi #For restart: [[ ! -e $COMOUTrestart/prepare/ccpa.${vday} ]] && mkdir -p $COMOUTrestart/prepare/ccpa.${vday} if [ -s $WORK/ccpa.${vday}/*24h*.nc ] ; then @@ -367,20 +368,33 @@ if [ "$data" = "prepbufr" ] ; then export vend=${vhr} export verif_grid=$grid - if [ "$lvl" = "sfc" ] ; then + >$WORK/prepbufr.$vday/rap.t${vhr}z.${grid}.prepbufr + split_by_subset $COMINobsproc/rap.${VDATE}/rap.t${vhr}z.prepbufr.tm00 + for subset in ADPUPA ADPSFC SFCSHP MSONET ; do + if [ -s ${WORK}/${subset} ] ; then + cat ${WORK}/${subset} >> $WORK/prepbufr.$vday/rap.t${vhr}z.${grid}.prepbufr + rm -f ${WORK}/${subset} + fi + done + + export bufrpath=$WORK + + if [ -s $WORK/prepbufr.$vday/rap.t${vhr}z.${grid}.prepbufr ] ; then + if [ "$lvl" = "sfc" ] ; then ${METPLUS_PATH}/ush/run_metplus.py -c ${PARMevs}/metplus_config/machine.conf -c ${GRID2OBS_CONF}/Pb2nc_obsRAP_Prepbufr_href.conf export err=$?; err_chk - elif [ "$lvl" = "profile" ] ; then + elif [ "$lvl" = "profile" ] ; then ${METPLUS_PATH}/ush/run_metplus.py -c ${PARMevs}/metplus_config/machine.conf -c ${GRID2OBS_CONF}/Pb2nc_obsRAP_Prepbufr_href_profile.conf export err=$?; err_chk - elif [ "$lvl" = "both" ] ; then + elif [ "$lvl" = "both" ] ; then ${METPLUS_PATH}/ush/run_metplus.py -c ${PARMevs}/metplus_config/machine.conf -c ${GRID2OBS_CONF}/Pb2nc_obsRAP_Prepbufr_href.conf export err=$?; err_chk if [ "$vhr" = "00" ] || [ "$vhr" = "12" ] ; then ${METPLUS_PATH}/ush/run_metplus.py -c ${PARMevs}/metplus_config/machine.conf -c ${GRID2OBS_CONF}/Pb2nc_obsRAP_Prepbufr_href_profile.conf - export err=$?; err_chk + export err=$?; err_chk fi - fi + fi + fi done done @@ -406,8 +420,12 @@ if [ "$data" = "prepbufr" ] ; then else #restart: copy restart files to the working directory [[ ! -d $WORK/prepbufr.${vday} ]] && mkdir -p $WORK/prepbufr.${vday} - cp $COMOUTrestart/prepare/prepbufr.${VDATE}/*G227*.nc $WORK/prepbufr.${vday} - cp $COMOUTrestart/prepare/prepbufr.${VDATE}/*G198*.nc $WORK/prepbufr.${vday} + if [ -s $COMOUTrestart/prepare/prepbufr.${VDATE}/*G227*.nc ] ; then + cp $COMOUTrestart/prepare/prepbufr.${VDATE}/*G227*.nc $WORK/prepbufr.${vday} + fi + if [ -s $COMOUTrestart/prepare/prepbufr.${VDATE}/*G198*.nc ] ; then + cp $COMOUTrestart/prepare/prepbufr.${VDATE}/*G198*.nc $WORK/prepbufr.${vday} + fi fi fi @@ -442,11 +460,18 @@ if [ "$data" = "gfs_prepbufr" ] ; then >$WORK/prepbufr.$vday/gdas.t${vhr}z.${grid}.prepbufr split_by_subset $COMINobsproc/gdas.${vday}/${vhr}/atmos/gdas.t${vhr}z.prepbufr - cat $WORK/ADPUPA $WORK/ADPSFC >> $WORK/prepbufr.$vday/gdas.t${vhr}z.${grid}.prepbufr + for subset in ADPUPA ADPSFC SFCSHP MSONET ; do + if [ -s ${WORK}/${subset} ] ; then + cat ${WORK}/${subset} >> $WORK/prepbufr.$vday/gdas.t${vhr}z.${grid}.prepbufr + rm -f ${WORK}/${subset} + fi + done export bufrpath=$WORK - ${METPLUS_PATH}/ush/run_metplus.py -c ${PARMevs}/metplus_config/machine.conf -c ${GRID2OBS_CONF}/Pb2nc_obsGDAS_Prepbufr_href_profile.conf - export err=$?; err_chk + if [ -s $WORK/prepbufr.$vday/gdas.t${vhr}z.${grid}.prepbufr ] ; then + ${METPLUS_PATH}/ush/run_metplus.py -c ${PARMevs}/metplus_config/machine.conf -c ${GRID2OBS_CONF}/Pb2nc_obsGDAS_Prepbufr_href_profile.conf + export err=$?; err_chk + fi done if [ -s ${WORK}/pb2nc/prepbufr_nc/*${grid}.nc ] ; then cp ${WORK}/pb2nc/prepbufr_nc/*${grid}.nc $WORK/prepbufr.$vday @@ -454,7 +479,7 @@ if [ "$data" = "gfs_prepbufr" ] ; then done #For restart - if [ $? = 0 ] ; then + if [ -s ${WORK}/pb2nc/prepbufr_nc/*.nc ] ; then cp ${WORK}/pb2nc/prepbufr_nc/*.nc $COMOUTrestart/prepare/prepbufr.${vday} >$COMOUTrestart/prepare/gfs_prepbufr.completed fi diff --git a/ush/cam/evs_href_snowfall.sh b/ush/cam/evs_href_snowfall.sh index 59241c6145..383981c4ed 100755 --- a/ush/cam/evs_href_snowfall.sh +++ b/ush/cam/evs_href_snowfall.sh @@ -2,16 +2,16 @@ #************************************************************************************ # Purpose: Generate href snowfall poe and sub-jobs files # Last update: -# 05/07/2024, add restart, Binbin Zhou Lynker@EMC/NCEP -# 10/30/2023, by Binbin Zhou Lynker@EMC/NCEP +# 01/10/2025, add MPMD, by Binbin Zhou Lynker@EMC/NCEP +# 05/30/2024, by Binbin Zhou Lynker@EMC/NCEP #*********************************************************************************** set -x #******************************************* # Build POE script to collect sub-jobs #****************************************** -export members=10 export write_job_cards=yes +cd $DATA/scripts >run_all_href_snowfall_poe.sh mkdir -p $COMOUTsmall/HREF_SNOW @@ -51,41 +51,43 @@ for obsv in 6h 24h ; do #################################################################################### # Restart check: - # check if this sub-task has been completed in the previous run + # check if this sub-task has been completed in the precious run # if not, do this sub-task, and mark it is completed after it is done # if yes, skip this task ##################################################################################### - if [ ! -e $COMOUTrestart/snow/run_href_snow${obsv}.${fhr}.${vhr}.completed ] ; then - - ihr=`$NDATE -$fhr $VDATE$vhr|cut -c 9-10` - if [ "$ihr" -eq "00" ] || [ "$ihr" -eq "12" ] ; then - if [ "$fhr" -ge "45" ] ; then - export nmem=7 - export members=7 - elif [ "$fhr" -ge "39" ] ; then - export nmem=8 - export members=8 + if [ ! -e $COMOUTrestart/snow/run_href_snow${obsv}.${fhr}.${vhr}.completed ] ; then + + ihr=`$NDATE -$fhr $VDATE$vhr|cut -c 9-10` + iday=`$NDATE -$fhr $VDATE$vhr|cut -c 1-8` + + input_fcst=$COMINhref/href.${iday}/verf_g2g/href.*.t${ihr}z.conus.f${fhr} + input_obsv=$DCOMINsnow/${VDATE}/wgrbbul/nohrsc_snowfall/sfav2_CONUS_${obsv}_${VDATE}${vhr}_grid184.grb2 + + if [ -s $input_fcst ] && [ -s $input_obsv ] ; then + + + if [ $ihr = 00 ] || [ $ihr = 12 ] ; then + if [ $fhr -ge 45 ] ; then + mbrs=7 + elif [ $fhr -eq 42 ] || [ $fhr -eq 39 ] ; then + mbrs=8 else - export nmem=10 - export members=10 + mbrs=10 fi - elif [ "$ihr" -eq "06" ] || [ "$ihr" -eq "18" ] ; then - if [ "$fhr" -ge "45" ] ; then - export nmem=4 - export members=4 - elif [ "$fhr" -ge "33" ] ; then - export nmem=8 - export members=8 + elif [ $ihr = 06 ] || [ $ihr = 18 ] ; then + if [ $fhr -ge 45 ] ; then + mbrs=4 + elif [ $fhr -le 42 ] && [ $fhr -ge 33 ] ; then + mbrs=8 else - export nmem=10 - export members=10 + mbrs=10 fi - else - export nmem=10 - export members=10 - fi + fi - echo "export nmem=$nmem" >> run_href_snow${obsv}.${fhr}.${vhr}.sh + + echo "#!/bin/ksh" >> run_href_snow${obsv}.${fhr}.${vhr}.sh + echo "set -x" >> run_href_snow${obsv}.${fhr}.${vhr}.sh + echo "export mbrs=$mbrs" >> run_href_snow${obsv}.${fhr}.${vhr}.sh echo "export regrid=G212" >> run_href_snow${obsv}.${fhr}.${vhr}.sh echo "export output_base=$WORK/precip/run_href_snow${obsv}" >> run_href_snow${obsv}.${fhr}.${vhr}.sh @@ -122,28 +124,30 @@ for obsv in 6h 24h ; do echo "export modeltail=''" >> run_href_snow${obsv}.${fhr}.${vhr}.sh echo "export extradir='verf_g2g/'" >> run_href_snow${obsv}.${fhr}.${vhr}.sh - echo "export verif_grid='' " >> run_href_snow${obsv}.${fhr}.${vhr}.sh echo "export verif_poly='${maskpath}/Bukovsky_NOHRSC_CONUS.nc, ${maskpath}/Bukovsky_NOHRSC_CONUS_East.nc, ${maskpath}/Bukovsky_NOHRSC_CONUS_West.nc, ${maskpath}/Bukovsky_NOHRSC_CONUS_South.nc, ${maskpath}/Bukovsky_NOHRSC_CONUS_Central.nc' " >> run_href_snow${obsv}.${fhr}.${vhr}.sh echo "${METPLUS_PATH}/ush/run_metplus.py -c ${PARMevs}/metplus_config/machine.conf -c ${SNOWFALL_CONF}/GenEnsProd_fcstHREF_obsNOHRSC.conf " >> run_href_snow${obsv}.${fhr}.${vhr}.sh - + echo " export err=\$?; err_chk" >> run_href_snow${obsv}.${fhr}.${vhr}.sh echo "${METPLUS_PATH}/ush/run_metplus.py -c ${PARMevs}/metplus_config/machine.conf -c ${SNOWFALL_CONF}/EnsembleStat_fcstHREF_obsNOHRSC.conf " >> run_href_snow${obsv}.${fhr}.${vhr}.sh - + echo " export err=\$?; err_chk" >> run_href_snow${obsv}.${fhr}.${vhr}.sh echo "${METPLUS_PATH}/ush/run_metplus.py -c ${PARMevs}/metplus_config/machine.conf -c ${SNOWFALL_CONF}/GridStat_fcstHREFmean_obsNOHRSC_G212.conf " >> run_href_snow${obsv}.${fhr}.${vhr}.sh - + echo " export err=\$?; err_chk" >> run_href_snow${obsv}.${fhr}.${vhr}.sh echo "${METPLUS_PATH}/ush/run_metplus.py -c ${PARMevs}/metplus_config/machine.conf -c ${SNOWFALL_CONF}/GridStat_fcstHREFmean_obsNOHRSC_NOHRSCgrid.conf " >> run_href_snow${obsv}.${fhr}.${vhr}.sh - + echo " export err=\$?; err_chk" >> run_href_snow${obsv}.${fhr}.${vhr}.sh echo "for FILEn in \$output_base/stat/\${MODEL}/ensemble_stat_\${MODEL}_*_${obsv}_FHR0${fhr}_${VDATE}_${vhr}0000V.stat; do if [ -f \"\$FILEn\" ]; then cp -v \$FILEn $COMOUTsmall/HREF_SNOW; fi; done" >> run_href_snow${obsv}.${fhr}.${vhr}.sh echo "for FILEn in \$output_base/stat/\${MODEL}/grid_stat_\${MODEL}_${obsv}_*_${fhr}0000L_${VDATE}_${vhr}0000V.stat; do if [ -f \"\$FILEn\" ]; then cp -v \$FILEn $COMOUTsmall/HREF_SNOW; fi; done" >> run_href_snow${obsv}.${fhr}.${vhr}.sh echo "for FILEn in \$output_base/stat/\${MODEL}/grid_stat_\${MODEL}${obsv}_*_${fhr}0000L_${VDATE}_${vhr}0000V.stat; do if [ -f \"\$FILEn\" ]; then cp -v \$FILEn $COMOUTsmall/HREF_SNOW; fi; done" >> run_href_snow${obsv}.${fhr}.${vhr}.sh #Mark this task is completed for restart - echo "[[ \$? = 0 ]] && >$COMOUTrestart/snow/run_href_snow${obsv}.${fhr}.${vhr}.completed" >> run_href_snow${obsv}.${fhr}.${vhr}.sh + echo "if [ $SENDCOM = YES ] ; then" >> run_href_snow${obsv}.${fhr}.${vhr}.sh + echo " [[ \$? = 0 ]] && >$COMOUTrestart/snow/run_href_snow${obsv}.${fhr}.${vhr}.completed" >> run_href_snow${obsv}.${fhr}.${vhr}.sh + echo "fi" >> run_href_snow${obsv}.${fhr}.${vhr}.sh chmod +x run_href_snow${obsv}.${fhr}.${vhr}.sh - echo "${DATA}/run_href_snow${obsv}.${fhr}.${vhr}.sh" >> run_all_href_snowfall_poe.sh + echo "${DATA}/scripts/run_href_snow${obsv}.${fhr}.${vhr}.sh" >> run_all_href_snowfall_poe.sh + fi fi #end if check restart done #end of vhr diff --git a/ush/cam/evs_href_spcoutlook.sh b/ush/cam/evs_href_spcoutlook.sh index a997484088..d841b8ecf5 100755 --- a/ush/cam/evs_href_spcoutlook.sh +++ b/ush/cam/evs_href_spcoutlook.sh @@ -2,7 +2,10 @@ #************************************************************************************ # Purpose: Generate href spcoutlook job's poe and sub-jobs files # and system (ecnt line type) -# Last update: 10/30/2023, by Binbin Zhou Lynker@EMC/NCEP +# +# Last update: +# 01/10/2025, add MPMD, by Binbin Zhou Lynker@EMC/NCEP +# 10/30/2024, by Binbin Zhou Lynker@EMC/NCEP #*********************************************************************************** set -x @@ -87,6 +90,7 @@ cd $WORK #******************************************* # Build POE script to collect sub-jobs #****************************************** +cd $DATA/scripts >run_all_href_spcoutlook_poe.sh obsv='prepbufr' @@ -99,65 +103,83 @@ for prod in mean ; do for dom in CONUS ; do - for valid in 0 12 ; do + for valid in 00 12 ; do export domain=$dom + for fhr in 06 12 18 24 30 36 42 48 ; do + + ihr=`$NDATE -$fhr $VDATE$valid|cut -c 9-10` + iday=`$NDATE -$fhr $VDATE$valid|cut -c 1-8` + + input_fcst="$COMINhref/href.${iday}/ensprod/href.t${ihr}z.conus.${prod}.f${fhr}.grib2" + input_obsv="$WORK/prepbufr.${VDATE}/prepbufr.t${valid}z.G227.nc" + + if [ -s $input_fcst ] && [ -s $input_obsv ] ; then + #****************************** # Build sub-jobs # ***************************** - >run_href_${model}.${dom}.${valid}_spcoutlook.sh + >run_href_${model}.${dom}.${valid}.${fhr}_spcoutlook.sh ####################################################################### #Restart check: - # check if this task has been completed in the previous run + # check if this task has been completed in the precious run # if not, run this task, and then mark its completion, # otherwise, skip this task ######################################################################## - if [ ! -e $COMOUTrestart/spcoutlook/run_href_${model}.${dom}.${valid}_spcoutlook.completed ] ; then + if [ ! -e $COMOUTrestart/spcoutlook/run_href_${model}.${dom}.${valid}.${fhr}_spcoutlook.completed ] ; then + + echo "#!/bin/ksh" >> run_href_${model}.${dom}.${valid}.${fhr}_spcoutlook.sh + echo "set -x" >> run_href_${model}.${dom}.${valid}.${fhr}_spcoutlook.sh + echo "export model=HREF${prod} " >> run_href_${model}.${dom}.${valid}.${fhr}_spcoutlook.sh + echo "export domain=$dom " >> run_href_${model}.${dom}.${valid}.${fhr}_spcoutlook.sh + echo "export regrid=G227" >> run_href_${model}.${dom}.${valid}.${fhr}_spcoutlook.sh + + echo "export output_base=${WORK}/grid2obs/run_href_${model}.${dom}.${valid}.${fhr}_spcoutlook" >> run_href_${model}.${dom}.${valid}.${fhr}_spcoutlook.sh + echo "export OBTYPE='PREPBUFR'" >> run_href_${model}.${dom}.${valid}.${fhr}_spcoutlook.sh + echo "export domain=CONUS" >> run_href_${model}.${dom}.${valid}.${fhr}_spcoutlook.sh + echo "export obsvgrid=G227" >> run_href_${model}.${dom}.${valid}.${fhr}_spcoutlook.sh - echo "export model=HREF${prod} " >> run_href_${model}.${dom}.${valid}_spcoutlook.sh - echo "export domain=$dom " >> run_href_${model}.${dom}.${valid}_spcoutlook.sh - echo "export regrid=G227" >> run_href_${model}.${dom}.${valid}_spcoutlook.sh + echo "export modelgrid=conus.${prod}" >> run_href_${model}.${dom}.${valid}.${fhr}_spcoutlook.sh - echo "export output_base=${WORK}/grid2obs/run_href_${model}.${dom}.${valid}_spcoutlook" >> run_href_${model}.${dom}.${valid}_spcoutlook.sh - echo "export OBTYPE='PREPBUFR'" >> run_href_${model}.${dom}.${valid}_spcoutlook.sh - echo "export domain=CONUS" >> run_href_${model}.${dom}.${valid}_spcoutlook.sh - echo "export obsvgrid=G227" >> run_href_${model}.${dom}.${valid}_spcoutlook.sh + echo "export obsvhead=$obsv" >> run_href_${model}.${dom}.${valid}.${fhr}_spcoutlook.sh + echo "export obsvpath=$WORK" >> run_href_${model}.${dom}.${valid}.${fhr}_spcoutlook.sh - echo "export modelgrid=conus.${prod}" >> run_href_${model}.${dom}.${valid}_spcoutlook.sh + echo "export vbeg=$valid" >>run_href_${model}.${dom}.${valid}.${fhr}_spcoutlook.sh + echo "export vend=$valid" >>run_href_${model}.${dom}.${valid}.${fhr}_spcoutlook.sh + echo "export valid_increment=3600" >> run_href_${model}.${dom}.${valid}.${fhr}_spcoutlook.sh + echo "export lead=$fhr" >> run_href_${model}.${dom}.${valid}.${fhr}_spcoutlook.sh + echo "export MODEL=HREF_${PROD}" >> run_href_${model}.${dom}.${valid}.${fhr}_spcoutlook.sh + echo "export regrid=G227" >> run_href_${model}.${dom}.${valid}.${fhr}_spcoutlook.sh + echo "export modelhead=$model" >> run_href_${model}.${dom}.${valid}.${fhr}_spcoutlook.sh + echo "export modelpath=$COMHREF" >> run_href_${model}.${dom}.${valid}.${fhr}_spcoutlook.sh + echo "export modeltail='.grib2'" >> run_href_${model}.${dom}.${valid}.${fhr}_spcoutlook.sh + echo "export extradir='ensprod/'" >> run_href_${model}.${dom}.${valid}.${fhr}_spcoutlook.sh - echo "export obsvhead=$obsv" >> run_href_${model}.${dom}.${valid}_spcoutlook.sh - echo "export obsvpath=$WORK" >> run_href_${model}.${dom}.${valid}_spcoutlook.sh + echo "export verif_grid=''" >> run_href_${model}.${dom}.${valid}.${fhr}_spcoutlook.sh - echo "export vbeg=$valid" >>run_href_${model}.${dom}.${valid}_spcoutlook.sh - echo "export vend=$valid" >>run_href_${model}.${dom}.${valid}_spcoutlook.sh - echo "export valid_increment=3600" >> run_href_${model}.${dom}.${valid}_spcoutlook.sh - echo "export lead='6,12,18,24,30,36,42,48'" >> run_href_${model}.${dom}.${valid}_spcoutlook.sh - echo "export MODEL=HREF_${PROD}" >> run_href_${model}.${dom}.${valid}_spcoutlook.sh - echo "export regrid=G227" >> run_href_${model}.${dom}.${valid}_spcoutlook.sh - echo "export modelhead=$model" >> run_href_${model}.${dom}.${valid}_spcoutlook.sh - echo "export modelpath=$COMHREF" >> run_href_${model}.${dom}.${valid}_spcoutlook.sh - echo "export modeltail='.grib2'" >> run_href_${model}.${dom}.${valid}_spcoutlook.sh - echo "export extradir='ensprod/'" >> run_href_${model}.${dom}.${valid}_spcoutlook.sh + echo "export verif_poly='$spc_otlk_masks'" >> run_href_${model}.${dom}.${valid}.${fhr}_spcoutlook.sh - echo "export verif_grid=''" >> run_href_${model}.${dom}.${valid}_spcoutlook.sh + echo "${METPLUS_PATH}/ush/run_metplus.py -c ${PARMevs}/metplus_config/machine.conf -c ${GRID2OBS_CONF}/PointStat_fcstHREF${prod}_obsPREPBUFR_SPCoutlook.conf " >> run_href_${model}.${dom}.${valid}.${fhr}_spcoutlook.sh - echo "export verif_poly='$spc_otlk_masks'" >> run_href_${model}.${dom}.${valid}_spcoutlook.sh + echo "cp \$output_base/stat/\${MODEL}/*.stat $COMOUTsmall" >> run_href_${model}.${dom}.${valid}.${fhr}_spcoutlook.sh - echo "${METPLUS_PATH}/ush/run_metplus.py -c ${PARMevs}/metplus_config/machine.conf -c ${GRID2OBS_CONF}/PointStat_fcstHREF${prod}_obsPREPBUFR_SPCoutlook.conf " >> run_href_${model}.${dom}.${valid}_spcoutlook.sh + #Mark this task is completed + echo "if [ $SENDCOM = YES ] ; then" >> run_href_${model}.${dom}.${valid}.${fhr}_spcoutlook.sh + echo " [[ \$? = 0 ]] && >$COMOUTrestart/spcoutlook/run_href_${model}.${dom}.${valid}.${fhr}_spcoutlook.completed" >> run_href_${model}.${dom}.${valid}.${fhr}_spcoutlook.sh + echo "fi" >> run_href_${model}.${dom}.${valid}.${fhr}_spcoutlook.sh - echo "cp \$output_base/stat/\${MODEL}/*.stat $COMOUTsmall" >> run_href_${model}.${dom}.${valid}_spcoutlook.sh + chmod +x run_href_${model}.${dom}.${valid}.${fhr}_spcoutlook.sh + echo "${DATA}/scripts/run_href_${model}.${dom}.${valid}.${fhr}_spcoutlook.sh" >> run_all_href_spcoutlook_poe.sh - #Mark this Alaska task is completed - echo "[[ \$? = 0 ]] && >$COMOUTrestart/spcoutlook/run_href_${model}.${dom}.${valid}_spcoutlook.completed" >> run_href_${model}.${dom}.${valid}_spcoutlook.sh + fi - chmod +x run_href_${model}.${dom}.${valid}_spcoutlook.sh - echo "${DATA}/run_href_${model}.${dom}.${valid}_spcoutlook.sh" >> run_all_href_spcoutlook_poe.sh + fi #end if check restart - fi #end if check restart + done #end of fhr - done # end of valid + done # end of valid done #end of dom loop diff --git a/ush/cam/evs_href_spcoutlook_cape.sh b/ush/cam/evs_href_spcoutlook_cape.sh deleted file mode 100755 index 415d8c168c..0000000000 --- a/ush/cam/evs_href_spcoutlook_cape.sh +++ /dev/null @@ -1,101 +0,0 @@ -#!/bin/ksh -set -x - -#Binbin note: If METPLUS_BASE, PARM_BASE not set, then they will be set to $METPLUS_PATH -# by config_launcher.py in METplus-3.0/ush -# why config_launcher.py is not in METplus-3.1/ush ??? - - -############################################################ - -cd $SPCoutlookMask - -files=`ls *_00Z.nc` -set -A file $files -len=${#file[@]} - -verif_poly_00Z=$SPCoutlookMask/${file[0]} - -for (( i=1; i<$len; i++ )); do - mask="${file[$i]}" - export verif_poly_00Z="$verif_poly_00Z, $SPCoutlookMask/${mask}" -done - - -files=`ls *_12Z.nc` -set -A file $files -len=${#file[@]} - -verif_poly_12Z=$SPCoutlookMask/${file[0]} -for (( i=1; i<$len; i++ )); do - mask="${file[$i]}" - export verif_poly_12Z="$verif_poly_12Z, $SPCoutlookMask/${mask}" -done - -cd $WORK - ->run_all_href_spcoutlook_poe.sh - -obsv='prepbufr' - -for prod in mean ; do - - PROD=`echo $prod | tr '[a-z]' '[A-Z]'` - - model=HREF${prod} - - for dom in CONUS ; do - - for valid in 0 12 ; do - - export domain=$dom - - >run_href_${model}.${dom}.${valid}_spcoutlook.sh - echo "export model=HREF${prod} " >> run_href_${model}.${dom}.${valid}_spcoutlook.sh - echo "export domain=$dom " >> run_href_${model}.${dom}.${valid}_spcoutlook.sh - echo "export regrid=G227" >> run_href_${model}.${dom}.${valid}_spcoutlook.sh - - echo "export output_base=${WORK}/grid2obs/run_href_${model}.${dom}.${valid}_spcoutlook" >> run_href_${model}.${dom}.${valid}_spcoutlook.sh - echo "export OBTYPE='PREPBUFR'" >> run_href_${model}.${dom}.${valid}_spcoutlook.sh - echo "export domain=CONUS" >> run_href_${model}.${dom}.${valid}_spcoutlook.sh - echo "export obsvgrid=G227" >> run_href_${model}.${dom}.${valid}_spcoutlook.sh - - echo "export modelgrid=conus.${prod}" >> run_href_${model}.${dom}.${valid}_spcoutlook.sh - - echo "export obsvhead=$obsv" >> run_href_${model}.${dom}.${valid}_spcoutlook.sh - echo "export obsvpath=$WORK" >> run_href_${model}.${dom}.${valid}_spcoutlook.sh - - echo "export vbeg=$valid" >>run_href_${model}.${dom}.${valid}_spcoutlook.sh - echo "export vend=$valid" >>run_href_${model}.${dom}.${valid}_spcoutlook.sh - echo "export valid_increment=3600" >> run_href_${model}.${dom}.${valid}_spcoutlook.sh - echo "export lead='6,12,18,24,30,36,42,48'" >> run_href_${model}.${dom}.${valid}_spcoutlook.sh - echo "export MODEL=HREF_${PROD}" >> run_href_${model}.${dom}.${valid}_spcoutlook.sh - echo "export regrid=G227" >> run_href_${model}.${dom}.${valid}_spcoutlook.sh - echo "export modelhead=$model" >> run_href_${model}.${dom}.${valid}_spcoutlook.sh - echo "export modelpath=$COMHREF" >> run_href_${model}.${dom}.${valid}_spcoutlook.sh - echo "export modeltail='.grib2'" >> run_href_${model}.${dom}.${valid}_spcoutlook.sh - echo "export extradir='ensprod/'" >> run_href_${model}.${dom}.${valid}_spcoutlook.sh - - echo "export verif_grid=''" >> run_href_${model}.${dom}.${valid}_spcoutlook.sh - - if [ $valid = 0 ] ; then - echo "export verif_poly='$verif_poly_00Z'" >> run_href_${model}.${dom}.${valid}_spcoutlook.sh - elif [ $valid = 12 ] ; then - echo "export verif_poly='$verif_poly_12Z'" >> run_href_${model}.${dom}.${valid}_spcoutlook.sh - fi - - echo "${METPLUS_PATH}/ush/run_metplus.py -c ${PARMevs}/metplus_config/machine.conf -c ${GRID2OBS_CONF}/PointStat_fcstHREF${prod}_obsPREPBUFR_SPCoutlook.conf " >> run_href_${model}.${dom}.${valid}_spcoutlook.sh - #echo "${METPLUS_PATH}/ush/run_metplus.py -c ${PARMevs}/metplus_config/machine.conf -c ${GRID2OBS_CONF}/PointStat_fcstHREF${prod}_obsPREPBUFR_SFC.conf " >> run_href_${model}.${dom}.${valid}_spcoutlook.sh - - echo "cp \$output_base/stat/\${MODEL}/*.stat $COMOUTsmall" >> run_href_${model}.${dom}.${valid}_spcoutlook.sh - - chmod +x run_href_${model}.${dom}.${valid}_spcoutlook.sh - echo "run_href_${model}.${dom}.${valid}_spcoutlook.sh" >> run_all_href_spcoutlook_poe.sh - - done # end of valid - - done #end of dom loop - -done #end of prod loop - -chmod 775 run_all_href_spcoutlook_poe.sh diff --git a/ush/cam/ush_href_plot_py/href_atmos_plots.py b/ush/cam/ush_href_plot_py/href_atmos_plots.py index 343836828a..254e9757ba 100644 --- a/ush/cam/ush_href_plot_py/href_atmos_plots.py +++ b/ush/cam/ush_href_plot_py/href_atmos_plots.py @@ -14,7 +14,6 @@ import subprocess import itertools import shutil -import href_atmos_util as gda_util from href_atmos_plots_specs import PlotSpecs print("BEGIN: "+os.path.basename(__file__)) @@ -106,25 +105,9 @@ logger.info(logger_info) if len(model_list) > 10: - logger.error("FATAL ERROR: TOO MANY MODELS LISTED ("+str(len(model_list)) + logger.error("TOO MANY MODELS LISTED ("+str(len(model_list)) +", ["+', '.join(model_list)+"]), maximum is 10") sys.exit(1) -''' -# Condense .stat files -logger.info("Condensing model .stat files for job") -for model_idx in range(len(model_list)): - model = model_list[model_idx] - condensed_model_stat_file = os.path.join(job_output_dir, 'model' - +str(model_idx+1)+'_'+model - +'.stat') - if VERIF_CASE == 'grid2grid' and VERIF_TYPE == 'pres_levs': - obs_name = truth_name_list[model_idx] - - gda_util.condense_model_stat_files(logger, stat_base_dir, - condensed_model_stat_file, model, - obs_name, grid, vx_mask, fcst_var_name, - obs_var_name, line_type) -''' # Set up model information dictionary original_model_info_dict = {} @@ -184,7 +167,7 @@ for v in range(len(fcst_var_prod)): var_info.append((fcst_var_prod[v], obs_var_prod[v])) else: - logger.error("FATAL ERROR: FORECAST AND OBSERVATION VARIABLE INFORMATION NOT THE " + logger.error("FORECAST AND OBSERVATION VARIABLE INFORMATION NOT THE " +"SAME LENGTH") sys.exit(1) @@ -201,325 +184,7 @@ date_info_dict = original_date_info_dict.copy() plot_info_dict = original_plot_info_dict.copy() met_info_dict = original_met_info_dict.copy() - if plot == 'time_series': - import href_atmos_plots_time_series as gdap_ts - for ts_info in \ - list(itertools.product(valid_hrs, fhrs, var_info, - interp_points_list)): - date_info_dict['valid_hr_start'] = str(ts_info[0]) - date_info_dict['valid_hr_end'] = str(ts_info[0]) - date_info_dict['valid_hr_inc'] = '24' - date_info_dict['forecast_hour'] = str(ts_info[1]) - plot_info_dict['fcst_var_name'] = ts_info[2][0][0] - plot_info_dict['fcst_var_level'] = ts_info[2][0][1] - plot_info_dict['fcst_var_thresh'] = ts_info[2][0][2] - plot_info_dict['obs_var_name'] = ts_info[2][1][0] - plot_info_dict['obs_var_level'] = ts_info[2][1][1] - plot_info_dict['obs_var_thresh'] = ts_info[2][1][2] - plot_info_dict['interp_points'] = str(ts_info[3]) - init_hr = gda_util.get_init_hour( - int(date_info_dict['valid_hr_start']), - int(date_info_dict['forecast_hour']) - ) - image_name = plot_specs.get_savefig_name( - os.path.join(job_output_dir, 'images'), - plot_info_dict, date_info_dict - ) - if init_hr in init_hrs: - if not os.path.exists(image_name): - make_ts = True - else: - make_ts = False - else: - make_ts = False - if make_ts: - plot_ts = gdap_ts.TimeSeries(logger, job_output_dir, - job_output_dir, model_info_dict, - date_info_dict, plot_info_dict, - met_info_dict, logo_dir) - plot_ts.make_time_series() - elif plot == 'lead_average': - import href_atmos_plots_lead_average as gdap_la - for la_info in \ - list(itertools.product(valid_hrs, var_info, - interp_points_list)): - date_info_dict['valid_hr_start'] = str(la_info[0]) - date_info_dict['valid_hr_end'] = str(la_info[0]) - date_info_dict['valid_hr_inc'] = '24' - date_info_dict['forecast_hours'] = fhrs - plot_info_dict['fcst_var_name'] = la_info[1][0][0] - plot_info_dict['fcst_var_level'] = la_info[1][0][1] - plot_info_dict['fcst_var_thresh'] = la_info[1][0][2] - plot_info_dict['obs_var_name'] = la_info[1][1][0] - plot_info_dict['obs_var_level'] = la_info[1][1][1] - plot_info_dict['obs_var_thresh'] = la_info[1][1][2] - plot_info_dict['interp_points'] = str(la_info[2]) - image_name = plot_specs.get_savefig_name( - os.path.join(job_output_dir, 'images'), - plot_info_dict, date_info_dict - ) - if not os.path.exists(image_name): - if len(date_info_dict['forecast_hours']) <= 1: - logger.warning("No span of forecast hours to plot, " - +"given 1 forecast hour, skipping " - +"lead_average plots") - make_la = False - else: - if plot_info_dict['stat'] == 'FBAR_OBAR': - make_la = False - else: - make_la = True - else: - make_la = False - if make_la: - plot_la = gdap_la.LeadAverage(logger, job_output_dir, - job_output_dir, model_info_dict, - date_info_dict, plot_info_dict, - met_info_dict, logo_dir) - plot_la.make_lead_average() - elif plot == 'valid_hour_average': - import href_atmos_plots_valid_hour_average as gdap_vha - for vha_info in \ - list(itertools.product(var_info, interp_points_list)): - date_info_dict['valid_hr_start'] = valid_hr_start - date_info_dict['valid_hr_end'] = valid_hr_end - date_info_dict['valid_hr_inc'] = valid_hr_inc - date_info_dict['forecast_hours'] = fhrs - plot_info_dict['fcst_var_name'] = vha_info[0][0][0] - plot_info_dict['fcst_var_level'] = vha_info[0][0][1] - plot_info_dict['fcst_var_thresh'] = vha_info[0][0][2] - plot_info_dict['obs_var_name'] = vha_info[0][1][0] - plot_info_dict['obs_var_level'] = vha_info[0][1][1] - plot_info_dict['obs_var_thresh'] = vha_info[0][1][2] - plot_info_dict['interp_points'] = str(vha_info[1]) - image_name = plot_specs.get_savefig_name( - os.path.join(job_output_dir, 'images'), - plot_info_dict, date_info_dict - ) - if not os.path.exists(image_name): - if date_info_dict['valid_hr_start'] \ - == date_info_dict['valid_hr_end']: - logger.warning("No span of valid hours to plot, " - +"valid start hour is the same as " - +"valid end hour, skipping " - +"valid_hour_average plots") - make_vha = False - else: - if plot_info_dict['stat'] == 'FBAR_OBAR': - make_vha = False - else: - make_vha = True - else: - make_vha = False - if make_vha: - plot_vha = gdap_vha.ValidHourAverage(logger, job_output_dir, - job_output_dir, - model_info_dict, - date_info_dict, - plot_info_dict, - met_info_dict, logo_dir) - plot_vha.make_valid_hour_average() - elif plot == 'threshold_average': - import href_atmos_plots_threshold_average as gdap_ta - for ta_info in \ - list(itertools.product(valid_hrs, fhrs, interp_points_list)): - date_info_dict['valid_hr_start'] = str(ta_info[0]) - date_info_dict['valid_hr_end'] = str(ta_info[0]) - date_info_dict['valid_hr_inc'] = '24' - date_info_dict['forecast_hour'] = str(ta_info[1]) - plot_info_dict['fcst_var_name'] = fcst_var_name - plot_info_dict['obs_var_name'] = obs_var_name - plot_info_dict['fcst_var_threshs'] = fcst_var_thresh_list - plot_info_dict['obs_var_name'] = obs_var_name - plot_info_dict['obs_var_threshs'] = obs_var_thresh_list - plot_info_dict['interp_points'] = str(ta_info[2]) - init_hr = gda_util.get_init_hour( - int(date_info_dict['valid_hr_start']), - int(date_info_dict['forecast_hour']) - ) - for l in range(len(fcst_var_level_list)): - plot_info_dict['fcst_var_level'] = fcst_var_level_list[l] - plot_info_dict['obs_var_level'] = obs_var_level_list[l] - image_name = plot_specs.get_savefig_name( - os.path.join(job_output_dir, 'images'), - plot_info_dict, date_info_dict - ) - if init_hr in init_hrs: - if not os.path.exists(image_name): - if len(plot_info_dict['fcst_var_threshs']) <= 1: - logger.warning("No span of thresholds to plot, " - +"given 1 threshold, skipping " - +"threshold_average plots") - make_ta = False - else: - if plot_info_dict['stat'] == 'FBAR_OBAR': - make_ta = False - else: - make_ta = True - else: - make_ta = False - else: - make_ta = False - if make_ta: - plot_ta = gdap_ta.ThresholdAverage(logger, job_output_dir, - job_output_dir, - model_info_dict, - date_info_dict, - plot_info_dict, - met_info_dict, - logo_dir) - plot_ta.make_threshold_average() - elif plot == 'lead_by_date': - import href_atmos_plots_lead_by_date as gdap_lbd - for lbd_info in \ - list(itertools.product(valid_hrs, var_info, - interp_points_list)): - date_info_dict['valid_hr_start'] = str(lbd_info[0]) - date_info_dict['valid_hr_end'] = str(lbd_info[0]) - date_info_dict['valid_hr_inc'] = '24' - date_info_dict['forecast_hours'] = fhrs - plot_info_dict['fcst_var_name'] = lbd_info[1][0][0] - plot_info_dict['fcst_var_level'] = lbd_info[1][0][1] - plot_info_dict['fcst_var_thresh'] = lbd_info[1][0][2] - plot_info_dict['obs_var_name'] = lbd_info[1][1][0] - plot_info_dict['obs_var_level'] = lbd_info[1][1][1] - plot_info_dict['obs_var_thresh'] = lbd_info[1][1][2] - plot_info_dict['interp_points'] = str(lbd_info[2]) - image_name = plot_specs.get_savefig_name( - os.path.join(job_output_dir, 'images'), - plot_info_dict, date_info_dict - ) - if not os.path.exists(image_name): - if len(date_info_dict['forecast_hours']) <= 1: - logger.warning("No span of forecast hours to plot, " - +"given 1 forecast hour, skipping " - +"lead_by_date plots") - make_lbd = False - else: - if plot_info_dict['stat'] == 'FBAR_OBAR': - make_lbd = False - else: - make_lbd = True - else: - make_lbd = False - if make_lbd: - plot_lbd = gdap_lbd.LeadByDate(logger, job_output_dir, - job_output_dir, model_info_dict, - date_info_dict, plot_info_dict, - met_info_dict, logo_dir) - plot_lbd.make_lead_by_date() - elif plot == 'stat_by_level': - import href_atmos_plots_stat_by_level as gdap_sbl - vert_profiles = ['all', 'trop', 'strat', 'ltrop', 'utrop'] - for sbl_info in \ - list(itertools.product(valid_hrs, fhrs, interp_points_list, - vert_profiles)): - date_info_dict['valid_hr_start'] = str(sbl_info[0]) - date_info_dict['valid_hr_end'] = str(sbl_info[0]) - date_info_dict['valid_hr_inc'] = '24' - date_info_dict['forecast_hour'] = str(sbl_info[1]) - plot_info_dict['fcst_var_name'] = fcst_var_name - plot_info_dict['obs_var_name'] = obs_var_name - plot_info_dict['interp_points'] = str(sbl_info[2]) - plot_info_dict['vert_profile'] = sbl_info[3] - init_hr = gda_util.get_init_hour( - int(date_info_dict['valid_hr_start']), - int(date_info_dict['forecast_hour']) - ) - plot_info_dict['fcst_var_level'] = sbl_info[3] - plot_info_dict['obs_var_level'] = sbl_info[3] - for t in range(len(fcst_var_thresh_list)): - plot_info_dict['fcst_var_thresh'] = fcst_var_thresh_list[t] - plot_info_dict['obs_var_thresh'] = obs_var_thresh_list[t] - image_name = plot_specs.get_savefig_name( - os.path.join(job_output_dir, 'images'), - plot_info_dict, date_info_dict - ) - if init_hr in init_hrs: - if not os.path.exists(image_name): - if plot_info_dict['stat'] == 'FBAR_OBAR': - make_sbl = False - else: - make_sbl = True - else: - make_sbl = False - else: - make_sbl = False - del plot_info_dict['fcst_var_level'] - del plot_info_dict['obs_var_level'] - if make_sbl: - plot_sbl = gdap_sbl.StatByLevel(logger, job_output_dir, - job_output_dir, - model_info_dict, - date_info_dict, - plot_info_dict, - met_info_dict, - logo_dir) - plot_sbl.make_stat_by_level() - elif plot == 'lead_by_level': - import href_atmos_plots_lead_by_level as gdap_lbl - if evs_run_mode == 'production' and int(fhr_inc) == 6: - fhrs_lbl = list( - range(int(fhr_start), int(fhr_end)+int(fhr_inc), 12) - ) - else: - fhrs_lbl = fhrs - vert_profiles = ['all', 'trop', 'strat', 'ltrop', 'utrop'] - for lbl_info in \ - list(itertools.product(valid_hrs, interp_points_list, - vert_profiles)): - date_info_dict['valid_hr_start'] = str(lbl_info[0]) - date_info_dict['valid_hr_end'] = str(lbl_info[0]) - date_info_dict['valid_hr_inc'] = '24' - date_info_dict['forecast_hours'] = fhrs_lbl - plot_info_dict['fcst_var_name'] = fcst_var_name - plot_info_dict['obs_var_name'] = obs_var_name - plot_info_dict['interp_points'] = str(lbl_info[1]) - plot_info_dict['vert_profile'] = lbl_info[2] - plot_info_dict['fcst_var_level'] = lbl_info[2] - plot_info_dict['obs_var_level'] = lbl_info[2] - for t in range(len(fcst_var_thresh_list)): - plot_info_dict['fcst_var_thresh'] = fcst_var_thresh_list[t] - plot_info_dict['obs_var_thresh'] = obs_var_thresh_list[t] - image_name = plot_specs.get_savefig_name( - os.path.join(job_output_dir, 'images'), - plot_info_dict, date_info_dict - ) - if not os.path.exists(image_name): - if len(date_info_dict['forecast_hours']) <= 1: - logger.warning("No span of forecast hours to plot, " - +"given 1 forecast hour, skipping " - +"lead_by_level plots") - else: - if plot_info_dict['stat'] == 'FBAR_OBAR': - make_lbl = False - else: - make_lbl = True - else: - make_lbl = False - del plot_info_dict['fcst_var_level'] - del plot_info_dict['obs_var_level'] - if make_lbl: - plot_lbl = gdap_lbl.LeadByLevel(logger, job_output_dir, - job_output_dir, - model_info_dict, - date_info_dict, - plot_info_dict, - met_info_dict, logo_dir) - plot_lbl.make_lead_by_level() - elif plot == 'nohrsc_spatial_map': - import href_atmos_plots_nohrsc_spatial_map as gdap_nsm - nohrsc_data_dir = os.path.join(VERIF_CASE_STEP_dir, 'data', 'nohrsc') - date_info_dict['valid_hr_start'] = str(valid_hrs[0]) - date_info_dict['valid_hr_end'] = str(valid_hrs[0]) - date_info_dict['valid_hr_inc'] = '24' - plot_info_dict['obs_var_name'] = obs_var_name - plot_info_dict['obs_var_level'] = obs_var_level_list[0] - plot_nsm = gdap_nsm.NOHRSCSpatialMap(logger, nohrsc_data_dir, - job_output_dir, date_info_dict, - plot_info_dict, logo_dir) - plot_nsm.make_nohrsc_spatial_map() - elif plot == 'precip_spatial_map': + if plot == 'precip_spatial_map': model_info_dict['obs'] = {'name': 'ccpa', 'plot_name': 'ccpa', 'obs_name': '24hrCCPA'} @@ -545,47 +210,6 @@ plot_info_dict, met_info_dict, logo_dir) plot_psm.make_precip_spatial_map() - elif plot == 'performance_diagram': - import href_atmos_plots_performance_diagram as gdap_pd - for pd_info in \ - list(itertools.product(valid_hrs, fhrs, interp_points_list)): - date_info_dict['valid_hr_start'] = str(pd_info[0]) - date_info_dict['valid_hr_end'] = str(pd_info[0]) - date_info_dict['valid_hr_inc'] = '24' - date_info_dict['forecast_hour'] = str(pd_info[1]) - plot_info_dict['fcst_var_name'] = fcst_var_name - plot_info_dict['obs_var_name'] = obs_var_name - plot_info_dict['fcst_var_threshs'] = fcst_var_thresh_list - plot_info_dict['obs_var_name'] = obs_var_name - plot_info_dict['obs_var_threshs'] = obs_var_thresh_list - plot_info_dict['interp_points'] = str(pd_info[2]) - init_hr = gda_util.get_init_hour( - int(date_info_dict['valid_hr_start']), - int(date_info_dict['forecast_hour']) - ) - for l in range(len(fcst_var_level_list)): - plot_info_dict['fcst_var_level'] = fcst_var_level_list[l] - plot_info_dict['obs_var_level'] = obs_var_level_list[l] - image_name = plot_specs.get_savefig_name( - os.path.join(job_output_dir, 'images'), - plot_info_dict, date_info_dict - ) - if init_hr in init_hrs: - if not os.path.exists(image_name): - make_pd = True - else: - make_pd = False - else: - make_pd = False - if make_pd: - plot_pd = gdap_pd.PerformanceDiagram(logger, job_output_dir, - job_output_dir, - model_info_dict, - date_info_dict, - plot_info_dict, - met_info_dict, - logo_dir) - plot_pd.make_performance_diagram() else: logger.warning(plot+" not recongized") diff --git a/ush/cam/ush_href_plot_py/href_atmos_plots_precip_spatial_map.py b/ush/cam/ush_href_plot_py/href_atmos_plots_precip_spatial_map.py index 3b3a8f087e..2e51070ee6 100644 --- a/ush/cam/ush_href_plot_py/href_atmos_plots_precip_spatial_map.py +++ b/ush/cam/ush_href_plot_py/href_atmos_plots_precip_spatial_map.py @@ -22,7 +22,6 @@ import cartopy.feature as cfeature from cartopy.mpl.ticker import LongitudeFormatter, LatitudeFormatter from cartopy import config -import href_atmos_util as gda_util from href_atmos_plots_specs import PlotSpecs class PrecipSpatialMap: @@ -183,13 +182,13 @@ def make_precip_spatial_map(self): file_valid_time, '%Y%m%d_%H%M%S' ) if valid_date_dt != file_valid_time_dt: - self.logger.error(f"FATAL ERROR: FILE VALID TIME {file_valid_time_dt} " + self.logger.error(f"FILE VALID TIME {file_valid_time_dt} " +"DOES NOT MATCH EXPECTED VALID TIME " +f"{valid_date_dt}") sys.exit(1) if model_num != 'obs': if init_date_dt != file_init_time_dt: - self.logger.error(f"FATAL ERROR: FILE INIT TIME {file_init_time_dt} " + self.logger.error(f"FILE INIT TIME {file_init_time_dt} " +"DOES NOT MATCH EXPECTED INIT TIME " +f"{init_date_dt}") sys.exit(1) diff --git a/ush/cam/ush_href_plot_py/href_atmos_plots_specs.py b/ush/cam/ush_href_plot_py/href_atmos_plots_specs.py index 4e09cb2cfd..4cea4c72b7 100644 --- a/ush/cam/ush_href_plot_py/href_atmos_plots_specs.py +++ b/ush/cam/ush_href_plot_py/href_atmos_plots_specs.py @@ -6,7 +6,6 @@ import sys import os import numpy as np -import href_atmos_util as gda_util class PlotSpecs: def __init__(self, logger, plot_type): @@ -123,7 +122,7 @@ def __init__(self, logger, plot_type): self.legend_font_size = 16 self.fig_title_size = 18 else: - self.logger.error(f"FATAL ERROR: {self.plot_type} NOT RECOGNIZED") + self.logger.error(f"{self.plot_type} NOT RECOGNIZED") sys.exit(1) def set_up_plot(self): diff --git a/ush/cam/ush_href_plot_py/href_atmos_util.py b/ush/cam/ush_href_plot_py/href_atmos_util.py deleted file mode 100644 index 3b5c972b68..0000000000 --- a/ush/cam/ush_href_plot_py/href_atmos_util.py +++ /dev/null @@ -1,2528 +0,0 @@ -#! /usr/bin/env python3 - -import os -import datetime -import numpy as np -import subprocess -import shutil -import sys -import netCDF4 as netcdf -import numpy as np -import glob -import pandas as pd -from time import sleep - -def run_shell_command(command): - """! Run shell command - - Args: - command - list of agrument entries (string) - - Returns: - - """ - print("Running "+' '.join(command)) - if any(mark in ' '.join(command) for mark in ['"', "'", '|', '*', '>']): - run_command = subprocess.run( - ' '.join(command), shell=True - ) - else: - run_command = subprocess.run(command) - if run_command.returncode != 0: - print("FATAL ERROR: "+' '.join(run_command.args)+" gave return code " - +str(run_command.returncode)) - -def metplus_command(conf_file_name): - """! Write out full call to METplus - - Args: - conf_file_name - METplus conf file name (string) - - Returns: - metplus_cmd - full call to METplus (string) - - """ - run_metplus = os.path.join(os.environ['METPLUS_PATH'], 'ush', - 'run_metplus.py') - machine_conf = os.path.join(os.environ['PARMevs'], 'metplus_config', - 'machine.conf') - conf_file = os.path.join(os.environ['PARMevs'], 'metplus_config', - os.environ['STEP'], os.environ['COMPONENT'], - os.environ['RUN']+'_'+os.environ['VERIF_CASE'], - conf_file_name) - if not os.path.exists(conf_file): - print("FATAL ERROR: "+conf_file+" DOES NOT EXIST") - sys.exit(1) - metplus_cmd = run_metplus+' -c '+machine_conf+' -c '+conf_file - return metplus_cmd - -def python_command(python_script_name, script_arg_list): - """! Write out full call to python - - Args: - python_script_name - python script name (string) - script_arg_list - list of script agruments (strings) - - Returns: - python_cmd - full call to python (string) - - """ - python_script = os.path.join(os.environ['USHevs'], os.environ['COMPONENT'], - python_script_name) - if not os.path.exists(python_script): - print("FATAL ERROR: "+python_script+" DOES NOT EXIST") - sys.exit(1) - python_cmd = 'python '+python_script - for script_arg in script_arg_list: - python_cmd = python_cmd+' '+script_arg - return python_cmd - -def check_file_exists_size(file_name): - """! Checks to see if file exists and has size greater than 0 - - Args: - file_name - file path (string) - - Returns: - file_good - boolean - - True: file exists,file size >0 - - False: file doesn't exist - OR file size = 0 - """ - if os.path.exists(file_name): - if os.path.getsize(file_name) > 0: - file_good = True - else: - print("WARNING: "+file_name+" empty, 0 sized") - file_good = False - else: - print("WARNING: "+file_name+" does not exist") - file_good = False - return file_good - -def copy_file(source_file, dest_file): - """! This copies a file from one location to another - - Args: - source_file - source file path (string) - dest_file - destination file path (string) - - Returns: - """ - if check_file_exists_size(source_file): - print("Copying "+source_file+" to "+dest_file) - shutil.copy2(source_file, dest_file) - -def convert_grib1_grib2(grib1_file, grib2_file): - """! Converts GRIB1 data to GRIB2 - - Args: - grib1_file - string of the path to - the GRIB1 file to - convert (string) - grib2_file - string of the path to - save the converted GRIB2 - file (string) - Returns: - """ - print("Converting GRIB1 file "+grib1_file+" " - +"to GRIB2 file "+grib2_file) - cnvgrib = os.environ['CNVGRIB'] - os.system(cnvgrib+' -g12 '+grib1_file+' ' - +grib2_file+' > /dev/null 2>&1') - -def convert_grib2_grib1(grib2_file, grib1_file): - """! Converts GRIB2 data to GRIB1 - - Args: - grib2_file - string of the path to - the GRIB2 file to - convert - grib1_file - string of the path to - save the converted GRIB1 - file - Returns: - """ - print("Converting GRIB2 file "+grib2_file+" " - +"to GRIB1 file "+grib1_file) - cnvgrib = os.environ['CNVGRIB'] - os.system(cnvgrib+' -g21 '+grib2_file+' ' - +grib1_file+' > /dev/null 2>&1') - -def convert_grib2_grib2(grib2_fileA, grib2_fileB): - """! Converts GRIB2 data to GRIB2 - - Args: - grib2_fileA - string of the path to - the GRIB2 file to - convert - grib2_fileB - string of the path to - save the converted GRIB2 - file - Returns: - """ - print("Converting GRIB2 file "+grib2_fileA+" " - +"to GRIB2 file "+grib2_fileB) - cnvgrib = os.environ['CNVGRIB'] - os.system(cnvgrib+' -g22 '+grib2_fileA+' ' - +grib2_fileB+' > /dev/null 2>&1') - -def get_time_info(date_start, date_end, date_type, init_hr_list, valid_hr_list, - fhr_list): - """! Creates a list of dictionaries containing information - on the valid dates and times, the initialization dates - and times, and forecast hour pairings - - Args: - date_start - verification start date - (string, format:YYYYmmdd) - date_end - verification end_date - (string, format:YYYYmmdd) - date_type - how to treat date_start and - date_end (string, values:VALID or INIT) - init_hr_list - list of initialization hours - (string) - valid_hr_list - list of valid hours (string) - fhr_list - list of forecasts hours (string) - - Returns: - time_info - list of dictionaries with the valid, - initalization, and forecast hour - pairings - """ - valid_hr_zfill2_list = [hr.zfill(2) for hr in valid_hr_list] - init_hr_zfill2_list = [hr.zfill(2) for hr in init_hr_list] - if date_type == 'VALID': - date_type_hr_list = valid_hr_zfill2_list - elif date_type == 'INIT': - date_type_hr_list = init_hr_zfill2_list - date_type_hr_start = date_type_hr_list[0] - date_type_hr_end = date_type_hr_list[-1] - if len(date_type_hr_list) > 1: - date_type_hr_inc = np.min( - np.diff(np.array(date_type_hr_list, dtype=int)) - ) - else: - date_type_hr_inc = 24 - date_start_dt = datetime.datetime.strptime(date_start+date_type_hr_start, - '%Y%m%d%H') - date_end_dt = datetime.datetime.strptime(date_end+date_type_hr_end, - '%Y%m%d%H') - time_info = [] - date_dt = date_start_dt - while date_dt <= date_end_dt: - if date_type == 'VALID': - valid_time_dt = date_dt - elif date_type == 'INIT': - init_time_dt = date_dt - for fhr in fhr_list: - if fhr == 'anl': - forecast_hour = 0 - else: - forecast_hour = int(fhr) - if date_type == 'VALID': - init_time_dt = (valid_time_dt - - datetime.timedelta(hours=forecast_hour)) - elif date_type == 'INIT': - valid_time_dt = (init_time_dt - + datetime.timedelta(hours=forecast_hour)) - if valid_time_dt.strftime('%H') in valid_hr_zfill2_list \ - and init_time_dt.strftime('%H') in init_hr_zfill2_list: - t = {} - t['valid_time'] = valid_time_dt - t['init_time'] = init_time_dt - t['forecast_hour'] = str(forecast_hour) - time_info.append(t) - date_dt = date_dt + datetime.timedelta(hours=int(date_type_hr_inc)) - return time_info - -def get_init_hour(valid_hour, forecast_hour): - """! Get a initialization hour/cycle - - Args: - valid_hour - valid hour (integer) - forecast_hour - forecast hour (integer) - """ - init_hour = 24 + (valid_hour - (forecast_hour%24)) - if forecast_hour % 24 == 0: - init_hour = valid_hour - else: - init_hour = 24 + (valid_hour - (forecast_hour%24)) - if init_hour >= 24: - init_hour = init_hour - 24 - return init_hour - -def format_filler(unfilled_file_format, valid_time_dt, init_time_dt, - forecast_hour, str_sub_dict): - """! Creates a filled file path from a format - - Args: - unfilled_file_format - file naming convention (string) - valid_time_dt - valid time (datetime) - init_time_dt - initialization time (datetime) - forecast_hour - forecast hour (string) - str_sub_dict - other strings to substitue (dictionary) - Returns: - filled_file_format - file_format filled in with verifying - time information (string) - """ - filled_file_format = '/' - format_opt_list = ['lead', 'lead_shift', 'valid', 'valid_shift', - 'init', 'init_shift'] - if len(list(str_sub_dict.keys())) != 0: - format_opt_list = format_opt_list+list(str_sub_dict.keys()) - for filled_file_format_chunk in unfilled_file_format.split('/'): - for format_opt in format_opt_list: - nformat_opt = ( - filled_file_format_chunk.count('{'+format_opt+'?fmt=') - ) - if nformat_opt > 0: - format_opt_count = 1 - while format_opt_count <= nformat_opt: - if format_opt in ['lead_shift', 'valid_shift', - 'init_shift']: - shift = (filled_file_format_chunk \ - .partition('shift=')[2] \ - .partition('}')[0]) - format_opt_count_fmt = ( - filled_file_format_chunk \ - .partition('{'+format_opt+'?fmt=')[2] \ - .rpartition('?')[0] - ) - else: - format_opt_count_fmt = ( - filled_file_format_chunk \ - .partition('{'+format_opt+'?fmt=')[2] \ - .partition('}')[0] - ) - if format_opt == 'valid': - replace_format_opt_count = valid_time_dt.strftime( - format_opt_count_fmt - ) - elif format_opt == 'lead': - if format_opt_count_fmt == '%1H': - if int(forecast_hour) < 10: - replace_format_opt_count = forecast_hour[1] - else: - replace_format_opt_count = forecast_hour - elif format_opt_count_fmt == '%2H': - replace_format_opt_count = forecast_hour.zfill(2) - elif format_opt_count_fmt == '%3H': - replace_format_opt_count = forecast_hour.zfill(3) - else: - replace_format_opt_count = forecast_hour - elif format_opt == 'init': - replace_format_opt_count = init_time_dt.strftime( - format_opt_count_fmt - ) - elif format_opt == 'lead_shift': - shift = (filled_file_format_chunk.partition('shift=')[2]\ - .partition('}')[0]) - forecast_hour_shift = str(int(forecast_hour) - + int(shift)) - if format_opt_count_fmt == '%1H': - if int(forecast_hour_shift) < 10: - replace_format_opt_count = ( - forecast_hour_shift[1] - ) - else: - replace_format_opt_count = forecast_hour_shift - elif format_opt_count_fmt == '%2H': - replace_format_opt_count = ( - forecast_hour_shift.zfill(2) - ) - elif format_opt_count_fmt == '%3H': - replace_format_opt_count = ( - forecast_hour_shift.zfill(3) - ) - else: - replace_format_opt_count = forecast_hour_shift - elif format_opt == 'init_shift': - shift = (filled_file_format_chunk.partition('shift=')[2]\ - .partition('}')[0]) - init_shift_time_dt = ( - init_time_dt + datetime.timedelta(hours=int(shift)) - ) - replace_format_opt_count = init_shift_time_dt.strftime( - format_opt_count_fmt - ) - elif format_opt == 'valid_shift': - shift = (filled_file_format_chunk.partition('shift=')[2]\ - .partition('}')[0]) - valid_shift_time_dt = ( - valid_time_dt + datetime.timedelta(hours=int(shift)) - ) - replace_format_opt_count = valid_shift_time_dt.strftime( - format_opt_count_fmt - ) - else: - replace_format_opt_count = str_sub_dict[format_opt] - if format_opt in ['lead_shift', 'valid_shift', 'init_shift']: - filled_file_format_chunk = ( - filled_file_format_chunk.replace( - '{'+format_opt+'?fmt=' - +format_opt_count_fmt - +'?shift='+shift+'}', - replace_format_opt_count - ) - ) - else: - filled_file_format_chunk = ( - filled_file_format_chunk.replace( - '{'+format_opt+'?fmt=' - +format_opt_count_fmt+'}', - replace_format_opt_count - ) - ) - format_opt_count+=1 - filled_file_format = os.path.join(filled_file_format, - filled_file_format_chunk) - return filled_file_format - -def prep_prod_gfs_file(source_file, dest_file, forecast_hour, prep_method): - """! Do prep work for GFS production files - - Args: - source_file_format - source file format (string) - dest_file - destination file (string) - forecast_hour - forecast hour (string) - prep_method - name of prep method to do - (string) - - Returns: - """ - # Environment variables and executables - WGRIB2 = os.environ['WGRIB2'] - EXECevs = os.environ['EXECevs'] - # Working file names - prepped_file = os.path.join(os.getcwd(), - 'atmos.'+dest_file.rpartition('/')[2]) - working_file1 = prepped_file+'.tmp1' - # Prep file - if prep_method == 'full': - if forecast_hour == 0: - wgrib_fhr = 'anl' - else: - wgrib_fhr = forecast_hour - thin_var_level_list = [ - 'CAPE:surface', - 'CAPE:90-0 mb above ground', - 'CWAT:entire atmosphere (considered as a single layer)', - 'DPT:2 m above ground', - 'GUST:surface', - 'HGT:1000 mb', 'HGT:925 mb', 'HGT:850 mb', 'HGT:700 mb', - 'HGT:500 mb', 'HGT:400 mb', 'HGT:300 mb', 'HGT:250 mb', - 'HGT:200 mb', 'HGT:150 mb', 'HGT:100 mb', 'HGT:50 mb','HGT:20 mb', - 'HGT:10 mb', 'HGT:5 mb', 'HGT:1 mb', 'HGT:cloud ceiling', - 'HGT:tropopause', - 'HPBL:surface', - 'ICEC:surface', - 'ICETK:surface', - 'LHTFL:surface', - 'O3MR:925 mb', 'O3MR:100 mb', 'O3MR:70 mb', 'O3MR:50 mb', - 'O3MR:30 mb', 'O3MR:20 mb', 'O3MR:10 mb', 'O3MR:5 mb', 'O3MR:1 mb', - 'PRES:surface', 'PRES:tropopause', - 'PRMSL:mean sea level', - 'PWAT:entire atmosphere (considered as a single layer)', - 'RH:1000 mb', 'RH:925 mb', 'RH:850 mb', 'RH:700 mb', 'RH:500 mb', - 'RH:400 mb', 'RH:300 mb', 'RH:250 mb', 'RH:200 mb', 'RH:150 mb', - 'RH:100 mb', 'RH:50 mb','RH:20 mb', 'RH:10 mb', 'RH:5 mb', - 'RH:1 mb', 'RH:2 m above ground', - 'SHTFL:surface', - 'SNOD:surface', - 'SPFH:1000 mb', 'SPFH:925 mb', 'SPFH:850 mb', 'SPFH:700 mb', - 'SPFH:500 mb', 'SPFH:400 mb', 'SPFH:300 mb', 'SPFH:250 mb', - 'SPFH:200 mb', 'SPFH:150 mb', 'SPFH:100 mb', 'SPFH:50 mb', - 'SPFH:20 mb', 'SPFH:10 mb', 'SPFH:5 mb', 'SPFH:1 mb', - 'SPFH:2 m above ground', - 'SOILW:0-0.1 m below ground', - 'TCDC:entire atmosphere:'+wgrib_fhr, - 'TMP:1000 mb', 'TMP:925 mb', 'TMP:850 mb', 'TMP:700 mb', - 'TMP:500 mb', 'TMP:400 mb', 'TMP:300 mb', 'TMP:250 mb', - 'TMP:200 mb', 'TMP:150 mb', 'TMP:100 mb', 'TMP:50 mb', - 'TMP:20 mb', 'TMP:10 mb', 'TMP:5 mb', 'TMP:1 mb', - 'TMP:2 m above ground', 'TMP:surface', 'TMP:tropopause', - 'TOZNE:entire atmosphere (considered as a single layer)', - 'TSOIL:0-0.1 m below ground', - 'UGRD:1000 mb', 'UGRD:925 mb', 'UGRD:850 mb', 'UGRD:700 mb', - 'UGRD:500 mb', 'UGRD:400 mb', 'UGRD:300 mb', 'UGRD:250 mb', - 'UGRD:200 mb', 'UGRD:150 mb', 'UGRD:100 mb', 'UGRD:50 mb', - 'UGRD:20 mb', 'UGRD:10 mb', 'UGRD:5 mb', 'UGRD:1 mb', - 'UGRD:10 m above ground', - 'VGRD:1000 mb', 'VGRD:925 mb', 'VGRD:850 mb', 'VGRD:700 mb', - 'VGRD:500 mb', 'VGRD:400 mb', 'VGRD:300 mb', 'VGRD:250 mb', - 'VGRD:200 mb', 'VGRD:150 mb', 'VGRD:100 mb', 'VGRD:50 mb', - 'VGRD:20 mb', 'VGRD:10 mb', 'VGRD:5 mb', 'VGRD:1 mb', - 'VGRD:10 m above ground', - 'VIS:surface', - 'WEASD:surface' - ] - # Missing in GFS files: Sea Ice Drift (Velocity) - SICED?? - # Sea Ice Extent - Derived from ICEC? - # Sea Ice Volume - if check_file_exists_size(source_file): - run_shell_command(['>', prepped_file]) - for thin_var_level in thin_var_level_list: - run_shell_command([WGRIB2, '-match', '"'+thin_var_level+'"', - source_file+'|'+WGRIB2, '-i', source_file, - '-grib', working_file1]) - run_shell_command(['cat', working_file1, '>>', prepped_file]) - os.remove(working_file1) - elif 'precip' in prep_method: - if int(forecast_hour) % 24 == 0: - thin_var_level = ('APCP:surface:0-' - +str(int(int(forecast_hour)/24))) - else: - thin_var_level = ('APCP:surface:0-'+forecast_hour) - if check_file_exists_size(source_file): - run_shell_command([WGRIB2, '-match', '"'+thin_var_level+'"', - source_file+'|'+WGRIB2, '-i', source_file, - '-grib', prepped_file]) - copy_file(prepped_file, dest_file) - -def prep_prod_fnmoc_file(source_file, dest_file, forecast_hour, - prep_method): - """! Do prep work for FNMOC production files - - Args: - source_file - source file format (string) - dest_file - destination file (string) - forecast_hour - forecast hour (string) - prep_method - name of prep method to do - (string) - - Returns: - """ - # Environment variables and executables - # Working file names - prepped_file = os.path.join(os.getcwd(), - 'atmos.'+dest_file.rpartition('/')[2]) - # Prep file - if check_file_exists_size(source_file): - convert_grib2_grib2(source_file, prepped_file) - copy_file(prepped_file, dest_file) - - -def prep_prod_jma_file(source_file_format, dest_file, forecast_hour, - prep_method): - """! Do prep work for JMA production files - - Args: - source_file_format - source file format (string) - dest_file - destination file (string) - forecast_hour - forecast hour (string) - prep_method - name of prep method to do - (string) - - Returns: - """ - # Environment variables and executables - WGRIB = os.environ['WGRIB'] - EXECevs = os.environ['EXECevs'] - JMAMERGE = os.path.join(EXECevs, 'jma_merge') - # Working file names - prepped_file = os.path.join(os.getcwd(), - 'atmos.'+dest_file.rpartition('/')[2]) - working_file1 = prepped_file+'.tmp1' - working_file2 = prepped_file+'.tmp2' - # Prep file - if prep_method == 'full': - if forecast_hour == 'anl': - wgrib_fhr = ':anl' - elif int(forecast_hour) == 0: - wgrib_fhr = ':anl' - else: - wgrib_fhr = ':'+forecast_hour+'hr' - for hem in ['n', 's']: - hem_source_file = source_file_format.replace('{hem?fmt=str}', hem) - if hem == 'n': - working_file = working_file1 - elif hem == 's': - working_file = working_file2 - if check_file_exists_size(hem_source_file): - run_shell_command( - [WGRIB+' '+hem_source_file+' | grep "'+wgrib_fhr+'" | ' - +WGRIB+' '+hem_source_file+' -i -grib -o ' - +working_file] - ) - if check_file_exists_size(working_file1) \ - and check_file_exists_size(working_file2): - run_shell_command( - [JMAMERGE, working_file1, working_file2, prepped_file] - ) - elif 'precip' in prep_method: - source_file = source_file_format - if check_file_exists_size(source_file): - run_shell_command( - [WGRIB+' '+source_file+' | grep "0-' - +forecast_hour+'hr" | '+WGRIB+' '+source_file - +' -i -grib -o '+prepped_file] - ) - copy_file(prepped_file, dest_file) - -def prep_prod_ecmwf_file(source_file, dest_file, forecast_hour, prep_method): - """! Do prep work for ECMWF production files - - Args: - source_file - source file format (string) - dest_file - destination file (string) - forecast_hour - forecast hour (string) - prep_method - name of prep method to do - (string) - - Returns: - """ - # Environment variables and executables - EXECevs = os.environ['EXECevs'] - ECMGFSLOOKALIKENEW = os.path.join(EXECevs, 'ecm_gfs_look_alike_new') - PCPCONFORM = os.path.join(EXECevs, 'pcpconform') - WGRIB = os.environ['WGRIB'] - # Working file names - prepped_file = os.path.join(os.getcwd(), - 'atmos.'+dest_file.rpartition('/')[2]) - working_file1 = prepped_file+'.tmp1' - # Prep file - if prep_method == 'full': - if forecast_hour == 'anl': - wgrib_fhr = ':anl' - elif int(forecast_hour) == 0: - wgrib_fhr = ':anl' - else: - wgrib_fhr = ':'+forecast_hour+'hr' - if check_file_exists_size(source_file): - run_shell_command( - [WGRIB+' '+source_file+' | grep "'+wgrib_fhr+'" | ' - +WGRIB+' '+source_file+' -i -grib -o ' - +working_file1] - ) - if check_file_exists_size(working_file1): - run_shell_command(['chmod', '750', working_file1]) - run_shell_command(['chgrp', 'rstprod', working_file1]) - run_shell_command( - [ECMGFSLOOKALIKENEW, working_file1, prepped_file] - ) - elif 'precip' in prep_method: - if check_file_exists_size(source_file): - run_shell_command( - [PCPCONFORM, 'ecmwf', source_file, prepped_file] - ) - if os.path.exists(prepped_file): - run_shell_command(['chmod', '750', prepped_file]) - run_shell_command(['chgrp', 'rstprod', prepped_file]) - copy_file(prepped_file, dest_file) - -def prep_prod_ukmet_file(source_file_format, dest_file, forecast_hour, - prep_method): - """! Do prep work for UKMET production files - - Args: - source_file_format - source file format (string) - dest_file - destination file (string) - forecast_hour - forecast hour (string) - prep_method - name of prep method to do - (string) - - Returns: - """ - # Environment variables and executables - EXECevs = os.environ['EXECevs'] - WGRIB = os.environ['WGRIB'] - WGRIB2 = os.environ['WGRIB2'] - UKMHIRESMERGE = os.path.join(EXECevs, 'ukm_hires_merge') - # Working file names - prepped_file = os.path.join(os.getcwd(), - 'atmos.'+dest_file.rpartition('/')[2]) - working_file1 = prepped_file+'.tmp1' - working_file2 = prepped_file+'.tmp2' - # Prep file - if prep_method == 'full': - ukmet_fhr_id_dict = { - 'anl': 'AAT', - '0': 'AAT', - '6': 'BBT', - '12': 'CCT', - '18': 'DDT', - '24': 'EET', - '30': 'FFT', - '36': 'GGT', - '42': 'HHT', - '48': 'IIT', - '54': 'JJT', - '60': 'JJT', - '66': 'KKT', - '72': 'KKT', - '78': 'QQT', - '84': 'LLT', - '90': 'TTT', - '96': 'MMT', - '102': 'UUT', - '108': 'NNT', - '114': 'VVT', - '120': 'OOT', - '126': '11T', - '132': 'PPA', - '138': '22T', - '144': 'PPA' - } - if forecast_hour in list(ukmet_fhr_id_dict.keys()): - if forecast_hour == 'anl': - fhr_id = ukmet_fhr_id_dict['anl'] - fhr_str = '0' - wgrib_fhr = 'anl' - else: - fhr_id = ukmet_fhr_id_dict[forecast_hour] - fhr_str = forecast_hour - if forecast_hour == '0': - wgrib_fhr = 'anl' - else: - wgrib_fhr = forecast_hour+'hr' - source_file = source_file_format.replace('{letter?fmt=str}', - fhr_id) - if check_file_exists_size(source_file): - run_shell_command( - [WGRIB+' '+source_file+' | grep "'+wgrib_fhr - +'" | '+WGRIB+' '+source_file+' -i -grib -o ' - +working_file1] - ) - if check_file_exists_size(working_file1): - run_shell_command([UKMHIRESMERGE, working_file1, - prepped_file, fhr_str]) - elif 'precip' in prep_method: - source_file = source_file_format - source_file_accum = 12 - if check_file_exists_size(source_file): - run_shell_command( - [WGRIB2+' '+source_file+' -if ":TWATP:" -set_var "APCP" ' - +'-fi -grib '+working_file1] - ) - if check_file_exists_size(working_file1): - convert_grib2_grib1(working_file1, working_file2) - if check_file_exists_size(working_file2): - source_file_accum_fhr_start = ( - int(forecast_hour) - source_file_accum - ) - run_shell_command( - [WGRIB+' '+working_file2+' | grep "' - +str(source_file_accum_fhr_start)+'-' - +forecast_hour+'hr" | '+WGRIB+' '+working_file2 - +' -i -grib -o '+prepped_file] - ) - copy_file(prepped_file, dest_file) - -def prep_prod_dwd_file(source_file, dest_file, forecast_hour, prep_method): - """! Do prep work for DWD production files - - Args: - source_file_format - source file format (string) - dest_file - destination file (string) - forecast_hour - forecast hour (string) - prep_method - name of prep method to do - (string) - - Returns: - """ - # Environment variables and executables - EXECevs = os.environ['EXECevs'] - PCPCONFORM = os.path.join(EXECevs, 'pcpconform') - # Working file names - prepped_file = os.path.join(os.getcwd(), - 'atmos.'+dest_file.rpartition('/')[2]) - #### For DWD to run through pcpconform, file name must be - #### dwd_YYYYMMDDHH_(hhh)_(hhh).tmp - working_file1 = os.path.join(os.getcwd(), - source_file.rpartition('/')[2]+'.tmp') - # Prep file - if 'precip' in prep_method: - if check_file_exists_size(source_file): - convert_grib2_grib1(source_file, working_file1) - if check_file_exists_size(working_file1): - run_shell_command( - [PCPCONFORM, 'dwd', working_file1, - prepped_file] - ) - copy_file(prepped_file, dest_file) - -def prep_prod_metfra_file(source_file, dest_file, forecast_hour, prep_method): - """! Do prep work for METRFRA production files - - Args: - source_file - source file(string) - dest_file - destination file (string) - forecast_hour - forecast hour (string) - prep_method - name of prep method to do - (string) - - Returns: - """ - # Environment variables and executables - EXECevs = os.environ['EXECevs'] - WGRIB = os.environ['WGRIB'] - # Temporary file names - prepped_file = os.path.join(os.getcwd(), - 'atmos.'+dest_file.rpartition('/')[2]) - # Prep file - if 'precip' in prep_method: - file_accum = 24 - fhr_accum_start = int(forecast_hour)-file_accum - if check_file_exists_size(source_file): - run_shell_command( - [WGRIB+' '+source_file+' | grep "' - +str(fhr_accum_start)+'-' - +forecast_hour+'hr" | '+WGRIB+' '+source_file - +' -i -grib -o '+prepped_file] - ) - copy_file(prepped_file, dest_file) - -def prep_prod_osi_saf_file(daily_source_file_format, daily_dest_file, - weekly_source_file_list, weekly_dest_file, - weekly_dates): - """! Do prep work for OSI-SAF production files - - Args: - daily_source_file_format - daily source file format (string) - daily_dest_file - daily destination file (string) - weekly_source_file_list - list of daily files to make up - weekly average file - weekly_dest_file - weekly destination file (string) - weekly_dates - date span for weekly dates (tuple - of datetimes) - Returns: - """ - # Environment variables and executables - FIXevs = os.environ['FIXevs'] - CDO_ROOT = os.environ['CDO_ROOT'] - # Temporary file names - daily_prepped_file = os.path.join(os.getcwd(), 'atmos.' - +daily_dest_file.rpartition('/')[2]) - weekly_prepped_file = os.path.join(os.getcwd(), 'atmos.' - +weekly_dest_file.rpartition('/')[2]) - # Prep daily file - for hem in ['nh', 'sh']: - hem_source_file = daily_source_file_format.replace('{hem?fmt=str}', - hem) - hem_dest_file = daily_dest_file.replace('multi.', 'multi.'+hem+'.') - hem_prepped_file = os.path.join(os.getcwd(), 'atmos.' - +hem_dest_file.rpartition('/')[2]) - if check_file_exists_size(hem_source_file): - run_shell_command( - [os.path.join(CDO_ROOT, 'bin', 'cdo'), - 'remapbil,' - +os.path.join(FIXevs, 'cdo_grids', 'G004.grid'), - hem_source_file, hem_prepped_file] - ) - if hem == 'nh': - nh_prepped_file = hem_prepped_file - elif hem == 'sh': - sh_prepped_file = hem_prepped_file - if check_file_exists_size(nh_prepped_file) \ - and check_file_exists_size(sh_prepped_file): - nh_data = netcdf.Dataset(nh_prepped_file) - sh_data = netcdf.Dataset(sh_prepped_file) - merged_data = netcdf.Dataset(daily_prepped_file, 'w', - format='NETCDF3_CLASSIC') - for attr in nh_data.ncattrs(): - if attr == 'history': - merged_data.setncattr( - attr, nh_data.getncattr(attr)+' ' - +sh_data.getncattr(attr) - ) - elif attr == 'southernmost_latitude': - merged_data.setncattr(attr, '-90') - elif attr == 'area': - merged_data.setncattr(attr, 'Global') - else: - merged_data.setncattr(attr, nh_data.getncattr(attr)) - for dim in list(nh_data.dimensions.keys()): - merged_data.createDimension(dim, len(nh_data.dimensions[dim])) - for var in ['time', 'time_bnds', 'lat', 'lon']: - merged_var = merged_data.createVariable( - var, nh_data.variables[var].datatype, - nh_data.variables[var].dimensions - ) - for k in nh_data.variables[var].ncattrs(): - merged_var.setncatts( - {k: nh_data.variables[var].getncattr(k)} - ) - if var == 'time': - merged_var[:] = nh_data.variables[var][:] + 43200 - else: - merged_var[:] = nh_data.variables[var][:] - for var in ['ice_conc', 'ice_conc_unfiltered', 'masks', - 'confidence_level', 'status_flag', 'total_uncertainty', - 'smearing_uncertainty', 'algorithm_uncertainty']: - merged_var = merged_data.createVariable( - var, nh_data.variables[var].datatype, - ('lat', 'lon') - ) - for k in nh_data.variables[var].ncattrs(): - if k == 'long_name': - merged_var.setncatts( - {k: nh_data.variables[var].getncattr(k)\ - .replace('northern hemisphere', 'globe')} - ) - else: - merged_var.setncatts( - {k: nh_data.variables[var].getncattr(k)} - ) - merged_var_vals = np.ma.masked_equal( - np.vstack((sh_data.variables[var][0,:180,:], - nh_data.variables[var][0,180:,:])) - ,nh_data.variables[var]._FillValue) - merged_var[:] = merged_var_vals - merged_data.close() - copy_file(daily_prepped_file, daily_dest_file) - -def prep_prod_ghrsst_ospo_file(source_file, dest_file, date_dt): - """! Do prep work for GHRSST OSPO production files - - Args: - source_file - source file (string) - dest_file - destination file (string) - date_dt - date (datetime object) - Returns: - """ - # Environment variables and executables - # Temporary file names - prepped_file = os.path.join(os.getcwd(), 'atmos.' - +source_file.rpartition('/')[2]) - # Prep file - copy_file(source_file, prepped_file) - if check_file_exists_size(prepped_file): - prepped_data = netcdf.Dataset(prepped_file, 'a', - format='NETCDF3_CLASSIC') - ghrsst_ospo_date_since_dt = datetime.datetime.strptime( - '1981-01-01 00:00:00','%Y-%m-%d %H:%M:%S' - ) - prepped_data['time'][:] = prepped_data['time'][:][0] + 43200 - prepped_data.close() - copy_file(prepped_file, dest_file) -def get_model_file(valid_time_dt, init_time_dt, forecast_hour, - source_file_format, dest_file_format): - """! This get a model file and saves it in the specificed - destination - - Args: - valid_time_dt - valid time (datetime) - init_time_dt - initialization time (datetime) - forecast_hour - forecast hour (string) - source_file_format - source file format (string) - dest_file_format - destination file format (string) - - - Returns: - """ - dest_file = format_filler(dest_file_format, valid_time_dt, - init_time_dt, forecast_hour, {}) - if not os.path.exists(dest_file): - source_file = format_filler(source_file_format, valid_time_dt, - init_time_dt, forecast_hour, {}) - if 'dcom/navgem' in source_file: - prep_prod_fnmoc_file(source_file, dest_file, forecast_hour, 'full') - elif 'wgrbbul/jma_' in source_file: - prep_prod_jma_file(source_file, dest_file, forecast_hour, 'full') - elif 'wgrbbul/ecmwf' in source_file: - prep_prod_ecmwf_file(source_file, dest_file, forecast_hour, 'full') - elif 'wgrbbul/ukmet_hires' in source_file: - prep_prod_ukmet_file(source_file, dest_file, forecast_hour, 'full') - elif 'qpf_verif/jma' in source_file: - prep_prod_jma_file(source_file, dest_file, forecast_hour, - 'precip') - elif 'qpf_verif/UWD' in source_file: - prep_prod_ecmwf_file(source_file, dest_file, forecast_hour, - 'precip') - elif 'qpf_verif/ukmo' in source_file: - prep_prod_ukmet_file(source_file, dest_file, forecast_hour, - 'precip') - elif 'qpf_verif/dwd' in source_file: - prep_prod_dwd_file(source_file, dest_file, forecast_hour, - 'precip') - elif 'qpf_verif/METFRA' in source_file: - prep_prod_metfra_file(source_file, dest_file, forecast_hour, - 'precip') - else: - if os.path.exists(source_file): - print("Linking "+source_file+" to "+dest_file) - os.symlink(source_file, dest_file) - else: - print("WARNING: "+source_file+" DOES NOT EXIST") - -def get_truth_file(valid_time_dt, source_file_format, dest_file_format): - """! This get a model file and saves it in the specificed - destination - - Args: - valid_time_dt - valid time (datetime) - source_file_format - source file format (string) - dest_file_format - destination file format (string) - - - Returns: - """ - dest_file = format_filler(dest_file_format, valid_time_dt, - valid_time_dt, ['anl'], {}) - if not os.path.exists(dest_file): - source_file = format_filler(source_file_format, valid_time_dt, - valid_time_dt, ['anl'], {}) - if os.path.exists(source_file): - print("Linking "+source_file+" to "+dest_file) - os.symlink(source_file, dest_file) - else: - print("WARNING: "+source_file+" DOES NOT EXIST") - -def check_model_files(job_dict): - """! Check what model files or don't exist - - Args: - job_dict - dictionary containing settings - job is running with (strings) - - Returns: - model_files_exist - if non-zero number of model files - exist or not (boolean) - fhr_list - list of forecast hours that model - files exist for (string) - """ - valid_date_dt = datetime.datetime.strptime( - job_dict['DATE']+job_dict['valid_hr_start'], - '%Y%m%d%H' - ) - verif_case_dir = os.path.join( - job_dict['DATA'], job_dict['VERIF_CASE']+'_'+job_dict['STEP'] - ) - model = job_dict['MODEL'] - fhr_min = int(job_dict['fhr_start']) - fhr_max = int(job_dict['fhr_end']) - fhr_inc = int(job_dict['fhr_inc']) - fhr = fhr_min - fhr_list = [] - fhr_check_dict = {} - while fhr <= fhr_max: - fhr_check_dict[str(fhr)] = {} - init_date_dt = valid_date_dt - datetime.timedelta(hours=fhr) - if job_dict['JOB_GROUP'] == 'reformat_data': - model_file_format = os.path.join(verif_case_dir, 'data', model, - model+'.{init?fmt=%Y%m%d%H}.' - +'f{lead?fmt=%3H}') - if job_dict['VERIF_CASE'] == 'grid2grid': - if job_dict['VERIF_TYPE'] == 'pres_levs' \ - and job_dict['job_name'] == 'GeoHeightAnom': - if init_date_dt.strftime('%H') in ['00', '12'] \ - and fhr % 24 == 0: - fhr_check_dict[str(fhr)]['file1'] = { - 'valid_date': valid_date_dt, - 'init_date': init_date_dt, - 'forecast_hour': str(fhr) - } - fhr_check_dict[str(fhr)]['file2'] = { - 'valid_date': valid_date_dt, - 'init_date': (valid_date_dt - -datetime.timedelta(hours=fhr-12)), - 'forecast_hour': str(fhr-12) - } - elif job_dict['VERIF_TYPE'] in ['sea_ice', 'sst']: - fhr_avg_end = fhr - fhr_avg_start = fhr-24 - fhr_in_avg = fhr_avg_start - nf = 0 - while fhr_in_avg <= fhr_avg_end: - fhr_check_dict[str(fhr)]['file'+str(nf+1)] = { - 'valid_date': valid_date_dt, - 'init_date': valid_date_dt-datetime.timedelta( - hours=fhr_in_avg - ), - 'forecast_hour': str(fhr_in_avg) - } - nf+=1 - fhr_in_avg+=int(job_dict['fhr_inc']) - else: - fhr_check_dict[str(fhr)]['file1'] = { - 'valid_date': valid_date_dt, - 'init_date': init_date_dt, - 'forecast_hour': str(fhr) - } - if job_dict['VERIF_CASE'] == 'grid2obs': - if job_dict['VERIF_TYPE'] == 'ptype': - fhr_check_dict[str(fhr)]['file1'] = { - 'valid_date': valid_date_dt, - 'init_date': init_date_dt, - 'forecast_hour': str(fhr) - } - elif job_dict['JOB_GROUP'] == 'assemble_data': - if job_dict['VERIF_CASE'] == 'grid2grid': - if job_dict['VERIF_TYPE'] in ['precip_accum24hr', - 'precip_accum3hr']: - model_file_format = os.path.join(verif_case_dir, 'data', - model, model+'.precip.' - +'{init?fmt=%Y%m%d%H}.' - +'f{lead?fmt=%3H}') - elif job_dict['VERIF_TYPE'] == 'pres_levs' \ - and job_dict['job_name'] == 'DailyAvg_GeoHeightAnom': - model_file_format = os.path.join(verif_case_dir, - 'METplus_output', - job_dict['RUN']+'.' - +'{valid?fmt=%Y%m%d}', - job_dict['MODEL'], - job_dict['VERIF_CASE'], - 'anomaly_' - +job_dict['VERIF_TYPE']+'_' - +job_dict['job_name']\ - .replace('DailyAvg_', '') - +'_init' - +'{init?fmt=%Y%m%d%H}_' - +'fhr{lead?fmt=%3H}.nc') - elif job_dict['VERIF_TYPE'] in ['sea_ice', 'sst']: - model_file_format = os.path.join(verif_case_dir, - 'METplus_output', - job_dict['RUN']+'.' - +'{valid?fmt=%Y%m%d}', - job_dict['MODEL'], - job_dict['VERIF_CASE'], - 'grid_stat_' - +job_dict['VERIF_TYPE']+'_' - +job_dict['job_name']\ - .replace('DailyAvg_', '') - +'_{lead?fmt=%2H}0000L_' - +'{valid?fmt=%Y%m%d}_' - +'{valid?fmt=%H}0000V_' - +'pairs.nc') - else: - model_file_format = os.path.join(verif_case_dir, 'data', - model, model - +'.{init?fmt=%Y%m%d%H}.' - +'f{lead?fmt=%3H}') - if job_dict['VERIF_TYPE'] in ['precip_accum24hr', - 'precip_accum3hr']: - precip_accum = int( - job_dict['VERIF_TYPE'].replace('precip_accum','')\ - .replace('hr','') - ) - fhr_in_accum_list = [str(fhr)] - if job_dict['MODEL_accum'][0] == '{': #continuous - if fhr-precip_accum > 0: - fhr_in_accum_list.append(str(fhr-precip_accum)) - elif int(job_dict['MODEL_accum']) < precip_accum: - nfiles_in_accum = int( - precip_accum/int(job_dict['MODEL_accum']) - ) - nf = 1 - while nf <= nfiles_in_accum: - fhr_nf = fhr - ((nf-1)*int(job_dict['MODEL_accum'])) - if fhr_nf > 0: - fhr_in_accum_list.append(str(fhr_nf)) - nf+=1 - for fhr_in_accum in fhr_in_accum_list: - file_num = fhr_in_accum_list.index(fhr_in_accum)+1 - fhr_check_dict[str(fhr)]['file'+str(file_num)] = { - 'valid_date': valid_date_dt, - 'init_date': init_date_dt, - 'forecast_hour': str(fhr_in_accum) - } - elif job_dict['VERIF_TYPE'] == 'snow': - fhr_check_dict[str(fhr)]['file1'] = { - 'valid_date': valid_date_dt, - 'init_date': init_date_dt, - 'forecast_hour': str(fhr) - } - fhr_check_dict[str(fhr)]['file2'] = { - 'valid_date': valid_date_dt, - 'init_date': init_date_dt, - 'forecast_hour': str(fhr-24) - } - else: - fhr_check_dict[str(fhr)]['file1'] = { - 'valid_date': valid_date_dt, - 'init_date': init_date_dt, - 'forecast_hour': str(fhr) - } - elif job_dict['VERIF_CASE'] == 'grid2obs': - model_file_format = os.path.join(verif_case_dir, 'data', - model, model - +'.{init?fmt=%Y%m%d%H}.' - +'f{lead?fmt=%3H}') - if job_dict['VERIF_TYPE'] == 'sfc' \ - and job_dict['job_name'] == 'TempAnom2m': - fhr_check_dict[str(fhr)]['file1'] = { - 'valid_date': valid_date_dt, - 'init_date': init_date_dt, - 'forecast_hour': str(fhr) - } - fhr_check_dict[str(fhr)]['file2'] = { - 'valid_date': valid_date_dt, - 'init_date': (valid_date_dt - -datetime.timedelta(hours=fhr-12)), - 'forecast_hour': str(fhr-12) - } - elif job_dict['VERIF_TYPE'] == 'ptype': - fhr_check_dict[str(fhr)]['file1'] = { - 'valid_date': valid_date_dt, - 'init_date': init_date_dt, - 'forecast_hour': str(fhr) - } - elif job_dict['JOB_GROUP'] == 'generate_stats': - if job_dict['VERIF_CASE'] == 'grid2grid': - if job_dict['VERIF_TYPE'] == 'pres_levs' \ - and job_dict['job_name'] == 'DailyAvg_GeoHeightAnom': - model_file_format = os.path.join( - verif_case_dir, 'METplus_output', - job_dict['RUN']+'.{valid?fmt=%Y%m%d}', - model, job_dict['VERIF_CASE'], 'daily_avg_' - +job_dict['VERIF_TYPE']+'_'+job_dict['job_name'] - +'_init{init?fmt=%Y%m%d%H}_' - +'valid{valid_shift?fmt=%Y%m%d%H?shift=-12}' - +'to{valid?fmt=%Y%m%d%H}.nc' - ) - elif job_dict['VERIF_TYPE'] == 'pres_levs' \ - and job_dict['job_name'] == 'WindShear': - model_file_format = os.path.join( - verif_case_dir, 'METplus_output', - job_dict['RUN']+'.{valid?fmt=%Y%m%d}', - model, job_dict['VERIF_CASE'], 'wind_shear_' - +job_dict['VERIF_TYPE']+'_'+job_dict['job_name'] - +'_init{init?fmt=%Y%m%d%H}_fhr{lead?fmt=%3H}.nc' - ) - elif job_dict['VERIF_TYPE'] in ['precip_accum24hr', - 'precip_accum3hr']: - precip_accum = (job_dict['VERIF_TYPE']\ - .replace('precip_accum','')) - model_file_format = os.path.join( - verif_case_dir, 'METplus_output', - job_dict['RUN']+'.{valid?fmt=%Y%m%d}', - model, job_dict['VERIF_CASE'], 'pcp_combine_' - +job_dict['VERIF_TYPE']+'_'+precip_accum+'Accum_init' - +'{init?fmt=%Y%m%d%H}_fhr{lead?fmt=%3H}.nc' - ) - elif job_dict['VERIF_TYPE'] == 'sea_ice': - model_file_format = os.path.join( - verif_case_dir, 'METplus_output', - job_dict['RUN']+'.{valid?fmt=%Y%m%d}', - model, job_dict['VERIF_CASE'], 'daily_avg_' - +job_dict['VERIF_TYPE']+'_'+job_dict['job_name'] - +'_init{init?fmt=%Y%m%d%H}_' - +'valid{valid_shift?fmt=%Y%m%d%H?shift=-24}' - +'to{valid?fmt=%Y%m%d%H}.nc' - ) - elif job_dict['VERIF_TYPE'] == 'snow': - model_file_format = os.path.join( - verif_case_dir, 'METplus_output', - job_dict['RUN']+'.{valid?fmt=%Y%m%d}', - model, job_dict['VERIF_CASE'], 'pcp_combine_' - +job_dict['VERIF_TYPE']+'_24hrAccum_' - +job_dict['file_name_var']+'_init' - +'{init?fmt=%Y%m%d%H}_fhr{lead?fmt=%3H}.nc' - ) - elif job_dict['VERIF_TYPE'] == 'sst': - model_file_format = os.path.join( - verif_case_dir, 'METplus_output', - job_dict['RUN']+'.{valid?fmt=%Y%m%d}', - model, job_dict['VERIF_CASE'], 'daily_avg_' - +job_dict['VERIF_TYPE']+'_'+job_dict['job_name'] - +'_init{init?fmt=%Y%m%d%H}_' - +'valid{valid_shift?fmt=%Y%m%d%H?shift=-24}' - +'to{valid?fmt=%Y%m%d%H}.nc' - ) - else: - model_file_format = os.path.join( - verif_case_dir, 'data', model, - model+'.{init?fmt=%Y%m%d%H}.f{lead?fmt=%3H}' - ) - elif job_dict['VERIF_CASE'] == 'grid2obs': - if job_dict['VERIF_TYPE'] == 'ptype' \ - and job_dict['job_name'] == 'Ptype': - model_file_format = os.path.join(verif_case_dir, - 'METplus_output', - job_dict['RUN']+'.' - +'{valid?fmt=%Y%m%d}', - job_dict['MODEL'], - job_dict['VERIF_CASE'], - 'merged_ptype_' - +job_dict['VERIF_TYPE']+'_' - +job_dict['job_name']+'_' - +'init{init?fmt=%Y%m%d%H}_' - +'fhr{lead?fmt=%3H}.nc') - elif job_dict['VERIF_TYPE'] == 'sfc' \ - and job_dict['job_name'] == 'DailyAvg_TempAnom2m': - model_file_format = os.path.join(verif_case_dir, - 'METplus_output', - job_dict['RUN']+'.' - +'{valid?fmt=%Y%m%d}', - job_dict['MODEL'], - job_dict['VERIF_CASE'], - 'anomaly_' - +job_dict['VERIF_TYPE']+'_' - +job_dict['job_name']\ - .replace('DailyAvg_', '') - +'_init' - +'{init?fmt=%Y%m%d%H}_' - +'fhr{lead?fmt=%3H}.stat') - else: - model_file_format = os.path.join( - verif_case_dir, 'data', model, - model+'.{init?fmt=%Y%m%d%H}.f{lead?fmt=%3H}' - ) - fhr_check_dict[str(fhr)]['file1'] = { - 'valid_date': valid_date_dt, - 'init_date': init_date_dt, - 'forecast_hour': str(fhr) - } - fhr+=fhr_inc - for fhr_key in list(fhr_check_dict.keys()): - fhr_key_files_exist_list = [] - for fhr_fileN_key in list(fhr_check_dict[fhr_key].keys()): - fhr_fileN = format_filler( - model_file_format, - fhr_check_dict[fhr_key][fhr_fileN_key]['valid_date'], - fhr_check_dict[fhr_key][fhr_fileN_key]['init_date'], - fhr_check_dict[fhr_key][fhr_fileN_key]['forecast_hour'], - {} - ) - if os.path.exists(fhr_fileN): - fhr_key_files_exist_list.append(True) - if job_dict['JOB_GROUP'] == 'reformat_data' \ - and job_dict['job_name'] in ['GeoHeightAnom', - 'Concentration', - 'SST']: - fhr_list.append( - fhr_check_dict[fhr_key][fhr_fileN_key]\ - ['forecast_hour'] - ) - elif job_dict['JOB_GROUP'] == 'assemble_data' \ - and job_dict['job_name'] in ['TempAnom2m']: - fhr_list.append( - fhr_check_dict[fhr_key][fhr_fileN_key]\ - ['forecast_hour'] - ) - else: - fhr_key_files_exist_list.append(False) - if all(x == True for x in fhr_key_files_exist_list) \ - and len(fhr_key_files_exist_list) > 0: - fhr_list.append(fhr_key) - fhr_list = list( - np.asarray(np.unique(np.asarray(fhr_list, dtype=int)),dtype=str) - ) - # UKMET data doesn't have RH for fhr 132 or 144 - if job_dict['MODEL'] == 'ukmet' \ - and job_dict['VERIF_CASE'] == 'grid2obs' \ - and job_dict['VERIF_TYPE'] == 'pres_levs' \ - and job_dict['job_name'] == 'RelHum': - for fhr_rm in ['132', '144']: - if fhr_rm in fhr_list: - fhr_list.remove(fhr_rm) - if len(fhr_list) != 0: - model_files_exist = True - else: - model_files_exist = False - return model_files_exist, fhr_list - -def check_truth_files(job_dict): - """! - Args: - job_dict - dictionary containing settings - job is running with (strings) - - Returns: - all_truth_file_exist - if all needed truth files - exist or not (boolean) - """ - valid_date_dt = datetime.datetime.strptime( - job_dict['DATE']+job_dict['valid_hr_start'], - '%Y%m%d%H' - ) - verif_case_dir = os.path.join( - job_dict['DATA'], job_dict['VERIF_CASE']+'_'+job_dict['STEP'] - ) - truth_file_list = [] - if job_dict['JOB_GROUP'] == 'reformat_data': - if job_dict['VERIF_CASE'] == 'grid2grid': - if job_dict['VERIF_TYPE'] == 'pres_levs': - model_truth_file = os.path.join( - verif_case_dir, 'data', job_dict['MODEL'], - job_dict['MODEL']+'.'+valid_date_dt.strftime('%Y%m%d%H') - +'.truth' - ) - truth_file_list.append(model_truth_file) - elif job_dict['VERIF_CASE'] == 'grid2obs': - if job_dict['VERIF_TYPE'] in ['pres_levs', 'sfc', 'ptype'] \ - and 'Prepbufr' in job_dict['job_name']: - prepbufr_name = (job_dict['job_name'].replace('Prepbufr', '')\ - .lower()) - prepbufr_file = os.path.join( - verif_case_dir, 'data', 'prepbufr_'+prepbufr_name, - 'prepbufr.'+prepbufr_name+'.' - +valid_date_dt.strftime('%Y%m%d%H') - ) - truth_file_list.append(prepbufr_file) - elif job_dict['JOB_GROUP'] == 'assemble_data': - if job_dict['VERIF_CASE'] == 'grid2grid': - if job_dict['VERIF_TYPE'] == 'precip_accum24hr' \ - and job_dict['job_name'] == '24hrCCPA': - nccpa_files = 4 - n = 1 - while n <= 4: - nccpa_file = os.path.join( - verif_case_dir, 'data', 'ccpa', 'ccpa.6H.' - +(valid_date_dt-datetime.timedelta(hours=(n-1)*6))\ - .strftime('%Y%m%d%H') - ) - truth_file_list.append(nccpa_file) - n+=1 - elif job_dict['VERIF_CASE'] == 'grid2obs': - if job_dict['VERIF_TYPE'] in ['pres_levs', 'sfc', 'ptype']: - pb2nc_file = os.path.join( - verif_case_dir, 'METplus_output', - job_dict['RUN']+'.'+valid_date_dt.strftime('%Y%m%d'), - 'prepbufr', job_dict['VERIF_CASE'], 'pb2nc_' - +job_dict['VERIF_TYPE']+'_'+job_dict['prepbufr']+'_valid' - +valid_date_dt.strftime('%Y%m%d%H')+'.nc' - ) - truth_file_list.append(pb2nc_file) - elif job_dict['JOB_GROUP'] == 'generate_stats': - if job_dict['VERIF_CASE'] == 'grid2grid': - if job_dict['VERIF_TYPE'] == 'pres_levs': - model_truth_file = os.path.join( - verif_case_dir, 'data', job_dict['MODEL'], - job_dict['MODEL']+'.'+valid_date_dt.strftime('%Y%m%d%H') - +'.truth' - ) - truth_file_list.append(model_truth_file) - elif job_dict['VERIF_TYPE'] == 'precip_accum24hr': - ccpa_file = os.path.join( - verif_case_dir, 'METplus_output', - job_dict['RUN']+'.'+valid_date_dt.strftime('%Y%m%d'), - 'ccpa', job_dict['VERIF_CASE'], 'pcp_combine_' - +job_dict['VERIF_TYPE']+'_24hrCCPA_valid' - +valid_date_dt.strftime('%Y%m%d%H')+'.nc' - ) - truth_file_list.append(ccpa_file) - elif job_dict['VERIF_TYPE'] == 'precip_accum3hr': - ccpa_file = os.path.join( - verif_case_dir, 'data', 'ccpa', 'ccpa.3H.' - +valid_date_dt.strftime('%Y%m%d%H') - ) - truth_file_list.append(ccpa_file) - elif job_dict['VERIF_TYPE'] == 'sea_ice': - osi_saf_file = os.path.join( - verif_case_dir, 'data', 'osi_saf', - 'osi_saf.multi.' - +(valid_date_dt-datetime.timedelta(hours=24))\ - .strftime('%Y%m%d%H') - +'to'+valid_date_dt.strftime('%Y%m%d%H')+'_G004.nc' - ) - truth_file_list.append(osi_saf_file) - elif job_dict['VERIF_TYPE'] == 'snow': - nohrsc_file = os.path.join( - verif_case_dir, 'data', 'nohrsc', - 'nohrsc.24H.'+valid_date_dt.strftime('%Y%m%d%H') - ) - truth_file_list.append(nohrsc_file) - elif job_dict['VERIF_TYPE'] == 'sst': - ghrsst_ospo_file = os.path.join( - verif_case_dir, 'data', 'ghrsst_ospo', - 'ghrsst_ospo.' - +(valid_date_dt-datetime.timedelta(hours=24))\ - .strftime('%Y%m%d%H') - +'to'+valid_date_dt.strftime('%Y%m%d%H')+'.nc' - ) - truth_file_list.append(ghrsst_ospo_file) - elif job_dict['VERIF_CASE'] == 'grid2obs': - if job_dict['VERIF_TYPE'] in ['pres_levs', 'sfc', 'ptype']: - pb2nc_file = os.path.join( - verif_case_dir, 'METplus_output', - job_dict['RUN']+'.'+valid_date_dt.strftime('%Y%m%d'), - 'prepbufr', job_dict['VERIF_CASE'], 'pb2nc_' - +job_dict['VERIF_TYPE']+'_'+job_dict['prepbufr']+'_valid' - +valid_date_dt.strftime('%Y%m%d%H')+'.nc' - ) - truth_file_list.append(pb2nc_file) - truth_files_exist_list = [] - for truth_file in truth_file_list: - if os.path.exists(truth_file): - truth_files_exist_list.append(True) - else: - truth_files_exist_list.append(False) - if all(x == True for x in truth_files_exist_list) \ - and len(truth_files_exist_list) > 0: - all_truth_file_exist = True - else: - all_truth_file_exist = False - return all_truth_file_exist - -def check_stat_files(job_dict): - """! Check for MET .stat files - - Args: - job_dict - dictionary containing settings - job is running with (strings) - - Returns: - stat_files_exist - if .stat files - exist or not (boolean) - """ - model_stat_file_dir = os.path.join( - job_dict['DATA'], job_dict['VERIF_CASE']+'_'+job_dict['STEP'], - 'METplus_output', job_dict['RUN']+'.'+job_dict['DATE'], - job_dict['MODEL'], job_dict['VERIF_CASE'] - ) - stat_file_list = glob.glob(os.path.join(model_stat_file_dir, '*.stat')) - if len(stat_file_list) != 0: - stat_files_exist = True - else: - stat_files_exist = False - return stat_files_exist - -def get_obs_valid_hrs(obs): - """! This returns the valid hour start, end, and increment - information for a given observation - - Args: - obs - observation name (string) - - Returns: - valid_hr_start - starting valid hour (integer) - valid_hr_end - ending valid hour (integer) - valid_hr_inc - valid hour increment (integer) - """ - obs_valid_hr_dict = { - '24hrCCPA': {'valid_hr_start': 12, - 'valid_hr_end': 12, - 'valid_hr_inc': 24}, - '3hrCCPA': {'valid_hr_start': 0, - 'valid_hr_end': 21, - 'valid_hr_inc': 3}, - '24hrNOHRSC': {'valid_hr_start': 12, - 'valid_hr_end': 12, - 'valid_hr_inc': 24}, - 'OSI-SAF': {'valid_hr_start': 00, - 'valid_hr_end': 00, - 'valid_hr_inc': 24}, - 'GHRSST-MEDIAN': {'valid_hr_start': 00, - 'valid_hr_end': 00, - 'valid_hr_inc': 24}, - 'GET_D': {'valid_hr_start': 00, - 'valid_hr_end': 00, - 'valid_hr_inc': 24}, - } - if obs in list(obs_valid_hr_dict.keys()): - valid_hr_start = obs_valid_hr_dict[obs]['valid_hr_start'] - valid_hr_end = obs_valid_hr_dict[obs]['valid_hr_end'] - valid_hr_inc = obs_valid_hr_dict[obs]['valid_hr_inc'] - else: - print(f"FATAL ERROR: Cannot get {obs} valid hour information") - sys.exit(1) - return valid_hr_start, valid_hr_end, valid_hr_inc - -def get_off_machine_data(job_file, job_name, job_output, machine, user, queue, - account): - """! This submits a job to the transfer queue - to get data that does not reside on current machine - Args: - job_file - path to job submission file (string) - job_name - job submission name (string) - job_output - path to write job output (string) - machine - machine name (string) - user - user name (string) - queue - submission queue name (string) - account - submission account name (string) - Returns: - """ - # Set up job wall time information - walltime = '60' - walltime_seconds = ( - datetime.timedelta(minutes=int(walltime)).total_seconds() - ) - walltime = (datetime.datetime.min - + datetime.timedelta(minutes=int(walltime))).time() - # Submit job - print("Submitting "+job_file+" to "+queue) - print("Output sent to "+job_output) - os.chmod(job_file, 0o755) - if machine == 'WCOSS2': - os.system('qsub -V -l walltime='+walltime.strftime('%H:%M:%S')+' ' - +'-q '+queue+' -A '+account+' -o '+job_output+' ' - +'-e '+job_output+' -N '+job_name+' ' - +'-l select=1:ncpus=1 '+job_file) - job_check_cmd = ('qselect -s QR -u '+user+' '+'-N ' - +job_name+' | wc -l') - elif machine in ['HERA', 'ORION', 'S4', 'JET']: - os.system('sbatch --ntasks=1 --time=' - +walltime.strftime('%H:%M:%S')+' --partition='+queue+' ' - +'--account='+account+' --output='+job_output+' ' - +'--job-name='+job_name+' '+job_file) - job_check_cmd = ('squeue -u '+user+' -n '+job_name+' ' - +'-t R,PD -h | wc -l') - sleep_counter, sleep_checker = 1, 10 - while (sleep_counter*sleep_checker) <= walltime_seconds: - sleep(sleep_checker) - print("Walltime checker: "+str(sleep_counter*sleep_checker)+" " - +"out of "+str(int(walltime_seconds))+" seconds") - check_job = subprocess.check_output(job_check_cmd, shell=True, - encoding='UTF-8') - if check_job[0] == '0': - break - sleep_counter+=1 - -def initalize_job_env_dict(verif_type, group, - verif_case_step_abbrev_type, job): - """! This initializes a dictionary of environment variables and their - values to be set for the job pulling from environment variables - already set previously - Args: - verif_type - string of the use case name - group - string of the group name - verif_case_step_abbrev_type - string of reference name in config - and environment variables - job - string of job name - Returns: - job_env_dict - dictionary of job settings - """ - job_env_var_list = [ - 'machine', 'evs_ver', 'HOMEevs', 'FIXevs', 'USHevs', 'DATA', 'COMROOT', - 'NET', 'RUN', 'VERIF_CASE', 'STEP', 'COMPONENT', 'COMIN', 'evs_run_mode' - ] - if group in ['reformat_data', 'assemble_data', 'generate_stats', 'gather_stats']: - os.environ['MET_TMP_DIR'] = os.path.join( - os.environ['DATA'], - os.environ['VERIF_CASE']+'_'+os.environ['STEP'], - 'METplus_output', 'tmp' - ) - if not os.path.exists(os.environ['MET_TMP_DIR']): - os.makedirs(os.environ['MET_TMP_DIR']) - job_env_var_list.extend( - ['METPLUS_PATH', 'MET_ROOT', 'MET_TMP_DIR', - 'COMROOT'] - ) - elif group == 'plot': - job_env_var_list.extend(['MET_ROOT', 'met_ver']) - job_env_dict = {} - for env_var in job_env_var_list: - job_env_dict[env_var] = os.environ[env_var] - job_env_dict['JOB_GROUP'] = group - if group in ['reformat_data', 'assemble_data', 'generate_stats', 'plot']: - job_env_dict['VERIF_TYPE'] = verif_type - if group == 'plot': - job_env_dict['job_var'] = job - else: - job_env_dict['job_name'] = job - job_env_dict['fhr_start'] = os.environ[ - verif_case_step_abbrev_type+'_fhr_min' - ] - job_env_dict['fhr_end'] = os.environ[ - verif_case_step_abbrev_type+'_fhr_max' - ] - job_env_dict['fhr_inc'] = os.environ[ - verif_case_step_abbrev_type+'_fhr_inc' - ] - if verif_type in ['pres_levs', 'means', 'sfc', 'ptype']: - verif_type_valid_hr_list = ( - os.environ[verif_case_step_abbrev_type+'_valid_hr_list']\ - .split(' ') - ) - job_env_dict['valid_hr_start'] = ( - verif_type_valid_hr_list[0].zfill(2) - ) - job_env_dict['valid_hr_end'] = ( - verif_type_valid_hr_list[-1].zfill(2) - ) - if len(verif_type_valid_hr_list) > 1: - verif_type_valid_hr_inc = np.min( - np.diff(np.array(verif_type_valid_hr_list, dtype=int)) - ) - else: - verif_type_valid_hr_inc = 24 - job_env_dict['valid_hr_inc'] = str(verif_type_valid_hr_inc) - else: - if verif_type == 'precip_accum24hr': - valid_hr_start, valid_hr_end, valid_hr_inc = ( - get_obs_valid_hrs('24hrCCPA') - ) - elif verif_type == 'precip_accum3hr': - valid_hr_start, valid_hr_end, valid_hr_inc = ( - get_obs_valid_hrs('3hrCCPA') - ) - elif verif_type == 'snow': - valid_hr_start, valid_hr_end, valid_hr_inc = ( - get_obs_valid_hrs('24hrNOHRSC') - ) - elif verif_type == 'sea_ice': - valid_hr_start, valid_hr_end, valid_hr_inc = ( - get_obs_valid_hrs('OSI-SAF') - ) - elif verif_type == 'sst': - valid_hr_start, valid_hr_end, valid_hr_inc = ( - get_obs_valid_hrs('GHRSST-MEDIAN') - ) - else: - valid_hr_start, valid_hr_end, valid_hr_inc = 12, 12, 23 - job_env_dict['valid_hr_start'] = str(valid_hr_start).zfill(2) - job_env_dict['valid_hr_end'] = str(valid_hr_end).zfill(2) - job_env_dict['valid_hr_inc'] = str(valid_hr_inc) - verif_type_init_hr_list = ( - os.environ[verif_case_step_abbrev_type+'_init_hr_list']\ - .split(' ') - ) - job_env_dict['init_hr_start'] = ( - verif_type_init_hr_list[0].zfill(2) - ) - job_env_dict['init_hr_end'] = ( - verif_type_init_hr_list[-1].zfill(2) - ) - if len(verif_type_init_hr_list) > 1: - verif_type_init_hr_inc = np.min( - np.diff(np.array(verif_type_init_hr_list, dtype=int)) - ) - else: - verif_type_init_hr_inc = 24 - job_env_dict['init_hr_inc'] = str(verif_type_init_hr_inc) - return job_env_dict - -def get_plot_dates(logger, date_type, start_date, end_date, - valid_hr_start, valid_hr_end, valid_hr_inc, - init_hr_start, init_hr_end, init_hr_inc, - forecast_hour): - """! This builds the dates to include in plotting based on user - configurations - Args: - logger - logger object - date_type - type of date to plot (string: VALID or INIT) - start_date - plotting start date (string, format: YYYYmmdd) - end_date - plotting end date (string, format: YYYYmmdd) - valid_hr_start - starting valid hour (string) - valid_hr_end - ending valid hour (string) - valid_hr_inc - valid hour increment (string) - init_hr_start - starting initialization hour (string) - init_hr_end - ending initialization hour (string) - init_hr_inc - initialization hour incrrement (string) - forecast_hour - forecast hour (string) - Returns: - valid_dates - array of valid dates (datetime) - init_dates - array of initalization dates (datetime) - """ - # Build date_type date array - if date_type == 'VALID': - start_date_dt = datetime.datetime.strptime(start_date+valid_hr_start, - '%Y%m%d%H') - end_date_dt = datetime.datetime.strptime(end_date+valid_hr_end, - '%Y%m%d%H') - dt_inc = datetime.timedelta(hours=int(valid_hr_inc)) - elif date_type == 'INIT': - start_date_dt = datetime.datetime.strptime(start_date+init_hr_start, - '%Y%m%d%H') - end_date_dt = datetime.datetime.strptime(end_date+init_hr_end, - '%Y%m%d%H') - dt_inc = datetime.timedelta(hours=int(init_hr_inc)) - date_type_dates = (np.arange(start_date_dt, end_date_dt+dt_inc, dt_inc)\ - .astype(datetime.datetime)) - # Build valid and init date arrays - if date_type == 'VALID': - valid_dates = date_type_dates - init_dates = (valid_dates - - datetime.timedelta(hours=(int(forecast_hour)))) - elif date_type == 'INIT': - init_dates = date_type_dates - valid_dates = (init_dates - + datetime.timedelta(hours=(int(forecast_hour)))) - # Check if unrequested hours exist in arrays, and remove - valid_remove_idx_list = [] - valid_hr_list = [ - str(hr).zfill(2) for hr in range(int(valid_hr_start), - int(valid_hr_end)+int(valid_hr_inc), - int(valid_hr_inc)) - ] - for d in range(len(valid_dates)): - if valid_dates[d].strftime('%H') \ - not in valid_hr_list: - valid_remove_idx_list.append(d) - valid_dates = np.delete(valid_dates, valid_remove_idx_list) - init_dates = np.delete(init_dates, valid_remove_idx_list) - init_remove_idx_list = [] - init_hr_list = [ - str(hr).zfill(2) for hr in range(int(init_hr_start), - int(init_hr_end)+int(init_hr_inc), - int(init_hr_inc)) - ] - for d in range(len(init_dates)): - if init_dates[d].strftime('%H') \ - not in init_hr_list: - init_remove_idx_list.append(d) - valid_dates = np.delete(valid_dates, init_remove_idx_list) - init_dates = np.delete(init_dates, init_remove_idx_list) - return valid_dates, init_dates - -def get_met_line_type_cols(logger, met_root, met_version, met_line_type): - """! Get the MET columns for a specific line type and MET - verison - - Args: - logger - logger object - met_root - path to MET (string) - met_version - MET version number (string) - met_line_type - MET line type (string) - Returns: - met_version_line_type_col_list - list of MET versoin - line type colums (strings) - """ - if met_version.count('.') == 2: - met_minor_version = met_version.rpartition('.')[0] - elif met_version.count('.') == 1: - met_minor_version = met_version - met_minor_version_col_file = os.path.join( - met_root, 'share', 'met', 'table_files', - 'met_header_columns_V'+met_minor_version+'.txt' - ) - if os.path.exists(met_minor_version_col_file): - with open(met_minor_version_col_file) as f: - for line in f: - if met_line_type in line: - line_type_cols = line.split(' : ')[-1] - break - else: - logger.error(f"FATAL ERROR: {met_minor_version_col_file} DOES NOT EXISTS, " - +"cannot determine MET data column structure") - sys.exit(1) - met_version_line_type_col_list = ( - line_type_cols.replace('\n', '').split(' ') - ) - return met_version_line_type_col_list - -def format_thresh(thresh): - """! Format threshold with letter and symbol options - - Args: - thresh - the threshold (string) - - Return: - thresh_symbol - threshold with symbols (string) - thresh_letters - treshold with letters (string) - """ - thresh_symbol = ( - thresh.replace('ge', '>=').replace('gt', '>')\ - .replace('eq', '==').replace('ne', '!=')\ - .replace('le', '<=').replace('lt', '<') - ) - thresh_letter = ( - thresh.replace('>=', 'ge').replace('>', 'gt')\ - .replace('==', 'eq').replace('!=', 'ne')\ - .replace('<=', 'le').replace('<', 'lt') - ) - return thresh_symbol, thresh_letter - -def condense_model_stat_files(logger, input_dir, output_file, model, obs, - grid, vx_mask, fcst_var_name, obs_var_name, - line_type): - """! Condense the individual date model stat file and - thin out unneeded data - - Args: - logger - logger object - input_dir - path to input directory (string) - output_file - path to output file (string) - model - model name (string) - obs - observation name (string) - grid - verification grid (string) - vx_mask - verification masking region (string) - fcst_var_name - forecast variable name (string) - obs_var_name - observation variable name (string) - line_type - MET line type (string) - - Returns: - """ - model_stat_files_wildcard = os.path.join(input_dir, model, model+'_*.stat') - model_stat_files = glob.glob(model_stat_files_wildcard, recursive=True) - if len(model_stat_files) == 0: - logger.warning(f"NO STAT FILES IN MATCHING " - +f"{model_stat_files_wildcard}") - else: - if not os.path.exists(output_file): - logger.debug(f"Condensing down stat files matching " - +f"{model_stat_files_wildcard}") - with open(model_stat_files[0]) as msf: - met_header_cols = msf.readline() - all_grep_output = '' - grep_opts = ( - ' | grep "'+obs+' "' - +' | grep "'+grid+' "' - +' | grep "'+vx_mask+' "' - +' | grep "'+fcst_var_name+' "' - +' | grep "'+obs_var_name+' "' - +' | grep "'+line_type+' "' - ) - for model_stat_file in model_stat_files: - logger.debug(f"Getting data from {model_stat_file}") - ps = subprocess.Popen( - 'grep -R "'+model+' " '+model_stat_file+grep_opts, - shell=True, stdout=subprocess.PIPE, - stderr=subprocess.STDOUT, encoding='UTF-8' - ) - logger.debug(f"Ran {ps.args}") - all_grep_output = all_grep_output+ps.communicate()[0] - logger.debug(f"Condensed {model} .stat file at " - +f"{output_file}") - with open(output_file, 'w') as f: - f.write(met_header_cols+all_grep_output) - -def build_df(logger, input_dir, output_dir, model_info_dict, - met_info_dict, fcst_var_name, fcst_var_level, fcst_var_thresh, - obs_var_name, obs_var_level, obs_var_thresh, line_type, - grid, vx_mask, interp_method, interp_points, date_type, dates, - met_format_valid_dates, fhr): - """! Build the data frame for all model stats, - Read the model parse file, if doesn't exist - parse the model file for need information, and write file - - Args: - logger - logger object - input_dir - path to input directory (string) - output_dir - path to output directory (string) - model_info_dict - model infomation dictionary (strings) - met_info_dict - MET information dictionary (strings) - fcst_var_name - forecast variable name (string) - fcst_var_level - forecast variable level (string) - fcst_var_tresh - forecast variable treshold (string) - obs_var_name - observation variable name (string) - obs_var_level - observation variable level (string) - obs_var_tresh - observation variable treshold (string) - line_type - MET line type (string) - grid - verification grid (string) - vx_mask - verification masking region (string) - interp_method - interpolation method (string) - interp_points - interpolation points (string) - date_type - type of date (string, VALID or INIT) - dates - array of dates (datetime) - met_format_valid_dates - list of valid dates formatted - like they are in MET stat files - fhr - forecast hour (string) - - Returns: - """ - met_version_line_type_col_list = get_met_line_type_cols( - logger, met_info_dict['root'], met_info_dict['version'], line_type - ) - for model_num in list(model_info_dict.keys()): - model_num_name = ( - model_num+'/'+model_info_dict[model_num]['name'] - +'/'+model_info_dict[model_num]['plot_name'] - ) - model_num_df_index = pd.MultiIndex.from_product( - [[model_num_name], met_format_valid_dates], - names=['model', 'valid_dates'] - ) - model_dict = model_info_dict[model_num] - condensed_model_file = os.path.join( - input_dir, model_num+'_'+model_dict['name']+'.stat' - ) - if len(dates) != 0: - if not os.path.exists(condensed_model_file): - write_condensed_stat_file = True - else: - write_condensed_stat_file = False - if write_condensed_stat_file: - condense_model_stat_files( - logger, input_dir, condensed_model_file, model_dict['name'], - model_dict['obs_name'], grid, vx_mask, - fcst_var_name, obs_var_name, line_type - ) - parsed_model_stat_file = os.path.join( - output_dir, - 'fcst'+model_dict['name']+'_' - +fcst_var_name+fcst_var_level+fcst_var_thresh+'_' - +'obs'+model_dict['obs_name']+'_' - +obs_var_name+obs_var_level+obs_var_thresh+'_' - +'linetype'+line_type+'_' - +'grid'+grid+'_'+'vxmask'+vx_mask+'_' - +'interp'+interp_method+interp_points+'_' - +date_type.lower() - +dates[0].strftime('%Y%m%d%H%M%S')+'to' - +dates[-1].strftime('%Y%m%d%H%M%S')+'_' - +'fhr'+fhr.zfill(3) - +'.stat' - ) - if not os.path.exists(parsed_model_stat_file): - write_parse_stat_file = True - read_parse_stat_file = True - else: - write_parse_stat_file = False - read_parse_stat_file = True - else: - write_parse_stat_file = False - read_parse_stat_file = False - if os.path.exists(condensed_model_file) and line_type == 'MCTC': - tmp_df = pd.read_csv( - condensed_model_file, sep=" ", skiprows=1, - skipinitialspace=True, - keep_default_na=False, dtype='str', header=None - ) - if len(tmp_df) > 0: - ncat = int(tmp_df[25][0]) - new_met_version_line_type_col_list = [] - for col in met_version_line_type_col_list: - if col == '(N_CAT)': - new_met_version_line_type_col_list.append('N_CAT') - elif col == 'F[0-9]*_O[0-9]*': - fcount = 1 - ocount = 1 - totcount = 1 - while totcount <= ncat*ncat: - new_met_version_line_type_col_list.append( - 'F'+str(fcount)+'_'+'O'+str(ocount) - ) - if ocount < ncat: - ocount+=1 - elif ocount == ncat: - ocount = 1 - fcount+=1 - totcount+=1 - else: - new_met_version_line_type_col_list.append(col) - met_version_line_type_col_list = ( - new_met_version_line_type_col_list - ) - if write_parse_stat_file: - if fcst_var_thresh != 'NA': - fcst_var_thresh_symbol, fcst_var_thresh_letter = ( - format_thresh(fcst_var_thresh) - ) - else: - fcst_var_thresh_symbol = fcst_var_thresh - fcst_vat_thresh_letter = fcst_var_thresh - if obs_var_thresh != 'NA': - obs_var_thresh_symbol, obs_var_thresh_letter = ( - format_thresh(obs_var_thresh) - ) - else: - obs_var_thresh_symbol = obs_var_thresh - obs_vat_thresh_letter = obs_var_thresh - if os.path.exists(condensed_model_file): - logger.debug(f"Parsing file {condensed_model_file}") - condensed_model_df = pd.read_csv( - condensed_model_file, sep=" ", skiprows=1, - skipinitialspace=True, names=met_version_line_type_col_list, - keep_default_na=False, dtype='str', header=None - ) - parsed_model_df = condensed_model_df[ - (condensed_model_df['MODEL'] == model_dict['name']) - & (condensed_model_df['DESC'] == grid) - & (condensed_model_df['FCST_LEAD'] \ - == fhr.zfill(2)+'0000') - & (condensed_model_df['FCST_VAR'] \ - == fcst_var_name) - & (condensed_model_df['FCST_LEV'] \ - == fcst_var_level) - & (condensed_model_df['OBS_VAR'] \ - == obs_var_name) - & (condensed_model_df['OBS_LEV'] \ - == obs_var_level) - & (condensed_model_df['OBTYPE'] == model_dict['obs_name']) - & (condensed_model_df['VX_MASK'] \ - == vx_mask) - & (condensed_model_df['INTERP_MTHD'] \ - == interp_method) - & (condensed_model_df['INTERP_PNTS'] \ - == interp_points) - & (condensed_model_df['FCST_THRESH'] \ - == fcst_var_thresh_symbol) - & (condensed_model_df['OBS_THRESH'] \ - == obs_var_thresh_symbol) - & (condensed_model_df['LINE_TYPE'] \ - == line_type) - ] - parsed_model_df = parsed_model_df[ - parsed_model_df['FCST_VALID_BEG'].isin(met_format_valid_dates) - ] - parsed_model_df['FCST_VALID_BEG'] = pd.to_datetime( - parsed_model_df['FCST_VALID_BEG'], format='%Y%m%d_%H%M%S' - ) - parsed_model_df = parsed_model_df.sort_values(by='FCST_VALID_BEG') - parsed_model_df['FCST_VALID_BEG'] = ( - parsed_model_df['FCST_VALID_BEG'].dt.strftime('%Y%m%d_%H%M%S') - ) - parsed_model_df.to_csv( - parsed_model_stat_file, header=met_version_line_type_col_list, - index=None, sep=' ', mode='w' - ) - if os.path.exists(parsed_model_stat_file): - logger.debug(f"Parsed {model_dict['name']} file " - +f"at {parsed_model_stat_file}") - else: - logger.debug(f"Could not create {parsed_model_stat_file}") - model_num_df = pd.DataFrame(np.nan, index=model_num_df_index, - columns=met_version_line_type_col_list) - if read_parse_stat_file: - if os.path.exists(parsed_model_stat_file): - logger.debug(f"Reading {parsed_model_stat_file} for " - +f"{model_dict['name']}") - model_stat_file_df = pd.read_csv( - parsed_model_stat_file, sep=" ", skiprows=1, - skipinitialspace=True, names=met_version_line_type_col_list, - na_values=['NA'], header=None - ) - df_dtype_dict = {} - float_idx = met_version_line_type_col_list.index('TOTAL') - for col in met_version_line_type_col_list: - col_idx = met_version_line_type_col_list.index(col) - if col_idx < float_idx: - df_dtype_dict[col] = str - else: - df_dtype_dict[col] = np.float64 - model_stat_file_df = model_stat_file_df.astype(df_dtype_dict) - for valid_date in met_format_valid_dates: - model_stat_file_df_valid_date_idx_list = ( - model_stat_file_df.index[ - model_stat_file_df['FCST_VALID_BEG'] == valid_date - ] - ).tolist() - if len(model_stat_file_df_valid_date_idx_list) == 0: - logger.debug("No data matching valid date " - +f"{valid_date} in" - +f"{parsed_model_stat_file}") - continue - elif len(model_stat_file_df_valid_date_idx_list) > 1: - logger.debug(f"Multiple lines matching valid date " - +f"{valid_date} in " - +f"{parsed_model_stat_file} " - +f"using first one") - else: - logger.debug(f"One line matching valid date " - +f"{valid_date} in " - +f"{parsed_model_stat_file}") - model_num_df.loc[(model_num_name, valid_date)] = ( - model_stat_file_df.loc\ - [model_stat_file_df_valid_date_idx_list[0]]\ - [:] - ) - else: - logger.warning(f"{parsed_model_stat_file} does not exist") - if model_num == 'model1': - all_model_df = model_num_df - else: - all_model_df = pd.concat([all_model_df, model_num_df]) - return all_model_df - -def calculate_stat(logger, data_df, line_type, stat): - """! Calculate the statistic from the data from the - read in MET .stat file(s) - Args: - data_df - dataframe containing the model(s) - information from the MET .stat - files - line_type - MET line type (string) - stat - statistic to calculate (string) - - Returns: - stat_df - dataframe of the statistic - stat_array - array of the statistic - """ - if line_type == 'SL1L2': - FBAR = data_df.loc[:]['FBAR'] - OBAR = data_df.loc[:]['OBAR'] - FOBAR = data_df.loc[:]['FOBAR'] - FFBAR = data_df.loc[:]['FFBAR'] - OOBAR = data_df.loc[:]['OOBAR'] - elif line_type == 'SAL1L2': - FABAR = data_df.loc[:]['FABAR'] - OABAR = data_df.loc[:]['OABAR'] - FOABAR = data_df.loc[:]['FOABAR'] - FFABAR = data_df.loc[:]['FFABAR'] - OOABAR = data_df.loc[:]['OOABAR'] - elif line_type == 'CNT': - FBAR = data_df.loc[:]['FBAR'] - FBAR_NCL = data_df.loc[:]['FBAR_NCL'] - FBAR_NCU = data_df.loc[:]['FBAR_NCU'] - FBAR_BCL = data_df.loc[:]['FBAR_BCL'] - FBAR_BCU = data_df.loc[:]['FBAR_BCU'] - FSTDEV = data_df.loc[:]['FSTDEV'] - FSTDEV_NCL = data_df.loc[:]['FSTDEV_NCL'] - FSTDEV_NCU = data_df.loc[:]['FSTDEV_NCU'] - FSTDEV_BCL = data_df.loc[:]['FSTDEV_BCL'] - FSTDEV_BCU = data_df.loc[:]['FSTDEV_BCU'] - OBAR = data_df.loc[:]['OBAR'] - OBAR_NCL = data_df.loc[:]['OBAR_NCL'] - OBAR_NCU = data_df.loc[:]['OBAR_NCU'] - OBAR_BCL = data_df.loc[:]['OBAR_BCL'] - OBAR_BCU = data_df.loc[:]['OBAR_BCU'] - OSTDEV = data_df.loc[:]['OSTDEV'] - OSTDEV_NCL = data_df.loc[:]['OSTDEV_NCL'] - OSTDEV_NCU = data_df.loc[:]['OSTDEV_NCU'] - OSTDEV_BCL = data_df.loc[:]['OSTDEV_BCL'] - OSTDEV_BCU = data_df.loc[:]['OSTDEV_BCU'] - PR_CORR = data_df.loc[:]['PR_CORR'] - PR_CORR_NCL = data_df.loc[:]['PR_CORR_NCL'] - PR_CORR_NCU = data_df.loc[:]['PR_CORR_NCU'] - PR_CORR_BCL = data_df.loc[:]['PR_CORR_BCL'] - PR_CORR_BCU = data_df.loc[:]['PR_CORR_BCU'] - SP_CORR = data_df.loc[:]['SP_CORR'] - KT_CORR = data_df.loc[:]['KT_CORR'] - RANKS = data_df.loc[:]['RANKS'] - FRANKS_TIES = data_df.loc[:]['FRANKS_TIES'] - ORANKS_TIES = data_df.loc[:]['ORANKS_TIES'] - ME = data_df.loc[:]['ME'] - ME_NCL = data_df.loc[:]['ME_NCL'] - ME_NCU = data_df.loc[:]['ME_NCU'] - ME_BCL = data_df.loc[:]['ME_BCL'] - ME_BCU = data_df.loc[:]['ME_BCU'] - ESTDEV = data_df.loc[:]['ESTDEV'] - ESTDEV_NCL = data_df.loc[:]['ESTDEV_NCL'] - ESTDEV_NCU = data_df.loc[:]['ESTDEV_NCU'] - ESTDEV_BCL = data_df.loc[:]['ESTDEV_BCL'] - ESTDEV_BCU = data_df.loc[:]['ESTDEV_BCU'] - MBIAS = data_df.loc[:]['MBIAS'] - MBIAS_BCL = data_df.loc[:]['MBIAS_BCL'] - MBIAS_BCU = data_df.loc[:]['MBIAS_BCU'] - MAE = data_df.loc[:]['MAE'] - MAE_BCL = data_df.loc[:]['MAE_BCL'] - MAE_BCU = data_df.loc[:]['MAE_BCU'] - MSE = data_df.loc[:]['MSE'] - MSE_BCL = data_df.loc[:]['MSE_BCL'] - MSE_BCU = data_df.loc[:]['MSE_BCU'] - BCRMSE = data_df.loc[:]['BCRMSE'] - BCRMSE_BCL = data_df.loc[:]['BCRMSE_BCL'] - BCRMSE_BCU = data_df.loc[:]['BCRMSE_BCU'] - RMSE = data_df.loc[:]['RMSE'] - RMSE_BCL = data_df.loc[:]['RMSE_BCL'] - RMSE_BCU = data_df.loc[:]['RMSE_BCU'] - E10 = data_df.loc[:]['E10'] - E10_BCL = data_df.loc[:]['E10_BCL'] - E10_BCU = data_df.loc[:]['E10_BCU'] - E25 = data_df.loc[:]['E25'] - E25_BCL = data_df.loc[:]['E25_BCL'] - E25_BCU = data_df.loc[:]['E25_BCU'] - E50 = data_df.loc[:]['E50'] - E50_BCL = data_df.loc[:]['E50_BCL'] - E50_BCU = data_df.loc[:]['E50_BCU'] - E75 = data_df.loc[:]['E75'] - E75_BCL = data_df.loc[:]['E75_BCL'] - E75_BCU = data_df.loc[:]['E75_BCU'] - E90 = data_df.loc[:]['E90'] - E90_BCL = data_df.loc[:]['E90_BCL'] - E90_BCU = data_df.loc[:]['E90_BCU'] - IQR = data_df.loc[:]['IQR'] - IQR_BCL = data_df.loc[:]['IQR_BCL'] - IQR_BCU = data_df.loc[:]['IQR_BCU'] - MAD = data_df.loc[:]['MAD'] - MAD_BCL = data_df.loc[:]['MAD_BCL'] - MAD_BCU = data_df.loc[:]['MAD_BCU'] - ANOM_CORR_NCL = data_df.loc[:]['ANOM_CORR_NCL'] - ANOM_CORR_NCU = data_df.loc[:]['ANOM_CORR_NCU'] - ANOM_CORR_BCL = data_df.loc[:]['ANOM_CORR_BCL'] - ANOM_CORR_BCU = data_df.loc[:]['ANOM_CORR_BCU'] - ME2 = data_df.loc[:]['ME2'] - ME2_BCL = data_df.loc[:]['ME2_BCL'] - ME2_BCU = data_df.loc[:]['ME2_BCU'] - MSESS = data_df.loc[:]['MSESS'] - MSESS_BCL = data_df.loc[:]['MSESS_BCL'] - MSESS_BCU = data_df.loc[:]['MSESS_BCU'] - RMSFA = data_df.loc[:]['RMSFA'] - RMSFA_BCL = data_df.loc[:]['RMSFA_BCL'] - RMSFA_BCU = data_df.loc[:]['RMSFA_BCU'] - RMSOA = data_df.loc[:]['RMSOA'] - RMSOA_BCL = data_df.loc[:]['RMSOA_BCL'] - RMSOA_BCU = data_df.loc[:]['RMSOA_BCU'] - ANOM_CORR_UNCNTR = data_df.loc[:]['ANOM_CORR_UNCNTR'] - ANOM_CORR_UNCNTR_BCL = data_df.loc[:]['ANOM_CORR_UNCNTR_BCL'] - ANOM_CORR_UNCNTR_BCU = data_df.loc[:]['ANOM_CORR_UNCNTR_BCU'] - SI = data_df.loc[:]['SI'] - SI_BCL = data_df.loc[:]['SI_BCL'] - SI_BCU = data_df.loc[:]['SI_BCU'] - elif line_type == 'GRAD': - FGBAR = data_df.loc[:]['FGBAR'] - OGBAR = data_df.loc[:]['OGBAR'] - MGBAR = data_df.loc[:]['MGBAR'] - EGBAR = data_df.loc[:]['EGBAR'] - S1 = data_df.loc[:]['S1'] - S1_OG = data_df.loc[:]['S1_OG'] - FGOG_RATIO = data_df.loc[:]['FGOG_RATIO'] - DX = data_df.loc[:]['DX'] - DY = data_df.loc[:]['DY'] - elif line_type == 'FHO': - F_RATE = data_df.loc[:]['F_RATE'] - H_RATE = data_df.loc[:]['H_RATE'] - O_RATE = data_df.loc[:]['O_RATE'] - elif line_type in ['CTC', 'NBRCTC']: - FY_OY = data_df.loc[:]['FY_OY'] - FY_ON = data_df.loc[:]['FY_ON'] - FN_OY = data_df.loc[:]['FN_OY'] - FN_ON = data_df.loc[:]['FN_ON'] - if line_type == 'CTC': - EC_VALUE = data_df.loc[:]['EC_VALUE'] - elif line_type in ['CTS', 'NBRCTS']: - BASER = data_df.loc[:]['BASER'] - BASER_NCL = data_df.loc[:]['BASER_NCL'] - BASER_NCU = data_df.loc[:]['BASER_NCU'] - BASER_BCL = data_df.loc[:]['BASER_BCL'] - BASER_BCU = data_df.loc[:]['BASER_BCU'] - FMEAN = data_df.loc[:]['FMEAN'] - FMEAN_NCL = data_df.loc[:]['FMEAN_NCL'] - FMEAN_NCU = data_df.loc[:]['FMEAN_NCU'] - FMEAN_BCL = data_df.loc[:]['FMEAN_BCL'] - FMEAN_BCU = data_df.loc[:]['FMEAN_BCU'] - ACC = data_df.loc[:]['ACC'] - ACC_NCL = data_df.loc[:]['ACC_NCL'] - ACC_NCU = data_df.loc[:]['ACC_NCU'] - ACC_BCL = data_df.loc[:]['ACC_BCL'] - ACC_BCU = data_df.loc[:]['ACC_BCU'] - FBIAS = data_df.loc[:]['FBIAS'] - FBIAS_BCL = data_df.loc[:]['FBIAS_BCL'] - FBIAS_BCU = data_df.loc[:]['FBIAS_BCU'] - PODY = data_df.loc[:]['PODY'] - PODY_NCL = data_df.loc[:]['PODY_NCL'] - PODY_NCU = data_df.loc[:]['PODY_NCU'] - PODY_BCL = data_df.loc[:]['PODY_BCL'] - PODY_BCU = data_df.loc[:]['PODY_BCU'] - PODN = data_df.loc[:]['PODN'] - PODN_NCL = data_df.loc[:]['PODN_NCL'] - PODN_NCU = data_df.loc[:]['PODN_NCU'] - PODN_BCL = data_df.loc[:]['PODN_BCL'] - PODN_BCU = data_df.loc[:]['PODN_BCU'] - POFD = data_df.loc[:]['POFD'] - POFD_NCL = data_df.loc[:]['POFD_NCL'] - POFD_NCU = data_df.loc[:]['POFD_NCU'] - POFD_BCL = data_df.loc[:]['POFD_BCL'] - POFD_BCU = data_df.loc[:]['POFD_BCU'] - FAR = data_df.loc[:]['FAR'] - FAR_NCL = data_df.loc[:]['FAR_NCL'] - FAR_NCU = data_df.loc[:]['FAR_NCU'] - FAR_BCL = data_df.loc[:]['FAR_BCL'] - FAR_BCU = data_df.loc[:]['FAR_BCU'] - CSI = data_df.loc[:]['CSI'] - CSI_NCL = data_df.loc[:]['CSI_NCL'] - CSI_NCU = data_df.loc[:]['CSI_NCU'] - CSI_BCL = data_df.loc[:]['CSI_BCL'] - CSI_BCU = data_df.loc[:]['CSI_BCU'] - GSS = data_df.loc[:]['GSS'] - GSS_BCL = data_df.loc[:]['GSS_BCL'] - GSS_BCU = data_df.loc[:]['GSS_BCU'] - HK = data_df.loc[:]['HK'] - HK_NCL = data_df.loc[:]['HK_NCL'] - HK_NCU = data_df.loc[:]['HK_NCU'] - HK_BCL = data_df.loc[:]['HK_BCL'] - HK_BCU = data_df.loc[:]['HK_BCU'] - HSS = data_df.loc[:]['HSS'] - HSS_BCL = data_df.loc[:]['HSS_BCL'] - HSS_BCU = data_df.loc[:]['HSS_BCU'] - ODDS = data_df.loc[:]['ODDS'] - ODDS_NCL = data_df.loc[:]['ODDS_NCL'] - ODDS_NCU = data_df.loc[:]['ODDS_NCU'] - ODDS_BCL = data_df.loc[:]['ODDS_BCL'] - ODDS_BCU = data_df.loc[:]['ODDS_BCU'] - LODDS = data_df.loc[:]['LODDS'] - LODDS_NCL = data_df.loc[:]['LODDS_NCL'] - LODDS_NCU = data_df.loc[:]['LODDS_NCU'] - LODDS_BCL = data_df.loc[:]['LODDS_BCL'] - LODDS_BCU = data_df.loc[:]['LODDS_BCU'] - ORSS = data_df.loc[:]['ORSS'] - ORSS_NCL = data_df.loc[:]['ORSS_NCL'] - ORSS_NCU = data_df.loc[:]['ORSS_NCU'] - ORSS_BCL = data_df.loc[:]['ORSS_BCL'] - ORSS_BCU = data_df.loc[:]['ORSS_BCU'] - EDS = data_df.loc[:]['EDS'] - EDS_NCL = data_df.loc[:]['EDS_NCL'] - EDS_NCU = data_df.loc[:]['EDS_NCU'] - EDS_BCL = data_df.loc[:]['EDS_BCL'] - EDS_BCU = data_df.loc[:]['EDS_BCU'] - SEDS = data_df.loc[:]['SEDS'] - SEDS_NCL = data_df.loc[:]['SEDS_NCL'] - SEDS_NCU = data_df.loc[:]['SEDS_NCU'] - SEDS_BCL = data_df.loc[:]['SEDS_BCL'] - SEDS_BCU = data_df.loc[:]['SEDS_BCU'] - EDI = data_df.loc[:]['EDI'] - EDI_NCL = data_df.loc[:]['EDI_NCL'] - EDI_NCU = data_df.loc[:]['EDI_NCU'] - EDI_BCL = data_df.loc[:]['EDI_BCL'] - EDI_BCU = data_df.loc[:]['EDI_BCU'] - SEDI = data_df.loc[:]['SEDI'] - SEDI_NCL = data_df.loc[:]['SEDI_NCL'] - SEDI_NCU = data_df.loc[:]['SEDI_NCU'] - SEDI_BCL = data_df.loc[:]['SEDI_BCL'] - SEDI_BCU = data_df.loc[:]['SEDI_BCU'] - BAGSS = data_df.loc[:]['BAGSS'] - BAGSS_BCL = data_df.loc[:]['BAGSS_BCL'] - BAGSS_BCU = data_df.loc[:]['BAGSS_BCU'] - if line_type == 'CTS': - EC_VALUE = data_df.loc[:]['EC_VALUE'] - elif line_type == 'MCTC': - F1_O1 = data_df.loc[:]['F1_O1'] - elif line_type == 'NBRCNT': - FBS = data_df.loc[:]['FBS'] - FBS_BCL = data_df.loc[:]['FBS_BCL'] - FBS_BCU = data_df.loc[:]['FBS_BCU'] - FSS = data_df.loc[:]['FSS'] - FSS_BCL = data_df.loc[:]['FSS_BCL'] - FSS_BCU = data_df.loc[:]['FSS_BCU'] - AFSS = data_df.loc[:]['AFSS'] - AFSS_BCL = data_df.loc[:]['AFSS_BCL'] - AFSS_BCU = data_df.loc[:]['AFSS_BCU'] - UFSS = data_df.loc[:]['UFSS'] - UFSS_BCL = data_df.loc[:]['UFSS_BCL'] - UFSS_BCU = data_df.loc[:]['UFSS_BCU'] - F_RATE = data_df.loc[:]['F_RATE'] - F_RATE_BCL = data_df.loc[:]['F_RATE_BCL'] - F_RATE_BCU = data_df.loc[:]['F_RATE_BCU'] - O_RATE = data_df.loc[:]['O_RATE'] - O_RATE_BCL = data_df.loc[:]['O_RATE_BCL'] - O_RATE_BCU = data_df.loc[:]['O_RATE_BCU'] - elif line_type == 'VL1L2': - UFBAR = data_df.loc[:]['UFBAR'] - VFBAR = data_df.loc[:]['VFBAR'] - UOBAR = data_df.loc[:]['UOBAR'] - VOBAR = data_df.loc[:]['VOBAR'] - UVFOBAR = data_df.loc[:]['UVFOBAR'] - UVFFBAR = data_df.loc[:]['UVFFBAR'] - UVOOBAR = data_df.loc[:]['UVOOBAR'] - elif line_type == 'VAL1L2': - UFABAR = data_df.loc[:]['UFABAR'] - VFABAR = data_df.loc[:]['VFABAR'] - UOABAR = data_df.loc[:]['UOABAR'] - VOABAR = data_df.loc[:]['VOABAR'] - UVFOABAR = data_df.loc[:]['UVFOABAR'] - UVFFABAR = data_df.loc[:]['UVFFABAR'] - UVOOABAR = data_df.loc[:]['UVOOABAR'] - FA_SPEED_BAR = data_df.loc[:]['FA_SPEED_BAR'] - OA_SPEED_BAR = data_df.loc[:]['OA_SPEED_BAR'] - elif line_type == 'VCNT': - FBAR = data_df.loc[:]['FBAR'] - OBAR = data_df.loc[:]['OBAR'] - FS_RMS = data_df.loc[:]['FS_RMS'] - OS_RMS = data_df.loc[:]['OS_RMS'] - MSVE = data_df.loc[:]['MSVE'] - RMSVE = data_df.loc[:]['RMSVE'] - FSTDEV = data_df.loc[:]['FSTDEV'] - OSTDEV = data_df.loc[:]['OSTDEV'] - FDIR = data_df.loc[:]['FDIR'] - ORDIR = data_df.loc[:]['ODIR'] - FBAR_SPEED = data_df.loc[:]['FBAR_SPEED'] - OBAR_SPEED = data_df.loc[:]['OBAR_SPEED'] - VDIFF_SPEED = data_df.loc[:]['VDIFF_SPEED'] - VDIFF_DIR = data_df.loc[:]['VDIFF_DIR'] - SPEED_ERR = data_df.loc[:]['SPEED_ERR'] - SPEED_ABSERR = data_df.loc[:]['SPEED_ABSERR'] - DIR_ERR = data_df.loc[:]['DIR_ERR'] - DIR_ABSERR = data_df.loc[:]['DIR_ABSERR'] - ANOM_CORR = data_df.loc[:]['ANOM_CORR'] - ANOM_CORR_NCL = data_df.loc[:]['ANOM_CORR_NCL'] - ANOM_CORR_NCU = data_df.loc[:]['ANOM_CORR_NCU'] - ANOM_CORR_BCL = data_df.loc[:]['ANOM_CORR_BCL'] - ANOM_CORR_BCU = data_df.loc[:]['ANOM_CORR_BCU'] - ANOM_CORR_UNCNTR = data_df.loc[:]['ANOM_CORR_UNCNTR'] - ANOM_CORR_UNCNTR_BCL = data_df.loc[:]['ANOM_CORR_UNCNTR_BCL'] - ANOM_CORR_UNCNTR_BCU = data_df.loc[:]['ANOM_CORR_UNCNTR_BCU'] - if stat == 'ACC': # Anomaly Correlation Coefficient - if line_type == 'SAL1L2': - stat_df = (FOABAR - FABAR*OABAR) \ - /np.sqrt((FFABAR - FABAR*FABAR)* - (OOABAR - OABAR*OABAR)) - elif line_type in ['CNT', 'VCNT']: - stat_df = ANOM_CORR - elif line_type == 'VAL1L2': - stat_df = UVFOABAR/np.sqrt(UVFFABAR*UVOOABAR) - elif stat in ['BIAS', 'ME']: # Bias/Mean Error - if line_type == 'SL1L2': - stat_df = FBAR - OBAR - elif line_type == 'CNT': - stat_df = ME - elif line_type == 'VL1L2': - stat_df = np.sqrt(UVFFBAR) - np.sqrt(UVOOBAR) - elif stat == 'CSI': # Critical Success Index' - if line_type == 'CTC': - stat_df = FY_OY/(FY_OY + FY_ON + FN_OY) - elif stat == 'F1_O1': # Count of forecast category 1 and observation category 1 - if line_type == 'MCTC': - stat_df = F1_O1 - elif stat in ['ETS', 'GSS']: # Equitable Threat Score/Gilbert Skill Score - if line_type == 'CTC': - TOTAL = FY_OY + FY_ON + FN_OY + FN_ON - C = ((FY_OY + FY_ON)*(FY_OY + FN_OY))/TOTAL - stat_df = (FY_OY - C)/(FY_OY + FY_ON + FN_OY - C) - elif line_type == 'CTS': - stat_df = GSS - elif stat == 'FBAR': # Forecast Mean - if line_type == 'SL1L2': - stat_df = FBAR - elif stat == 'FBIAS': # Frequency Bias - if line_type == 'CTC': - stat_df = (FY_OY + FY_ON)/(FY_OY + FN_OY) - elif line_type == 'CTS': - stat_df = FBIAS - elif stat == 'FSS': # Fraction Skill Score - if line_type == 'NBRCNT': - stat_df = FSS - elif stat == 'FY_OY': # Forecast Yes/Obs Yes - if line_type == 'CTC': - stat_df = FY_OY - elif stat == 'HSS': # Heidke Skill Score - if line_type == 'CTC': - TOTAL = FY_OY + FY_ON + FN_OY + FN_ON - CA = (FY_OY+FY_ON)*(FY_OY+FN_OY) - CB = (FN_OY+FN_ON)*(FY_ON+FN_ON) - C = (CA + CB)/TOTAL - stat_df = (FY_OY + FN_ON - C)/(TOTAL - C) - elif stat == 'OBAR': # Observation Mean - if line_type == 'SL1L2': - stat_df = OBAR - elif stat == 'POD': # Probability of Detection - if line_type == 'CTC': - stat_df = FY_OY/(FY_OY + FN_OY) - elif stat == 'RMSE': # Root Mean Square Error - if line_type == 'SL1L2': - stat_df = np.sqrt(FFBAR + OOBAR - 2*FOBAR) - elif line_type == 'CNT': - stat_df = RMSE - elif line_type == 'VL1L2': - stat_df = np.sqrt(UVFFBAR + UVOOBAR - 2*UVFOBAR) - elif stat == 'S1': # S1 - if line_type == 'GRAD': - stat_df = S1 - elif stat == 'SRATIO': # Success Ratio - if line_type == 'CTC': - stat_df = 1 - (FY_ON/(FY_ON + FY_OY)) - else: - logger.error("FATAL ERROR: "+stat+" IS NOT AN OPTION") - sys.exit(1) - idx = 0 - idx_dict = {} - while idx < stat_df.index.nlevels: - idx_dict['index'+str(idx)] = len( - stat_df.index.get_level_values(idx).unique() - ) - idx+=1 - if stat_df.index.nlevels == 1: - stat_array = stat_df.values.reshape( - idx_dict['index0'] - ) - elif stat_df.index.nlevels == 2: - stat_array = stat_df.values.reshape( - idx_dict['index0'], idx_dict['index1'] - ) - return stat_df, stat_array - -def calculate_average(logger, average_method, line_type, stat, df): - """! Calculate average of dataset - - Args: - logger - logger object - average_method - method to use to - calculate the - average (string: - mean, aggregation) - line_type - line type to calculate - stat from - stat - statistic to calculate - (string) - df - dataframe of values - Returns: - """ - average_value = np.nan - if average_method == 'mean': - average_value = np.ma.masked_invalid(df).mean() - elif average_method == 'aggregation': - if not df.isnull().values.all(): - ndays = ( - len(df.loc[:,'TOTAL']) - -np.ma.count_masked(np.ma.masked_invalid(df.loc[:,'TOTAL'])) - ) - avg_df, avg_array = calculate_stat( - logger, df.loc[:,'TOTAL':].agg(['sum'])/ndays, - line_type, stat - ) - average_value = avg_array[0] - else: - logger.warning(f"{average_method} not recongnized..." - +"use mean, or aggregation...returning NaN") - return average_value diff --git a/ush/cam/ush_href_plot_py/lead_average.py b/ush/cam/ush_href_plot_py/lead_average.py index 7946907dd4..7a6ee69005 100755 --- a/ush/cam/ush_href_plot_py/lead_average.py +++ b/ush/cam/ush_href_plot_py/lead_average.py @@ -657,6 +657,7 @@ def plot_lead_average(df: pd.DataFrame, logger: logging.Logger, connect_points = True else: connect_points = False + n_mods = 0 for m in range(len(mod_setting_dicts)): if model_list[m] in model_colors.model_alias: model_plot_name = ( @@ -699,9 +700,10 @@ def plot_lead_average(df: pd.DataFrame, logger: logging.Logger, else: y_vals_metric_min = np.nanmin(y_vals_metric1) y_vals_metric_max = np.nanmax(y_vals_metric1) - if m == 0: + if n_mods == 0: y_mod_min = y_vals_metric_min y_mod_max = y_vals_metric_max + n_mods+=1 else: if math.isinf(y_mod_min): y_mod_min = y_vals_metric_min diff --git a/ush/cam/ush_href_plot_py/lead_average_valid.py b/ush/cam/ush_href_plot_py/lead_average_valid.py index 29543018ce..7d13222994 100755 --- a/ush/cam/ush_href_plot_py/lead_average_valid.py +++ b/ush/cam/ush_href_plot_py/lead_average_valid.py @@ -657,6 +657,7 @@ def plot_lead_average(df: pd.DataFrame, logger: logging.Logger, connect_points = True else: connect_points = False + n_mods = 0 for m in range(len(mod_setting_dicts)): if model_list[m] in model_colors.model_alias: model_plot_name = ( @@ -699,9 +700,10 @@ def plot_lead_average(df: pd.DataFrame, logger: logging.Logger, else: y_vals_metric_min = np.nanmin(y_vals_metric1) y_vals_metric_max = np.nanmax(y_vals_metric1) - if m == 0: + if n_mods == 0: y_mod_min = y_vals_metric_min y_mod_max = y_vals_metric_max + n_mods+=1 else: if math.isinf(y_mod_min): y_mod_min = y_vals_metric_min diff --git a/ush/cam/ush_href_plot_py/prune_stat_files.py b/ush/cam/ush_href_plot_py/prune_stat_files.py index 882ba852a8..7133cb47af 100755 --- a/ush/cam/ush_href_plot_py/prune_stat_files.py +++ b/ush/cam/ush_href_plot_py/prune_stat_files.py @@ -100,10 +100,10 @@ def prune_data(data_dir, prune_dir, tmp_dir, output_base_template, valid_range, ) # Prune the MET .stat files and write to new file for met_stat_file in met_stat_files: - ps = subprocess.Popen('grep -R "'+model+'" '+met_stat_file+filter_cmd, - shell=True, stdout=subprocess.PIPE, - stderr=subprocess.STDOUT, encoding='UTF-8') - grep_output = ps.communicate()[0] + grep = subprocess.run('grep -R "'+model+'" '+met_stat_file+filter_cmd, + shell=True, capture_output=True, encoding="utf8") + grep_output = grep.stdout + all_grep_output = all_grep_output+grep_output pruned_met_stat_file = os.path.join(pruned_data_dir, model+'.stat')