diff --git a/.gitignore b/.gitignore index d8acb1db705..2b6c8350458 100644 --- a/.gitignore +++ b/.gitignore @@ -156,20 +156,8 @@ sorc/ocnicepost.fd # Ignore scripts from externals #------------------------------ # jobs symlinks -jobs/JGFS_ATMOS_WAFS -jobs/JGFS_ATMOS_WAFS_BLENDING -jobs/JGFS_ATMOS_WAFS_BLENDING_0P25 -jobs/JGFS_ATMOS_WAFS_GCIP -jobs/JGFS_ATMOS_WAFS_GRIB2 -jobs/JGFS_ATMOS_WAFS_GRIB2_0P25 # scripts symlinks scripts/exemcsfc_global_sfc_prep.sh -scripts/exgfs_atmos_wafs_blending.sh -scripts/exgfs_atmos_wafs_blending_0p25.sh -scripts/exgfs_atmos_wafs_gcip.sh -scripts/exgfs_atmos_wafs_grib.sh -scripts/exgfs_atmos_wafs_grib2.sh -scripts/exgfs_atmos_wafs_grib2_0p25.sh # ush symlinks ush/chgres_cube.sh ush/emcsfc_ice_blend.sh @@ -185,11 +173,7 @@ ush/global_chgres_driver.sh ush/global_cycle.sh ush/global_cycle_driver.sh ush/jediinc2fv3.py -ush/mkwfsgbl.sh ush/ufsda -ush/wafs_blending.sh -ush/wafs_grib2.regrid.sh -ush/wafs_intdsk.sh ush/finddate.sh ush/make_NTC_file.pl ush/make_ntc_bull.pl diff --git a/.gitmodules b/.gitmodules index 5c9e5692434..50e82ac77e1 100644 --- a/.gitmodules +++ b/.gitmodules @@ -26,3 +26,6 @@ [submodule "sorc/gsi_monitor.fd"] path = sorc/gsi_monitor.fd url = https://github.com/NOAA-EMC/GSI-Monitor.git +[submodule "sorc/upp.fd"] + path = sorc/upp.fd + url = https://github.com/NOAA-EMC/UPP.git diff --git a/ci/Jenkinsfile b/ci/Jenkinsfile index 34535ed6083..573dc6c7349 100644 --- a/ci/Jenkinsfile +++ b/ci/Jenkinsfile @@ -2,6 +2,7 @@ def Machine = 'none' def machine = 'none' def HOME = 'none' def caseList = '' +// Location of the custom workspaces for each machine in the CI system. They are persitent for each iteration of the PR. def custom_workspace = [hera: '/scratch1/NCEPDEV/global/CI', orion: '/work2/noaa/stmp/CI/ORION', hercules: '/work2/noaa/stmp/CI/HERCULES'] pipeline { @@ -82,13 +83,42 @@ pipeline { } } else { checkout scm + def error_logs = "" + def error_logs_message = "" def builds_file = readYaml file: 'ci/cases/yamls/build.yaml' def build_args_list = builds_file['builds'] def build_args = build_args_list[system].join(' ').trim().replaceAll('null', '') dir("${HOMEgfs}/sorc") { - sh(script: "${build_args}") + try { + sh(script: "${build_args}") + } catch (Exception error_build) { + echo "Failed to build system: ${error_build.getMessage()}" + if ( fileExists("logs/error_log.logs") ) { + def fileContent = readFile 'logs/error_log.logs' + def lines = fileContent.readLines() + for (line in lines) { + echo "archiving: ${line}" + if (fileExists("sorc/logs/${line}") && readFile("sorc/logs/${line}").length() > 0 ){ + try { + archiveArtifacts artifacts: "${line}", fingerprint: true + error_logs = error_logs + "${HOMEgfs}/sorc/logs/${line} " + error_logs_message = error_logs_message + "${HOMEgfs}/sorc/logs/${line}\n" + } + catch (Exception error_arch) { echo "Failed to archive error log ${line}: ${error_arch.getMessage()}" } + } + } + repo_url=sh(script: "${HOMEgfs}/ci/scripts/utils/publish_logs.py --file ${error_logs} --repo PR_BUILD_${env.CHANGE_ID}", returnStdout: true).trim() + gist_url=sh(script: "${HOMEgfs}/ci/scripts/utils/publish_logs.py --file ${error_logs} --gist PR_BUILD_${env.CHANGE_ID}", returnStdout: true).trim() + } + try { + pullRequest.comment("Build failed on **${Machine}** with error logs:\n\n```${error_logs_message}```\n\nFollow link here to view the contents of the above file(s): [(link)](${gist_url})") + } catch (Exception error_comment) { + echo "Failed to comment on PR: ${error_comment.getMessage()}" + } + error("Failed to build system on ${Machine}") + } sh(script: './link_workflow.sh') - sh(script: "echo ${HOMEgfs} > BUILT_semaphor") + // sh(script: "echo ${HOMEgfs} > BUILT_semaphor") } } if (env.CHANGE_ID && system == 'gfs') { @@ -119,7 +149,7 @@ pipeline { axis { name 'Case' // TODO add dynamic list of cases from env vars (needs addtional plugins) - values 'C48C48_ufs_hybatmDA', 'C48_ATM', 'C48_S2SW', 'C48_S2SWA_gefs', 'C48mx500_3DVarAOWCDA', 'C96C48_hybatmDA', 'C96_atm3DVar', 'C96_atmsnowDA' + values 'C48C48_ufs_hybatmDA', 'C48_ATM', 'C48_S2SW', 'C48_S2SWA_gefs', 'C48mx500_3DVarAOWCDA', 'C96C48_hybatmDA', 'C96_atm3DVar', 'C96_atmaerosnowDA' } } stages { @@ -147,21 +177,38 @@ pipeline { steps { script { HOMEgfs = "${HOME}/gfs" // common HOMEgfs is used to launch the scripts that run the experiments - ws(HOMEgfs) { - pslot = sh(script: "${HOMEgfs}/ci/scripts/utils/ci_utils_wrapper.sh get_pslot ${HOME}/RUNTESTS ${Case}", returnStdout: true).trim() - try { - sh(script: "${HOMEgfs}/ci/scripts/run-check_ci.sh ${HOME} ${pslot}") - } catch (Exception e) { - sh(script: "${HOMEgfs}/ci/scripts/utils/ci_utils_wrapper.sh cancel_all_batch_jobs ${HOME}/RUNTESTS") - ws(HOME) { - if (fileExists('RUNTESTS/error.logs')) { - def fileContent = readFile 'RUNTESTS/error.logs' - def lines = fileContent.readLines() - for (line in lines) { - echo "archiving: ${line}" - archiveArtifacts artifacts: "${line}", fingerprint: true - } + pslot = sh(script: "${HOMEgfs}/ci/scripts/utils/ci_utils_wrapper.sh get_pslot ${HOME}/RUNTESTS ${Case}", returnStdout: true).trim() + try { + sh(script: "${HOMEgfs}/ci/scripts/run-check_ci.sh ${HOME} ${pslot}") + } catch (Exception error_experment) { + sh(script: "${HOMEgfs}/ci/scripts/utils/ci_utils_wrapper.sh cancel_all_batch_jobs ${HOME}/RUNTESTS") + ws(HOME) { + def error_logs = "" + def error_logs_message = "" + if (fileExists("RUNTESTS/error.logs")) { + def fileContent = readFile 'RUNTESTS/error.logs' + def lines = fileContent.readLines() + for (line in lines) { + echo "archiving: ${line}" + if (fileExists("${HOME}/${line}") && readFile("${HOME}/${line}").length() > 0) { + try { + archiveArtifacts artifacts: "${line}", fingerprint: true + error_logs = error_logs + "${HOME}/${line} " + error_logs_message = error_logs_message + "${HOME}/${line}\n" + } catch (Exception error_arch) { + echo "Failed to archive error log ${line}: ${error_arch.getMessage()}" + } + } + } + repo_url = sh(script: "${HOMEgfs}/ci/scripts/utils/publish_logs.py --file ${error_logs} --repo PR_${env.CHANGE_ID}", returnStdout: true).trim() + gist_url = sh(script: "${HOMEgfs}/ci/scripts/utils/publish_logs.py --file ${error_logs} --gist PR_${env.CHANGE_ID}", returnStdout: true).trim() + try { + pullRequest.comment("Experiment ${Case} failed on ${Machine} with error logs: ${error_logs_message}\n\nFollow link here to view the contents of the above file(s): [(link)](${gist_url})") + } catch (Exception error_comment) { + echo "Failed to comment on PR: ${error_comment.getMessage()}" } + } else { + echo "No error logs found for failed cases in $HOME/RUNTESTS/error.logs" } error("Failed to run experiments ${Case} on ${Machine}") } @@ -169,12 +216,11 @@ pipeline { } } } - } } } } - + post { always { script { diff --git a/ci/cases/pr/C48_S2SWA_gefs.yaml b/ci/cases/pr/C48_S2SWA_gefs.yaml index d42f4cd15bf..00bf3a333e3 100644 --- a/ci/cases/pr/C48_S2SWA_gefs.yaml +++ b/ci/cases/pr/C48_S2SWA_gefs.yaml @@ -16,3 +16,6 @@ arguments: idate: 2021032312 edate: 2021032312 yaml: {{ HOMEgfs }}/ci/cases/yamls/gefs_ci_defaults.yaml + +skip_ci_on_hosts: + - hera diff --git a/ci/cases/pr/C96_atmsnowDA.yaml b/ci/cases/pr/C96_atmaerosnowDA.yaml similarity index 81% rename from ci/cases/pr/C96_atmsnowDA.yaml rename to ci/cases/pr/C96_atmaerosnowDA.yaml index 35fcc10fb21..7e22955a370 100644 --- a/ci/cases/pr/C96_atmsnowDA.yaml +++ b/ci/cases/pr/C96_atmaerosnowDA.yaml @@ -4,7 +4,7 @@ experiment: arguments: pslot: {{ 'pslot' | getenv }} - app: ATM + app: ATMA resdetatmos: 96 comroot: {{ 'RUNTESTS' | getenv }}/COMROOT expdir: {{ 'RUNTESTS' | getenv }}/EXPDIR @@ -14,7 +14,7 @@ arguments: nens: 0 gfs_cyc: 1 start: cold - yaml: {{ HOMEgfs }}/ci/cases/yamls/atmsnowDA_defaults_ci.yaml + yaml: {{ HOMEgfs }}/ci/cases/yamls/atmaerosnowDA_defaults_ci.yaml skip_ci_on_hosts: - orion diff --git a/ci/cases/yamls/atmsnowDA_defaults_ci.yaml b/ci/cases/yamls/atmaerosnowDA_defaults_ci.yaml similarity index 100% rename from ci/cases/yamls/atmsnowDA_defaults_ci.yaml rename to ci/cases/yamls/atmaerosnowDA_defaults_ci.yaml diff --git a/ci/cases/yamls/build.yaml b/ci/cases/yamls/build.yaml index 2ff008d372a..87fae425846 100644 --- a/ci/cases/yamls/build.yaml +++ b/ci/cases/yamls/build.yaml @@ -1,3 +1,3 @@ builds: - - gefs: './build_all.sh' - - gfs: './build_all.sh -wgu' + - gefs: './build_all.sh -k' + - gfs: './build_all.sh -kwgu' diff --git a/ci/scripts/utils/githubpr.py b/ci/scripts/utils/githubpr.py new file mode 100755 index 00000000000..5fe0b643eaf --- /dev/null +++ b/ci/scripts/utils/githubpr.py @@ -0,0 +1,124 @@ +#!/usr/bin/env python3 + +import os +import re + +from github import Github, GithubException, InputFileContent, UnknownObjectException +from wxflow import which + + +class GitHubDBError(Exception): + """ + Base class for GitHubDB exceptions. + """ + UnknownObjectException = UnknownObjectException + GithubException = GithubException + + +class GitHubPR(Github): + """ + GitHubPR is an inherited class from GitHub in pyGitHub for interacting with GitHub pull requests. + + Attributes + ---------- + repo : github.Repository.Repository + The GitHub repository to interact with. + pulls : github.PaginatedList.PaginatedList of github.PullRequest.PullRequest + The list of open pull requests in the repository, sorted by last updated. + user : github.AuthenticatedUser.AuthenticatedUser + The authenticated user. + InputFileContent : github.InputFileContent.InputFileContent + The class used to create file content for gists. + + Methods + ------- + __init__(self, repo_url=None, TOKEN=None) + Initialize a new GitHubPR instance. + get_repo_url(self, repo_url=None) + Set the repository for the GitHubPR instance + using an URL directly or from 'REPO_URL' environment variable. + get_pr_list(self) + Get the numerical list of all pull requests. + get_ci_pr_list(self, state='Ready', host=None) + Get the numerical list of all pull requests with a specific state from labels. + for example if a PR has a label 'CI-Ready-Hera' of the form CI-[state]-[host] + its corresponding PR number will be included in the list. + """ + + def __init__(self, repo_url=None, TOKEN=None): + """ + __init__ Initialize a new GitHubPR instance. + + This method authenticates with the GitHub API using the 'gh' CLI tool + when the TOKEN is not provided. The repository comes from from the 'REPO_URL' + environment variable when repo_url is not provided. + """ + if TOKEN is None: + gh_cli = which('gh') + gh_cli.add_default_arg(['auth', 'status', '--show-token']) + TOKEN = gh_cli(output=str, error=str).split('\n')[3].split(': ')[1] + super().__init__(TOKEN) + + self.repo = self.get_repo_url(repo_url) + self.pulls = self.repo.get_pulls(state='open', sort='updated', direction='desc') + self.user = self.get_user() + + self.InputFileContent = InputFileContent + + def get_repo_url(self, repo_url=None): + """ + set_repo Set the repository for the GitHubPR instance. + + Parameters + ---------- + repo_url : Repository URL + The GitHub repository. + """ + if repo_url is None: + repo_url = os.environ.get("REPO_URL") + match = re.search(r"github\.com/(.+)", repo_url) + repo_identifier = match.group(1)[:-4] + return self.get_repo(repo_identifier) + + def get_pr_list(self): + """ + get_pr_list Get the numerical list of all pull requests. + + Returns + ------- + list + A list of all pull request numbers. + """ + return [pull.number for pull in self.pulls] + + def get_ci_pr_list(self, state='Ready', host=None): + """ + get_ci_pr_list Get a list of pull requests that match a specified state and host. + + Parameters + ---------- + state : str, optional + The state of the pull requests to get (default is 'Ready'). + host : str, optional + The host of the pull requests to get. If None, all hosts are included (default is None). + + Returns + ------- + list + A list of pull request numbers that match the specified state and host. + """ + pr_list = [] + for pull in self.pulls: + labels = pull.get_labels() + ci_labels = [s for s in labels if 'CI' in s.name] + for label in ci_labels: + if state in label.name: + if host is not None: + if host.lower() in label.name.lower(): + pr_list.append(pull.number) + break + else: + pr_list.append(pull.number) + break + + return pr_list diff --git a/ci/scripts/utils/publish_logs.py b/ci/scripts/utils/publish_logs.py new file mode 100755 index 00000000000..7768c17c100 --- /dev/null +++ b/ci/scripts/utils/publish_logs.py @@ -0,0 +1,106 @@ +#!/usr/bin/env python3 + +import os +from githubpr import GitHubPR, GitHubDBError +from argparse import ArgumentParser, FileType + + +def parse_args(): + """ + Parse command line arguments. + + Returns + ------- + argparse.Namespace + The parsed command line arguments. + """ + + description = """Arguments for creating and updating error log files + """ + parser = ArgumentParser(description=description) + + parser.add_argument('--file', help='path to file for uploading to GitHub', required=False, type=FileType('r'), nargs='+') + parser.add_argument('--gist', help='create a gist of the file', nargs=1, metavar='identifier_string', required=False) + parser.add_argument('--repo', help='create a file in a repo', nargs=1, metavar='path_header', required=False) + args = parser.parse_args() + if bool(args.gist) == bool(args.repo): # Exactly one of the two is required + parser.error("Exactly one of --gist and --repo is required") + return args + + +def add_logs_to_gist(args, emcbot_gh): + """ + Adds log files to a GitHub gist. + + Parameters + ---------- + args : Namespace + The arguments parsed from the command line. + emcbot_gh : GitHubPR + The GitHubPR object to interact with GitHub. + + Prints + ------ + The URL of the created gist. + """ + + gist_files = {} + for file in args.file: + file_content = file.read() + gist_files[os.path.basename(file.name)] = emcbot_gh.InputFileContent(file_content) + + gist = emcbot_gh.user.create_gist(public=True, files=gist_files, description=f"error log file from CI run {args.gist[0]}") + print(gist.html_url) + + +def upload_logs_to_repo(args, emcbot_gh, emcbot_ci_url): + """ + Upload log files to a repository. + + Parameters + ---------- + args : Namespace + The arguments parsed from the command line. + emcbot_gh : GitHubPR + The GitHubPR object to interact with GitHub. + emcbot_ci_url : str + The URL of the repository to upload the logs to. + + Prints + ------ + The URL of the uploaded file in the repository. + """ + + path_header = args.repo[0] + repo_branch = "error_logs" + repo_path = "ci/error_logs" + extra = 0 + while True: + try: + extra += 1 + file_path_in_repo = f"{repo_path}/{path_header}/" + str(os.path.basename(args.file[0].name)) + content = emcbot_gh.repo.get_contents(file_path_in_repo, ref='error_logs') + path_header = f'{args.repo[0]}_{str(extra)}' + except GitHubDBError.GithubException as e: + break + + for file in args.file: + file_content = file.read() + file_path_in_repo = f"{repo_path}/{path_header}/" + str(os.path.basename(file.name)) + emcbot_gh.repo.create_file(file_path_in_repo, "Adding error log file", file_content, branch="error_logs") + + file_url = f"{emcbot_ci_url.rsplit('.',1)[0]}/tree/{repo_branch}/{repo_path}/{path_header}" + print(file_url) + + +if __name__ == '__main__': + + args = parse_args() + emcbot_ci_url = "https://github.com/emcbot/ci-global-workflows.git" + emcbot_gh = GitHubPR(repo_url=emcbot_ci_url) + + if args.gist: # Add error logs to a gist in GitHub emcbot's account + add_logs_to_gist(args, emcbot_gh) + + if args.repo: # Upload error logs to emcbot's ci-global-workflows error_logs branch + upload_logs_to_repo(args, emcbot_gh, emcbot_ci_url) diff --git a/docs/source/components.rst b/docs/source/components.rst index 98e76b467ba..869ef89babe 100644 --- a/docs/source/components.rst +++ b/docs/source/components.rst @@ -28,7 +28,7 @@ Components included as submodules: * **GSI Monitor** (https://github.com/NOAA-EMC/GSI-Monitor): These tools monitor the GSI package's data assimilation, detecting and reporting missing data sources, low observation counts, and high penalty values * **GDAS** (https://github.com/NOAA-EMC/GDASApp): Jedi based Data Assimilation system. This system is currently being developed for marine Data Assimilation and in time will replace GSI for atmospheric data assimilation as well * **UFS UTILS** (https://github.com/ufs-community/UFS_UTILS): Utility codes needed for UFS-weather-model -* **wxflow** Collection of python utilities for weather workflows (https://github.com/NOAA-EMC/wxflow) +* **wxflow** (https://github.com/NOAA-EMC/wxflow): Collection of python utilities for weather workflows * **Verif global** (https://github.com/NOAA-EMC/EMC_verif-global): Verification package to evaluate GFS parallels. It uses MET and METplus. At this moment the verification package is limited to providing atmospheric metrics only .. note:: @@ -57,19 +57,20 @@ Data Observation data, also known as dump data, is prepared in production and then archived in a global dump archive (GDA) for use by users when running cycled experiments. The GDA (identified as ``$DMPDIR`` in the workflow) is available on supported platforms and the workflow system knows where to find the data. -* Hera: /scratch1/NCEPDEV/global/glopara/dump -* Orion/Hercules: /work/noaa/rstprod/dump -* Jet: /mnt/lfs4/HFIP/hfv3gfs/glopara/dump -* WCOSS2: /lfs/h2/emc/global/noscrub/emc.global/dump -* S4: /data/prod/glopara/dump +* Hera: ``/scratch1/NCEPDEV/global/glopara/dump`` +* Orion/Hercules: ``/work/noaa/rstprod/dump`` +* Jet: ``/mnt/lfs4/HFIP/hfv3gfs/glopara/dump`` +* WCOSS2: ``/lfs/h2/emc/global/noscrub/emc.global/dump`` +* S4: ``/data/prod/glopara/dump`` ----------------------------- Global Dump Archive Structure ----------------------------- -The global dump archive (GDA) mimics the structure of its production source: ``DMPDIR/CDUMP.PDY/[CC/atmos/]FILES`` +The global dump archive (GDA) mimics the structure of its production source: -The ``CDUMP`` is either gdas, gfs, or rtofs. All three contain production output for each day (``PDY``). The gdas and gfs folders are further broken into cycle (``CC``) and component (``atmos``). +* GDAS/GFS: ``DMPDIR/gdas[gfs].PDY/CC/atmos/FILES`` +* RTOFS: ``DMPDIR/rtofs.PDY/FILES`` The GDA also contains special versions of some datasets and experimental data that is being evaluated ahead of implementation into production. The following subfolder suffixes exist: @@ -81,6 +82,7 @@ The GDA also contains special versions of some datasets and experimental data th +--------+------------------------------------------------------------------------------------------------------+ | ur | Un-restricted versions of restricted files in production. Produced and archived on a 48hrs delay. | | | Some restricted datasets are unrestricted. Data amounts: restricted > un-restricted > non-restricted | +| | Limited availability. Discontinued producing mid-2023. | +--------+------------------------------------------------------------------------------------------------------+ | x | Experimental global datasets being evaluated for production. Dates and types vary depending on | | | upcoming global upgrades. | diff --git a/docs/source/conf.py b/docs/source/conf.py index 89526d9f69c..81f231f6b06 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -13,13 +13,14 @@ import os import sys sys.path.insert(0, os.path.abspath('.')) - +from datetime import datetime # -- Project information ----------------------------------------------------- project = 'Global-workflow' -copyright = '2023, Kate Friedman, Walter Kolczynski, Rahul Mahajan, Lin Gan, Arun Chawla' -author = 'Kate Friedman, Walter Kolczynski, Rahul Mahajan, Lin Gan, Arun Chawla' +year = datetime.now().year +copyright = f"2015-{year} NOAA/NWS/NCEP/EMC" +author = 'Kate Friedman, Walter Kolczynski, Rahul Mahajan, Lin Gan, and numerous collaborators and contributors' # The full version, including alpha/beta/rc tags release = '0.1' diff --git a/docs/source/hpc.rst b/docs/source/hpc.rst index 3ce6a889d97..508597781de 100644 --- a/docs/source/hpc.rst +++ b/docs/source/hpc.rst @@ -72,23 +72,23 @@ Version It is advised to use Git v2+ when available. At the time of writing this documentation the default Git clients on the different machines were as noted in the table below. It is recommended that you check the default modules before loading recommended ones: -+---------+----------+---------------------------------------+ ++----------+----------+---------------------------------------+ | Machine | Default | Recommended | -+---------+----------+---------------------------------------+ ++----------+----------+---------------------------------------+ | Hera | v2.18.0 | default | -+---------+----------+---------------------------------------+ ++----------+----------+---------------------------------------+ | Hercules | v2.31.1 | default | -+---------+----------+---------------------------------------+ ++----------+----------+---------------------------------------+ | Orion | v1.8.3.1 | **module load git/2.28.0** | -+---------+----------+---------------------------------------+ ++----------+----------+---------------------------------------+ | Jet | v2.18.0 | default | -+---------+----------+---------------------------------------+ -| WCOSS2 | v2.26.2 | default or **module load git/2.29.0** | -+---------+----------+---------------------------------------+ ++----------+----------+---------------------------------------+ +| WCOSS2 | v2.35.3 | default | ++----------+----------+---------------------------------------+ | S4 | v1.8.3.1 | **module load git/2.30.0** | -+---------+----------+---------------------------------------+ -| AWS PW | v1.8.3.1 | default -+---------+----------+---------------------------------------+ ++----------+----------+---------------------------------------+ +| AWS PW | v1.8.3.1 | default | ++----------+----------+---------------------------------------+ ^^^^^^^^^^^^^ Output format diff --git a/docs/source/index.rst b/docs/source/index.rst index 43814880785..a5161789b38 100644 --- a/docs/source/index.rst +++ b/docs/source/index.rst @@ -27,6 +27,10 @@ General updates: NOAA employees and affiliates can join the gfs-announce distrib GitHub updates: Users should adjust their "Watch" settings for this repo so they receive notifications as they'd like to. Find the "Watch" or "Unwatch" button towards the top right of the `authoritative global-workflow repository page `_ and click it to adjust how you watch the repo. +================= +Table of Contents +================= + .. toctree:: :numbered: :maxdepth: 3 diff --git a/docs/source/init.rst b/docs/source/init.rst index 1b28d755843..f945494af8e 100644 --- a/docs/source/init.rst +++ b/docs/source/init.rst @@ -51,6 +51,7 @@ Cold-start atmosphere-only cycled C96 deterministic C48 enkf (80 members) ICs ar Hera: /scratch1/NCEPDEV/global/glopara/data/ICSDIR/C96C48 Orion/Hercules: /work/noaa/global/glopara/data/ICSDIR/C96C48 WCOSS2: /lfs/h2/emc/global/noscrub/emc.global/data/ICSDIR/C96C48 + AWS: https://noaa-nws-global-pds.s3.amazonaws.com/index.html#data/ICSDIR/C96C48 Start date = 2021122018 @@ -111,6 +112,7 @@ Warm-start cycled w/ coupled (S2S) model C48 atmosphere C48 enkf (80 members) 5 Orion/Hercules: /work/noaa/global/glopara/data/ICSDIR/C48C48mx500 WCOSS2: /lfs/h2/emc/global/noscrub/emc.global/data/ICSDIR/C48C48mx500 Jet: /lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/C48C48mx500 + AWS: https://noaa-nws-global-pds.s3.amazonaws.com/index.html#data/ICSDIR/C48C48mx500 Start date = 2021032312 @@ -246,7 +248,7 @@ Automated Generation Cycled mode ----------- -Not yet supported. +Not yet supported. See the UFS_UTILS documentation on the gdas_init utility to generate your own ICs for cycled or forecast-only mode: https://noaa-emcufs-utils.readthedocs.io/en/latest/ufs_utils.html#gdas-init .. _forecastonly-coupled: @@ -319,14 +321,14 @@ Manual Generation The following information is for users needing to generate cold-start initial conditions for a cycled experiment that will run at a different resolution or layer amount than the operational GFS (C768C384L127). -The ``chgres_cube`` code is available from the `UFS_UTILS repository `_ on GitHub and can be used to convert GFS ICs to a different resolution or number of layers. Users should see the documentation to generation initial conditions in the UFS_UTILS repository. The ``chgres_cube`` code/scripts currently support the following GFS inputs: +The ``chgres_cube`` code is available from the `UFS_UTILS repository `_ on GitHub and can be used to convert GFS ICs to a different resolution or number of layers. Users should see the `documentation to generation initial conditions in the UFS_UTILS repository `_. The ``chgres_cube`` code/scripts currently support the following GFS inputs: * pre-GFSv14 * GFSv14 * GFSv15 * GFSv16 -See instructions in UFS_UTILS to clone, build and generate initial conditions. +See instructions in UFS_UTILS to clone, build and generate initial conditions: https://noaa-emcufs-utils.readthedocs.io/en/latest/ufs_utils.html#gdas-init .. _warmstarts-prod: diff --git a/docs/source/setup.rst b/docs/source/setup.rst index 0e87ade9a57..de5cfa099af 100644 --- a/docs/source/setup.rst +++ b/docs/source/setup.rst @@ -6,9 +6,13 @@ Experiment Setup :: - # Note: this will wipe your existing lmod environment source workflow/gw_setup.sh +.. warning:: + Sourcing gw_setup.sh will wipe your existing lmod environment + +.. note:: + Bash shell is required to source gw_setup.sh ^^^^^^^^^^^^^^^^^^^^^^^^ Forecast-only experiment diff --git a/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_PREP b/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_PREP index 2e49a9f14d0..bad646bf2dd 100755 --- a/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_PREP +++ b/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_PREP @@ -34,7 +34,8 @@ RUN=${GDUMP} YMD=${gPDY} HH=${gcyc} generate_com -rx \ # Add UFSDA to PYTHONPATH ufsdaPATH="${HOMEgfs}/sorc/gdas.cd/ush/" -pyiodaPATH="${HOMEgfs}/sorc/gdas.cd/build/lib/python3.7/" +# shellcheck disable=SC2311 +pyiodaPATH="${HOMEgfs}/sorc/gdas.cd/build/lib/python$(detect_py_ver)/" PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${ufsdaPATH}:${pyiodaPATH}" export PYTHONPATH diff --git a/jobs/JGFS_ATMOS_WAFS b/jobs/JGFS_ATMOS_WAFS new file mode 100755 index 00000000000..35a916bf1ac --- /dev/null +++ b/jobs/JGFS_ATMOS_WAFS @@ -0,0 +1,96 @@ +#!/bin/sh + +######################################## +# GFS AWIPS PRODUCT GENERATION +######################################## +date +export PS4='$SECONDS + ' +set -xa + +export KEEPDATA=${KEEPDATA:-NO} + +############################################ +# Working Directory +############################################ +export DATA=${DATA:-${DATAROOT}/${jobid:?}} +mkdir -p $DATA +cd $DATA + +############################################ +# Output for executables +############################################ +export pgmout=OUTPUT.$$ + +############################################ +# Load the UTILITIES module +############################################ +#### module load prod_util +#### module load grib_util + +########################################### +# Run setpdy and initialize PDY variables +########################################### +export cycle=t${cyc}z +setpdy.sh +. ./PDY + +export RERUN=${RERUN:-NO} + +############################################ +# Set up the NET and RUN +############################################ +export NET=${NET:-gfs} +export RUN=${RUN:-gfs} +export COMPONENT=${COMPONENT:-atmos} + +############################################ +# Specify HOME Directory +############################################ +export gfs_ver=${gfs_ver:-v16.3.0} +export HOMEgfs=${HOMEgfs:-${NWROOT}/gfs.${gfs_ver}} +export EXECgfs=$HOMEgfs/exec +export FIXgfs=$HOMEgfs/fix/wafs +export PARMgfs=$HOMEgfs/parm/wafs +export USHgfs=$HOMEgfs/ush +export SCRIPTSgfs=$HOMEgfs/scripts + +################################################ +# Set up the input/output directory +################################################ +export COMIN=${COMIN:-$(compath.py ${envir}/${NET}/${gfs_ver})/${RUN}.${PDY}/${cyc}/$COMPONENT} +export COMOUT=${COMOUT:-$(compath.py -o $NET/$gfs_ver)/$RUN.$PDY/$cyc/$COMPONENT} +export PCOM=${PCOM:-$COMOUT/wmo} + +if [ $SENDCOM = YES ] ; then + mkdir -p $COMOUT $PCOM +fi + +############################################ +# print current environment +############################################ +env + +############################################ +# Execute the script. +############################################ + +${SCRIPTSgfs}/exgfs_atmos_wafs_grib.sh $fcsthrs +export err=$?; err_chk + +echo "JOB $job HAS COMPLETED NORMALLY!" + +############################################ +# print exec output +############################################ +if [ -e "$pgmout" ] ; then + cat $pgmout +fi + +############################################ +# remove temporary working directory +############################################ +if [ $KEEPDATA != YES ] ; then + rm -rf $DATA +fi + +date diff --git a/jobs/JGFS_ATMOS_WAFS_BLENDING_0P25 b/jobs/JGFS_ATMOS_WAFS_BLENDING_0P25 new file mode 100755 index 00000000000..7367ce5a2c2 --- /dev/null +++ b/jobs/JGFS_ATMOS_WAFS_BLENDING_0P25 @@ -0,0 +1,153 @@ +#!/bin/sh +######################################################## +# This job runs the code to blend US's and UK's WAFS products at 0.25 deg +######################################################## + +date +export PS4='$SECONDS + ' +set -x + +# keep the working directory or not +export KEEPDATA=${KEEPDATA:-NO} + +############################################ +# Working Directory +############################################ +export DATA=${DATA:-${DATAROOT}/${jobid:?}} +mkdir -p $DATA +cd $DATA + +############################################ +# Output for executables +############################################ +export pgmout=OUTPUT.$$ + +########################################### +# Run setpdy and initialize PDY variables +########################################### +export cycle=t${cyc}z +setpdy.sh +. ./PDY + +export RERUN=${RERUN:-NO} + +############################################ +# Set up the NET and RUN +############################################ +export NET=${NET:-gfs} +export RUN=${RUN:-gfs} +export COMPONENT=${COMPONENT:-atmos} + +############################################ +# Specify HOME Directory +############################################ +export gfs_ver=${gfs_ver:-v16.3.0} +export HOMEgfs=${HOMEgfs:-${NWROOT}/gfs.${gfs_ver}} +export EXECgfs=$HOMEgfs/exec +export FIXgfs=$HOMEgfs/fix/wafs +export PARMgfs=$HOMEgfs/parm/wafs +export USHgfs=$HOMEgfs/ush +export SCRIPTSgfs=$HOMEgfs/scripts + +################################################ +# Set up the INPUT and OUTPUT directories +################################################ +export COMIN=${COMIN:-$(compath.py ${envir}/${NET}/${gfs_ver})/${RUN}.${PDY}/${cyc}/$COMPONENT} +export COMOUT=${COMOUT:-$(compath.py -o $NET/$gfs_ver)/$RUN.$PDY/$cyc/$COMPONENT} +export PCOM=${PCOM:-$COMOUT/wmo} + +if [ $SENDCOM = YES ] ; then + mkdir -p $COMOUT $PCOM +fi + +export COMINus=${COMINus:-$COMIN} +export COMINuk=${COMINuk:-$DCOMROOT/$PDY/wgrbbul/ukmet_wafs} + +############################################ +# print current environment +############################################ +env + +############################################## +# Set up the forecast hours +############################################## +export SHOUR=${SHOUR:-06} +export EHOUR=${EHOUR:-48} +export FHOUT_GFS=${FHOUT_GFS:-1} + +############################################### +# Specify Timeout Behavior of WAFS blending +# +# SLEEP_TIME - Amount of time to wait for +# a input file before exiting +# SLEEP_INT - Amount of time to wait between +# checking for input files +############################################### +# export SLEEP_TIME=300 # changed to 60 to avoid hitting wall_clock when miss umket wafs files ... +# JY -0129: export SLEEP_TIME=600 +export SLEEP_TIME=900 +export SLEEP_INT=10 + +#################################### +# Check if this is a restart +#################################### +if test -f $COMOUT/$RUN.t${cyc}z.control.wafsblending_0p25 +then + modelrecvy=`cat < $COMOUT/${RUN}.t${cyc}z.control.wafsblending_0p25` + recvy_pdy=`echo $modelrecvy | cut -c1-8` + recvy_cyc=`echo $modelrecvy | cut -c9-10` + recvy_shour=`echo $modelrecvy | cut -c11-` + + if [ $FHOUT_GFS -eq 3 ] ; then + FHINC=03 + else + if [ $recvy_shour -lt 24 ] ; then + FHINC=01 + else + FHINC=03 + fi + fi + + if test $RERUN = "NO" + then + if [ $recvy_shour -lt $EHOUR ] + then + new_shour=`expr $recvy_shour + $FHINC` + fi + if test $new_shour -ge $SHOUR + then + export SHOUR=$new_shour + if [ $SHOUR -lt 10 ]; then SHOUR=0$SHOUR; fi + fi + if test $recvy_shour -ge $EHOUR + then + echo "WAFS blending Already Completed to $EHOUR" + else + echo "Starting: PDY=$PDY cycle=t${recvy_cyc}z SHOUR=$SHOUR ." + fi + fi +fi + +############################################ +# Execute the script. +############################################ +${SCRIPTSgfs}/exgfs_atmos_wafs_blending_0p25.sh +export err=$?; err_chk + +echo "JOB $job HAS COMPLETED NORMALLY." + +############################################ +# print exec output +############################################ +if [ -e "$pgmout" ] ; then + cat $pgmout +fi + +############################################ +# remove temporary working directory +############################################ +if [ $KEEPDATA != YES ] ; then + rm -rf $DATA +fi + +date diff --git a/jobs/JGFS_ATMOS_WAFS_GCIP b/jobs/JGFS_ATMOS_WAFS_GCIP new file mode 100755 index 00000000000..d4e1a4529f4 --- /dev/null +++ b/jobs/JGFS_ATMOS_WAFS_GCIP @@ -0,0 +1,140 @@ +#!/bin/sh + +############################################ +# GFS GCIP PRODUCT GENERATION +############################################ + +date +export PS4='$SECONDS + ' +set -xa + +# keep the working directory or not +export KEEPDATA=${KEEPDATA:-NO} + +############################################ +# Working Directory +############################################ +export DATA=${DATA:-${DATAROOT}/${jobid:?}} +mkdir -p $DATA +cd $DATA + +############################################ +# Output for executables +############################################ +export pgmout=OUTPUT.$$ + +############################################ +# Load the UTILITIES module +############################################ +#### module load prod_util +#### module load grib_util + +############################################ +# Run setpdy and initialize PDY variables +############################################ +export cycle=t${cyc}z +setpdy.sh +. ./PDY + +############################################ +# Set up the NET and RUN +############################################ +export NET=${NET:-gfs} +export RUN=${RUN:-gfs} +export COMPONENT=${COMPONENT:-atmos} + +############################################ +# Specify HOME Directory +############################################ +export gfs_ver=${gfs_ver:-v16.3.0} +export HOMEgfs=${HOMEgfs:-${NWROOT}/gfs.${gfs_ver}} +export EXECgfs=$HOMEgfs/exec +export FIXgfs=$HOMEgfs/fix/wafs +export PARMgfs=$HOMEgfs/parm/wafs +export USHgfs=$HOMEgfs/ush +export SCRIPTSgfs=$HOMEgfs/scripts + +# For BUFR dump, TMPDIR must be defined +export TMPDIR=$DATA # will be overwritten in exgfs script for parallel runs on ffhr +# For BUFR dump, these two environment variables are defined by module load +# HOMEobsproc_shared_bufr_dumplist <= module load bufr_dumplist/1.5.0 +# HOMEobsproc_dump <= module load dumpjb/4.0.0 + + +################################################ +# Set up the input/output directory +################################################ +# model data +export COMINgfs=${COMINgfs:-$(compath.py ${envir}/${NET}/${gfs_ver})/${RUN}.${PDY}/${cyc}/$COMPONENT} + +# satellite data +#ftp://satepsanone.nesdis.noaa.gov/2day/gmosaic/ +# Have to change IP address to digital ones, which BSUB can identify +#export COMINsat=${COMINsat:-ftp://140.90.213.161/2day/gmosaic} +export COMINsat=${COMINsat:-$DCOMROOT/$PDY/mcidas} + +# radar data +export radarl2_ver=${radarl2_ver:-v1.2} +export COMINradar=${COMINradar:-$(compath.py ${envir}/radarl2/$radarl2_ver)/radar.$PDY} + +# metar/ships/lightning/pireps +# data are dumped by $USHobsproc_dump/dumpjb +# + +# COMOUT +export COMOUT=${COMOUT:-$(compath.py -o $NET/$gfs_ver)/$RUN.$PDY/$cyc/$COMPONENT} + +mkdir -p $COMOUT + +############################################### +# Specify Timeout Behavior of WAFS GCIP +# +# SLEEP_TIME - how long to wait for inputs before exiting +# SLEEP_INT - time interval for checking for inputs +############################################### +# JY export SLEEP_TIME=300 +export SLEEP_TIME=600 +export SLEEP_INT=10 + +############################################ +# Execute the script, parallel run for 000 003 +############################################ +export MPIRUN=${MPIRUN:-"mpiexec -l -np 2 --cpu-bind verbose,core cfp"} + +# GCIP runs f000 f003 for each cycle, 4 times/day, +# to make the output valid every 3 hours +if [ `echo $MPIRUN | cut -d " " -f1` = 'srun' ] ; then + echo 0 ${SCRIPTSgfs}/exgfs_atmos_wafs_gcip.sh 000 >> gcip.cmdfile + echo 1 ${SCRIPTSgfs}/exgfs_atmos_wafs_gcip.sh 003 >> gcip.cmdfile +else + echo ${SCRIPTSgfs}/exgfs_atmos_wafs_gcip.sh 000 >> gcip.cmdfile + echo ${SCRIPTSgfs}/exgfs_atmos_wafs_gcip.sh 003 >> gcip.cmdfile + export MP_PGMMODEL=mpmd +fi + +$MPIRUN gcip.cmdfile + +export err=$? +if [ $err -eq 0 ] ; then + echo "JOB $job HAS COMPLETED NORMALLY!" +elif [ $err -eq 1 ] ; then + echo "WARNING!!! JOB $job incomplete. Missing satellite data." +else + echo "JOB $job FAILED!!!!" +fi + +############################################ +# print exec output +############################################ +if [ -e "$pgmout" ] ; then + cat $pgmout +fi + +############################################ +# remove temporary working directory +############################################ +if [ $KEEPDATA != YES ] ; then + rm -rf $DATA +fi + +date diff --git a/jobs/JGFS_ATMOS_WAFS_GRIB2 b/jobs/JGFS_ATMOS_WAFS_GRIB2 new file mode 100755 index 00000000000..ed4c92979ef --- /dev/null +++ b/jobs/JGFS_ATMOS_WAFS_GRIB2 @@ -0,0 +1,124 @@ +#!/bin/sh + +######################################## +# GFS AWIPS PRODUCT GENERATION +######################################## + +date +export PS4='$SECONDS + ' +set -x + +# keep the working directory or not +export KEEPDATA=${KEEPDATA:-NO} + +############################################ +# Working Directory +############################################ +export DATA=${DATA:-${DATAROOT}/${jobid:?}} +mkdir -p $DATA +cd $DATA + +############################################ +# Output for executables +############################################ +export pgmout=OUTPUT.$$ + +############################################ +# Load the UTILITIES module +############################################ +#### module load prod_util +#### module load grib_util + +########################################### +# Run setpdy and initialize PDY variables +########################################### +export cycle=t${cyc}z +setpdy.sh +. ./PDY + +############################################ +# Set up the NET and RUN +############################################ +export NET=${NET:-gfs} +export RUN=${RUN:-gfs} +export COMPONENT=${COMPONENT:-atmos} + +############################################ +# Specify HOME Directory +############################################ +export gfs_ver=${gfs_ver:-v16.3.0} +export HOMEgfs=${HOMEgfs:-${NWROOT}/gfs.${gfs_ver}} +export EXECgfs=$HOMEgfs/exec +export FIXgfs=$HOMEgfs/fix/wafs +export PARMgfs=$HOMEgfs/parm/wafs +export USHgfs=$HOMEgfs/ush +export SCRIPTSgfs=$HOMEgfs/scripts + +################################################ +# Set up the input/output directory +################################################ +#### if [ $envir = "prod" ] || [ $envir = "para" ] ; then +#### export COMIN=${COMIN:-$COMROOT/${NET}/${envir}/$RUN.$PDY} +#### else +#### export COMIN=${COMIN:-$COMROOT/${NET}/prod/$RUN.$PDY} +#### fi + +export COMIN=${COMIN:-$(compath.py ${envir}/${NET}/${gfs_ver})/${RUN}.${PDY}/${cyc}/$COMPONENT} +export COMOUT=${COMOUT:-$(compath.py -o $NET/$gfs_ver)/$RUN.$PDY/$cyc/$COMPONENT} +export PCOM=${PCOM:-$COMOUT/wmo} + +if [ $SENDCOM = YES ] ; then + mkdir -p $COMOUT $PCOM +fi + +############################################ +# print current environment +############################################ +env + +############################################## +# Set up the forecast hours +############################################## +export FHOURS=${FHOURS:-"00 06 09 12 15 18 21 24 27 30 33 36 42 48 54 60 66 72"} + +############################################ +# Execute the script. +############################################ + +NP=`echo $FHOURS | wc -w` +export MPIRUN=${MPIRUN:-"mpiexec -np $NP -cpu-bind verbose,core cfp"} + +rm wafsgrib2.cmdfile +ic=0 +for fcsthrs in $FHOURS ; do + if [ `echo $MPIRUN | cut -d " " -f1` = 'srun' ] ; then + echo $ic ${SCRIPTSgfs}/exgfs_atmos_wafs_grib2.sh $fcsthrs >> wafsgrib2.cmdfile + else + echo ${SCRIPTSgfs}/exgfs_atmos_wafs_grib2.sh $fcsthrs >> wafsgrib2.cmdfile + export MP_PGMMODEL=mpmd + fi + ic=`expr $ic + 1` +done + +$MPIRUN wafsgrib2.cmdfile + +export err=$?; err_chk + +echo "JOB $job HAS COMPLETED NORMALLY!" + +############################################ +# print exec output +############################################ +if [ -e "$pgmout" ] ; then + cat $pgmout +fi + +############################################ +# remove temporary working directory +############################################ +if [ $KEEPDATA != YES ] ; then + rm -rf $DATA +fi + +date + diff --git a/jobs/JGFS_ATMOS_WAFS_GRIB2_0P25 b/jobs/JGFS_ATMOS_WAFS_GRIB2_0P25 new file mode 100755 index 00000000000..64570bbf5d2 --- /dev/null +++ b/jobs/JGFS_ATMOS_WAFS_GRIB2_0P25 @@ -0,0 +1,133 @@ +#!/bin/sh + +######################################## +# GFS WAFS GRIB 0P25 PRODUCT GENERATION +######################################## + +date +export PS4='$SECONDS + ' +set -x + +# keep the working directory or not +export KEEPDATA=${KEEPDATA:-NO} + +############################################ +# Working Directory +############################################ +export DATA=${DATA:-${DATAROOT}/${jobid:?}} +mkdir -p $DATA +cd $DATA + +############################################ +# Output for executables +############################################ +export pgmout=OUTPUT.$$ + +########################################### +# Run setpdy and initialize PDY variables +########################################### +export cycle=t${cyc}z +setpdy.sh +. ./PDY + +############################################ +# Set up the NET and RUN +############################################ +export NET=${NET:-gfs} +export RUN=${RUN:-gfs} +export COMPONENT=${COMPONENT:-atmos} + +############################################ +# Specify HOME Directory +############################################ +export gfs_ver=${gfs_ver:-v16.3.0} +export HOMEgfs=${HOMEgfs:-${NWROOT}/gfs.${gfs_ver}} +export EXECgfs=$HOMEgfs/exec +export FIXgfs=$HOMEgfs/fix/wafs +export PARMgfs=$HOMEgfs/parm/wafs +export USHgfs=$HOMEgfs/ush +export SCRIPTSgfs=$HOMEgfs/scripts + +################################################ +# Set up the input/output directory +################################################ +export COMIN=${COMIN:-$(compath.py ${envir}/${NET}/${gfs_ver})/${RUN}.${PDY}/${cyc}/$COMPONENT} +export COMOUT=${COMOUT:-$(compath.py -o $NET/$gfs_ver)/$RUN.$PDY/$cyc/$COMPONENT} +export PCOM=${PCOM:-$COMOUT/wmo} + +if [ $SENDCOM = YES ] ; then + mkdir -p $COMOUT $PCOM +fi + +############################################ +# print current environment +############################################ +env + +############################################## +# Set up the forecast hours +############################################## +#export SHOUR=${SHOUR:-06} +# Will change to 120 for 2023 ICAO standard +#export EHOUR=${EHOUR:-120} +#export EHOUR=${EHOUR:-36} + +export FHOUT_GFS=${FHOUT_GFS:-1} +if [ $FHOUT_GFS -eq 3 ] ; then #27 + export FHOURS=${FHOURS:-"6 9 12 15 18 21 24 27 30 33 36 39 42 45 48 54 60 66 72 78 84 90 96 102 108 114 120"} +else #39 + export FHOURS=${FHOURS:-"6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 27 30 33 36 39 42 45 48 54 60 66 72 78 84 90 96 102 108 114 120"} +fi + +############################################### +# Specify Timeout Behavior of WAFS blending +# +# SLEEP_TIME - Amount of time to wait for +# a input file before exiting +# SLEEP_INT - Amount of time to wait between +# checking for input files +############################################### +# export SLEEP_TIME=300 # changed to 60 to avoid hitting wall_clock when miss umket wafs files ... +export SLEEP_TIME=600 +export SLEEP_INT=10 + +############################################ +# Execute the script. +############################################ +NP=`echo $FHOURS | wc -w` +export MPIRUN=${MPIRUN:-"mpiexec -np $NP -cpu-bind verbose,core cfp"} + +rm wafsgrib2_0p25.cmdfile +ic=0 +for fcsthrs in $FHOURS ; do + if [ `echo $MPIRUN | cut -d " " -f1` = 'srun' ] ; then + echo $ic ${SCRIPTSgfs}/exgfs_atmos_wafs_grib2_0p25.sh $fcsthrs >> wafsgrib2_0p25.cmdfile + else + echo ${SCRIPTSgfs}/exgfs_atmos_wafs_grib2_0p25.sh $fcsthrs >> wafsgrib2_0p25.cmdfile + export MP_PGMMODEL=mpmd + fi + ic=`expr $ic + 1` +done + +$MPIRUN wafsgrib2_0p25.cmdfile + +export err=$?; err_chk + +echo "JOB $job HAS COMPLETED NORMALLY!" + +############################################ +# print exec output +############################################ +if [ -e "$pgmout" ] ; then + cat $pgmout +fi + +############################################ +# remove temporary working directory +############################################ +if [ $KEEPDATA != YES ] ; then + rm -rf $DATA +fi + +date + diff --git a/jobs/JGLOBAL_FORECAST b/jobs/JGLOBAL_FORECAST index bfdc7e3688c..d99712d4213 100755 --- a/jobs/JGLOBAL_FORECAST +++ b/jobs/JGLOBAL_FORECAST @@ -13,7 +13,7 @@ fi # Restart conditions for GFS cycle come from GDAS rCDUMP=${RUN} -[[ ${RUN} == "gfs" ]] && export rCDUMP="gdas" +export rCDUMP="${RUN/gfs/gdas}" # Ignore possible spelling error (nothing is misspelled) # shellcheck disable=SC2153 diff --git a/jobs/rocoto/prepatmiodaobs.sh b/jobs/rocoto/prepatmiodaobs.sh index d424df92615..0e69eda5c9d 100755 --- a/jobs/rocoto/prepatmiodaobs.sh +++ b/jobs/rocoto/prepatmiodaobs.sh @@ -14,8 +14,9 @@ export jobid="${job}.$$" ############################################################### # setup python path for workflow and ioda utilities wxflowPATH="${HOMEgfs}/ush/python:${HOMEgfs}/ush/python/wxflow/src" -PYIODALIB="${HOMEgfs}/sorc/gdas.cd/build/lib/python3.7" -PYTHONPATH="${PYIODALIB}:${wxflowPATH}:${PYTHONPATH}" +# shellcheck disable=SC2311 +pyiodaPATH="${HOMEgfs}/sorc/gdas.cd/build/lib/python$(detect_py_ver)/" +PYTHONPATH="${pyiodaPATH}:${wxflowPATH}:${PYTHONPATH}" export PYTHONPATH ############################################################### diff --git a/jobs/rocoto/prepsnowobs.sh b/jobs/rocoto/prepsnowobs.sh index dae11129ebc..cff082bab2c 100755 --- a/jobs/rocoto/prepsnowobs.sh +++ b/jobs/rocoto/prepsnowobs.sh @@ -14,7 +14,9 @@ export jobid="${job}.$$" ############################################################### # setup python path for workflow utilities and tasks wxflowPATH="${HOMEgfs}/ush/python:${HOMEgfs}/ush/python/wxflow/src" -gdasappPATH="${HOMEgfs}/sorc/gdas.cd/iodaconv/src:${HOMEgfs}/sorc/gdas.cd/build/lib/python3.7" +# shellcheck disable=SC2311 +pyiodaPATH="${HOMEgfs}/sorc/gdas.cd/build/lib/python$(detect_py_ver)/" +gdasappPATH="${HOMEgfs}/sorc/gdas.cd/sorc/iodaconv/src:${pyiodaPATH}" PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${wxflowPATH}:${gdasappPATH}" export PYTHONPATH diff --git a/modulefiles/module_base.hera.lua b/modulefiles/module_base.hera.lua index 8703100e76d..1a94019d656 100644 --- a/modulefiles/module_base.hera.lua +++ b/modulefiles/module_base.hera.lua @@ -42,7 +42,7 @@ setenv("WGRIB2","wgrib2") setenv("UTILROOT",(os.getenv("prod_util_ROOT") or "None")) --prepend_path("MODULEPATH", pathJoin("/scratch1/NCEPDEV/global/glopara/git/prepobs/v" .. (os.getenv("prepobs_run_ver") or "None"), "modulefiles")) -prepend_path("MODULEPATH", pathJoin("/scratch1/NCEPDEV/global/glopara/git/prepobs/feature-GFSv17_com_reorg_log_update/modulefiles")) +prepend_path("MODULEPATH", pathJoin("/scratch1/NCEPDEV/global/glopara/git/prepobs/dev-gfsv17/modulefiles")) load(pathJoin("prepobs", (os.getenv("prepobs_run_ver") or "None"))) prepend_path("MODULEPATH", pathJoin("/scratch1/NCEPDEV/global/glopara/git/Fit2Obs/v" .. (os.getenv("fit2obs_ver") or "None"), "modulefiles")) diff --git a/modulefiles/module_gwci.hera.lua b/modulefiles/module_gwci.hera.lua index 3d4c413a44e..75b972b69ba 100644 --- a/modulefiles/module_gwci.hera.lua +++ b/modulefiles/module_gwci.hera.lua @@ -2,7 +2,7 @@ help([[ Load environment to run GFS workflow setup scripts on Hera ]]) -prepend_path("MODULEPATH", "/scratch1/NCEPDEV/nems/role.epic/spack-stack/spack-stack-1.6.0/envs/gsi-addon-dev/install/modulefiles/Core") +prepend_path("MODULEPATH", "/scratch1/NCEPDEV/nems/role.epic/spack-stack/spack-stack-1.6.0/envs/gsi-addon-dev-rocky8/install/modulefiles/Core") load(pathJoin("stack-intel", os.getenv("2021.5.0"))) load(pathJoin("stack-intel-oneapi-mpi", os.getenv("2021.5.1"))) diff --git a/modulefiles/module_gwsetup.hera.lua b/modulefiles/module_gwsetup.hera.lua index 3e8bf2d7f88..696f9577beb 100644 --- a/modulefiles/module_gwsetup.hera.lua +++ b/modulefiles/module_gwsetup.hera.lua @@ -4,7 +4,7 @@ Load environment to run GFS workflow setup scripts on Hera load(pathJoin("rocoto")) -prepend_path("MODULEPATH", "/scratch1/NCEPDEV/nems/role.epic/spack-stack/spack-stack-1.6.0/envs/gsi-addon-dev/install/modulefiles/Core") +prepend_path("MODULEPATH", "/scratch1/NCEPDEV/nems/role.epic/spack-stack/spack-stack-1.6.0/envs/gsi-addon-dev-rocky8/install/modulefiles/Core") local stack_intel_ver=os.getenv("stack_intel_ver") or "2021.5.0" local python_ver=os.getenv("python_ver") or "3.11.6" diff --git a/parm/config/gefs/config.base b/parm/config/gefs/config.base index da315a73e74..7ec3993f5f6 100644 --- a/parm/config/gefs/config.base +++ b/parm/config/gefs/config.base @@ -74,7 +74,7 @@ export NCP="/bin/cp -p" export NMV="/bin/mv" export NLN="/bin/ln -sf" export VERBOSE="YES" -export KEEPDATA="NO" +export KEEPDATA="@KEEPDATA@" export DEBUG_POSTSCRIPT="NO" # PBS only; sets debug=true export CHGRP_RSTPROD="@CHGRP_RSTPROD@" export CHGRP_CMD="@CHGRP_CMD@" @@ -262,7 +262,7 @@ export MEMDIR="mem${ENSMEM}" # initialize ocean ensemble members with perturbations # if true, only occurs for members greater than zero -export OCN_ENS_PERTURB_FILES=false +export USE_OCN_PERTURB_FILES=@STAGE_OCN_PERTURB_FILES@ export DOIAU="NO" # While we are not doing IAU, we may want to warm start w/ IAU in the future # Check if cycle is cold starting diff --git a/parm/config/gefs/config.efcs b/parm/config/gefs/config.efcs index a6f34818d79..e57584be17b 100644 --- a/parm/config/gefs/config.efcs +++ b/parm/config/gefs/config.efcs @@ -60,7 +60,27 @@ export SPPT_TAU=21600. export SPPT_LSCALE=500000. export SPPT_LOGIT=".true." export SPPT_SFCLIMIT=".true." +# OCN options +export DO_OCN_SPPT="YES" +export OCNSPPT="0.8,0.4,0.2,0.08,0.04" +export OCNSPPT_TAU="2.16E4,2.592E5,2.592E6,7.776E6,3.1536E7" +export OCNSPPT_LSCALE="500.E3,1000.E3,2000.E3,2000.E3,2000.E3" +export DO_OCN_PERT_EPBL="YES" +export EPBL="0.8,0.4,0.2,0.08,0.04" +export EPBL_TAU="2.16E4,2.592E5,2.592E6,7.776E6,3.1536E7" +export EPBL_LSCALE="500.E3,1000.E3,2000.E3,2000.E3,2000.E3" +if [[ "${USE_OCN_PERTURB_FILES:-false}" == "true" ]]; then + export ODA_INCUPD="True" + export ODA_TEMPINC_VAR='t_pert' + export ODA_SALTINC_VAR='s_pert' + export ODA_THK_VAR='h_anl' + export ODA_UINC_VAR='u_pert' + export ODA_VINC_VAR='v_pert' + export ODA_INCUPD_NHOURS=0.0 +else + export ODA_INCUPD="False" +fi export restart_interval="${restart_interval_gfs}" echo "END: config.efcs" diff --git a/parm/config/gefs/config.fcst b/parm/config/gefs/config.fcst index 8c3de84357e..c3f41c5e7ea 100644 --- a/parm/config/gefs/config.fcst +++ b/parm/config/gefs/config.fcst @@ -5,6 +5,8 @@ echo "BEGIN: config.fcst" +export USE_ESMF_THREADING="YES" # Toggle to use ESMF-managed threading or traditional threading in UFSWM + # Turn off waves if not used for this CDUMP case ${WAVE_CDUMP} in both | "${CDUMP/enkf}" ) ;; # Don't change diff --git a/parm/config/gefs/config.resources b/parm/config/gefs/config.resources index 1f6485931f8..0be7e864a19 100644 --- a/parm/config/gefs/config.resources +++ b/parm/config/gefs/config.resources @@ -101,6 +101,20 @@ case ${step} in ntasks_fv3=${ntasks_fv3_gfs} ntasks_quilt=${ntasks_quilt_gfs} nthreads_fv3=${nthreads_fv3_gfs} + nthreads_ufs=${nthreads_ufs_gfs} + fi + + # Determine if using ESMF-managed threading or traditional threading + # If using traditional threading, set them to 1 + if [[ "${USE_ESMF_THREADING:-}" == "YES" ]]; then + export UFS_THREADS=1 + else # traditional threading + export UFS_THREADS=${nthreads_ufs:-1} + nthreads_fv3=1 + nthreads_mediator=1 + [[ "${DO_WAVE}" == "YES" ]] && nthreads_ww3=1 + [[ "${DO_OCN}" == "YES" ]] && nthreads_mom6=1 + [[ "${DO_ICE}" == "YES" ]] && nthreads_cice6=1 fi # PETS for the atmosphere dycore @@ -177,11 +191,11 @@ case ${step} in if [[ "${_CDUMP}" =~ "gfs" ]]; then declare -x "npe_${step}_gfs"="${NTASKS_TOT}" - declare -x "nth_${step}_gfs"=1 # ESMF handles threading for the UFS-weather-model + declare -x "nth_${step}_gfs"="${UFS_THREADS}" declare -x "npe_node_${step}_gfs"="${npe_node_max}" else declare -x "npe_${step}"="${NTASKS_TOT}" - declare -x "nth_${step}"=1 # ESMF handles threading for the UFS-weather-model + declare -x "nth_${step}"="${UFS_THREADS}" declare -x "npe_node_${step}"="${npe_node_max}" fi diff --git a/parm/config/gefs/config.ufs b/parm/config/gefs/config.ufs index 17d21573e40..b8695b6dbb4 100644 --- a/parm/config/gefs/config.ufs +++ b/parm/config/gefs/config.ufs @@ -78,6 +78,8 @@ case "${fv3_res}" in export layout_y_gfs=1 export nthreads_fv3=1 export nthreads_fv3_gfs=1 + export nthreads_ufs=1 + export nthreads_ufs_gfs=1 export cdmbgwd="0.071,2.1,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling export cdmbgwd_gsl="40.0,1.77,1.0,1.0" # settings for GSL drag suite export knob_ugwp_tauamp=6.0e-3 # setting for UGWPv1 non-stationary GWD @@ -94,6 +96,8 @@ case "${fv3_res}" in export layout_y_gfs=2 export nthreads_fv3=1 export nthreads_fv3_gfs=1 + export nthreads_ufs=1 + export nthreads_ufs_gfs=1 export cdmbgwd="0.14,1.8,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling export cdmbgwd_gsl="20.0,2.5,1.0,1.0" # settings for GSL drag suite export knob_ugwp_tauamp=3.0e-3 # setting for UGWPv1 non-stationary GWD @@ -110,6 +114,8 @@ case "${fv3_res}" in export layout_y_gfs=6 export nthreads_fv3=1 export nthreads_fv3_gfs=2 + export nthreads_ufs=1 + export nthreads_ufs_gfs=2 export cdmbgwd="0.23,1.5,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling export cdmbgwd_gsl="10.0,3.5,1.0,1.0" # settings for GSL drag suite export knob_ugwp_tauamp=1.5e-3 # setting for UGWPv1 non-stationary GWD @@ -126,6 +132,8 @@ case "${fv3_res}" in export layout_y_gfs=8 export nthreads_fv3=1 export nthreads_fv3_gfs=2 + export nthreads_ufs=1 + export nthreads_ufs_gfs=2 export cdmbgwd="1.1,0.72,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling export cdmbgwd_gsl="5.0,5.0,1.0,1.0" # settings for GSL drag suite export knob_ugwp_tauamp=0.8e-3 # setting for UGWPv1 non-stationary GWD @@ -142,6 +150,8 @@ case "${fv3_res}" in export layout_y_gfs=16 export nthreads_fv3=4 export nthreads_fv3_gfs=4 + export nthreads_ufs=4 + export nthreads_ufs_gfs=4 export cdmbgwd="4.0,0.15,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling export cdmbgwd_gsl="2.5,7.5,1.0,1.0" # settings for GSL drag suite export knob_ugwp_tauamp=0.5e-3 # setting for UGWPv1 non-stationary GWD @@ -158,6 +168,8 @@ case "${fv3_res}" in export layout_y_gfs=16 export nthreads_fv3=4 export nthreads_fv3_gfs=4 + export nthreads_ufs=4 + export nthreads_ufs_gfs=4 export cdmbgwd="4.0,0.10,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling export cdmbgwd_gsl="1.67,8.8,1.0,1.0" # settings for GSL drag suite export knob_ugwp_tauamp=0.35e-3 # setting for UGWPv1 non-stationary GWD @@ -174,6 +186,8 @@ case "${fv3_res}" in export layout_y_gfs=32 export nthreads_fv3=4 export nthreads_fv3_gfs=4 + export nthreads_ufs=4 + export nthreads_ufs_gfs=4 export cdmbgwd="4.0,0.05,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling export cdmbgwd_gsl="0.625,14.1,1.0,1.0" # settings for GSL drag suite export knob_ugwp_tauamp=0.13e-3 # setting for UGWPv1 non-stationary GWD @@ -433,27 +447,30 @@ fi # Set the name of the UFS (previously nems) configure template to use # Default ufs.configure templates for supported model configurations +if [[ "${USE_ESMF_THREADING:-}" == "YES" ]]; then + tmpl_suffix="_esmf" +fi case "${model_list}" in atm) - default_template="${PARMgfs}/ufs/ufs.configure.atm.IN" + default_template="${PARMgfs}/ufs/ufs.configure.atm${tmpl_suffix:-}.IN" ;; atm.aero) - default_template="${PARMgfs}/ufs/ufs.configure.atmaero.IN" + default_template="${PARMgfs}/ufs/ufs.configure.atmaero${tmpl_suffix:-}.IN" ;; atm.wave) - default_template="${PARMgfs}/ufs/ufs.configure.leapfrog_atm_wav.IN" + default_template="${PARMgfs}/ufs/ufs.configure.leapfrog_atm_wav${tmpl_suffix:-}.IN" ;; atm.ocean.ice) - default_template="${PARMgfs}/ufs/ufs.configure.s2s_esmf.IN" + default_template="${PARMgfs}/ufs/ufs.configure.s2s${tmpl_suffix:-}.IN" ;; atm.ocean.ice.aero) - default_template="${PARMgfs}/ufs/ufs.configure.s2sa_esmf.IN" + default_template="${PARMgfs}/ufs/ufs.configure.s2sa${tmpl_suffix:-}.IN" ;; atm.ocean.ice.wave) - default_template="${PARMgfs}/ufs/ufs.configure.s2sw_esmf.IN" + default_template="${PARMgfs}/ufs/ufs.configure.s2sw${tmpl_suffix:-}.IN" ;; atm.ocean.ice.wave.aero) - default_template="${PARMgfs}/ufs/ufs.configure.s2swa_esmf.IN" + default_template="${PARMgfs}/ufs/ufs.configure.s2swa${tmpl_suffix:-}.IN" ;; *) echo "FATAL ERROR: Unsupported UFSWM configuration for ${model_list}" diff --git a/parm/config/gefs/yaml/defaults.yaml b/parm/config/gefs/yaml/defaults.yaml index b19eb57e55e..5d5fe5963d0 100644 --- a/parm/config/gefs/yaml/defaults.yaml +++ b/parm/config/gefs/yaml/defaults.yaml @@ -4,4 +4,9 @@ base: DO_JEDIOCNVAR: "NO" DO_JEDISNOWDA: "NO" DO_MERGENSST: "NO" + KEEPDATA: "NO" FHMAX_GFS: 120 + +stage_ic: + USE_OCN_PERTURB_FILES: "false" + diff --git a/parm/config/gfs/config.aeroanl b/parm/config/gfs/config.aeroanl index cf7981f8070..972f393feb5 100644 --- a/parm/config/gfs/config.aeroanl +++ b/parm/config/gfs/config.aeroanl @@ -6,24 +6,26 @@ echo "BEGIN: config.aeroanl" export CASE_ANL=${CASE} -export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/aero/obs/config/ -export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/aero/obs/lists/gdas_aero_prototype.yaml +export OBS_LIST="${PARMgfs}/gdas/aero/obs/lists/gdas_aero.yaml.j2" export STATICB_TYPE='identity' -export BERROR_YAML=${HOMEgfs}/sorc/gdas.cd/parm/aero/berror/staticb_${STATICB_TYPE}.yaml -export BERROR_DATA_DIR=${FIXgfs}/gdas/bump/aero/${CASE_ANL}/ +export BERROR_YAML="${PARMgfs}/gdas/aero/berror/staticb_${STATICB_TYPE}.yaml.j2" +export BERROR_DATA_DIR="${FIXgfs}/gdas/bump/aero/${CASE_ANL}/" export BERROR_DATE="20160630.000000" +export CRTM_FIX_YAML="${PARMgfs}/gdas/aero_crtm_coeff.yaml.j2" +export JEDI_FIX_YAML="${PARMgfs}/gdas/aero_jedi_fix.yaml.j2" + export io_layout_x=@IO_LAYOUT_X@ export io_layout_y=@IO_LAYOUT_Y@ -export JEDIEXE=${EXECgfs}/fv3jedi_var.x +export JEDIEXE="${EXECgfs}/fv3jedi_var.x" if [[ "${DOIAU}" == "YES" ]]; then export aero_bkg_times="3,6,9" - export AEROVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/aero/variational/3dvar_fgat_gfs_aero.yaml + export JEDIYAML="${PARMgfs}/gdas/aero/variational/3dvar_fgat_gfs_aero.yaml.j2" else export aero_bkg_times="6" - export AEROVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/aero/variational/3dvar_gfs_aero.yaml + export JEDIYAML="${PARMgfs}/gdas/aero/variational/3dvar_gfs_aero.yaml.j2" fi echo "END: config.aeroanl" diff --git a/parm/config/gfs/config.aeroanlfinal b/parm/config/gfs/config.aeroanlfinal index 230ec5205a9..34e5d8f1164 100644 --- a/parm/config/gfs/config.aeroanlfinal +++ b/parm/config/gfs/config.aeroanlfinal @@ -6,5 +6,5 @@ echo "BEGIN: config.aeroanlfinal" # Get task specific resources -. $EXPDIR/config.resources aeroanlfinal +source "${EXPDIR}/config.resources" aeroanlfinal echo "END: config.aeroanlfinal" diff --git a/parm/config/gfs/config.aeroanlinit b/parm/config/gfs/config.aeroanlinit index 72175b8d0cc..7036d3d27b8 100644 --- a/parm/config/gfs/config.aeroanlinit +++ b/parm/config/gfs/config.aeroanlinit @@ -6,5 +6,5 @@ echo "BEGIN: config.aeroanlinit" # Get task specific resources -. $EXPDIR/config.resources aeroanlinit +source "${EXPDIR}/config.resources" aeroanlinit echo "END: config.aeroanlinit" diff --git a/parm/config/gfs/config.aeroanlrun b/parm/config/gfs/config.aeroanlrun index da13df28316..012e5b79f3b 100644 --- a/parm/config/gfs/config.aeroanlrun +++ b/parm/config/gfs/config.aeroanlrun @@ -6,6 +6,6 @@ echo "BEGIN: config.aeroanlrun" # Get task specific resources -. $EXPDIR/config.resources aeroanlrun +source "${EXPDIR}/config.resources" aeroanlrun echo "END: config.aeroanlrun" diff --git a/parm/config/gfs/config.anal b/parm/config/gfs/config.anal index 98d0e88cc28..09aaa15a98e 100644 --- a/parm/config/gfs/config.anal +++ b/parm/config/gfs/config.anal @@ -50,6 +50,13 @@ export OZINFO=${FIXgfs}/gsi/global_ozinfo.txt export SATINFO=${FIXgfs}/gsi/global_satinfo.txt export OBERROR=${FIXgfs}/gsi/prepobs_errtable.global +if [[ ${GSI_SOILANAL} = "YES" ]]; then + export hofx_2m_sfcfile=".true." + export reducedgrid=".false." # not possible for sfc analysis, Jeff Whitaker says it's not useful anyway + export paranc=".false." # temporary until sfc io coded for parance (PR being prepared by T. Gichamo) + export CONVINFO=${FIXgfs}/gsi/global_convinfo_2mObs.txt + export ANAVINFO=${FIXgfs}/gsi/global_anavinfo_soilanal.l127.txt +fi # Use experimental dumps in EMC GFS v16 parallels if [[ ${RUN_ENVIR} == "emc" ]]; then diff --git a/parm/config/gfs/config.atmanl b/parm/config/gfs/config.atmanl index 11358de8a84..7cfd0cb47fb 100644 --- a/parm/config/gfs/config.atmanl +++ b/parm/config/gfs/config.atmanl @@ -19,6 +19,9 @@ else export BERROR_YAML="${PARMgfs}/gdas/atm/berror/staticb_${STATICB_TYPE}.yaml.j2" fi +export CRTM_FIX_YAML="${PARMgfs}/gdas/atm_crtm_coeff.yaml.j2" +export JEDI_FIX_YAML="${PARMgfs}/gdas/atm_jedi_fix.yaml.j2" + export layout_x_atmanl=@LAYOUT_X_ATMANL@ export layout_y_atmanl=@LAYOUT_Y_ATMANL@ diff --git a/parm/config/gfs/config.atmensanl b/parm/config/gfs/config.atmensanl index 49b903e4c0a..8e824b22f61 100644 --- a/parm/config/gfs/config.atmensanl +++ b/parm/config/gfs/config.atmensanl @@ -9,6 +9,9 @@ export OBS_LIST="${PARMgfs}/gdas/atm/obs/lists/lgetkf_prototype.yaml.j2" export JEDIYAML="${PARMgfs}/gdas/atm/lgetkf/lgetkf.yaml.j2" export INTERP_METHOD='barycentric' +export CRTM_FIX_YAML="${PARMgfs}/gdas/atm_crtm_coeff.yaml.j2" +export JEDI_FIX_YAML="${PARMgfs}/gdas/atm_jedi_fix.yaml.j2" + export layout_x_atmensanl=@LAYOUT_X_ATMENSANL@ export layout_y_atmensanl=@LAYOUT_Y_ATMENSANL@ diff --git a/parm/config/gfs/config.base b/parm/config/gfs/config.base index d81ec601326..64926aed1ae 100644 --- a/parm/config/gfs/config.base +++ b/parm/config/gfs/config.base @@ -233,7 +233,7 @@ export FHOUT=3 # Will be changed to 1 in config.base if (DOHYBVAR set export FHOUT_OCNICE=3 # Cycle to run EnKF (set to BOTH for both gfs and gdas) -export EUPD_CYC="gdas" +export EUPD_CYC="@EUPD_CYC@" # GFS cycle info export gfs_cyc=@gfs_cyc@ # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. @@ -295,16 +295,20 @@ export DO_MERGENSST="@DO_MERGENSST@" # Hybrid related export DOHYBVAR="@DOHYBVAR@" export NMEM_ENS=@NMEM_ENS@ -export NMEM_ENS_GFS=@NMEM_ENS@ export SMOOTH_ENKF="NO" export l4densvar=".true." export lwrite4danl=".true." +# Early-cycle EnKF parameters +export NMEM_ENS_GFS=30 +export NMEM_ENS_GFS_OFFSET=20 +export DO_CALC_INCREMENT_ENKF_GFS="NO" + # EnKF output frequency if [[ ${DOHYBVAR} = "YES" ]]; then export FHMIN_ENKF=3 export FHMAX_ENKF=9 - export FHMAX_ENKF_GFS=120 + export FHMAX_ENKF_GFS=@FHMAX_ENKF_GFS@ export FHOUT_ENKF_GFS=3 if [[ ${l4densvar} = ".true." ]]; then export FHOUT=1 @@ -331,6 +335,8 @@ fi if [[ "${DOIAU_ENKF}" = "NO" ]]; then export IAUFHRS_ENKF="6"; fi +export GSI_SOILANAL=@GSI_SOILANAL@ + # turned on nsst in anal and/or fcst steps, and turn off rtgsst export DONST="YES" if [[ ${DONST} = "YES" ]]; then export FNTSFA=" "; fi @@ -347,9 +353,6 @@ export MAKE_ACFTBUFR="@MAKE_ACFTBUFR@" # Analysis increments to zero in CALCINCEXEC export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc','rwmr_inc','snmr_inc','grle_inc'" -# Write analysis files for early cycle EnKF -export DO_CALC_INCREMENT_ENKF_GFS="YES" - # Stratospheric increments to zero export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc','rwmr_inc','snmr_inc','grle_inc'" export INCVARS_EFOLD="5" @@ -381,11 +384,14 @@ export FITSARC="YES" export FHMAX_FITS=132 [[ "${FHMAX_FITS}" -gt "${FHMAX_GFS}" ]] && export FHMAX_FITS=${FHMAX_GFS} -# The monitor jobs are not yet supported for JEDIATMVAR +# The monitor jobs are not yet supported for JEDIATMVAR. if [[ ${DO_JEDIATMVAR} = "YES" ]]; then export DO_VERFOZN="NO" # Ozone data assimilation monitoring export DO_VERFRAD="NO" # Radiance data assimilation monitoring export DO_VMINMON="NO" # GSI minimization monitoring +# The minimization monitor is not yet supported on RDHPCS Hera for Rocky-8 +elif [[ ${machine} = "HERA" ]]; then + export DO_VMINMON="NO" # GSI minimization monitoring fi echo "END: config.base" diff --git a/parm/config/gfs/config.esfc b/parm/config/gfs/config.esfc index 7c32313758a..684dea4ee37 100644 --- a/parm/config/gfs/config.esfc +++ b/parm/config/gfs/config.esfc @@ -12,7 +12,7 @@ echo "BEGIN: config.esfc" # Set DOSFCANL_ENKF=NO to prevent creation of sfcanl at # center of analysis window. -if [ $DOIAU_ENKF = "YES" ]; then +if [[ ${DOIAU_ENKF} = "YES" ]]; then export DOSFCANL_ENKF="NO" fi @@ -21,4 +21,10 @@ if [[ "${DO_JEDIATMENS}" == "YES" ]]; then export DONST="NO" fi +# set up soil analysis +if [[ ${GSI_SOILANAL} = "YES" ]]; then + export DO_LNDINC=".true." + export LND_SOI_FILE="lnd_incr" +fi + echo "END: config.esfc" diff --git a/parm/config/gfs/config.fcst b/parm/config/gfs/config.fcst index 461f1c5b182..a3e67f8cf9a 100644 --- a/parm/config/gfs/config.fcst +++ b/parm/config/gfs/config.fcst @@ -5,6 +5,8 @@ echo "BEGIN: config.fcst" +export USE_ESMF_THREADING="YES" # Toggle to use ESMF-managed threading or traditional threading in UFSWM + # Turn off waves if not used for this CDUMP case ${WAVE_CDUMP} in both | "${CDUMP/enkf}" ) ;; # Don't change diff --git a/parm/config/gfs/config.resources b/parm/config/gfs/config.resources index 4227db8529f..3a1cb8aa6b7 100644 --- a/parm/config/gfs/config.resources +++ b/parm/config/gfs/config.resources @@ -570,6 +570,20 @@ case ${step} in ntasks_fv3=${ntasks_fv3_gfs} ntasks_quilt=${ntasks_quilt_gfs} nthreads_fv3=${nthreads_fv3_gfs} + nthreads_ufs=${nthreads_ufs_gfs} + fi + + # Determine if using ESMF-managed threading or traditional threading + # If using traditional threading, set them to 1 + if [[ "${USE_ESMF_THREADING:-}" == "YES" ]]; then + export UFS_THREADS=1 + else # traditional threading + export UFS_THREADS=${nthreads_ufs:-1} + nthreads_fv3=1 + nthreads_mediator=1 + [[ "${DO_WAVE}" == "YES" ]] && nthreads_ww3=1 + [[ "${DO_OCN}" == "YES" ]] && nthreads_mom6=1 + [[ "${DO_ICE}" == "YES" ]] && nthreads_cice6=1 fi # PETS for the atmosphere dycore @@ -646,11 +660,11 @@ case ${step} in if [[ "${_CDUMP}" =~ "gfs" ]]; then declare -x "npe_${step}_gfs"="${NTASKS_TOT}" - declare -x "nth_${step}_gfs"=1 # ESMF handles threading for the UFS-weather-model + declare -x "nth_${step}_gfs"="${UFS_THREADS}" declare -x "npe_node_${step}_gfs"="${npe_node_max}" else declare -x "npe_${step}"="${NTASKS_TOT}" - declare -x "nth_${step}"=1 # ESMF handles threading for the UFS-weather-model + declare -x "nth_${step}"="${UFS_THREADS}" declare -x "npe_node_${step}"="${npe_node_max}" fi @@ -692,9 +706,14 @@ case ${step} in "C48" | "C96") export npe_upp=${CASE:1} ;; - "C192" | "C384" | "C768") + "C192" | "C384") + export npe_upp=120 + export memory_upp="96GB" + ;; + "C768") export npe_upp=120 export memory_upp="96GB" + if [[ ${machine} == "WCOSS2" ]]; then export memory_upp="480GB" ; fi ;; *) echo "FATAL ERROR: Resources not defined for job ${job} at resolution ${CASE}" diff --git a/parm/config/gfs/config.snowanl b/parm/config/gfs/config.snowanl index 30e6d9c07b3..7b3ffa47f3d 100644 --- a/parm/config/gfs/config.snowanl +++ b/parm/config/gfs/config.snowanl @@ -22,6 +22,8 @@ export BESTDDEV="30." # Background Error Std. Dev. for LETKFOI export APPLY_INCR_EXE="${EXECgfs}/apply_incr.exe" export APPLY_INCR_NML_TMPL="${PARMgfs}/gdas/snow/letkfoi/apply_incr_nml.j2" +export JEDI_FIX_YAML="${PARMgfs}/gdas/snow_jedi_fix.yaml.j2" + export io_layout_x=@IO_LAYOUT_X@ export io_layout_y=@IO_LAYOUT_Y@ diff --git a/parm/config/gfs/config.ufs b/parm/config/gfs/config.ufs index 68a8941122c..0db6f090a55 100644 --- a/parm/config/gfs/config.ufs +++ b/parm/config/gfs/config.ufs @@ -78,6 +78,8 @@ case "${fv3_res}" in export layout_y_gfs=1 export nthreads_fv3=1 export nthreads_fv3_gfs=1 + export nthreads_ufs=1 + export nthreads_ufs_gfs=1 export cdmbgwd="0.071,2.1,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling export cdmbgwd_gsl="40.0,1.77,1.0,1.0" # settings for GSL drag suite export knob_ugwp_tauamp=6.0e-3 # setting for UGWPv1 non-stationary GWD @@ -94,6 +96,8 @@ case "${fv3_res}" in export layout_y_gfs=2 export nthreads_fv3=1 export nthreads_fv3_gfs=1 + export nthreads_ufs=1 + export nthreads_ufs_gfs=1 export cdmbgwd="0.14,1.8,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling export cdmbgwd_gsl="20.0,2.5,1.0,1.0" # settings for GSL drag suite export knob_ugwp_tauamp=3.0e-3 # setting for UGWPv1 non-stationary GWD @@ -110,6 +114,8 @@ case "${fv3_res}" in export layout_y_gfs=6 export nthreads_fv3=1 export nthreads_fv3_gfs=2 + export nthreads_ufs=1 + export nthreads_ufs_gfs=2 export cdmbgwd="0.23,1.5,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling export cdmbgwd_gsl="10.0,3.5,1.0,1.0" # settings for GSL drag suite export knob_ugwp_tauamp=1.5e-3 # setting for UGWPv1 non-stationary GWD @@ -126,6 +132,8 @@ case "${fv3_res}" in export layout_y_gfs=8 export nthreads_fv3=2 export nthreads_fv3_gfs=2 + export nthreads_ufs=2 + export nthreads_ufs_gfs=2 export cdmbgwd="1.1,0.72,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling export cdmbgwd_gsl="5.0,5.0,1.0,1.0" # settings for GSL drag suite export knob_ugwp_tauamp=0.8e-3 # setting for UGWPv1 non-stationary GWD @@ -142,6 +150,8 @@ case "${fv3_res}" in export layout_y_gfs=16 export nthreads_fv3=4 export nthreads_fv3_gfs=4 + export nthreads_ufs=4 + export nthreads_ufs_gfs=4 export cdmbgwd="4.0,0.15,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling export cdmbgwd_gsl="2.5,7.5,1.0,1.0" # settings for GSL drag suite export knob_ugwp_tauamp=0.5e-3 # setting for UGWPv1 non-stationary GWD @@ -158,6 +168,8 @@ case "${fv3_res}" in export layout_y_gfs=16 export nthreads_fv3=4 export nthreads_fv3_gfs=4 + export nthreads_ufs=4 + export nthreads_ufs_gfs=4 export cdmbgwd="4.0,0.10,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling export cdmbgwd_gsl="1.67,8.8,1.0,1.0" # settings for GSL drag suite export knob_ugwp_tauamp=0.35e-3 # setting for UGWPv1 non-stationary GWD @@ -174,6 +186,8 @@ case "${fv3_res}" in export layout_y_gfs=32 export nthreads_fv3=4 export nthreads_fv3_gfs=4 + export nthreads_ufs=4 + export nthreads_ufs_gfs=4 export cdmbgwd="4.0,0.05,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling export cdmbgwd_gsl="0.625,14.1,1.0,1.0" # settings for GSL drag suite export knob_ugwp_tauamp=0.13e-3 # setting for UGWPv1 non-stationary GWD @@ -420,7 +434,7 @@ if [[ "${skip_ww3}" == "false" ]]; then ntasks_ww3=40 nthreads_ww3=1 ;; - "uglo_m1g16") + "uglo_m1g16") ntasks_ww3=1000 nthreads_ww3=1 ;; @@ -441,27 +455,30 @@ fi # Set the name of the UFS (previously nems) configure template to use # Default ufs.configure templates for supported model configurations +if [[ "${USE_ESMF_THREADING:-}" == "YES" ]]; then + tmpl_suffix="_esmf" +fi case "${model_list}" in atm) - default_template="${PARMgfs}/ufs/ufs.configure.atm.IN" + default_template="${PARMgfs}/ufs/ufs.configure.atm${tmpl_suffix:-}.IN" ;; atm.aero) - default_template="${PARMgfs}/ufs/ufs.configure.atmaero.IN" + default_template="${PARMgfs}/ufs/ufs.configure.atmaero${tmpl_suffix:-}.IN" ;; atm.wave) - default_template="${PARMgfs}/ufs/ufs.configure.leapfrog_atm_wav.IN" + default_template="${PARMgfs}/ufs/ufs.configure.leapfrog_atm_wav${tmpl_suffix:-}.IN" ;; atm.ocean.ice) - default_template="${PARMgfs}/ufs/ufs.configure.s2s_esmf.IN" + default_template="${PARMgfs}/ufs/ufs.configure.s2s${tmpl_suffix:-}.IN" ;; atm.ocean.ice.aero) - default_template="${PARMgfs}/ufs/ufs.configure.s2sa_esmf.IN" + default_template="${PARMgfs}/ufs/ufs.configure.s2sa${tmpl_suffix:-}.IN" ;; atm.ocean.ice.wave) - default_template="${PARMgfs}/ufs/ufs.configure.s2sw_esmf.IN" + default_template="${PARMgfs}/ufs/ufs.configure.s2sw${tmpl_suffix:-}.IN" ;; atm.ocean.ice.wave.aero) - default_template="${PARMgfs}/ufs/ufs.configure.s2swa_esmf.IN" + default_template="${PARMgfs}/ufs/ufs.configure.s2swa${tmpl_suffix:-}.IN" ;; *) echo "FATAL ERROR: Unsupported UFSWM configuration for ${model_list}" diff --git a/parm/config/gfs/yaml/defaults.yaml b/parm/config/gfs/yaml/defaults.yaml index 9c90255cdd7..521c7a03ba4 100644 --- a/parm/config/gfs/yaml/defaults.yaml +++ b/parm/config/gfs/yaml/defaults.yaml @@ -8,6 +8,9 @@ base: DO_GOES: "NO" FHMAX_GFS: 120 DO_VRFY_OCEANDA: "NO" + GSI_SOILANAL: "NO" + EUPD_CYC: "gdas" + FHMAX_ENKF_GFS: 12 atmanl: LAYOUT_X_ATMANL: 8 @@ -32,12 +35,12 @@ snowanl: ocnanal: SOCA_INPUT_FIX_DIR: "/scratch2/NCEPDEV/ocean/Guillaume.Vernieres/data/static/72x35x25/soca" # TODO: These need to go to glopara fix space. CASE_ANL: "C48" # TODO: Check in gdasapp if used anywhere for SOCA - SOCA_OBS_LIST: "{{ HOMEgfs }}/sorc/gdas.cd/parm/soca/obs/obs_list.yaml" # TODO: This is also repeated in oceanprepobs + SOCA_OBS_LIST: "${PARMgfs}/gdas/soca/obs/obs_list.yaml" # TODO: This is also repeated in oceanprepobs SOCA_NINNER: 100 SABER_BLOCKS_YAML: "" NICAS_RESOL: 1 NICAS_GRID_SIZE: 15000 prepoceanobs: - SOCA_OBS_LIST: "{{ HOMEgfs }}/sorc/gdas.cd/parm/soca/obs/obs_list.yaml" # TODO: This is also repeated in ocnanal - OBSPREP_YAML: "{{ HOMEgfs }}/sorc/gdas.cd/parm/soca/obsprep/obsprep_config.yaml" + SOCA_OBS_LIST: "${PARMgfs}/gdas/soca/obs/obs_list.yaml" # TODO: This is also repeated in ocnanal + OBSPREP_YAML: "${PARMgfs}/gdas/soca/obsprep/obsprep_config.yaml" DMPDIR: "/scratch1/NCEPDEV/global/glopara/data/experimental_obs" diff --git a/parm/gdas/aero_crtm_coeff.yaml b/parm/gdas/aero_crtm_coeff.yaml deleted file mode 100644 index 75b54c37412..00000000000 --- a/parm/gdas/aero_crtm_coeff.yaml +++ /dev/null @@ -1,13 +0,0 @@ -mkdir: -- {{ DATA }}/crtm/ -copy: -- [{{ CRTM_FIX }}/AerosolCoeff.bin, {{ DATA }}/crtm/] -- [{{ CRTM_FIX }}/CloudCoeff.bin, {{ DATA }}/crtm/] -- [{{ CRTM_FIX }}/v.viirs-m_npp.SpcCoeff.bin, {{ DATA }}/crtm/] -- [{{ CRTM_FIX }}/v.viirs-m_npp.TauCoeff.bin, {{ DATA }}/crtm/] -- [{{ CRTM_FIX }}/v.viirs-m_j1.SpcCoeff.bin, {{ DATA }}/crtm/] -- [{{ CRTM_FIX }}/v.viirs-m_j1.TauCoeff.bin, {{ DATA }}/crtm/] -- [{{ CRTM_FIX }}/NPOESS.VISice.EmisCoeff.bin, {{ DATA }}/crtm/] -- [{{ CRTM_FIX }}/NPOESS.VISland.EmisCoeff.bin, {{ DATA }}/crtm/] -- [{{ CRTM_FIX }}/NPOESS.VISsnow.EmisCoeff.bin, {{ DATA }}/crtm/] -- [{{ CRTM_FIX }}/NPOESS.VISwater.EmisCoeff.bin, {{ DATA }}/crtm/] diff --git a/parm/gdas/aero_crtm_coeff.yaml.j2 b/parm/gdas/aero_crtm_coeff.yaml.j2 new file mode 100644 index 00000000000..b48d8ff2315 --- /dev/null +++ b/parm/gdas/aero_crtm_coeff.yaml.j2 @@ -0,0 +1,13 @@ +mkdir: +- '{{ DATA }}/crtm/' +copy: +- ['{{ CRTM_FIX }}/AerosolCoeff.bin', '{{ DATA }}/crtm/'] +- ['{{ CRTM_FIX }}/CloudCoeff.bin', '{{ DATA }}/crtm/'] +- ['{{ CRTM_FIX }}/v.viirs-m_npp.SpcCoeff.bin', '{{ DATA }}/crtm/'] +- ['{{ CRTM_FIX }}/v.viirs-m_npp.TauCoeff.bin', '{{ DATA }}/crtm/'] +- ['{{ CRTM_FIX }}/v.viirs-m_j1.SpcCoeff.bin', '{{ DATA }}/crtm/'] +- ['{{ CRTM_FIX }}/v.viirs-m_j1.TauCoeff.bin', '{{ DATA }}/crtm/'] +- ['{{ CRTM_FIX }}/NPOESS.VISice.EmisCoeff.bin', '{{ DATA }}/crtm/'] +- ['{{ CRTM_FIX }}/NPOESS.VISland.EmisCoeff.bin', '{{ DATA }}/crtm/'] +- ['{{ CRTM_FIX }}/NPOESS.VISsnow.EmisCoeff.bin', '{{ DATA }}/crtm/'] +- ['{{ CRTM_FIX }}/NPOESS.VISwater.EmisCoeff.bin', '{{ DATA }}/crtm/'] diff --git a/parm/gdas/aero_jedi_fix.yaml b/parm/gdas/aero_jedi_fix.yaml deleted file mode 100644 index 16cbeac6e7b..00000000000 --- a/parm/gdas/aero_jedi_fix.yaml +++ /dev/null @@ -1,11 +0,0 @@ -mkdir: -- !ENV ${DATA}/fv3jedi -copy: -- - !ENV ${FIXgfs}/gdas/fv3jedi/fv3files/akbk$(npz).nc4 - - !ENV ${DATA}/fv3jedi/akbk.nc4 -- - !ENV ${FIXgfs}/gdas/fv3jedi/fv3files/fmsmpp.nml - - !ENV ${DATA}/fv3jedi/fmsmpp.nml -- - !ENV ${FIXgfs}/gdas/fv3jedi/fv3files/field_table_gfdl - - !ENV ${DATA}/fv3jedi/field_table -- - !ENV $(HOMEgfs)/sorc/gdas.cd/parm/io/fv3jedi_fieldmetadata_restart.yaml - - !ENV ${DATA}/fv3jedi/fv3jedi_fieldmetadata_restart.yaml diff --git a/parm/gdas/atm_jedi_fix.yaml b/parm/gdas/aero_jedi_fix.yaml.j2 similarity index 100% rename from parm/gdas/atm_jedi_fix.yaml rename to parm/gdas/aero_jedi_fix.yaml.j2 diff --git a/parm/gdas/atm_crtm_coeff.yaml b/parm/gdas/atm_crtm_coeff.yaml.j2 similarity index 100% rename from parm/gdas/atm_crtm_coeff.yaml rename to parm/gdas/atm_crtm_coeff.yaml.j2 diff --git a/parm/gdas/atm_jedi_fix.yaml.j2 b/parm/gdas/atm_jedi_fix.yaml.j2 new file mode 100644 index 00000000000..69039baddf7 --- /dev/null +++ b/parm/gdas/atm_jedi_fix.yaml.j2 @@ -0,0 +1,7 @@ +mkdir: +- '{{ DATA }}/fv3jedi' +copy: +- ['{{ FIXgfs }}/gdas/fv3jedi/fv3files/akbk{{ npz }}.nc4', '{{ DATA }}/fv3jedi/akbk.nc4'] +- ['{{ FIXgfs }}/gdas/fv3jedi/fv3files/fmsmpp.nml', '{{ DATA }}/fv3jedi/fmsmpp.nml'] +- ['{{ FIXgfs }}/gdas/fv3jedi/fv3files/field_table_gfdl', '{{ DATA }}/fv3jedi/field_table'] +- ['{{ PARMgfs }}/gdas/io/fv3jedi_fieldmetadata_restart.yaml', '{{ DATA }}/fv3jedi/fv3jedi_fieldmetadata_restart.yaml'] diff --git a/parm/gdas/snow_jedi_fix.yaml.j2 b/parm/gdas/snow_jedi_fix.yaml.j2 index 4d820a82ba5..69039baddf7 100644 --- a/parm/gdas/snow_jedi_fix.yaml.j2 +++ b/parm/gdas/snow_jedi_fix.yaml.j2 @@ -1,7 +1,7 @@ mkdir: - '{{ DATA }}/fv3jedi' copy: -- ['{{ HOMEgfs }}/fix/gdas/fv3jedi/fv3files/akbk{{ npz }}.nc4', '{{ DATA }}/fv3jedi/akbk.nc4'] -- ['{{ HOMEgfs }}/fix/gdas/fv3jedi/fv3files/fmsmpp.nml', '{{ DATA }}/fv3jedi/fmsmpp.nml'] -- ['{{ HOMEgfs }}/fix/gdas/fv3jedi/fv3files/field_table_gfdl', '{{ DATA }}/fv3jedi/field_table'] -- ['{{ HOMEgfs }}/sorc/gdas.cd/parm/io/fv3jedi_fieldmetadata_restart.yaml', '{{ DATA }}/fv3jedi/fv3jedi_fieldmetadata_restart.yaml'] +- ['{{ FIXgfs }}/gdas/fv3jedi/fv3files/akbk{{ npz }}.nc4', '{{ DATA }}/fv3jedi/akbk.nc4'] +- ['{{ FIXgfs }}/gdas/fv3jedi/fv3files/fmsmpp.nml', '{{ DATA }}/fv3jedi/fmsmpp.nml'] +- ['{{ FIXgfs }}/gdas/fv3jedi/fv3files/field_table_gfdl', '{{ DATA }}/fv3jedi/field_table'] +- ['{{ PARMgfs }}/gdas/io/fv3jedi_fieldmetadata_restart.yaml', '{{ DATA }}/fv3jedi/fv3jedi_fieldmetadata_restart.yaml'] diff --git a/parm/ufs/fv3/diag_table_aod b/parm/ufs/fv3/diag_table_aod index 0de51b66d85..fd8aee1791a 100644 --- a/parm/ufs/fv3/diag_table_aod +++ b/parm/ufs/fv3/diag_table_aod @@ -3,4 +3,4 @@ "gfs_phys", "SU_AOD_550", "su_aod550", "fv3_history2d", "all", .false., "none", 2 "gfs_phys", "BC_AOD_550", "bc_aod550", "fv3_history2d", "all", .false., "none", 2 "gfs_phys", "OC_AOD_550", "oc_aod550", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "SS_AOD_550", "ss_aod550", "fv3_history2d", "all", .false., "none", 2 \ No newline at end of file +"gfs_phys", "SS_AOD_550", "ss_aod550", "fv3_history2d", "all", .false., "none", 2 diff --git a/scripts/exgdas_enkf_ecen.sh b/scripts/exgdas_enkf_ecen.sh index ff25c78b7c5..8e87b3ccb19 100755 --- a/scripts/exgdas_enkf_ecen.sh +++ b/scripts/exgdas_enkf_ecen.sh @@ -51,7 +51,6 @@ GPREFIX=${GPREFIX:-""} GPREFIX_ENS=${GPREFIX_ENS:-$GPREFIX} # Variables -NMEM_ENS=${NMEM_ENS:-80} imp_physics=${imp_physics:-99} INCREMENTS_TO_ZERO=${INCREMENTS_TO_ZERO:-"'NONE'"} DOIAU=${DOIAU_ENKF:-"NO"} @@ -59,10 +58,15 @@ FHMIN=${FHMIN_ECEN:-3} FHMAX=${FHMAX_ECEN:-9} FHOUT=${FHOUT_ECEN:-3} FHSFC=${FHSFC_ECEN:-$FHMIN} -if [ $RUN = "enkfgfs" ]; then +if [ "${RUN}" = "enkfgfs" ]; then DO_CALC_INCREMENT=${DO_CALC_INCREMENT_ENKF_GFS:-"NO"} + NMEM_ENS=${NMEM_ENS_GFS:-30} + ec_offset=${NMEM_ENS_GFS_OFFSET:-20} + mem_offset=$((ec_offset * cyc/6)) else DO_CALC_INCREMENT=${DO_CALC_INCREMENT:-"NO"} + NMEM_ENS=${NMEM_ENS:-80} + mem_offset=0 fi # global_chgres stuff @@ -106,12 +110,17 @@ ENKF_SUFFIX="s" for FHR in $(seq $FHMIN $FHOUT $FHMAX); do for imem in $(seq 1 $NMEM_ENS); do + smem=$((imem + mem_offset)) + if (( smem > 80 )); then + smem=$((smem - 80)) + fi + gmemchar="mem"$(printf %03i $smem) memchar="mem"$(printf %03i $imem) MEMDIR=${memchar} YMD=${PDY} HH=${cyc} generate_com -x \ COM_ATMOS_ANALYSIS_MEM:COM_ATMOS_ANALYSIS_TMPL - MEMDIR=${memchar} RUN=${GDUMP_ENS} YMD=${gPDY} HH=${gcyc} generate_com -x \ + MEMDIR=${gmemchar} RUN=${GDUMP_ENS} YMD=${gPDY} HH=${gcyc} generate_com -x \ COM_ATMOS_HISTORY_MEM_PREV:COM_ATMOS_HISTORY_TMPL ${NLN} "${COM_ATMOS_HISTORY_MEM_PREV}/${GPREFIX_ENS}atmf00${FHR}${ENKF_SUFFIX}.nc" "./atmges_${memchar}" diff --git a/scripts/exgdas_enkf_post.sh b/scripts/exgdas_enkf_post.sh index 1819502c45b..6f600687471 100755 --- a/scripts/exgdas_enkf_post.sh +++ b/scripts/exgdas_enkf_post.sh @@ -46,10 +46,11 @@ FHMIN=${FHMIN_EPOS:-3} FHMAX=${FHMAX_EPOS:-9} FHOUT=${FHOUT_EPOS:-3} -if [[ $CDUMP == "gfs" ]]; then +if [[ "${RUN}" == "enkfgfs" ]]; then NMEM_ENS=${NMEM_ENS_GFS:-${NMEM_ENS:-30}} +else + NMEM_ENS=${NMEM_ENS:-80} fi -NMEM_ENS=${NMEM_ENS:-80} SMOOTH_ENKF=${SMOOTH_ENKF:-"NO"} ENKF_SPREAD=${ENKF_SPREAD:-"NO"} diff --git a/scripts/exgdas_enkf_sfc.sh b/scripts/exgdas_enkf_sfc.sh index 85d0b2187de..584c7932319 100755 --- a/scripts/exgdas_enkf_sfc.sh +++ b/scripts/exgdas_enkf_sfc.sh @@ -24,6 +24,7 @@ pwd=$(pwd) # Base variables DONST=${DONST:-"NO"} +GSI_SOILANAL=${GSI_SOILANAL:-"NO"} DOSFCANL_ENKF=${DOSFCANL_ENKF:-"YES"} export CASE=${CASE:-384} ntiles=${ntiles:-6} @@ -46,7 +47,14 @@ GPREFIX=${GPREFIX:-""} GPREFIX_ENS=${GPREFIX_ENS:-${GPREFIX}} # Variables -NMEM_ENS=${NMEM_ENS:-80} +if [ "${RUN}" = "enkfgfs" ]; then + NMEM_ENS=${NMEM_ENS_GFS:-30} + ec_offset=${NMEM_ENS_GFS_OFFSET:-20} + mem_offset=$((ec_offset * cyc/6)) +else + NMEM_ENS=${NMEM_ENS:-80} + mem_offset=0 +fi DOIAU=${DOIAU_ENKF:-"NO"} # Global_cycle stuff @@ -61,7 +69,6 @@ export DELTSFC=${DELTSFC:-6} APRUN_ESFC=${APRUN_ESFC:-${APRUN:-""}} NTHREADS_ESFC=${NTHREADS_ESFC:-${NTHREADS:-1}} - ################################################################################ # Preprocessing mkdata=NO @@ -132,18 +139,24 @@ if [ $DOIAU = "YES" ]; then export TILE_NUM=$n for imem in $(seq 1 $NMEM_ENS); do - + smem=$((imem + mem_offset)) + if (( smem > 80 )); then + smem=$((smem - 80)) + fi + gmemchar="mem"$(printf %03i "$smem") cmem=$(printf %03i $imem) memchar="mem$cmem" MEMDIR=${memchar} YMD=${PDY} HH=${cyc} generate_com \ COM_ATMOS_RESTART_MEM:COM_ATMOS_RESTART_TMPL - MEMDIR=${memchar} RUN="enkfgdas" YMD=${gPDY} HH=${gcyc} generate_com \ + MEMDIR=${gmemchar} RUN=${GDUMP_ENS} YMD=${gPDY} HH=${gcyc} generate_com \ COM_ATMOS_RESTART_MEM_PREV:COM_ATMOS_RESTART_TMPL - [[ ${TILE_NUM} -eq 1 ]] && mkdir -p "${COM_ATMOS_RESTART_MEM}" + MEMDIR=${memchar} YMD=${PDY} HH=${cyc} generate_com \ + COM_ATMOS_ANALYSIS_MEM:COM_ATMOS_ANALYSIS_TMPL + [[ ${TILE_NUM} -eq 1 ]] && mkdir -p "${COM_ATMOS_RESTART_MEM}" ${NCP} "${COM_ATMOS_RESTART_MEM_PREV}/${bPDY}.${bcyc}0000.sfc_data.tile${n}.nc" \ "${COM_ATMOS_RESTART_MEM}/${bPDY}.${bcyc}0000.sfcanl_data.tile${n}.nc" ${NLN} "${COM_ATMOS_RESTART_MEM_PREV}/${bPDY}.${bcyc}0000.sfc_data.tile${n}.nc" \ @@ -153,7 +166,12 @@ if [ $DOIAU = "YES" ]; then ${NLN} "${FIXgfs}/orog/${CASE}/${CASE}_grid.tile${n}.nc" "${DATA}/fngrid.${cmem}" ${NLN} "${FIXgfs}/orog/${CASE}/${CASE}.mx${OCNRES}_oro_data.tile${n}.nc" "${DATA}/fnorog.${cmem}" - done + if [[ ${GSI_SOILANAL} = "YES" ]]; then + FHR=6 + ${NLN} "${COM_ATMOS_ANALYSIS_MEM}/${APREFIX_ENS}sfci00${FHR}.nc" \ + "${DATA}/lnd_incr.${cmem}" + fi + done # ensembles CDATE="${PDY}${cyc}" ${CYCLESH} export err=$?; err_chk @@ -175,7 +193,7 @@ if [ $DOSFCANL_ENKF = "YES" ]; then MEMDIR=${memchar} YMD=${PDY} HH=${cyc} generate_com \ COM_ATMOS_RESTART_MEM:COM_ATMOS_RESTART_TMPL - RUN="${GDUMP_ENS}" MEMDIR=${memchar} YMD=${gPDY} HH=${gcyc} generate_com \ + RUN="${GDUMP_ENS}" MEMDIR=${gmemchar} YMD=${gPDY} HH=${gcyc} generate_com \ COM_ATMOS_RESTART_MEM_PREV:COM_ATMOS_RESTART_TMPL [[ ${TILE_NUM} -eq 1 ]] && mkdir -p "${COM_ATMOS_RESTART_MEM}" diff --git a/scripts/exgdas_enkf_update.sh b/scripts/exgdas_enkf_update.sh index a23a892914a..d61c4986155 100755 --- a/scripts/exgdas_enkf_update.sh +++ b/scripts/exgdas_enkf_update.sh @@ -56,7 +56,6 @@ ENKFSTAT=${ENKFSTAT:-${APREFIX}enkfstat} # Namelist parameters USE_CORRELATED_OBERRS=${USE_CORRELATED_OBERRS:-"NO"} -NMEM_ENS=${NMEM_ENS:-80} NAM_ENKF=${NAM_ENKF:-""} SATOBS_ENKF=${SATOBS_ENKF:-""} OZOBS_ENKF=${OZOBS_ENKF:-""} @@ -81,12 +80,18 @@ cnvw_option=${cnvw_option:-".false."} netcdf_diag=${netcdf_diag:-".true."} modelspace_vloc=${modelspace_vloc:-".false."} # if true, 'vlocal_eig.dat' is needed IAUFHRS_ENKF=${IAUFHRS_ENKF:-6} -if [ $RUN = "enkfgfs" ]; then +if [ "${RUN}" = "enkfgfs" ]; then DO_CALC_INCREMENT=${DO_CALC_INCREMENT_ENKF_GFS:-"NO"} + NMEM_ENS=${NMEM_ENS_GFS:-30} + ec_offset=${NMEM_ENS_GFS_OFFSET:-20} + mem_offset=$((ec_offset * cyc/6)) else DO_CALC_INCREMENT=${DO_CALC_INCREMENT:-"NO"} + NMEM_ENS=${NMEM_ENS:-80} + mem_offset=0 fi INCREMENTS_TO_ZERO=${INCREMENTS_TO_ZERO:-"'NONE'"} +GSI_SOILANAL=${GSI_SOILANAL:-"NO"} ################################################################################ @@ -178,9 +183,14 @@ else fi nfhrs=$(echo $IAUFHRS_ENKF | sed 's/,/ /g') for imem in $(seq 1 $NMEM_ENS); do + smem=$((imem + mem_offset)) + if (( smem > 80 )); then + smem=$((smem - 80)) + fi + gmemchar="mem"$(printf %03i $smem) memchar="mem"$(printf %03i $imem) - MEMDIR=${memchar} RUN=${GDUMP_ENS} YMD=${gPDY} HH=${gcyc} generate_com -x \ + MEMDIR=${gmemchar} RUN=${GDUMP_ENS} YMD=${gPDY} HH=${gcyc} generate_com -x \ COM_ATMOS_HISTORY_MEM_PREV:COM_ATMOS_HISTORY_TMPL MEMDIR=${memchar} YMD=${PDY} HH=${cyc} generate_com -x \ @@ -203,6 +213,10 @@ for imem in $(seq 1 $NMEM_ENS); do for FHR in $nfhrs; do ${NLN} "${COM_ATMOS_HISTORY_MEM_PREV}/${GPREFIX}atmf00${FHR}${ENKF_SUFFIX}.nc" \ "sfg_${PDY}${cyc}_fhr0${FHR}_${memchar}" + if [ $GSI_SOILANAL = "YES" ]; then + ${NLN} "${COM_ATMOS_HISTORY_MEM_PREV}/${GPREFIX}sfcf00${FHR}${ENKF_SUFFIX}.nc" \ + "bfg_${PDY}${cyc}_fhr0${FHR}_${memchar}" + fi if [ $cnvw_option = ".true." ]; then ${NLN} "${COM_ATMOS_HISTORY_MEM_PREV}/${GPREFIX}sfcf00${FHR}.nc" \ "sfgsfc_${PDY}${cyc}_fhr0${FHR}_${memchar}" @@ -224,6 +238,10 @@ for imem in $(seq 1 $NMEM_ENS); do "incr_${PDY}${cyc}_fhr0${FHR}_${memchar}" fi fi + if [ $GSI_SOILANAL = "YES" ]; then + ${NLN} "${COM_ATMOS_ANALYSIS_MEM}/${APREFIX}sfci00${FHR}.nc" \ + "sfcincr_${PDY}${cyc}_fhr0${FHR}_${memchar}" + fi done done @@ -238,10 +256,10 @@ for FHR in $nfhrs; do fi done -if [ $USE_CFP = "YES" ]; then +if [[ $USE_CFP = "YES" ]]; then chmod 755 $DATA/mp_untar.sh ncmd=$(cat $DATA/mp_untar.sh | wc -l) - if [ $ncmd -gt 0 ]; then + if [[ $ncmd -gt 0 ]]; then ncmd_max=$((ncmd < npe_node_max ? ncmd : npe_node_max)) APRUNCFP=$(eval echo $APRUNCFP) $APRUNCFP $DATA/mp_untar.sh @@ -398,8 +416,8 @@ cat stdout stderr > "${COM_ATMOS_ANALYSIS_STAT}/${ENKFSTAT}" ################################################################################ # Postprocessing -cd $pwd -[[ $mkdata = "YES" ]] && rm -rf $DATA +cd "$pwd" +[[ $mkdata = "YES" ]] && rm -rf "${DATA}" -exit $err +exit ${err} diff --git a/scripts/exgfs_atmos_wafs_blending_0p25.sh b/scripts/exgfs_atmos_wafs_blending_0p25.sh new file mode 100755 index 00000000000..293325185ea --- /dev/null +++ b/scripts/exgfs_atmos_wafs_blending_0p25.sh @@ -0,0 +1,298 @@ +#!/bin/ksh +################################################################################ +#### UNIX Script Documentation Block +# . . +# Script name: exgfs_atmos_wafs_blending_0p25.sh (copied from exgfs_atmos_wafs_blending.sh) +# Script description: This scripts looks for US and UK WAFS Grib2 products at 1/4 deg, +# wait for specified period of time, and then run $USHgfs/wafs_blending_0p25.sh +# if both WAFS data are available. Otherwise, the job aborts with error massage +# +# Author: Y Mao Org: EMC Date: 2020-04-02 +# +# +# Script history log: +# 2020-04-02 Y Mao +# Oct 2021 - Remove jlogfile +# 2022-05-25 | Y Mao | Add ICAO new milestone Nov 2023 + +set -x +echo "JOB $job HAS BEGUN" +export SEND_AWC_US_ALERT=NO +export SEND_AWC_UK_ALERT=NO +export SEND_US_WAFS=NO +export SEND_UK_WAFS=NO + +cd $DATA +export SLEEP_LOOP_MAX=`expr $SLEEP_TIME / $SLEEP_INT` + +echo "start blending US and UK WAFS products at 1/4 degree for " $cyc " z cycle" +export ffhr=$SHOUR + +export ic_uk=1 + +while test $ffhr -le $EHOUR +do + +########################## +# look for US WAFS data +########################## + + export ic=1 + while [ $ic -le $SLEEP_LOOP_MAX ] + do + if [ -s ${COMINus}/gfs.t${cyc}z.wafs_0p25_unblended.f${ffhr}.grib2 ] ; then + break + fi + if [ $ic -eq $SLEEP_LOOP_MAX ] ; then + echo "US WAFS GRIB2 file $COMINus/gfs.t${cyc}z.wafs_0p25_unblended.f${ffhr}.grib2 not found after waiting over $SLEEP_TIME seconds" + echo "US WAFS GRIB2 file " $COMINus/gfs.t${cyc}z.wafs_0p25_unblended.f${ffhr}.grib2 "not found after waiting ",$SLEEP_TIME, "exitting" + SEND_UK_WAFS=YES + break + else + ic=`expr $ic + 1` + sleep $SLEEP_INT + fi + done + +########################## +# look for UK WAFS data. +########################## + + SLEEP_LOOP_MAX_UK=$SLEEP_LOOP_MAX + + # export ic=1 + while [ $ic_uk -le $SLEEP_LOOP_MAX_UK ] + do + # Three(3) unblended UK files for each cycle+fhour: icing, turb, cb + ukfiles=`ls $COMINuk/EGRR_WAFS_0p25_*_unblended_${PDY}_${cyc}z_t${ffhr}.grib2 | wc -l` + if [ $ukfiles -ge 3 ] ; then + break + fi + + if [ $ic_uk -eq $SLEEP_LOOP_MAX_UK ] ; then + echo "UK WAFS GRIB2 file $COMINuk/EGRR_WAFS_0p25_*_unblended_${PDY}_${cyc}z_t${ffhr}.grib2 not found" + echo "UK WAFS GRIB2 file " $COMINuk/EGRR_WAFS_0p25_*_unblended_${PDY}_${cyc}z_t${ffhr}.grib2 " not found" + export SEND_US_WAFS=YES + break + else + ic_uk=`expr $ic_uk + 1` + sleep $SLEEP_INT + fi + done + +########################## +# If both UK and US data are missing. +########################## + + if [ $SEND_UK_WAFS = 'YES' -a $SEND_US_WAFS = 'YES' ] ; then + SEND_US_WAFS=NO + SEND_UK_WAFS=NO + echo "BOTH UK and US data are missing, no blended for $PDY$cyc$ffhr" + export err=1; err_chk + continue + fi + +########################## +# Blending or unblended +########################## + + if [ $SEND_US_WAFS = 'YES' ] ; then + echo "turning back on dbn alert for unblended US WAFS product" + elif [ $SEND_UK_WAFS = 'YES' ] ; then + echo "turning back on dbn alert for unblended UK WAFS product" + # retrieve UK products + # Three(3) unblended UK files for each cycle+fhour: icing, turb, cb + cat $COMINuk/EGRR_WAFS_0p25_*_unblended_${PDY}_${cyc}z_t${ffhr}.grib2 > EGRR_WAFS_0p25_unblended_${PDY}_${cyc}z_t${ffhr}.grib2 + else # elif [ $SEND_US_WAFS = "NO" -a $SEND_UK_WAFS = "NO" ] ; then + # retrieve UK products + # Three(3) unblended UK files for each cycle+fhour: icing, turb, cb + cat $COMINuk/EGRR_WAFS_0p25_*_unblended_${PDY}_${cyc}z_t${ffhr}.grib2 > EGRR_WAFS_0p25_unblended_${PDY}_${cyc}z_t${ffhr}.grib2 + + # pick up US data + cp ${COMINus}/gfs.t${cyc}z.wafs_0p25_unblended.f${ffhr}.grib2 . + + # run blending code + export pgm=wafs_blending_0p25.x + . prep_step + + startmsg + $EXECgfs/$pgm gfs.t${cyc}z.wafs_0p25_unblended.f${ffhr}.grib2 \ + EGRR_WAFS_0p25_unblended_${PDY}_${cyc}z_t${ffhr}.grib2 \ + 0p25_blended_${PDY}${cyc}f${ffhr}.grib2 > f${ffhr}.out + + err1=$? + if test "$err1" -ne 0 + then + echo "WAFS blending 0p25 program failed at " ${PDY}${cyc}F${ffhr} " turning back on dbn alert for unblended US WAFS product" + SEND_US_WAFS=YES + fi + fi + +########################## +# Date dissemination +########################## + + if [ $SEND_US_WAFS = "YES" ] ; then + + ############################################################################################## + # + # checking any US WAFS product was sent due to No UK WAFS GRIB2 file or WAFS blending program + # (Alert once for all forecast hours) + # + if [ $SEND_AWC_US_ALERT = "NO" ] ; then + echo "WARNING! No UK WAFS GRIB2 0P25 file for WAFS blending. Send alert message to AWC ......" + make_NTC_file.pl NOXX10 KKCI $PDY$cyc NONE $FIXgfs/wafs_blending_0p25_admin_msg $PCOM/wifs_0p25_admin_msg + make_NTC_file.pl NOXX10 KWBC $PDY$cyc NONE $FIXgfs/wafs_blending_0p25_admin_msg $PCOM/iscs_0p25_admin_msg + if [ $SENDDBN_NTC = "YES" ] ; then + $DBNROOT/bin/dbn_alert NTC_LOW WAFS $job $PCOM/wifs_0p25_admin_msg + $DBNROOT/bin/dbn_alert NTC_LOW WAFS $job $PCOM/iscs_0p25_admin_msg + fi + + if [ $envir != prod ]; then + export maillist='nco.spa@noaa.gov' + fi + export maillist=${maillist:-'nco.spa@noaa.gov,ncep.sos@noaa.gov'} + export subject="WARNING! No UK WAFS GRIB2 0P25 file for WAFS blending, $PDY t${cyc}z $job" + echo "*************************************************************" > mailmsg + echo "*** WARNING! No UK WAFS GRIB2 0P25 file for WAFS blending ***" >> mailmsg + echo "*************************************************************" >> mailmsg + echo >> mailmsg + echo "Send alert message to AWC ...... " >> mailmsg + echo >> mailmsg + cat mailmsg > $COMOUT/${RUN}.t${cyc}z.wafs_blend_0p25_usonly.emailbody + cat $COMOUT/${RUN}.t${cyc}z.wafs_blend_0p25_usonly.emailbody | mail.py -s "$subject" $maillist -v + + export SEND_AWC_US_ALERT=YES + fi + ############################################################################################## + # + # Distribute US WAFS unblend Data to NCEP FTP Server (WOC) and TOC + # + echo "altering the unblended US WAFS products - $COMINus/gfs.t${cyc}z.wafs_0p25_unblended.f${ffhr}.grib2 " + echo "and $COMINus/gfs.t${cyc}z.wafs_0p25_unblended.f${ffhr}.grib2.idx " + + if [ $SENDDBN = "YES" ] ; then + $DBNROOT/bin/dbn_alert MODEL GFS_WAFS_0P25_UBL_GB2 $job $COMINus/gfs.t${cyc}z.wafs_0p25_unblended.f${ffhr}.grib2 + $DBNROOT/bin/dbn_alert MODEL GFS_WAFS_0P25_UBL_GB2_WIDX $job $COMINus/gfs.t${cyc}z.wafs_0p25_unblended.f${ffhr}.grib2.idx + fi + +# if [ $SENDDBN_NTC = "YES" ] ; then +# $DBNROOT/bin/dbn_alert NTC_LOW $NET $job $COMOUT/gfs.t${cyc}z.wafs_0p25_unblended.f${ffhr}.grib2 +# fi + + + export SEND_US_WAFS=NO + + elif [ $SEND_UK_WAFS = "YES" ] ; then + ############################################################################################## + # + # checking any UK WAFS product was sent due to No US WAFS GRIB2 file + # (Alert once for all forecast hours) + # + if [ $SEND_AWC_UK_ALERT = "NO" ] ; then + echo "WARNING: No US WAFS GRIB2 0P25 file for WAFS blending. Send alert message to AWC ......" + make_NTC_file.pl NOXX10 KKCI $PDY$cyc NONE $FIXgfs/wafs_blending_0p25_admin_msg $PCOM/wifs_0p25_admin_msg + make_NTC_file.pl NOXX10 KWBC $PDY$cyc NONE $FIXgfs/wafs_blending_0p25_admin_msg $PCOM/iscs_0p25_admin_msg + if [ $SENDDBN_NTC = "YES" ] ; then + $DBNROOT/bin/dbn_alert NTC_LOW WAFS $job $PCOM/wifs_0p25_admin_msg + $DBNROOT/bin/dbn_alert NTC_LOW WAFS $job $PCOM/iscs_0p25_admin_msg + fi + + if [ $envir != prod ]; then + export maillist='nco.spa@noaa.gov' + fi + export maillist=${maillist:-'nco.spa@noaa.gov,ncep.sos@noaa.gov'} + export subject="WARNING! No US WAFS GRIB2 0P25 file for WAFS blending, $PDY t${cyc}z $job" + echo "*************************************************************" > mailmsg + echo "*** WARNING! No US WAFS GRIB2 0P25 file for WAFS blending ***" >> mailmsg + echo "*************************************************************" >> mailmsg + echo >> mailmsg + echo "Send alert message to AWC ...... " >> mailmsg + echo >> mailmsg + cat mailmsg > $COMOUT/${RUN}.t${cyc}z.wafs_blend_0p25_ukonly.emailbody + cat $COMOUT/${RUN}.t${cyc}z.wafs_blend_0p25_ukonly.emailbody | mail.py -s "$subject" $maillist -v + + export SEND_AWC_UK_ALERT=YES + fi + ############################################################################################## + # + # Distribute UK WAFS unblend Data to NCEP FTP Server (WOC) and TOC + # + echo "altering the unblended UK WAFS products - EGRR_WAFS_0p25_unblended_${PDY}_${cyc}z_t${ffhr}.grib2" + + if [ $SENDDBN = "YES" ] ; then + $DBNROOT/bin/dbn_alert MODEL GFS_WAFS_UKMET_0P25_UBL_GB2 $job EGRR_WAFS_0p25_unblended_${PDY}_${cyc}z_t${ffhr}.grib2 + fi + +# if [ $SENDDBN_NTC = "YES" ] ; then +# $DBNROOT/bin/dbn_alert NTC_LOW $NET $job EGRR_WAFS_0p25_unblended_${PDY}_${cyc}z_t${ffhr}.grib2 +# fi + export SEND_UK_WAFS=NO + + + else + ############################################################################################## + # + # TOCGRIB2 Processing WAFS Blending GRIB2 (Icing, CB, GTG) + + # As in August 2020, no WMO header is needed for WAFS data at 1/4 deg + ## . prep_step + ## export pgm=$TOCGRIB2 + ## startmsg + + ## export FORT11=0p25_blended_${PDY}${cyc}f${ffhr}.grib2 + ## export FORT31=" " + ## export FORT51=grib2.t${cyc}z.WAFS_0p25_blended_f${ffhr} + + ## $TOCGRIB2 < $FIXgfs/grib2_blended_wafs_wifs_f${ffhr}.0p25 >> $pgmout 2> errfile + + ## err=$?;export err ;err_chk + ## echo " error from tocgrib=",$err + + ############################################################################################## + # + # Distribute US WAFS unblend Data to NCEP FTP Server (WOC) and TOC + # + if [ $SENDCOM = YES ]; then + cp 0p25_blended_${PDY}${cyc}f${ffhr}.grib2 $COMOUT/WAFS_0p25_blended_${PDY}${cyc}f${ffhr}.grib2 + ## cp grib2.t${cyc}z.WAFS_0p25_blended_f${ffhr} $PCOM/grib2.t${cyc}z.WAFS_0p25_blended_f${ffhr} + fi + + if [ $SENDDBN_NTC = "YES" ] ; then + # Distribute Data to NCEP FTP Server (WOC) and TOC + echo "No WMO header yet" + ## $DBNROOT/bin/dbn_alert NTC_LOW $NET $job $PCOM/grib2.t${cyc}z.WAFS_0p25_blended_f${ffhr} + fi + + if [ $SENDDBN = "YES" ] ; then + $DBNROOT/bin/dbn_alert MODEL GFS_WAFS_0P25_BL_GB2 $job $COMOUT/WAFS_0p25_blended_${PDY}${cyc}f${ffhr}.grib2 + fi + fi + +########################## +# Next loop +########################## + + echo "$PDY$cyc$ffhr" > $COMOUT/${RUN}.t${cyc}z.control.wafsblending_0p25 + + if [ $FHOUT_GFS -eq 3 ] ; then + FHINC=03 + else + if [ $ffhr -lt 24 ] ; then + FHINC=01 + else + FHINC=03 + fi + fi + + ffhr=`expr $ffhr + $FHINC` + if test $ffhr -lt 10 + then + ffhr=0${ffhr} + fi + +done +################################################################################ + +exit 0 +# diff --git a/scripts/exgfs_atmos_wafs_gcip.sh b/scripts/exgfs_atmos_wafs_gcip.sh new file mode 100755 index 00000000000..ad91c474204 --- /dev/null +++ b/scripts/exgfs_atmos_wafs_gcip.sh @@ -0,0 +1,242 @@ +#!/bin/ksh +###################################################################### +# UTILITY SCRIPT NAME : exgfs_atmos_wafs_gcip.sh +# DATE WRITTEN : 01/28/2015 +# +# Abstract: This utility script produces the WAFS GCIP. +# +# GCIP runs f00 f03 for each cycle, 4 times/day, +# to make the output valid every 3 hours +# +# History: 01/28/2015 +# - GFS post master file as first guess +# /com/prod/gfs.YYYYMMDD +# - Nesdis composite global satellite data +# /dcom (ftp?) +# - Metar/ships/lightning/pireps +# ksh /nwprod/ush/dumpjb YYYYMMDDHH hours output >/dev/null +# - Radar data over CONUS +# /com/hourly/prod/radar.YYYYMMDD/refd3d.tHHz.grbf00 +# - output of current icing potential +##################################################################### +echo "-----------------------------------------------------" +echo "JGFS_ATMOS_WAFS_GCIP at 00Z/06Z/12Z/18Z GFS postprocessing" +echo "-----------------------------------------------------" +echo "History: 2015 - First implementation of this new script." +echo "Oct 2021 - Remove jlogfile" +echo " " +##################################################################### + +set -xa + +# Set up working dir for parallel runs based on ffhr +ffhr=$1 +DATA=$DATA/$ffhr +mkdir -p $DATA +cd $DATA +# Overwrite TMPDIR for dumpjb +export TMPDIR=$DATA + +SLEEP_LOOP_MAX=`expr $SLEEP_TIME / $SLEEP_INT` + +configFile=gcip.config + +echo 'before preparing data' `date` + +# valid time. no worry, it won't be across to another date +vhour=$(( $ffhr + $cyc )) +vhour="$(printf "%02d" $(( 10#$vhour )) )" + +######################################################## +# Preparing data + +if [ $RUN = "gfs" ] ; then + + # model data + masterFile=$COMINgfs/gfs.t${cyc}z.master.grb2f$ffhr + + # check the availability of model file + icnt=1 + while [ $icnt -lt $SLEEP_LOOP_MAX ] ; do + if [ -s $masterFile ] ; then + break + fi + sleep $SLEEP_INT + icnt=$((icnt + 1)) + if [ $icnt -ge $SLEEP_LOOP_MAX ] ; then + msg="ABORTING after $SLEEP_TIME seconds of waiting for gfs master file!" + err_exit $msg + fi + done + + cpreq $PARMgfs/wafs_gcip_gfs.cfg $configFile + + modelFile=modelfile.grb +# ln -sf $masterFile $modelFile + $WGRIB2 $masterFile | egrep ":HGT:|:VVEL:|:CLMR:|:TMP:|:SPFH:|:RWMR:|:SNMR:|:GRLE:|:ICMR:|:RH:" | egrep "00 mb:|25 mb:|50 mb:|75 mb:|:HGT:surface" | $WGRIB2 -i $masterFile -grib $modelFile + + # metar / ships / lightning / pireps + # dumped data files' suffix is ".ibm" + obsfiles="metar ships ltngsr pirep" + for obsfile in $obsfiles ; do +# ksh $USHobsproc_dump/dumpjb ${PDY}${vhour} 1.5 $obsfile >/dev/null + ksh $DUMPJB ${PDY}${vhour} 1.5 $obsfile + done + metarFile=metar.ibm + shipFile=ships.ibm + lightningFile=ltngsr.ibm + pirepFile=pirep.ibm + + satFiles="" + channels="VIS SIR LIR SSR" + # If one channel is missing, satFiles will be empty + for channel in $channels ; do + satFile=GLOBCOMP$channel.${PDY}${vhour} + if [[ $COMINsat == *ftp:* ]] ; then + curl -O $COMINsat/$satFile + else + + # check the availability of satellite data file + icnt=1 + while [ $icnt -lt $SLEEP_LOOP_MAX ] ; do + if [ -s $COMINsat/$satFile ] ; then + break + fi + sleep $SLEEP_INT + icnt=$((icnt + 1)) + if [ $icnt -ge $SLEEP_LOOP_MAX ] ; then + msg="GCIP at ${vhour}z ABORTING after $SLEEP_TIME seconds of waiting for satellite $channel file!" + echo "$msg" + rc=1 + echo $msg >> $COMOUT/${RUN}.gcip.log + + if [ $envir != prod ]; then + export maillist='nco.spa@noaa.gov' + fi + export maillist=${maillist:-'nco.spa@noaa.gov,ncep.sos@noaa.gov'} + + export subject="Missing GLOBCOMPVIS Satellite Data for $PDY t${cyc}z $job" + echo "*************************************************************" > mailmsg + echo "*** WARNING !! COULD NOT FIND GLOBCOMPVIS Satellite Data *** " >> mailmsg + echo "*************************************************************" >> mailmsg + echo >> mailmsg + echo "One or more GLOBCOMPVIS Satellite Data files are missing, including " >> mailmsg + echo " $COMINsat/$satFile " >> mailmsg + echo >> mailmsg + echo "$job will gracfully exited" >> mailmsg + cat mailmsg > $COMOUT/${RUN}.t${cyc}z.gcip.emailbody + cat $COMOUT/${RUN}.t${cyc}z.gcip.emailbody | mail.py -s "$subject" $maillist -v + + exit $rc + fi + done + + cp $COMINsat/$satFile . + fi + if [[ -s $satFile ]] ; then + satFiles="$satFiles $satFile" + else + satFiles="" + break + fi + done + + # radar data + sourceRadar=$COMINradar/refd3d.t${vhour}z.grb2f00 + + # check the availability of radar data file + icnt=1 + while [ $icnt -lt $SLEEP_LOOP_MAX ] ; do + if [ -s $sourceRadar ] ; then + break + fi + sleep $SLEEP_INT + icnt=$((icnt + 1)) + if [ $icnt -ge $SLEEP_LOOP_MAX ] ; then + echo "WARNING: radar data is not available after $SLEEP_TIME seconds of waiting!" + fi + done + + radarFile=radarFile.grb + if [ -s $sourceRadar ] ; then + cp $sourceRadar $radarFile + fi + + fi # RUN model name + +######################################################## +# Composite gcip command options + +outputfile=gfs.t${vhour}z.gcip.f00.grib2 + +cmdoptions="-t ${PDY}${vhour} -c $configFile -model $modelFile" +if [[ -s $metarFile ]] ; then + cmdoptions="$cmdoptions -metar $metarFile" +else + err_exit "There are no METAR observations." +fi +if [[ -s $shipFile ]] ; then + cmdoptions="$cmdoptions -ship $shipFile" +fi +# empty if a channel data is missing +if [[ -n $satFiles ]] ; then + cmdoptions="$cmdoptions -sat $satFiles" +else + err_exit "Satellite data are not available or completed." +fi +if [[ -s $lightningFile ]] ; then + cmdoptions="$cmdoptions -lightning $lightningFile" +fi +if [[ -s $pirepFile ]] ; then + cmdoptions="$cmdoptions -pirep $pirepFile" +fi +if [[ -s $radarFile ]] ; then + cmdoptions="$cmdoptions -radar $radarFile" +fi +cmdoptions="$cmdoptions -o $outputfile" + +####################################################### +# Run GCIP + +echo 'after preparing data' `date` + +export pgm=wafs_gcip.x + +cpreq $FIXgfs/gcip_near_ir_refl.table near_ir_refl.table + +startmsg +$EXECgfs/$pgm >> $pgmout $cmdoptions 2> errfile & +wait +export err=$?; err_chk + + +if [[ -s $outputfile ]] ; then + ############################## + # Post Files to COM + ############################## + if [ $SENDCOM = "YES" ] ; then + cp $outputfile $COMOUT/$outputfile + if [ $SENDDBN = "YES" ] ; then + # $DBNROOT/bin/dbn_alert GFS_WAFS GCIP $job $COMOUT/$outputfile +#alert removed in v15.0 $DBNROOT/bin/dbn_alert MODEL GFS_WAFS_GCIP $job $COMOUT/$outputfile + : + fi + fi +else + err_exit "Output $outputfile was not generated" +fi + + +################################################################################ +# GOOD RUN +set +x +echo "**************JOB EXGFS_ATMOS_WAFS_GCIP.SH COMPLETED NORMALLY ON THE IBM" +echo "**************JOB EXGFS_ATMOS_WAFS_GCIP.SH COMPLETED NORMALLY ON THE IBM" +echo "**************JOB EXGFS_ATMOS_WAFS_GCIP.SH COMPLETED NORMALLY ON THE IBM" +set -x +################################################################################ + +exit 0 + +############## END OF SCRIPT ####################### + diff --git a/scripts/exgfs_atmos_wafs_grib.sh b/scripts/exgfs_atmos_wafs_grib.sh new file mode 100755 index 00000000000..e81f0e99dad --- /dev/null +++ b/scripts/exgfs_atmos_wafs_grib.sh @@ -0,0 +1,146 @@ +#!/bin/sh +###################################################################### +# UTILITY SCRIPT NAME : exgfs_atmos_wafs_grib.sh +# DATE WRITTEN : 10/04/2004 +# +# Abstract: This utility script produces the WAFS GRIB +# +# Input: 1 arguments are passed to this script. +# 1st argument - Forecast Hour - format of 2I +# +# Logic: If we are processing fcsthrss 12-30, we have the +# added variable of the a or b in the process accordingly. +# The other fcsthrss, the a or b is dropped. +# +##################################################################### +echo "------------------------------------------------" +echo "JWAFS_00/06/12/18 GFS postprocessing" +echo "------------------------------------------------" +echo "History: OCT 2004 - First implementation of this new script." +echo " Aug 2015 - Modified for Phase II" +echo " Dec 2015 - Modified for input model data in Grib2" +echo " Oct 2021 - Remove jlogfile" +echo " " +##################################################################### +set +x +fcsthrs_list="$1" +num=$# + +if test "$num" -ge 1 +then + echo " Appropriate number of arguments were passed" + set -x + export DBNALERT_TYPE=${DBNALERT_TYPE:-GRIB} +# export job=${job:-interactive} +else + echo "" + echo "Usage: exgfs_atmos_wafs_grib.sh \$fcsthrs " + echo "" + exit 16 +fi + +cd $DATA + +set -x + +# To fix bugzilla 628 ( removing 'j' ahead of $job ) +export jobsuffix=gfs_atmos_wafs_f${fcsthrs}_$cyc + +############################################### +# Wait for the availability of the pgrib file +############################################### +# file name and forecast hour of GFS model data in Grib2 are 3 digits +export fcsthrs000="$(printf "%03d" $(( 10#$fcsthrs )) )" +icnt=1 +while [ $icnt -lt 1000 ] +do +# if [ -s $COMIN/${RUN}.${cycle}.pgrbf$fcsthrs ] + if [ -s $COMIN/${RUN}.${cycle}.pgrb2.1p00.f$fcsthrs000 ] + then + break + fi + + sleep 10 + icnt=$((icnt + 1)) + if [ $icnt -ge 180 ] + then + msg="ABORTING after 30 min of waiting for the pgrib filei!" + err_exit $msg + fi +done + +######################################## +echo "HAS BEGUN!" +######################################## + +echo " ------------------------------------------" +echo " BEGIN MAKING GFS WAFS PRODUCTS" +echo " ------------------------------------------" + +#################################################### +# +# GFS WAFS PRODUCTS MUST RUN IN CERTAIN ORDER +# BY REQUIREMENT FROM FAA. +# PLEASE DO NOT ALTER ORDER OF PROCESSING WAFS +# PRODUCTS CONSULTING WITH MR. BRENT GORDON. +# +#################################################### + +set +x +echo " " +echo "#####################################" +echo " Process GRIB WAFS PRODUCTS (mkwafs)" +echo " FORECAST HOURS 00 - 72." +echo "#####################################" +echo " " +set -x + +if test $fcsthrs -eq 0 +then + echo " " +fi + +# If we are processing fcsthrss 12-30, we have the +# added variable of the a or b in the process. +# The other fcsthrss, the a or b is dropped. + +if test $fcsthrs -ge 12 -a $fcsthrs -le 24 +then + sh $USHgfs/wafs_mkgbl.sh ${fcsthrs} a +fi + +if test $fcsthrs -eq 30 +then + sh $USHgfs/wafs_mkgbl.sh ${fcsthrs} a + for fcsthrs in 12 18 24 30 + do + sh $USHgfs/wafs_mkgbl.sh ${fcsthrs} b + done + sh $USHgfs/wafs_mkgbl.sh 00 x + sh $USHgfs/wafs_mkgbl.sh 06 x +fi + +if test $fcsthrs -gt 30 -a $fcsthrs -le 48 +then + sh $USHgfs/wafs_mkgbl.sh ${fcsthrs} x +fi + +if test $fcsthrs -eq 60 -o $fcsthrs -eq 72 +then + sh $USHgfs/wafs_mkgbl.sh ${fcsthrs} x +fi + +################################################################################ +# GOOD RUN +set +x +echo "**************JOB EXGFS_ATMOS_WAFS_GRIB.SH COMPLETED NORMALLY ON THE IBM" +echo "**************JOB EXGFS_ATMOS_WAFS_GRIB.SH COMPLETED NORMALLY ON THE IBM" +echo "**************JOB EXGFS_ATMOS_WAFS_GRIB.SH COMPLETED NORMALLY ON THE IBM" +set -x +################################################################################ + +echo "HAS COMPLETED NORMALLY!" + +exit 0 + +############## END OF SCRIPT ####################### diff --git a/scripts/exgfs_atmos_wafs_grib2.sh b/scripts/exgfs_atmos_wafs_grib2.sh new file mode 100755 index 00000000000..4631a10d8c5 --- /dev/null +++ b/scripts/exgfs_atmos_wafs_grib2.sh @@ -0,0 +1,227 @@ +#!/bin/sh +###################################################################### +# UTILITY SCRIPT NAME : exgfs_atmos_wafs_grib2.sh +# DATE WRITTEN : 07/15/2009 +# +# Abstract: This utility script produces the WAFS GRIB2. The output +# GRIB files are posted on NCEP ftp server and the grib2 files +# are pushed via dbnet to TOC to WAFS (ICSC). +# This is a joint project of WAFC London and WAFC Washington. +# +# We are processing WAFS grib2 for fcsthrs from 06 - 36 +# with 3-hour time increment. +# +# History: 08/20/2014 +# - ingest master file in grib2 (or grib1 if grib2 fails) +# - output of icng tcld cat cb are in grib2 +# 02/21/2020 +# - Prepare unblended icing severity and GTG tubulence +# for blending at 0.25 degree +# 02/22/2022 +# - Add grib2 data requested by FAA +# - Stop generating grib1 data for WAFS +##################################################################### +echo "-----------------------------------------------------" +echo "JGFS_ATMOS_WAFS_GRIB2 at 00Z/06Z/12Z/18Z GFS postprocessing" +echo "-----------------------------------------------------" +echo "History: AUGUST 2009 - First implementation of this new script." +echo "Oct 2021 - Remove jlogfile" +echo "Feb 2022 - Add FAA data, stop grib1 data" +echo " " +##################################################################### + +set -x + +fcsthrs=$1 + +DATA=$DATA/$fcsthrs +mkdir -p $DATA +cd $DATA + +########################################################## +# Wait for the availability of the gfs master pgrib file +########################################################## +# file name and forecast hour of GFS model data in Grib2 are 3 digits +export fcsthrs000="$(printf "%03d" $(( 10#$fcsthrs )) )" + +# 2D data +master2=$COMIN/${RUN}.${cycle}.master.grb2f${fcsthrs000} +master2i=$COMIN/${RUN}.${cycle}.master.grb2if${fcsthrs000} +# 3D data +wafs2=$COMIN/${RUN}.${cycle}.wafs.grb2f${fcsthrs000} +wafs2i=$COMIN/${RUN}.${cycle}.wafs.grb2f${fcsthrs000}.idx +# 3D data (on ICAO standard level) +icao2=$COMIN/${RUN}.${cycle}.wafs_icao.grb2f${fcsthrs000} +icao2i=$COMIN/${RUN}.${cycle}.wafs_icao.grb2f${fcsthrs000}.idx + +icnt=1 +while [ $icnt -lt 1000 ] +do + if [[ -s $master2i && -s $wafs2i ]] ; then + break + fi + + sleep 10 + icnt=$((icnt + 1)) + if [ $icnt -ge 180 ] ; then + msg="ABORTING after 30 min of waiting for the gfs master and wafs file!" + err_exit $msg + fi +done + +######################################## +echo "HAS BEGUN!" +######################################## + +echo " ------------------------------------------" +echo " BEGIN MAKING GFS WAFS GRIB2 PRODUCTS" +echo " ------------------------------------------" + +set +x +echo " " +echo "#####################################" +echo " Process GRIB WAFS PRODUCTS " +echo " FORECAST HOURS 06 - 36." +echo "#####################################" +echo " " +set -x + + +if [ $fcsthrs -le 36 -a $fcsthrs -gt 0 ] ; then + wafs_timewindow=yes +else + wafs_timewindow=no +fi + +#--------------------------- +# 1) Grib2 data for FAA +#--------------------------- +$WGRIB2 $master2 | grep -F -f $FIXgfs/grib2_gfs_awf_master.list | $WGRIB2 -i $master2 -grib tmpfile_gfsf${fcsthrs} +# F006 master file has two records of 0-6 hour APCP and ACPCP each, keep only one +# FAA APCP ACPCP: included every 6 forecast hour (0, 48], every 12 forest hour [48, 72] (controlled by $FIXgfs/grib2_gfs_awf_master3d.list) +if [ $fcsthrs -eq 6 ] ; then + $WGRIB2 tmpfile_gfsf${fcsthrs} -not "(APCP|ACPCP)" -grib tmp.grb2 + $WGRIB2 tmpfile_gfsf${fcsthrs} -match APCP -append -grib tmp.grb2 -quit + $WGRIB2 tmpfile_gfsf${fcsthrs} -match ACPCP -append -grib tmp.grb2 -quit + mv tmp.grb2 tmpfile_gfsf${fcsthrs} +fi +# U V will have the same grid message number by using -ncep_uv. +# U V will have the different grid message number without -ncep_uv. +$WGRIB2 tmpfile_gfsf${fcsthrs} \ + -set master_table 6 \ + -new_grid_winds earth -set_grib_type jpeg \ + -new_grid_interpolation bilinear -if ":(UGRD|VGRD):max wind" -new_grid_interpolation neighbor -fi \ + -new_grid latlon 0:288:1.25 90:145:-1.25 gfs.t${cyc}z.awf_grb45f${fcsthrs}.grib2 +$WGRIB2 -s gfs.t${cyc}z.awf_grb45f${fcsthrs}.grib2 > gfs.t${cyc}z.awf_grb45f${fcsthrs}.grib2.idx + +# For FAA, add WMO header. The header is different from WAFS +export pgm=$TOCGRIB2 +. prep_step +startmsg +export FORT11=gfs.t${cyc}z.awf_grb45f${fcsthrs}.grib2 +export FORT31=" " +export FORT51=grib2.t${cyc}z.awf_grbf${fcsthrs}.45 +$TOCGRIB2 < $FIXgfs/grib2_gfs_awff${fcsthrs}.45 >> $pgmout 2> errfile +err=$?;export err ;err_chk +echo " error from tocgrib=",$err + +if [ $wafs_timewindow = 'yes' ] ; then +#--------------------------- +# 2) traditional WAFS fields +#--------------------------- + # 3D data from $wafs2, on exact model pressure levels + $WGRIB2 $wafs2 | grep -F -f $FIXgfs/grib2_gfs_wafs_wafsmaster.list | $WGRIB2 -i $wafs2 -grib tmpfile_gfsf${fcsthrs} + # 2D data from $master2 + tail -5 $FIXgfs/grib2_gfs_wafs_wafsmaster.list > grib2_gfs_wafs_wafsmaster.list.2D + $WGRIB2 $master2 | grep -F -f grib2_gfs_wafs_wafsmaster.list.2D | $WGRIB2 -i $master2 -grib tmpfile_gfsf${fcsthrs}.2D + # Complete list of WAFS data + cat tmpfile_gfsf${fcsthrs}.2D >> tmpfile_gfsf${fcsthrs} + # WMO header + cp $FIXgfs/grib2_gfs_wafsf${fcsthrs}.45 wafs_wmo_header45 + # U V will have the same grid message number by using -ncep_uv. + # U V will have the different grid message number without -ncep_uv. + $WGRIB2 tmpfile_gfsf${fcsthrs} \ + -set master_table 6 \ + -new_grid_winds earth -set_grib_type jpeg \ + -new_grid_interpolation bilinear -if ":(UGRD|VGRD):max wind" -new_grid_interpolation neighbor -fi \ + -new_grid latlon 0:288:1.25 90:145:-1.25 gfs.t${cyc}z.wafs_grb45f${fcsthrs}.grib2 + $WGRIB2 -s gfs.t${cyc}z.wafs_grb45f${fcsthrs}.grib2 > gfs.t${cyc}z.wafs_grb45f${fcsthrs}.grib2.idx + + # For WAFS, add WMO header. Processing WAFS GRIB2 grid 45 for ISCS and WIFS + export pgm=$TOCGRIB2 + . prep_step + startmsg + export FORT11=gfs.t${cyc}z.wafs_grb45f${fcsthrs}.grib2 + export FORT31=" " + export FORT51=grib2.t${cyc}z.wafs_grbf${fcsthrs}.45 + $TOCGRIB2 < wafs_wmo_header45 >> $pgmout 2> errfile + err=$?;export err ;err_chk + echo " error from tocgrib=",$err + +fi # wafs_timewindow + +if [ $SENDCOM = "YES" ] ; then + + ############################## + # Post Files to COM + ############################## + + # FAA data + mv gfs.t${cyc}z.awf_grb45f${fcsthrs}.grib2 $COMOUT/gfs.t${cyc}z.awf_grb45f${fcsthrs}.grib2 + mv gfs.t${cyc}z.awf_grb45f${fcsthrs}.grib2.idx $COMOUT/gfs.t${cyc}z.awf_grb45f${fcsthrs}.grib2.idx + + # WAFS data + if [ $wafs_timewindow = 'yes' ] ; then + mv gfs.t${cyc}z.wafs_grb45f${fcsthrs}.grib2 $COMOUT/gfs.t${cyc}z.wafs_grb45f${fcsthrs}.grib2 + mv gfs.t${cyc}z.wafs_grb45f${fcsthrs}.grib2.idx $COMOUT/gfs.t${cyc}z.wafs_grb45f${fcsthrs}.grib2.idx + fi + + ############################## + # Post Files to PCOM + ############################## + + mv grib2.t${cyc}z.awf_grbf${fcsthrs}.45 $PCOM/grib2.t${cyc}z.awf_grbf${fcsthrs}.45 + + if [ $wafs_timewindow = 'yes' ] ; then + mv grib2.t${cyc}z.wafs_grbf${fcsthrs}.45 $PCOM/grib2.t${cyc}z.wafs_grbf${fcsthrs}.45 + fi +fi + +###################### +# Distribute Data +###################### + +if [ $SENDDBN = "YES" ] ; then + +# +# Distribute Data to WOC +# + if [ $wafs_timewindow = 'yes' ] ; then + $DBNROOT/bin/dbn_alert MODEL GFS_WAFS_1P25_GB2 $job $COMOUT/gfs.t${cyc}z.wafs_grb45f${fcsthrs}.grib2 +# +# Distribute Data to TOC TO WIFS FTP SERVER (AWC) +# + $DBNROOT/bin/dbn_alert NTC_LOW $NET $job $PCOM/grib2.t${cyc}z.wafs_grbf${fcsthrs}.45 + fi +# +# Distribute data to FAA +# + $DBNROOT/bin/dbn_alert NTC_LOW $NET $job $PCOM/grib2.t${cyc}z.awf_grbf${fcsthrs}.45 + + +fi + +################################################################################ +# GOOD RUN +set +x +echo "**************JOB EXGFS_ATMOS_WAFS_GRIB2.SH COMPLETED NORMALLY ON THE IBM" +echo "**************JOB EXGFS_ATMOS_WAFS_GRIB2.SH COMPLETED NORMALLY ON THE IBM" +echo "**************JOB EXGFS_ATMOS_WAFS_GRIB2.SH COMPLETED NORMALLY ON THE IBM" +set -x +################################################################################ + +echo "HAS COMPLETED NORMALLY!" + +exit 0 + +############## END OF SCRIPT ####################### diff --git a/scripts/exgfs_atmos_wafs_grib2_0p25.sh b/scripts/exgfs_atmos_wafs_grib2_0p25.sh new file mode 100755 index 00000000000..ec53966430e --- /dev/null +++ b/scripts/exgfs_atmos_wafs_grib2_0p25.sh @@ -0,0 +1,200 @@ +#!/bin/sh +###################################################################### +# UTILITY SCRIPT NAME : exgfs_atmos_wafs_grib2_0p25.sh +# DATE WRITTEN : 03/20/2020 +# +# Abstract: This utility script produces the WAFS GRIB2 at 0.25 degree. +# The output GRIB files are posted on NCEP ftp server and the +# grib2 files are pushed via dbnet to TOC to WAFS (ICSC). +# This is a joint project of WAFC London and WAFC Washington. +# +# We are processing WAFS grib2 for ffhr: +# hourly: 006 - 024 +# 3 hour: 027 - 048 +# 6 hour: 054 - 120 (for U/V/T/RH, not for turbulence/icing/CB) +# +# History: +##################################################################### +echo "-----------------------------------------------------" +echo "JGFS_ATMOS_WAFS_GRIB2_0P25 at 00Z/06Z/12Z/18Z GFS postprocessing" +echo "-----------------------------------------------------" +echo "History: MARCH 2020 - First implementation of this new script." +echo "Oct 2021 - Remove jlogfile" +echo "Aug 2022 - ffhr expanded from 36 to 120" +echo " " +##################################################################### + +cd $DATA + +set -x + + +ffhr=$1 +export ffhr="$(printf "%03d" $(( 10#$ffhr )) )" +export ffhr2="$(printf "%02d" $(( 10#$ffhr )) )" + +DATA=$DATA/$ffhr +mkdir -p $DATA +cd $DATA + + +if [ $ffhr -le 48 ] ; then + hazard_timewindow=yes +else + hazard_timewindow=no +fi + + +########################################################## +# Wait for the availability of the gfs WAFS file +########################################################## + +# 3D data (on new ICAO model pressure levels) and 2D data (CB) +wafs2=$COMIN/${RUN}.${cycle}.wafs.grb2f${ffhr} +wafs2i=$COMIN/${RUN}.${cycle}.wafs.grb2f${ffhr}.idx + +# 2D data from master file (U/V/H on max wind level, T/H at tropopause) +master2=$COMIN/${RUN}.${cycle}.master.grb2f${ffhr} + +# 3D data (on standard atmospheric pressure levels) +# Up to fhour=48 +# Will be removed in GFS.v17 +icao2=$COMIN/${RUN}.${cycle}.wafs_icao.grb2f${ffhr} + +icnt=1 +while [ $icnt -lt 1000 ] +do + if [[ -s $wafs2i ]] ; then + break + fi + + sleep 10 + icnt=$((icnt + 1)) + if [ $icnt -ge 180 ] ; then + msg="ABORTING after 30 min of waiting for the gfs wafs file!" + err_exit $msg + fi +done + + +######################################## +echo "HAS BEGUN!" +######################################## + +echo " ------------------------------------------" +echo " BEGIN MAKING GFS WAFS GRIB2 0.25 DEG PRODUCTS" +echo " ------------------------------------------" + +set +x +echo " " +echo "#####################################" +echo " Process GRIB2 WAFS 0.25 DEG PRODUCTS " +echo "#####################################" +echo " " +set -x + +opt1=' -set_grib_type same -new_grid_winds earth ' +opt21=' -new_grid_interpolation bilinear -if ' +opt22="(:ICESEV|parm=37):" +opt23=' -new_grid_interpolation neighbor -fi ' +opt24=' -set_bitmap 1 -set_grib_max_bits 16 ' +opt25=":(UGRD|VGRD):max wind" +newgrid="latlon 0:1440:0.25 90:721:-0.25" + +# WAFS 3D data +$WGRIB2 $wafs2 $opt1 $opt21 $opt22 $opt23 $opt24 -new_grid $newgrid tmp_wafs_0p25.grb2 +# Master 2D data +$WGRIB2 $master2 | grep -F -f $FIXgfs/grib2_0p25_gfs_master2d.list \ + | $WGRIB2 -i $master2 -set master_table 25 -grib tmp_master.grb2 +$WGRIB2 tmp_master.grb2 $opt1 $opt21 ":(UGRD|VGRD):max wind" $opt23 $opt24 -new_grid $newgrid tmp_master_0p25.grb2 + +#--------------------------- +# Product 1: WAFS u/v/t/rh gfs.tHHz.wafs_0p25.fFFF.grib2 +#--------------------------- +$WGRIB2 tmp_wafs_0p25.grb2 | egrep "UGRD|VGRD|TMP|HGT|RH" \ + | $WGRIB2 -i tmp_wafs_0p25.grb2 -set master_table 25 -grib tmp.gfs.t${cyc}z.wafs_0p25.f${ffhr}.grib2 +cat tmp_master_0p25.grb2 >> tmp.gfs.t${cyc}z.wafs_0p25.f${ffhr}.grib2 +# Convert template 5 to 5.40 +#$WGRIB2 tmp.gfs.t${cyc}z.wafs_0p25.f${ffhr}.grib2 -set_grib_type jpeg -grib_out gfs.t${cyc}z.wafs_0p25.f${ffhr}.grib2 +mv tmp.gfs.t${cyc}z.wafs_0p25.f${ffhr}.grib2 gfs.t${cyc}z.wafs_0p25.f${ffhr}.grib2 +$WGRIB2 -s gfs.t${cyc}z.wafs_0p25.f${ffhr}.grib2 > gfs.t${cyc}z.wafs_0p25.f${ffhr}.grib2.idx + +if [ $hazard_timewindow = 'yes' ] ; then +#--------------------------- +# Product 2: For AWC and Delta airline: EDPARM CAT MWT ICESEV CB gfs.tHHz.awf_0p25.fFFF.grib2 +#--------------------------- + criteria1=":EDPARM:|:ICESEV:|parm=37:" + criteria2=":CATEDR:|:MWTURB:" + criteria3=":CBHE:|:ICAHT:" + $WGRIB2 tmp_wafs_0p25.grb2 | egrep "${criteria1}|$criteria2|$criteria3" \ + | $WGRIB2 -i tmp_wafs_0p25.grb2 -grib gfs.t${cyc}z.awf_0p25.f${ffhr}.grib2 + $WGRIB2 -s gfs.t${cyc}z.awf_0p25.f${ffhr}.grib2 > gfs.t${cyc}z.awf_0p25.f${ffhr}.grib2.idx + +#--------------------------- +# Product 3: WAFS unblended EDPARM, ICESEV, CB (No CAT MWT) gfs.tHHz.wafs_0p25_unblended.fFF.grib2 +#--------------------------- + $WGRIB2 tmp_wafs_0p25.grb2 | grep -F -f $FIXgfs/grib2_0p25_gfs_hazard.list \ + | $WGRIB2 -i tmp_wafs_0p25.grb2 -set master_table 25 -grib tmp_wafs_0p25.grb2.forblend + + # Convert template 5 to 5.40 + #$WGRIB2 tmp_wafs_0p25.grb2.forblend -set_grib_type jpeg -grib_out gfs.t${cyc}z.wafs_0p25_unblended.f${ffhr2}.grib2 + mv tmp_wafs_0p25.grb2.forblend gfs.t${cyc}z.wafs_0p25_unblended.f${ffhr2}.grib2 + $WGRIB2 -s gfs.t${cyc}z.wafs_0p25_unblended.f${ffhr2}.grib2 > gfs.t${cyc}z.wafs_0p25_unblended.f${ffhr2}.grib2.idx +fi + +if [ $SENDCOM = "YES" ] ; then + + ############################## + # Post Files to COM + ############################## + + mv gfs.t${cyc}z.wafs_0p25.f${ffhr}.grib2 $COMOUT/gfs.t${cyc}z.wafs_0p25.f${ffhr}.grib2 + mv gfs.t${cyc}z.wafs_0p25.f${ffhr}.grib2.idx $COMOUT/gfs.t${cyc}z.wafs_0p25.f${ffhr}.grib2.idx + + if [ $hazard_timewindow = 'yes' ] ; then + mv gfs.t${cyc}z.awf_0p25.f${ffhr}.grib2 $COMOUT/gfs.t${cyc}z.awf_0p25.f${ffhr}.grib2 + mv gfs.t${cyc}z.awf_0p25.f${ffhr}.grib2.idx $COMOUT/gfs.t${cyc}z.awf_0p25.f${ffhr}.grib2.idx + + mv gfs.t${cyc}z.wafs_0p25_unblended.f${ffhr2}.grib2 $COMOUT/gfs.t${cyc}z.wafs_0p25_unblended.f${ffhr2}.grib2 + mv gfs.t${cyc}z.wafs_0p25_unblended.f${ffhr2}.grib2.idx $COMOUT/gfs.t${cyc}z.wafs_0p25_unblended.f${ffhr2}.grib2.idx + fi + + ############################# + # Post Files to PCOM + ############################## + ## mv gfs.t${cyc}z.wafs_0p25_unblended_wifs.f${ffhr2}.grib2 $PCOM/gfs.t${cyc}z.wafs_0p25_unblended_wifs.f${ffhr2}.grib2 +fi + + +if [ $SENDDBN = "YES" ] ; then + ###################### + # Distribute Data + ###################### + + if [ $hazard_timewindow = 'yes' ] ; then + # Hazard WAFS data (ICESEV EDR CAT MWT on 100mb to 1000mb or on new ICAO 2023 levels) sent to AWC and to NOMADS for US stakeholders + $DBNROOT/bin/dbn_alert MODEL GFS_AWF_0P25_GB2 $job $COMOUT/gfs.t${cyc}z.awf_0p25.f${ffhr}.grib2 + + # Unblended US WAFS data sent to UK for blending, to the same server as 1.25 deg unblended data: wmo/grib2.tCCz.wafs_grb_wifsfFF.45 + $DBNROOT/bin/dbn_alert MODEL GFS_WAFS_0P25_UBL_GB2 $job $COMOUT/gfs.t${cyc}z.wafs_0p25_unblended.f${ffhr2}.grib2 + fi + + # WAFS U/V/T/RH data sent to the same server as the unblended data as above + $DBNROOT/bin/dbn_alert MODEL GFS_WAFS_0P25_GB2 $job $COMOUT/gfs.t${cyc}z.wafs_0p25.f${ffhr}.grib2 + +fi + +################################################################################ +# GOOD RUN +set +x +echo "**************JOB EXGFS_ATMOS_WAFS_GRIB2_0P25.SH COMPLETED NORMALLY ON THE IBM" +echo "**************JOB EXGFS_ATMOS_WAFS_GRIB2_0P25.SH COMPLETED NORMALLY ON THE IBM" +echo "**************JOB EXGFS_ATMOS_WAFS_GRIB2_0P25.SH COMPLETED NORMALLY ON THE IBM" +set -x +################################################################################ + +echo "HAS COMPLETED NORMALLY!" + +exit 0 + +############## END OF SCRIPT ####################### diff --git a/scripts/exglobal_atmos_analysis.sh b/scripts/exglobal_atmos_analysis.sh index 6da862eb54c..575c1126757 100755 --- a/scripts/exglobal_atmos_analysis.sh +++ b/scripts/exglobal_atmos_analysis.sh @@ -89,6 +89,8 @@ SENDDBN=${SENDDBN:-"NO"} RUN_GETGES=${RUN_GETGES:-"NO"} GETGESSH=${GETGESSH:-"getges.sh"} export gesenvir=${gesenvir:-${envir}} + +export hofx_2m_sfcfile=${hofx_2m_sfcfile:-".false."} # Observations OPREFIX=${OPREFIX:-""} @@ -748,6 +750,7 @@ cat > gsiparm.anl << EOF / &OBS_INPUT dmesh(1)=145.0,dmesh(2)=150.0,dmesh(3)=100.0,dmesh(4)=50.0,time_window_max=3.0, + hofx_2m_sfcfile=${hofx_2m_sfcfile}, ${OBSINPUT} / OBS_INPUT:: diff --git a/scripts/exglobal_forecast.sh b/scripts/exglobal_forecast.sh index 0a6329910e7..3555d4ef336 100755 --- a/scripts/exglobal_forecast.sh +++ b/scripts/exglobal_forecast.sh @@ -149,6 +149,12 @@ if [[ "${esmf_profile:-}" = ".true." ]]; then export ESMF_RUNTIME_PROFILE_OUTPUT=SUMMARY fi +if [[ "${USE_ESMF_THREADING:-}" == "YES" ]]; then + unset OMP_NUM_THREADS +else + export OMP_NUM_THREADS=${UFS_THREADS:-1} +fi + ${NCP} "${EXECgfs}/${FCSTEXEC}" "${DATA}/" ${APRUN_UFS} "${DATA}/${FCSTEXEC}" 1>&1 2>&2 export ERR=$? diff --git a/scripts/exglobal_stage_ic.sh b/scripts/exglobal_stage_ic.sh index 6a1be22e71d..e7dfc3c4950 100755 --- a/scripts/exglobal_stage_ic.sh +++ b/scripts/exglobal_stage_ic.sh @@ -106,7 +106,7 @@ for MEMDIR in "${MEMDIR_ARRAY[@]}"; do # Ocean Perturbation Files # Extra zero on MEMDIR ensure we have a number even if the string is empty - if (( 0${MEMDIR:3} > 0 )) && [[ "${OCN_ENS_PERTURB_FILES:-false}" == "true" ]]; then + if (( 0${MEMDIR:3} > 0 )) && [[ "${USE_OCN_PERTURB_FILES:-false}" == "true" ]]; then src="${BASE_CPLIC}/${CPL_OCNIC:-}/${PDY}${cyc}/${MEMDIR}/ocean/${PDY}.${cyc}0000.mom6_increment.nc" tgt="${COM_OCEAN_RESTART_PREV}/${PDY}.${cyc}0000.mom6_increment.nc" ${NCP} "${src}" "${tgt}" diff --git a/sorc/build_all.sh b/sorc/build_all.sh index 3de13b7b605..d8374c269fc 100755 --- a/sorc/build_all.sh +++ b/sorc/build_all.sh @@ -27,6 +27,8 @@ Usage: ${BASH_SOURCE[0]} [-a UFS_app][-c build_config][-d][-h][-j n][-v][-w] Print this help message and exit -j: Specify maximum number of build jobs (n) + -k: + Kill all builds if any build fails -u: Build UFS-DA -v: @@ -48,15 +50,17 @@ _build_debug="" _verbose_opt="" _wave_unst="" _build_job_max=20 +_quick_kill="NO" # Reset option counter in case this script is sourced OPTIND=1 -while getopts ":a:dghj:uvw" option; do +while getopts ":a:dghj:kuvw" option; do case "${option}" in a) _build_ufs_opt+="-a ${OPTARG} ";; d) _build_debug="-d" ;; g) _build_gsi="YES" ;; h) _usage;; j) _build_job_max="${OPTARG} ";; + k) _quick_kill="YES" ;; u) _build_ufsda="YES" ;; v) _verbose_opt="-v";; w) _wave_unst="-w";; @@ -193,6 +197,31 @@ fi procs_in_use=0 declare -A build_ids +check_builds() +{ + for chk_build in "${!build_jobs[@]}"; do + # Check if the build is complete and if so what the status was + if [[ -n "${build_ids[${chk_build}]+0}" ]]; then + if ! ps -p "${build_ids[${chk_build}]}" > /dev/null; then + wait "${build_ids[${chk_build}]}" + build_stat=$? + if [[ ${build_stat} != 0 ]]; then + echo "build_${chk_build}.sh failed! Exiting!" + echo "Check logs/build_${chk_build}.log for details." + echo "logs/build_${chk_build}.log" > "${HOMEgfs}/sorc/logs/error.logs" + for kill_build in "${!build_jobs[@]}"; do + if [[ -n "${build_ids[${kill_build}]+0}" ]]; then + pkill -P "${build_ids[${kill_build}]}" + fi + done + return "${build_stat}" + fi + fi + fi + done + return 0 +} + builds_started=0 # Now start looping through all of the jobs until everything is done while [[ ${builds_started} -lt ${#build_jobs[@]} ]]; do @@ -228,11 +257,31 @@ while [[ ${builds_started} -lt ${#build_jobs[@]} ]]; do fi done + # If requested, check if any build has failed and exit if so + if [[ "${_quick_kill}" == "YES" ]]; then + check_builds + build_stat=$? + if (( build_stat != 0 )); then + exit "${build_stat}" + fi + fi + sleep 5s done + # Wait for all jobs to complete and check return statuses -while [[ ${#build_jobs[@]} -gt 0 ]]; do +while [[ "${#build_jobs[@]}" -gt 0 ]]; do + + # If requested, check if any build has failed and exit if so + if [[ "${_quick_kill}" == "YES" ]]; then + check_builds + build_stat=$? + if [[ ${build_stat} != 0 ]]; then + exit "${build_stat}" + fi + fi + for build in "${!build_jobs[@]}"; do # Test if each job is complete and if so, notify and remove from the array if [[ -n "${build_ids[${build}]+0}" ]]; then diff --git a/sorc/build_ufs.sh b/sorc/build_ufs.sh index d18dbef9fad..3055179f508 100755 --- a/sorc/build_ufs.sh +++ b/sorc/build_ufs.sh @@ -36,7 +36,7 @@ CLEAN_BEFORE=YES CLEAN_AFTER=NO if [[ "${MACHINE_ID}" != "noaacloud" ]]; then - ./tests/compile.sh "${MACHINE_ID}" "${MAKE_OPT}" "${COMPILE_NR}" "intel" "${CLEAN_BEFORE}" "${CLEAN_AFTER}" + BUILD_JOBS=${BUILD_JOBS:-8} ./tests/compile.sh "${MACHINE_ID}" "${MAKE_OPT}" "${COMPILE_NR}" "intel" "${CLEAN_BEFORE}" "${CLEAN_AFTER}" mv "./tests/fv3_${COMPILE_NR}.exe" ./tests/ufs_model.x mv "./tests/modules.fv3_${COMPILE_NR}.lua" ./tests/modules.ufs_model.lua cp "./modulefiles/ufs_common.lua" ./tests/ufs_common.lua diff --git a/sorc/build_upp.sh b/sorc/build_upp.sh index e217e171db5..1dca0035fdd 100755 --- a/sorc/build_upp.sh +++ b/sorc/build_upp.sh @@ -26,6 +26,6 @@ if [[ ! -d "../exec" ]]; then mkdir -p ../exec fi -cd ufs_model.fd/FV3/upp/tests +cd upp.fd/tests # shellcheck disable=SC2086 BUILD_JOBS=${BUILD_JOBS:-8} ./compile_upp.sh ${_opts} diff --git a/sorc/gdas.cd b/sorc/gdas.cd index 968146f02b6..2198b419567 160000 --- a/sorc/gdas.cd +++ b/sorc/gdas.cd @@ -1 +1 @@ -Subproject commit 968146f02b64fb12e9a27ab0975f1ecea90bdcc2 +Subproject commit 2198b419567cf7efa7404cd076e76e01d86f9e58 diff --git a/sorc/gfs_utils.fd b/sorc/gfs_utils.fd index 4b7f6095d26..de3708bfb00 160000 --- a/sorc/gfs_utils.fd +++ b/sorc/gfs_utils.fd @@ -1 +1 @@ -Subproject commit 4b7f6095d260b7fcd9c99c337454e170f1aa7f2f +Subproject commit de3708bfb00cd51900e813b84fdf2a3be5d398b0 diff --git a/sorc/gsi_enkf.fd b/sorc/gsi_enkf.fd index 74ac5942118..b53740a7bd1 160000 --- a/sorc/gsi_enkf.fd +++ b/sorc/gsi_enkf.fd @@ -1 +1 @@ -Subproject commit 74ac5942118d2a83ca84d3a629ec3aaffdb36fc5 +Subproject commit b53740a7bd1cc416f634589075b8c8b89f0ef761 diff --git a/sorc/gsi_monitor.fd b/sorc/gsi_monitor.fd index 149cd811f27..3ecfe079139 160000 --- a/sorc/gsi_monitor.fd +++ b/sorc/gsi_monitor.fd @@ -1 +1 @@ -Subproject commit 149cd811f273883e7d6400eded7b980acd65ca2b +Subproject commit 3ecfe0791391cee11cb0e3a3bcc16957b37057e0 diff --git a/sorc/gsi_utils.fd b/sorc/gsi_utils.fd index 55abe588252..67b014d8d3e 160000 --- a/sorc/gsi_utils.fd +++ b/sorc/gsi_utils.fd @@ -1 +1 @@ -Subproject commit 55abe588252ec6f39047d54a14727cf59f7f6688 +Subproject commit 67b014d8d3e5acc1d21aca15e3fe2d66d327a206 diff --git a/sorc/link_workflow.sh b/sorc/link_workflow.sh index 2b54f3ea10b..d4c6f45a122 100755 --- a/sorc/link_workflow.sh +++ b/sorc/link_workflow.sh @@ -166,12 +166,19 @@ declare -a ufs_templates=("model_configure.IN" \ "MOM6_data_table.IN" \ "ice_in.IN" \ "ufs.configure.atm.IN" \ + "ufs.configure.atm_esmf.IN" \ "ufs.configure.atmaero.IN" \ - "ufs.configure.leapfrog_atm_wav.IN" \ + "ufs.configure.atmaero_esmf.IN" \ + "ufs.configure.s2s.IN" \ "ufs.configure.s2s_esmf.IN" \ + "ufs.configure.s2sa.IN" \ "ufs.configure.s2sa_esmf.IN" \ + "ufs.configure.s2sw.IN" \ "ufs.configure.s2sw_esmf.IN" \ - "ufs.configure.s2swa_esmf.IN" ) + "ufs.configure.s2swa.IN" \ + "ufs.configure.s2swa_esmf.IN" \ + "ufs.configure.leapfrog_atm_wav.IN" \ + "ufs.configure.leapfrog_atm_wav_esmf.IN" ) for file in "${ufs_templates[@]}"; do [[ -s "${file}" ]] && rm -f "${file}" ${LINK_OR_COPY} "${HOMEgfs}/sorc/ufs_model.fd/tests/parm/${file}" . @@ -345,10 +352,11 @@ fi #--link source code directories #------------------------------ cd "${HOMEgfs}/sorc" || exit 8 -if [[ -d ufs_model.fd ]]; then - [[ -d upp.fd ]] && rm -rf upp.fd - ${LINK} ufs_model.fd/FV3/upp upp.fd -fi +# TODO: Commenting out until UPP is up-to-date with Rocky-8. +#if [[ -d ufs_model.fd ]]; then +# [[ -d upp.fd ]] && rm -rf upp.fd +# ${LINK} ufs_model.fd/FV3/upp upp.fd +#fi if [[ -d gsi_enkf.fd ]]; then [[ -d gsi.fd ]] && rm -rf gsi.fd diff --git a/sorc/ufs_model.fd b/sorc/ufs_model.fd index 69886627284..7fdb58cad0d 160000 --- a/sorc/ufs_model.fd +++ b/sorc/ufs_model.fd @@ -1 +1 @@ -Subproject commit 698866272846e8c0f8f61ddb1b20d6463460cd63 +Subproject commit 7fdb58cad0dad2f62ce7813c6719554d1c5a17af diff --git a/sorc/ufs_utils.fd b/sorc/ufs_utils.fd index 47956a96a7d..f42fae239d0 160000 --- a/sorc/ufs_utils.fd +++ b/sorc/ufs_utils.fd @@ -1 +1 @@ -Subproject commit 47956a96a7dc14e33fbccbb7fe74422f9bf542bf +Subproject commit f42fae239d0824f7b9a83c9afdc3d980894c7df8 diff --git a/sorc/upp.fd b/sorc/upp.fd new file mode 160000 index 00000000000..15e2a8c1a51 --- /dev/null +++ b/sorc/upp.fd @@ -0,0 +1 @@ +Subproject commit 15e2a8c1a5189621b51d948b3b8f88cf417bb620 diff --git a/sorc/verif-global.fd b/sorc/verif-global.fd index bd1c8f62a18..9377e84ba3f 160000 --- a/sorc/verif-global.fd +++ b/sorc/verif-global.fd @@ -1 +1 @@ -Subproject commit bd1c8f62a1878051e34ff7c6f6a4dd290381f1ef +Subproject commit 9377e84ba3fc9b2fd13c2c84cfd571855dee75ae diff --git a/ush/forecast_postdet.sh b/ush/forecast_postdet.sh index 9173a37bdc2..fcf1c2dbed4 100755 --- a/ush/forecast_postdet.sh +++ b/ush/forecast_postdet.sh @@ -251,8 +251,15 @@ EOF ${NLN} "${FIXgfs}/am/global_co2historicaldata_glob.txt" "${DATA}/co2historicaldata_glob.txt" ${NLN} "${FIXgfs}/am/co2monthlycyc.txt" "${DATA}/co2monthlycyc.txt" - if [[ ${ICO2} -gt 0 ]]; then - for file in $(ls "${FIXgfs}/am/fix_co2_proj/global_co2historicaldata"*) ; do + # Set historical CO2 values based on whether this is a reforecast run or not + # Ref. issue 2403 + local co2dir + co2dir="fix_co2_proj" + if [[ ${reforecast:-"NO"} == "YES" ]]; then + co2dir="co2dat_4a" + fi + if (( ICO2 > 0 )); then + for file in $(ls "${FIXgfs}/am/${co2dir}/global_co2historicaldata"*) ; do ${NLN} "${file}" "${DATA}/$(basename "${file//global_}")" done fi @@ -695,6 +702,13 @@ MOM6_postdet() { ${NLN} "${COM_OCEAN_ANALYSIS}/${RUN}.t${cyc}z.ocninc.nc" "${DATA}/INPUT/mom6_increment.nc" fi + # GEFS perturbations + # TODO if [[ $RUN} == "gefs" ]] block maybe be needed + # to ensure it does not interfere with the GFS + if (( MEMBER > 0 )) && [[ "${ODA_INCUPD:-False}" == "True" ]]; then + ${NLN} "${COM_OCEAN_RESTART_PREV}/${sPDY}.${scyc}0000.mom6_increment.nc" "${DATA}/INPUT/mom6_increment.nc" + fi + # Copy MOM6 fixed files ${NCP} "${FIXgfs}/mom6/${OCNRES}/"* "${DATA}/INPUT/" # TODO: These need to be explicit @@ -709,13 +723,11 @@ MOM6_postdet() { # If using stochatic parameterizations, create a seed that does not exceed the # largest signed integer - if [[ "${DO_OCN_SPPT}" = "YES" ]] || [[ "${DO_OCN_PERT_EPBL}" = "YES" ]]; then - if [[ ${SET_STP_SEED:-"YES"} = "YES" ]]; then - ISEED_OCNSPPT=$(( (current_cycle*1000 + MEMBER*10 + 6) % 2147483647 )) - ISEED_EPBL=$(( (current_cycle*1000 + MEMBER*10 + 7) % 2147483647 )) - else - ISEED=${ISEED:-0} - fi + if [[ ${DO_OCN_SPPT} = "YES" ]]; then + ISEED_OCNSPPT=$((current_cycle*10000 + ${MEMBER#0}*100 + 8)),$((current_cycle*10000 + ${MEMBER#0}*100 + 9)),$((current_cycle*10000 + ${MEMBER#0}*100 + 10)),$((current_cycle*10000 + ${MEMBER#0}*100 + 11)),$((current_cycle*10000 + ${MEMBER#0}*100 + 12)) + fi + if [[ ${DO_OCN_PERT_EPBL} = "YES" ]]; then + ISEED_EPBL=$((current_cycle*10000 + ${MEMBER#0}*100 + 13)),$((current_cycle*10000 + ${MEMBER#0}*100 + 14)),$((current_cycle*10000 + ${MEMBER#0}*100 + 15)),$((current_cycle*10000 + ${MEMBER#0}*100 + 16)),$((current_cycle*10000 + ${MEMBER#0}*100 + 17)) fi # Link output files @@ -926,7 +938,7 @@ GOCART_rc() { GOCART_postdet() { echo "SUB ${FUNCNAME[0]}: Linking output data for GOCART" - for fhr in ${FV3_OUTPUT_FH}; do + for fhr in ${GOCART_OUTPUT_FH}; do local vdate=$(date --utc -d "${current_cycle:0:8} ${current_cycle:8:2} + ${fhr} hours" +%Y%m%d%H) # Temporarily delete existing files due to noclobber in GOCART @@ -947,7 +959,7 @@ GOCART_out() { # TO DO: this should be linked but there were issues where gocart was crashing if it was linked local fhr local vdate - for fhr in ${FV3_OUTPUT_FH}; do + for fhr in ${GOCART_OUTPUT_FH}; do if (( fhr == 0 )); then continue; fi vdate=$(date --utc -d "${current_cycle:0:8} ${current_cycle:8:2} + ${fhr} hours" +%Y%m%d%H) ${NCP} "${DATA}/gocart.inst_aod.${vdate:0:8}_${vdate:8:2}00z.nc4" \ diff --git a/ush/forecast_predet.sh b/ush/forecast_predet.sh index ab02270b46e..b5e1ad8e82e 100755 --- a/ush/forecast_predet.sh +++ b/ush/forecast_predet.sh @@ -10,30 +10,33 @@ to_seconds() { # Function to convert HHMMSS to seconds since 00Z - local hhmmss=${1:?} - local hh=${hhmmss:0:2} - local mm=${hhmmss:2:2} - local ss=${hhmmss:4:2} - local seconds=$((10#${hh}*3600+10#${mm}*60+10#${ss})) - local padded_seconds=$(printf "%05d" "${seconds}") + local hhmmss hh mm ss seconds padded_seconds + hhmmss=${1:?} + hh=${hhmmss:0:2} + mm=${hhmmss:2:2} + ss=${hhmmss:4:2} + seconds=$((10#${hh}*3600+10#${mm}*60+10#${ss})) + padded_seconds=$(printf "%05d" "${seconds}") echo "${padded_seconds}" } middle_date(){ # Function to calculate mid-point date in YYYYMMDDHH between two dates also in YYYYMMDDHH - local date1=${1:?} - local date2=${2:?} - local date1s=$(date --utc -d "${date1:0:8} ${date1:8:2}:00:00" +%s) - local date2s=$(date --utc -d "${date2:0:8} ${date2:8:2}:00:00" +%s) - local dtsecsby2=$(( $((date2s - date1s)) / 2 )) - local mid_date=$(date --utc -d "${date1:0:8} ${date1:8:2} + ${dtsecsby2} seconds" +%Y%m%d%H%M%S) + local date1 date2 date1s date2s dtsecsby2 mid_date + date1=${1:?} + date2=${2:?} + date1s=$(date --utc -d "${date1:0:8} ${date1:8:2}:00:00" +%s) + date2s=$(date --utc -d "${date2:0:8} ${date2:8:2}:00:00" +%s) + dtsecsby2=$(( $((date2s - date1s)) / 2 )) + mid_date=$(date --utc -d "${date1:0:8} ${date1:8:2} + ${dtsecsby2} seconds" +%Y%m%d%H%M%S) echo "${mid_date:0:10}" } nhour(){ # Function to calculate hours between two dates (This replicates prod-util NHOUR) - local date1=${1:?} - local date2=${2:?} + local date1 date2 seconds1 seconds2 hours + date1=${1:?} + date2=${2:?} # Convert dates to UNIX timestamps seconds1=$(date --utc -d "${date1:0:8} ${date1:8:2}:00:00" +%s) seconds2=$(date --utc -d "${date2:0:8} ${date2:8:2}:00:00" +%s) @@ -41,21 +44,17 @@ nhour(){ echo "${hours}" } +# shellcheck disable=SC2034 common_predet(){ echo "SUB ${FUNCNAME[0]}: Defining variables for shared through model components" - # Ignore "not used" warning - # shellcheck disable=SC2034 pwd=$(pwd) CDUMP=${CDUMP:-gdas} - CASE=${CASE:-C96} CDATE=${CDATE:-"${PDY}${cyc}"} ENSMEM=${ENSMEM:-000} # Define significant cycles current_cycle="${PDY}${cyc}" previous_cycle=$(date --utc -d "${current_cycle:0:8} ${current_cycle:8:2} - ${assim_freq} hours" +%Y%m%d%H) - # ignore errors that variable isn't used - # shellcheck disable=SC2034 next_cycle=$(date --utc -d "${current_cycle:0:8} ${current_cycle:8:2} + ${assim_freq} hours" +%Y%m%d%H) forecast_end_cycle=$(date --utc -d "${current_cycle:0:8} ${current_cycle:8:2} + ${FHMAX} hours" +%Y%m%d%H) @@ -89,6 +88,7 @@ common_predet(){ cd "${DATA}" || ( echo "FATAL ERROR: Unable to 'cd ${DATA}', ABORT!"; exit 8 ) } +# shellcheck disable=SC2034 FV3_predet(){ echo "SUB ${FUNCNAME[0]}: Defining variables for FV3" @@ -105,8 +105,6 @@ FV3_predet(){ fi # Convert output settings into an explicit list for FV3 - # Ignore "not used" warning - # shellcheck disable=SC2034 FV3_OUTPUT_FH="" local fhr=${FHMIN} if (( FHOUT_HF > 0 && FHMAX_HF > 0 )); then @@ -116,8 +114,6 @@ FV3_predet(){ FV3_OUTPUT_FH="${FV3_OUTPUT_FH} $(seq -s ' ' "${fhr}" "${FHOUT}" "${FHMAX}")" # Other options - # ignore errors that variable isn't used - # shellcheck disable=SC2034 MEMBER=$(( 10#${ENSMEM:-"-1"} )) # -1: control, 0: ensemble mean, >0: ensemble member $MEMBER PREFIX_ATMINC=${PREFIX_ATMINC:-""} # allow ensemble to use recentered increment @@ -169,7 +165,6 @@ FV3_predet(){ nstf_name=${nstf_name:-"${NST_MODEL},${NST_SPINUP},${NST_RESV},${ZSEA1},${ZSEA2}"} nst_anl=${nst_anl:-".false."} - # blocking factor used for threading and general physics performance #nyblocks=$(expr \( $npy - 1 \) \/ $layout_y ) #nxblocks=$(expr \( $npx - 1 \) \/ $layout_x \/ 32) @@ -215,6 +210,7 @@ WW3_predet(){ ${NLN} "${COM_WAVE_RESTART}" "restart_wave" } +# shellcheck disable=SC2034 CICE_predet(){ echo "SUB ${FUNCNAME[0]}: CICE before run type determination" @@ -227,12 +223,11 @@ CICE_predet(){ # CICE does not have a concept of high frequency output like FV3 # Convert output settings into an explicit list for CICE - # Ignore "not used" warning - # shellcheck disable=SC2034 CICE_OUTPUT_FH=$(seq -s ' ' "${FHMIN}" "${FHOUT_OCNICE}" "${FHMAX}") } +# shellcheck disable=SC2034 MOM6_predet(){ echo "SUB ${FUNCNAME[0]}: MOM6 before run type determination" @@ -245,8 +240,6 @@ MOM6_predet(){ # MOM6 does not have a concept of high frequency output like FV3 # Convert output settings into an explicit list for MOM6 - # Ignore "not used" warning - # shellcheck disable=SC2034 MOM6_OUTPUT_FH=$(seq -s ' ' "${FHMIN}" "${FHOUT_OCNICE}" "${FHMAX}") } @@ -260,9 +253,12 @@ CMEPS_predet(){ } +# shellcheck disable=SC2034 GOCART_predet(){ echo "SUB ${FUNCNAME[0]}: GOCART before run type determination" if [[ ! -d "${COM_CHEM_HISTORY}" ]]; then mkdir -p "${COM_CHEM_HISTORY}"; fi + GOCART_OUTPUT_FH=$(seq -s ' ' "${FHMIN}" "6" "${FHMAX}") + # TODO: AERO_HISTORY.rc has hardwired output frequency to 6 hours } diff --git a/ush/hpssarch_gen.sh b/ush/hpssarch_gen.sh index eadd465421d..8b61cac948e 100755 --- a/ush/hpssarch_gen.sh +++ b/ush/hpssarch_gen.sh @@ -681,7 +681,7 @@ if [[ ${type} == "enkfgdas" || ${type} == "enkfgfs" ]]; then touch "${DATA}/${RUN}_restartb_grp${n}.txt" m=1 - while (( m <= NMEM_EARCGRP )); do + while (( m <= NMEM_EARCGRP && (n-1)*NMEM_EARCGRP+m <= NMEM_ENS )); do nm=$(((n-1)*NMEM_EARCGRP+m)) mem=$(printf %03i ${nm}) head="${RUN}.t${cyc}z." diff --git a/ush/module-setup.sh b/ush/module-setup.sh index 008ef7ee972..b66e3622d09 100755 --- a/ush/module-setup.sh +++ b/ush/module-setup.sh @@ -36,10 +36,10 @@ elif [[ ${MACHINE_ID} = orion* ]] ; then if ( ! eval module help > /dev/null 2>&1 ) ; then source /apps/lmod/lmod/init/bash fi - export LMOD_SYSTEM_DEFAULT_MODULES=contrib - set +u - module reset - set -u + #export LMOD_SYSTEM_DEFAULT_MODULES=git/2.28.0 # contrib has a lot of stuff we shouldn't put in MODULEPATH + #set +u + module purge # reset causes issues on Orion sometimes. + #set -u elif [[ ${MACHINE_ID} = s4* ]] ; then # We are on SSEC Wisconsin S4 diff --git a/ush/parsing_namelists_FV3.sh b/ush/parsing_namelists_FV3.sh index 14610f1201f..ac18b06e717 100755 --- a/ush/parsing_namelists_FV3.sh +++ b/ush/parsing_namelists_FV3.sh @@ -631,6 +631,24 @@ EOF use_zmtnblck = ${use_zmtnblck:-".true."} EOF fi + + if [[ "${DO_OCN_SPPT:-NO}" == "YES" ]]; then + cat >> input.nml <> input.nml <> input.nml << EOF ${nam_stochy_nml:-} diff --git a/ush/parsing_namelists_MOM6.sh b/ush/parsing_namelists_MOM6.sh index 9c1378fec43..923288c76a2 100755 --- a/ush/parsing_namelists_MOM6.sh +++ b/ush/parsing_namelists_MOM6.sh @@ -25,24 +25,6 @@ EOF # new_lscale=.true. #EOF -if [[ "${DO_OCN_SPPT}" == "YES" ]]; then - cat >> input.nml <> input.nml <> input.nml < None: super().initialize() # stage CRTM fix files - crtm_fix_list_path = os.path.join(self.task_config['HOMEgfs'], 'parm', 'gdas', 'aero_crtm_coeff.yaml') - logger.debug(f"Staging CRTM fix files from {crtm_fix_list_path}") - crtm_fix_list = parse_j2yaml(crtm_fix_list_path, self.task_config) + logger.info(f"Staging CRTM fix files from {self.task_config.CRTM_FIX_YAML}") + crtm_fix_list = parse_j2yaml(self.task_config.CRTM_FIX_YAML, self.task_config) FileHandler(crtm_fix_list).sync() # stage fix files - jedi_fix_list_path = os.path.join(self.task_config['HOMEgfs'], 'parm', 'gdas', 'aero_jedi_fix.yaml') - logger.debug(f"Staging JEDI fix files from {jedi_fix_list_path}") - jedi_fix_list = parse_j2yaml(jedi_fix_list_path, self.task_config) + logger.info(f"Staging JEDI fix files from {self.task_config.JEDI_FIX_YAML}") + jedi_fix_list = parse_j2yaml(self.task_config.JEDI_FIX_YAML, self.task_config) FileHandler(jedi_fix_list).sync() # stage berror files @@ -93,10 +91,9 @@ def initialize(self: Analysis) -> None: FileHandler(self.get_bkg_dict(AttrDict(self.task_config, **self.task_config))).sync() # generate variational YAML file - logger.debug(f"Generate variational YAML file: {self.task_config.fv3jedi_yaml}") - varda_yaml = parse_j2yaml(self.task_config['AEROVARYAML'], self.task_config) - save_as_yaml(varda_yaml, self.task_config.fv3jedi_yaml) - logger.info(f"Wrote variational YAML to: {self.task_config.fv3jedi_yaml}") + logger.debug(f"Generate variational YAML file: {self.task_config.jedi_yaml}") + save_as_yaml(self.task_config.jedi_config, self.task_config.jedi_yaml) + logger.info(f"Wrote variational YAML to: {self.task_config.jedi_yaml}") # need output dir for diags and anl logger.debug("Create empty output [anl, diags] directories to receive output from executable") @@ -114,7 +111,7 @@ def execute(self: Analysis) -> None: exec_cmd = Executable(self.task_config.APRUN_AEROANL) exec_name = os.path.join(self.task_config.DATA, 'fv3jedi_var.x') exec_cmd.add_default_arg(exec_name) - exec_cmd.add_default_arg(self.task_config.fv3jedi_yaml) + exec_cmd.add_default_arg(self.task_config.jedi_yaml) try: logger.debug(f"Executing {exec_cmd}") @@ -212,7 +209,7 @@ def _add_fms_cube_sphere_increments(self: Analysis) -> None: inc_template = os.path.join(self.task_config.DATA, 'anl', 'aeroinc.' + increment_template) bkg_template = os.path.join(self.task_config.COM_ATMOS_RESTART_PREV, restart_template) # get list of increment vars - incvars_list_path = os.path.join(self.task_config['HOMEgfs'], 'parm', 'gdas', 'aeroanl_inc_vars.yaml') + incvars_list_path = os.path.join(self.task_config['PARMgfs'], 'gdas', 'aeroanl_inc_vars.yaml') incvars = YAMLFile(path=incvars_list_path)['incvars'] super().add_fv3_increments(inc_template, bkg_template, incvars) diff --git a/ush/python/pygfs/task/analysis.py b/ush/python/pygfs/task/analysis.py index 5709bc130e9..2221fb7b340 100644 --- a/ush/python/pygfs/task/analysis.py +++ b/ush/python/pygfs/task/analysis.py @@ -4,6 +4,7 @@ import glob import tarfile from logging import getLogger +from pprint import pformat from netCDF4 import Dataset from typing import List, Dict, Any, Union @@ -26,10 +27,14 @@ def __init__(self, config: Dict[str, Any]) -> None: super().__init__(config) self.config.ntiles = 6 # Store location of GDASApp jinja2 templates - self.gdasapp_j2tmpl_dir = os.path.join(self.config.HOMEgfs, 'parm/gdas') + self.gdasapp_j2tmpl_dir = os.path.join(self.config.PARMgfs, 'gdas') def initialize(self) -> None: super().initialize() + + # all JEDI analyses need a JEDI config + self.task_config.jedi_config = self.get_jedi_config() + # all analyses need to stage observations obs_dict = self.get_obs_dict() FileHandler(obs_dict).sync() @@ -41,13 +46,33 @@ def initialize(self) -> None: # link jedi executable to run directory self.link_jediexe() + @logit(logger) + def get_jedi_config(self) -> Dict[str, Any]: + """Compile a dictionary of JEDI configuration from JEDIYAML template file + + Parameters + ---------- + + Returns + ---------- + jedi_config : Dict + a dictionary containing the fully rendered JEDI yaml configuration + """ + + # generate JEDI YAML file + logger.info(f"Generate JEDI YAML config: {self.task_config.jedi_yaml}") + jedi_config = parse_j2yaml(self.task_config.JEDIYAML, self.task_config, searchpath=self.gdasapp_j2tmpl_dir) + logger.debug(f"JEDI config:\n{pformat(jedi_config)}") + + return jedi_config + @logit(logger) def get_obs_dict(self) -> Dict[str, Any]: """Compile a dictionary of observation files to copy - This method uses the OBS_LIST configuration variable to generate a dictionary - from a list of YAML files that specify what observation files are to be - copied to the run directory from the observation input directory + This method extracts 'observers' from the JEDI yaml and from that list, extracts a list of + observation files that are to be copied to the run directory + from the observation input directory Parameters ---------- @@ -57,13 +82,13 @@ def get_obs_dict(self) -> Dict[str, Any]: obs_dict: Dict a dictionary containing the list of observation files to copy for FileHandler """ - logger.debug(f"OBS_LIST: {self.task_config['OBS_LIST']}") - obs_list_config = parse_j2yaml(self.task_config["OBS_LIST"], self.task_config, searchpath=self.gdasapp_j2tmpl_dir) - logger.debug(f"obs_list_config: {obs_list_config}") - # get observers from master dictionary - observers = obs_list_config['observers'] + + logger.info(f"Extracting a list of observation files from {self.task_config.JEDIYAML}") + observations = find_value_in_nested_dict(self.task_config.jedi_config, 'observations') + logger.debug(f"observations:\n{pformat(observations)}") + copylist = [] - for ob in observers: + for ob in observations['observers']: obfile = ob['obs space']['obsdatain']['engine']['obsfile'] basename = os.path.basename(obfile) copylist.append([os.path.join(self.task_config['COM_OBS'], basename), obfile]) @@ -77,9 +102,11 @@ def get_obs_dict(self) -> Dict[str, Any]: def get_bias_dict(self) -> Dict[str, Any]: """Compile a dictionary of observation files to copy - This method uses the OBS_LIST configuration variable to generate a dictionary - from a list of YAML files that specify what observation bias correction files - are to be copied to the run directory from the observation input directory + This method extracts 'observers' from the JEDI yaml and from that list, extracts a list of + observation bias correction files that are to be copied to the run directory + from the component directory. + TODO: COM_ATMOS_ANALYSIS_PREV is hardwired here and this method is not appropriate in + `analysis.py` and should be implemented in the component where this is applicable. Parameters ---------- @@ -89,13 +116,13 @@ def get_bias_dict(self) -> Dict[str, Any]: bias_dict: Dict a dictionary containing the list of observation bias files to copy for FileHandler """ - logger.debug(f"OBS_LIST: {self.task_config['OBS_LIST']}") - obs_list_config = parse_j2yaml(self.task_config["OBS_LIST"], self.task_config, searchpath=self.gdasapp_j2tmpl_dir) - logger.debug(f"obs_list_config: {obs_list_config}") - # get observers from master dictionary - observers = obs_list_config['observers'] + + logger.info(f"Extracting a list of bias correction files from {self.task_config.JEDIYAML}") + observations = find_value_in_nested_dict(self.task_config.jedi_config, 'observations') + logger.debug(f"observations:\n{pformat(observations)}") + copylist = [] - for ob in observers: + for ob in observations['observers']: if 'obs bias' in ob.keys(): obfile = ob['obs bias']['input file'] obdir = os.path.dirname(obfile) @@ -104,6 +131,7 @@ def get_bias_dict(self) -> Dict[str, Any]: for file in ['satbias.nc', 'satbias_cov.nc', 'tlapse.txt']: bfile = f"{prefix}.{file}" copylist.append([os.path.join(self.task_config.COM_ATMOS_ANALYSIS_PREV, bfile), os.path.join(obdir, bfile)]) + # TODO: Why is this specific to ATMOS? bias_dict = { 'mkdir': [os.path.join(self.runtime_config.DATA, 'bc')], @@ -328,3 +356,74 @@ def tgz_diags(statfile: str, diagdir: str) -> None: # Add diag files to tarball for diagfile in diags: tgz.add(diagfile, arcname=os.path.basename(diagfile)) + + +@logit(logger) +def find_value_in_nested_dict(nested_dict: Dict, target_key: str) -> Any: + """ + Recursively search through a nested dictionary and return the value for the target key. + This returns the first target key it finds. So if a key exists in a subsequent + nested dictionary, it will not be found. + + Parameters + ---------- + nested_dict : Dict + Dictionary to search + target_key : str + Key to search for + + Returns + ------- + Any + Value of the target key + + Raises + ------ + KeyError + If key is not found in dictionary + + TODO: if this gives issues due to landing on an incorrect key in the nested + dictionary, we will have to implement a more concrete method to search for a key + given a more complete address. See resolved conversations in PR 2387 + + # Example usage: + nested_dict = { + 'a': { + 'b': { + 'c': 1, + 'd': { + 'e': 2, + 'f': 3 + } + }, + 'g': 4 + }, + 'h': { + 'i': 5 + }, + 'j': { + 'k': 6 + } + } + + user_key = input("Enter the key to search for: ") + result = find_value_in_nested_dict(nested_dict, user_key) + """ + + if not isinstance(nested_dict, dict): + raise TypeError(f"Input is not of type(dict)") + + result = nested_dict.get(target_key) + if result is not None: + return result + + for value in nested_dict.values(): + if isinstance(value, dict): + try: + result = find_value_in_nested_dict(value, target_key) + if result is not None: + return result + except KeyError: + pass + + raise KeyError(f"Key '{target_key}' not found in the nested dictionary") diff --git a/ush/python/pygfs/task/atm_analysis.py b/ush/python/pygfs/task/atm_analysis.py index 5a90a89e34b..6348bdf3198 100644 --- a/ush/python/pygfs/task/atm_analysis.py +++ b/ush/python/pygfs/task/atm_analysis.py @@ -31,7 +31,7 @@ def __init__(self, config): _res = int(self.config.CASE[1:]) _res_anl = int(self.config.CASE_ANL[1:]) _window_begin = add_to_datetime(self.runtime_config.current_cycle, -to_timedelta(f"{self.config.assim_freq}H") / 2) - _fv3jedi_yaml = os.path.join(self.runtime_config.DATA, f"{self.runtime_config.CDUMP}.t{self.runtime_config.cyc:02d}z.atmvar.yaml") + _jedi_yaml = os.path.join(self.runtime_config.DATA, f"{self.runtime_config.CDUMP}.t{self.runtime_config.cyc:02d}z.atmvar.yaml") # Create a local dictionary that is repeatedly used across this class local_dict = AttrDict( @@ -48,7 +48,7 @@ def __init__(self, config): 'OPREFIX': f"{self.runtime_config.CDUMP}.t{self.runtime_config.cyc:02d}z.", # TODO: CDUMP is being replaced by RUN 'APREFIX': f"{self.runtime_config.CDUMP}.t{self.runtime_config.cyc:02d}z.", # TODO: CDUMP is being replaced by RUN 'GPREFIX': f"gdas.t{self.runtime_config.previous_cycle.hour:02d}z.", - 'fv3jedi_yaml': _fv3jedi_yaml, + 'jedi_yaml': _jedi_yaml, } ) @@ -71,19 +71,17 @@ def initialize(self: Analysis) -> None: super().initialize() # stage CRTM fix files - crtm_fix_list_path = os.path.join(self.task_config.HOMEgfs, 'parm', 'gdas', 'atm_crtm_coeff.yaml') - logger.debug(f"Staging CRTM fix files from {crtm_fix_list_path}") - crtm_fix_list = parse_j2yaml(crtm_fix_list_path, self.task_config) + logger.info(f"Staging CRTM fix files from {self.task_config.CRTM_FIX_YAML}") + crtm_fix_list = parse_j2yaml(self.task_config.CRTM_FIX_YAML, self.task_config) FileHandler(crtm_fix_list).sync() # stage fix files - jedi_fix_list_path = os.path.join(self.task_config.HOMEgfs, 'parm', 'gdas', 'atm_jedi_fix.yaml') - logger.debug(f"Staging JEDI fix files from {jedi_fix_list_path}") - jedi_fix_list = parse_j2yaml(jedi_fix_list_path, self.task_config) + logger.info(f"Staging JEDI fix files from {self.task_config.JEDI_FIX_YAML}") + jedi_fix_list = parse_j2yaml(self.task_config.JEDI_FIX_YAML, self.task_config) FileHandler(jedi_fix_list).sync() # stage static background error files, otherwise it will assume ID matrix - logger.debug(f"Stage files for STATICB_TYPE {self.task_config.STATICB_TYPE}") + logger.info(f"Stage files for STATICB_TYPE {self.task_config.STATICB_TYPE}") FileHandler(self.get_berror_dict(self.task_config)).sync() # stage ensemble files for use in hybrid background error @@ -102,10 +100,9 @@ def initialize(self: Analysis) -> None: FileHandler(self.get_bkg_dict(AttrDict(self.task_config))).sync() # generate variational YAML file - logger.debug(f"Generate variational YAML file: {self.task_config.fv3jedi_yaml}") - varda_yaml = parse_j2yaml(self.task_config.JEDIYAML, self.task_config, searchpath=self.gdasapp_j2tmpl_dir) - save_as_yaml(varda_yaml, self.task_config.fv3jedi_yaml) - logger.info(f"Wrote variational YAML to: {self.task_config.fv3jedi_yaml}") + logger.debug(f"Generate variational YAML file: {self.task_config.jedi_yaml}") + save_as_yaml(self.task_config.jedi_config, self.task_config.jedi_yaml) + logger.info(f"Wrote variational YAML to: {self.task_config.jedi_yaml}") # need output dir for diags and anl logger.debug("Create empty output [anl, diags] directories to receive output from executable") @@ -123,7 +120,7 @@ def execute(self: Analysis) -> None: exec_cmd = Executable(self.task_config.APRUN_ATMANL) exec_name = os.path.join(self.task_config.DATA, 'fv3jedi_var.x') exec_cmd.add_default_arg(exec_name) - exec_cmd.add_default_arg(self.task_config.fv3jedi_yaml) + exec_cmd.add_default_arg(self.task_config.jedi_yaml) try: logger.debug(f"Executing {exec_cmd}") @@ -170,7 +167,7 @@ def finalize(self: Analysis) -> None: archive.add(diaggzip, arcname=os.path.basename(diaggzip)) # copy full YAML from executable to ROTDIR - logger.info(f"Copying {self.task_config.fv3jedi_yaml} to {self.task_config.COM_ATMOS_ANALYSIS}") + logger.info(f"Copying {self.task_config.jedi_yaml} to {self.task_config.COM_ATMOS_ANALYSIS}") src = os.path.join(self.task_config.DATA, f"{self.task_config.CDUMP}.t{self.task_config.cyc:02d}z.atmvar.yaml") dest = os.path.join(self.task_config.COM_ATMOS_ANALYSIS, f"{self.task_config.CDUMP}.t{self.task_config.cyc:02d}z.atmvar.yaml") logger.debug(f"Copying {src} to {dest}") diff --git a/ush/python/pygfs/task/atmens_analysis.py b/ush/python/pygfs/task/atmens_analysis.py index 3e2c0a233c0..1037b557c24 100644 --- a/ush/python/pygfs/task/atmens_analysis.py +++ b/ush/python/pygfs/task/atmens_analysis.py @@ -31,7 +31,7 @@ def __init__(self, config): _res = int(self.config.CASE_ENS[1:]) _window_begin = add_to_datetime(self.runtime_config.current_cycle, -to_timedelta(f"{self.config.assim_freq}H") / 2) - _fv3jedi_yaml = os.path.join(self.runtime_config.DATA, f"{self.runtime_config.CDUMP}.t{self.runtime_config.cyc:02d}z.atmens.yaml") + _jedi_yaml = os.path.join(self.runtime_config.DATA, f"{self.runtime_config.CDUMP}.t{self.runtime_config.cyc:02d}z.atmens.yaml") # Create a local dictionary that is repeatedly used across this class local_dict = AttrDict( @@ -45,7 +45,7 @@ def __init__(self, config): 'OPREFIX': f"{self.config.EUPD_CYC}.t{self.runtime_config.cyc:02d}z.", # TODO: CDUMP is being replaced by RUN 'APREFIX': f"{self.runtime_config.CDUMP}.t{self.runtime_config.cyc:02d}z.", # TODO: CDUMP is being replaced by RUN 'GPREFIX': f"gdas.t{self.runtime_config.previous_cycle.hour:02d}z.", - 'fv3jedi_yaml': _fv3jedi_yaml, + 'jedi_yaml': _jedi_yaml, } ) @@ -96,19 +96,17 @@ def initialize(self: Analysis) -> None: FileHandler({'mkdir': dirlist}).sync() # stage CRTM fix files - crtm_fix_list_path = os.path.join(self.task_config.HOMEgfs, 'parm', 'gdas', 'atm_crtm_coeff.yaml') - logger.debug(f"Staging CRTM fix files from {crtm_fix_list_path}") - crtm_fix_list = parse_j2yaml(crtm_fix_list_path, self.task_config) + logger.info(f"Staging CRTM fix files from {self.task_config.CRTM_FIX_YAML}") + crtm_fix_list = parse_j2yaml(self.task_config.CRTM_FIX_YAML, self.task_config) FileHandler(crtm_fix_list).sync() # stage fix files - jedi_fix_list_path = os.path.join(self.task_config.HOMEgfs, 'parm', 'gdas', 'atm_jedi_fix.yaml') - logger.debug(f"Staging JEDI fix files from {jedi_fix_list_path}") - jedi_fix_list = parse_j2yaml(jedi_fix_list_path, self.task_config) + logger.info(f"Staging JEDI fix files from {self.task_config.JEDI_FIX_YAML}") + jedi_fix_list = parse_j2yaml(self.task_config.JEDI_FIX_YAML, self.task_config) FileHandler(jedi_fix_list).sync() # stage backgrounds - logger.debug(f"Stage ensemble member background files") + logger.info(f"Stage ensemble member background files") localconf = AttrDict() keys = ['COM_ATMOS_RESTART_TMPL', 'previous_cycle', 'ROTDIR', 'RUN', 'NMEM_ENS', 'DATA', 'current_cycle', 'ntiles'] @@ -118,10 +116,9 @@ def initialize(self: Analysis) -> None: FileHandler(self.get_fv3ens_dict(localconf)).sync() # generate ensemble da YAML file - logger.debug(f"Generate ensemble da YAML file: {self.task_config.fv3jedi_yaml}") - ensda_yaml = parse_j2yaml(self.task_config.JEDIYAML, self.task_config, searchpath=self.gdasapp_j2tmpl_dir) - save_as_yaml(ensda_yaml, self.task_config.fv3jedi_yaml) - logger.info(f"Wrote ensemble da YAML to: {self.task_config.fv3jedi_yaml}") + logger.debug(f"Generate ensemble da YAML file: {self.task_config.jedi_yaml}") + save_as_yaml(self.task_config.jedi_config, self.task_config.jedi_yaml) + logger.info(f"Wrote ensemble da YAML to: {self.task_config.jedi_yaml}") # need output dir for diags and anl logger.debug("Create empty output [anl, diags] directories to receive output from executable") @@ -153,7 +150,7 @@ def execute(self: Analysis) -> None: exec_cmd = Executable(self.task_config.APRUN_ATMENSANL) exec_name = os.path.join(self.task_config.DATA, 'fv3jedi_letkf.x') exec_cmd.add_default_arg(exec_name) - exec_cmd.add_default_arg(self.task_config.fv3jedi_yaml) + exec_cmd.add_default_arg(self.task_config.jedi_yaml) try: logger.debug(f"Executing {exec_cmd}") @@ -206,7 +203,7 @@ def finalize(self: Analysis) -> None: archive.add(diaggzip, arcname=os.path.basename(diaggzip)) # copy full YAML from executable to ROTDIR - logger.info(f"Copying {self.task_config.fv3jedi_yaml} to {self.task_config.COM_ATMOS_ANALYSIS_ENS}") + logger.info(f"Copying {self.task_config.jedi_yaml} to {self.task_config.COM_ATMOS_ANALYSIS_ENS}") src = os.path.join(self.task_config.DATA, f"{self.task_config.CDUMP}.t{self.task_config.cyc:02d}z.atmens.yaml") dest = os.path.join(self.task_config.COM_ATMOS_ANALYSIS_ENS, f"{self.task_config.CDUMP}.t{self.task_config.cyc:02d}z.atmens.yaml") logger.debug(f"Copying {src} to {dest}") diff --git a/ush/python/pygfs/task/snow_analysis.py b/ush/python/pygfs/task/snow_analysis.py index 9eee8314c35..c149f140b61 100644 --- a/ush/python/pygfs/task/snow_analysis.py +++ b/ush/python/pygfs/task/snow_analysis.py @@ -260,20 +260,18 @@ def initialize(self) -> None: FileHandler({'mkdir': dirlist}).sync() # stage fix files - jedi_fix_list_path = os.path.join(self.task_config.HOMEgfs, 'parm', 'gdas', 'snow_jedi_fix.yaml.j2') - logger.info(f"Staging JEDI fix files from {jedi_fix_list_path}") - jedi_fix_list = parse_j2yaml(jedi_fix_list_path, self.task_config) + logger.info(f"Staging JEDI fix files from {self.task_config.JEDI_FIX_YAML}") + jedi_fix_list = parse_j2yaml(self.task_config.JEDI_FIX_YAML, self.task_config) FileHandler(jedi_fix_list).sync() # stage backgrounds logger.info("Staging ensemble backgrounds") FileHandler(self.get_ens_bkg_dict(localconf)).sync() - # generate letkfoi YAML file - logger.info(f"Generate JEDI LETKF YAML file: {self.task_config.jedi_yaml}") - letkfoi_yaml = parse_j2yaml(self.task_config.JEDIYAML, self.task_config, searchpath=self.gdasapp_j2tmpl_dir) - save_as_yaml(letkfoi_yaml, self.task_config.jedi_yaml) + # Write out letkfoi YAML file + save_as_yaml(self.task_config.jedi_config, self.task_config.jedi_yaml) logger.info(f"Wrote letkfoi YAML to: {self.task_config.jedi_yaml}") + # need output dir for diags and anl logger.info("Create empty output [anl, diags] directories to receive output from executable") newdirs = [ diff --git a/ush/wafs_mkgbl.sh b/ush/wafs_mkgbl.sh new file mode 100755 index 00000000000..026a01ffed9 --- /dev/null +++ b/ush/wafs_mkgbl.sh @@ -0,0 +1,152 @@ +# UTILITY SCRIPT NAME : wafs_mkgbl.sh +# AUTHOR : Mary Jacobs +# DATE WRITTEN : 11/06/96 +# +# Abstract: This utility script produces the GFS WAFS +# bulletins. +# +# Input: 2 arguments are passed to this script. +# 1st argument - Forecast Hour - format of 2I +# 2nd argument - In hours 12-30, the designator of +# a or b. +# +# Logic: If we are processing hours 12-30, we have the +# added variable of the a or b, and process +# accordingly. The other hours, the a or b is dropped. +# +echo "History: SEPT 1996 - First implementation of this utility script" +echo "History: AUG 1999 - Modified for implementation on IBM SP" +echo " - Allows users to run interactively" +# + +set -x +hour_list="$1" +sets_key=$2 +num=$# + +if test $num -ge 2 +then + echo " Appropriate number of arguments were passed" + set -x + if [ -z "$DATA" ] + then + export DATA=`pwd` + cd $DATA + setpdy.sh + . PDY + fi +else + echo "" + echo "Usage: wafs_mkgbl.sh \$hour [a|b]" + echo "" + exit 16 +fi + +echo " ------------------------------------------" +echo " BEGIN MAKING ${NET} WAFS PRODUCTS" +echo " ------------------------------------------" + +echo "Enter Make WAFS utility." + +for hour in $hour_list +do + ############################## + # Copy Input Field to $DATA + ############################## + + if test ! -f pgrbf${hour} + then +# cp $COMIN/${RUN}.${cycle}.pgrbf${hour} pgrbf${hour} + +# file name and forecast hour of GFS model data in Grib2 are 3 digits +# export fhr3=$hour +# if test $fhr3 -lt 100 +# then +# export fhr3="0$fhr3" +# fi + fhr3="$(printf "%03d" $(( 10#$hour )) )" + +# To solve Bugzilla #408: remove the dependency of grib1 files in gfs wafs job in next GFS upgrade +# Reason: It's not efficent if simply converting from grib2 to grib1 (costs 6 seconds with 415 records) +# Solution: Need to grep 'selected fields on selected levels' before CNVGRIB (costs 1 second with 92 records) + ln -s $COMIN/${RUN}.${cycle}.pgrb2.1p00.f$fhr3 pgrb2f${hour} + $WGRIB2 pgrb2f${hour} | grep -F -f $FIXgfs/grib_wafs.grb2to1.list | $WGRIB2 -i pgrb2f${hour} -grib pgrb2f${hour}.tmp +# on Cray, IOBUF_PARAMS has to used to speed up CNVGRIB +# export IOBUF_PARAMS='*:size=32M:count=4:verbose' + $CNVGRIB -g21 pgrb2f${hour}.tmp pgrbf${hour} +# unset IOBUF_PARAMS + fi + + # + # BAG - Put in fix on 20070925 to force the percision of U and V winds + # to default to 1 through the use of the grib_wafs.namelist file. + # + $COPYGB -g3 -i0 -N$FIXgfs/grib_wafs.namelist -x pgrbf${hour} tmp + mv tmp pgrbf${hour} + $GRBINDEX pgrbf${hour} pgrbif${hour} + + ############################## + # Process WAFS + ############################## + + if test $hour -ge '12' -a $hour -le '30' + then + sets=$sets_key + set +x + echo "We are processing the primary and secondary sets of hours." + echo "These sets are the a and b of hours 12-30." + set -x + else + # This is for hours 00/06 and 36-72. + unset sets + fi + + export pgm=wafs_makewafs + . prep_step + + export FORT11="pgrbf${hour}" + export FORT31="pgrbif${hour}" + export FORT51="xtrn.wfs${NET}${hour}${sets}" + export FORT53="com.wafs${hour}${sets}" + + startmsg + $EXECgfs/wafs_makewafs.x < $FIXgfs/grib_wfs${NET}${hour}${sets} >>$pgmout 2>errfile + export err=$?;err_chk + + + ############################## + # Post Files to PCOM + ############################## + + if test "$SENDCOM" = 'YES' + then + cp xtrn.wfs${NET}${hour}${sets} $PCOM/xtrn.wfs${NET}${cyc}${hour}${sets}.$jobsuffix +# cp com.wafs${hour}${sets} $PCOM/com.wafs${cyc}${hour}${sets}.$jobsuffix + +# if test "$SENDDBN_NTC" = 'YES' +# then +# if test "$NET" = 'gfs' +# then +# $DBNROOT/bin/dbn_alert MODEL GFS_WAFS $job \ +# $PCOM/com.wafs${cyc}${hour}${sets}.$jobsuffix +# $DBNROOT/bin/dbn_alert MODEL GFS_XWAFS $job \ +# $PCOM/xtrn.wfs${NET}${cyc}${hour}${sets}.$jobsuffix +# fi +# fi + fi + + ############################## + # Distribute Data + ############################## + + if [ "$SENDDBN_NTC" = 'YES' ] ; then + $DBNROOT/bin/dbn_alert GRIB_LOW $NET $job $PCOM/xtrn.wfs${NET}${cyc}${hour}${sets}.$jobsuffix + else + echo "xtrn.wfs${NET}${cyc}${hour}${sets}.$job file not posted to db_net." + fi + + echo "Wafs Processing $hour hour completed normally" + +done + +exit diff --git a/versions/build.hera.ver b/versions/build.hera.ver index 263e967a970..337d5c32dab 100644 --- a/versions/build.hera.ver +++ b/versions/build.hera.ver @@ -1,5 +1,5 @@ export stack_intel_ver=2021.5.0 export stack_impi_ver=2021.5.1 -export spack_env=gsi-addon-dev +export spack_env=gsi-addon-dev-rocky8 source "${HOMEgfs:-}/versions/build.spack.ver" export spack_mod_path="/scratch1/NCEPDEV/nems/role.epic/spack-stack/spack-stack-${spack_stack_ver}/envs/${spack_env}/install/modulefiles/Core" diff --git a/versions/fix.ver b/versions/fix.ver index a2a9caf8e32..d2828518bcd 100644 --- a/versions/fix.ver +++ b/versions/fix.ver @@ -12,7 +12,7 @@ export gdas_fv3jedi_ver=20220805 export gdas_gsibec_ver=20221031 export gdas_obs_ver=20240213 export glwu_ver=20220805 -export gsi_ver=20230911 +export gsi_ver=20240208 export lut_ver=20220805 export mom6_ver=20231219 export orog_ver=20231027 diff --git a/versions/run.hera.ver b/versions/run.hera.ver index 4529d34821b..b358f9d495e 100644 --- a/versions/run.hera.ver +++ b/versions/run.hera.ver @@ -1,6 +1,6 @@ export stack_intel_ver=2021.5.0 export stack_impi_ver=2021.5.1 -export spack_env=gsi-addon-dev +export spack_env=gsi-addon-dev-rocky8 export hpss_ver=hpss export ncl_ver=6.6.2 diff --git a/workflow/rocoto/gfs_tasks.py b/workflow/rocoto/gfs_tasks.py index b91e974c740..36b57bd6dc4 100644 --- a/workflow/rocoto/gfs_tasks.py +++ b/workflow/rocoto/gfs_tasks.py @@ -2661,7 +2661,7 @@ def earc(self): # Integer division is floor division, but we need ceiling division n_groups = -(self.nmem // -self._configs['earc']['NMEM_EARCGRP']) - groups = ' '.join([f'{grp:02d}' for grp in range(0, n_groups)]) + groups = ' '.join([f'{grp:02d}' for grp in range(0, n_groups + 1)]) cycledef = 'gdas_half,gdas' if self.cdump in ['enkfgdas'] else self.cdump.replace('enkf', '') diff --git a/workflow/rocoto/tasks.py b/workflow/rocoto/tasks.py index 01bf586af13..75312ba77db 100644 --- a/workflow/rocoto/tasks.py +++ b/workflow/rocoto/tasks.py @@ -47,7 +47,10 @@ def __init__(self, app_config: AppConfig, cdump: str) -> None: self.HOMEgfs = self._base['HOMEgfs'] self.rotdir = self._base['ROTDIR'] self.pslot = self._base['PSLOT'] - self.nmem = int(self._base['NMEM_ENS']) + if self.cdump == "enkfgfs": + self.nmem = int(self._base['NMEM_ENS_GFS']) + else: + self.nmem = int(self._base['NMEM_ENS']) self._base['cycle_interval'] = to_timedelta(f'{self._base["assim_freq"]}H') self.n_tiles = 6 # TODO - this needs to be elsewhere