diff --git a/.ansible-lint b/.ansible-lint index 821806e3a..9d40faf3b 100644 --- a/.ansible-lint +++ b/.ansible-lint @@ -35,6 +35,7 @@ exclude_paths: - tests/sanity/ignore-2.11.txt - tests/sanity/ignore-2.12.txt - tests/sanity/ignore-2.13.txt + - tests/sanity/ignore-2.14.txt - venv* parseable: true quiet: false diff --git a/.github/ISSUE_TEMPLATE/bug_issue.yml b/.github/ISSUE_TEMPLATE/bug_issue.yml index 2193cb615..9395c85b1 100644 --- a/.github/ISSUE_TEMPLATE/bug_issue.yml +++ b/.github/ISSUE_TEMPLATE/bug_issue.yml @@ -1,7 +1,7 @@ name: Report a bug description: Request that a bug be reviewed. Complete all required fields. title: "[Bug] Enter description" -labels: [Bug] +labels: ["Bug", "Needs Triage" ] assignees: - IBMAnsibleHelper body: diff --git a/.github/workflows/ac-ansible-test-sanity.yml b/.github/workflows/ac-ansible-test-sanity.yml new file mode 100644 index 000000000..1354195a5 --- /dev/null +++ b/.github/workflows/ac-ansible-test-sanity.yml @@ -0,0 +1,71 @@ +name: AC Ansible sanity + +on: + pull_request: + branches: + - dev + - staging* + paths-ignore: + - '**.tar.gz' + - 'pycache/**' + - '.ansible-lint' + - 'cache/**' + - '.DS_Store' + - '.git/**' + - '.github/**' + - '.gitignore' + - '.python-version' + - '.pytest_cache/**' + - '.vscode/**' + - 'Jenkinsfile' + - 'ac' + - 'ansible.cfg' + - 'changelogs/**' + - 'collections/**' + - 'docs/**' + - 'scripts/**' + - 'test_config.yml' + - 'tests/*.ini' + - 'tests/*.py' + - 'tests/.pytest_cache' + - 'tests/pycache' + - 'tests/functional' + - 'tests/helpers' + - 'tests/requirements.txt' + - 'tests/unit' + - 'tests/sanity/ignore-*' + - 'venv*' + +jobs: + ansible-sanity: + runs-on: ubuntu-latest + env: + branch: ${{ github.event.pull_request.head.ref }} + + steps: + - name: Checkout repository + uses: actions/checkout@v4 + + - name: Set up Python + uses: actions/setup-python@v5 + with: + python-version: 3.11 + + - name: Set up venv + run: | + python -m pip install --upgrade pip + pip install virtualenv + mkdir venv + virtualenv venv/venv-2.16 + + - name: Install dependencies + run: | + source venv/venv-2.16/bin/activate + python -m pip install --upgrade pip + pip install ansible + + - name: Run ac-sanity + run: | + source venv/venv-2.16/bin/activate + ./ac --ac-build + ./ac --ac-sanity diff --git a/.github/workflows/ac-bandit.yml b/.github/workflows/ac-bandit.yml new file mode 100644 index 000000000..288fb92b1 --- /dev/null +++ b/.github/workflows/ac-bandit.yml @@ -0,0 +1,38 @@ +name: AC Bandit + +on: + pull_request: + branches: + - dev + - staging* + +jobs: + bandit: + runs-on: ubuntu-latest + + steps: + - name: Checkout repository + uses: actions/checkout@v4 + + - name: Set up Python + uses: actions/setup-python@v5 + with: + python-version: 3.11 + + - name: Set up venv + run: | + python -m pip install --upgrade pip + pip install virtualenv + mkdir venv + virtualenv venv/venv-2.16 + + - name: Install dependencies + run: | + source venv/venv-2.16/bin/activate + python -m pip install --upgrade pip + pip install bandit + + - name: Run ac-bandit + run: | + source venv/venv-2.16/bin/activate + ./ac --ac-bandit --level l diff --git a/.github/workflows/ac-galaxy-importer.yml b/.github/workflows/ac-galaxy-importer.yml new file mode 100644 index 000000000..271f01c22 --- /dev/null +++ b/.github/workflows/ac-galaxy-importer.yml @@ -0,0 +1,40 @@ +name: AC Galaxy Importer + +on: + pull_request: + branches: + - dev + - staging* + +jobs: + galaxy-importer: + runs-on: ubuntu-latest + + steps: + - name: Checkout repository + uses: actions/checkout@v4 + + - name: Set up Python + uses: actions/setup-python@v5 + with: + python-version: 3.11 + + - name: Set up venv + run: | + python -m pip install --upgrade pip + pip install virtualenv + mkdir venv + virtualenv venv/venv-2.16 + + - name: Install dependencies + run: | + source venv/venv-2.16/bin/activate + python -m pip install --upgrade pip + pip install ansible + pip install ansible-importer + pip install galaxy-importer + + - name: Run ac-galaxy-importer + run: | + source venv/venv-2.16/bin/activate + ./ac --ac-galaxy-importer diff --git a/CHANGELOG.rst b/CHANGELOG.rst index 505a98474..d2f69d546 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -5,18 +5,23 @@ ibm.ibm_zos_core Release Notes .. contents:: Topics -v1.9.0-beta.1 -============= +v1.9.0 +====== Release Summary --------------- -Release Date: '2024-01-31' +Release Date: '2024-03-11' This changelog describes all changes made to the modules and plugins included in this collection. The release date is the date the changelog is created. For additional details such as required dependencies and availability review the collections `release notes `__ +Major Changes +------------- + +- zos_job_submit - when job statuses were read, were limited to AC (active), CC (completed normally), ABEND (ended abnormally) and ? (error unknown), SEC (security error), JCLERROR (job had a jcl error). Now the additional statuses are supported, CANCELLED (job was cancelled), CAB (converter abend), CNV (converter error), SYS (system failure) and FLU (job was flushed). (https://github.com/ansible-collections/ibm_zos_core/pull/1283). + Minor Changes ------------- @@ -32,11 +37,22 @@ Minor Changes Bugfixes -------- +- module_utils/job.py - job output containing non-printable characters would crash modules. Fix now handles the error gracefully and returns a message to the user inside `content` of the `ddname` that failed. (https://github.com/ansible-collections/ibm_zos_core/pull/1288). +- zos_apf - When operation=list was selected and more than one data set entry was fetched, the module only returned one data set. Fix now returns the complete list. (https://github.com/ansible-collections/ibm_zos_core/pull/1236). - zos_copy - When copying an executable data set with aliases and destination did not exist, destination data set was created with wrong attributes. Fix now creates destination data set with the same attributes as the source. (https://github.com/ansible-collections/ibm_zos_core/pull/1066). - zos_copy - When performing a copy operation to an existing file, the copied file resulted in having corrupted contents. Fix now implements a workaround to not use the specific copy routine that corrupts the file contents. (https://github.com/ansible-collections/ibm_zos_core/pull/1064). +- zos_data_set - Fixes a small parsing bug in module_utils/data_set function which extracts volume serial(s) from a LISTCAT command output. Previously a leading '-' was left behind for volser strings under 6 chars. (https://github.com/ansible-collections/ibm_zos_core/pull/1247). - zos_job_output - When passing a job ID or name less than 8 characters long, the module sent the full stack trace as the module's message. Change now allows the use of a shorter job ID or name, as well as wildcards. (https://github.com/ansible-collections/ibm_zos_core/pull/1078). - zos_job_query - The module handling ZOAU import errors obscured the original traceback when an import error ocurred. Fix now passes correctly the context to the user. (https://github.com/ansible-collections/ibm_zos_core/pull/1042). - zos_job_query - When passing a job ID or name less than 8 characters long, the module sent the full stack trace as the module's message. Change now allows the use of a shorter job ID or name, as well as wildcards. (https://github.com/ansible-collections/ibm_zos_core/pull/1078). +- zos_job_submit - Was ignoring the default value for location=DATA_SET, now when location is not specified it will default to DATA_SET. (https://github.com/ansible-collections/ibm_zos_core/pull/1120). +- zos_job_submit - when a JCL error occurred, the ret_code[msg_code] contained JCLERROR followed by an integer where the integer appeared to be a reason code when actually it is a multi line marker used to coordinate errors spanning more than one line. Now when a JCLERROR occurs, only the JCLERROR is returned for property ret_code[msg_code]. (https://github.com/ansible-collections/ibm_zos_core/pull/1283). +- zos_job_submit - when a response was returned, it contained an undocumented property; ret_code[msg_text]. Now when a response is returned, it correctly returns property ret_code[msg_txt]. (https://github.com/ansible-collections/ibm_zos_core/pull/1283). +- zos_job_submit - when typrun=copy was used in JCL it would fail the module with an improper message and error condition. While this case continues to be considered a failure, the message has been corrected and it fails under the condition that not enough time has been added to the modules execution. (https://github.com/ansible-collections/ibm_zos_core/pull/1283). +- zos_job_submit - when typrun=hold was used in JCL it would fail the module with an improper message and error condition. While this case continues to be considered a failure, the message has been corrected and it fails under the condition that not enough time has been added to the modules execution. (https://github.com/ansible-collections/ibm_zos_core/pull/1283). +- zos_job_submit - when typrun=jchhold was used in JCL it would fail the module with an improper message and error condition. While this case continues to be considered a failure, the message has been corrected and it fails under the condition that not enough time has been added to the modules execution. (https://github.com/ansible-collections/ibm_zos_core/pull/1283). +- zos_job_submit - when typrun=scan was used in JCL, it would fail the module. Now typrun=scan no longer fails the module and an appropriate message is returned with appropriate return code values. (https://github.com/ansible-collections/ibm_zos_core/pull/1283). +- zos_job_submit - when wait_time_s was used, the duration would run approximately 5 second longer than reported in the duration. Now the when duration is returned, it is the actual accounting from when the job is submitted to when the module reads the job output. (https://github.com/ansible-collections/ibm_zos_core/pull/1283). - zos_operator - The module handling ZOAU import errors obscured the original traceback when an import error ocurred. Fix now passes correctly the context to the user. (https://github.com/ansible-collections/ibm_zos_core/pull/1042). - zos_unarchive - Using a local file with a USS format option failed when sending to remote because dest_data_set option had an empty dictionary. Fix now leaves dest_data_set as None when using a USS format option. (https://github.com/ansible-collections/ibm_zos_core/pull/1045). - zos_unarchive - When unarchiving USS files, the module left temporary files on the remote. Change now removes temporary files. (https://github.com/ansible-collections/ibm_zos_core/pull/1073). diff --git a/README.md b/README.md index da3b114d4..b2345c118 100644 --- a/README.md +++ b/README.md @@ -51,7 +51,7 @@ and ansible-doc to automate tasks on z/OS. Ansible version compatibility ============================= -This collection has been tested against **Ansible Core** versions >=2.14. +This collection has been tested against **Ansible Core** versions >=2.15. The Ansible Core versions supported for this collection align to the [ansible-core support matrix](https://docs.ansible.com/ansible/latest/reference_appendices/release_and_maintenance.html#ansible-core-support-matrix). Review the [Ansible community changelogs](https://docs.ansible.com/ansible/latest/reference_appendices/release_and_maintenance.html#ansible-community-changelogs) for corresponding **Ansible community packages** @@ -64,11 +64,12 @@ for more more information on supported versions of Ansible. Other Dependencies ================== -This release of the **IBM z/OS core collection** requires the z/OS managed node have: -- [z/OS](https://www.ibm.com/docs/en/zos) V2R4 or later. +This release of the **IBM z/OS core collection** requires the z/OS managed node have the following: +- [z/OS](https://www.ibm.com/docs/en/zos) - [z/OS shell](https://www.ibm.com/support/knowledgecenter/en/SSLTBW_2.4.0/com.ibm.zos.v2r4.bpxa400/part1.htm). -- [IBM Open Enterprise SDK for Python](https://www.ibm.com/products/open-enterprise-python-zos) 3.9 - 3.11. -- [IBM Z Open Automation Utilities](https://www.ibm.com/docs/en/zoau/1.2.x) 1.2.5 (or later) but prior to version 1.3. +- [IBM Open Enterprise SDK for Python](https://www.ibm.com/products/open-enterprise-python-zos) +- [IBM Z Open Automation Utilities](https://www.ibm.com/docs/en/zoau/1.2.x) +For specific dependency versions, please review the [release notes](https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html) for the version of the IBM Ansible z/OS core installed. Copyright ========= diff --git a/ac b/ac index bb307f4a6..9aee6a02d 100755 --- a/ac +++ b/ac @@ -242,6 +242,18 @@ ac_build(){ } # ------------------------------------------------------------------------------ +# Run galaxy importer on collection. +# ------------------------------------------------------------------------------ +#->ac-galaxy-importer: +## Build current branch and run galaxy importer on collection. +## Usage: ac [--ac-galaxy-importer] +## Example: +## $ ac --ac-galaxy-importer +ac_galaxy_importer(){ + message "Running Galaxy Importer" + . $VENV_BIN/activate && collection_name=$($VENV_BIN/ansible-galaxy collection build --force | awk -F/ '{print $NF}') && python -m galaxy_importer.main $collection_name +} + # Run a changelog lint locally # ------------------------------------------------------------------------------ #->ac-changelog: @@ -679,6 +691,10 @@ while true; do ensure_managed_venv_exists $1 option_submitted="--ac-build" ;; + --ac-galaxy-importer) # Command + ensure_managed_venv_exists $1 + option_submitted="--ac-galaxy-importer" + ;; --ac-changelog) # Command ensure_managed_venv_exists $1 option_submitted="--ac-changelog" @@ -835,6 +851,8 @@ if [ "$option_submitted" ] && [ "$option_submitted" = "--ac-bandit" ] ; then ac_bandit $level elif [ "$option_submitted" ] && [ "$option_submitted" = "--ac-build" ] ; then ac_build +elif [ "$option_submitted" ] && [ "$option_submitted" = "--ac-galaxy-importer" ] ; then + ac_galaxy_importer elif [ "$option_submitted" ] && [ "$option_submitted" = "--ac-changelog" ] ; then ac_changelog $command elif [ "$option_submitted" ] && [ "$option_submitted" = "--ac-install" ] ; then diff --git a/changelogs/.plugin-cache.yaml b/changelogs/.plugin-cache.yaml index 899014cd9..4e2979ebb 100644 --- a/changelogs/.plugin-cache.yaml +++ b/changelogs/.plugin-cache.yaml @@ -6,7 +6,11 @@ plugins: callback: {} cliconf: {} connection: {} - filter: {} + filter: + filter_wtor_messages: + description: Filter a list of WTOR messages + name: filter_wtor_messages + version_added: 1.2.0 httpapi: {} inventory: {} lookup: {} @@ -131,4 +135,4 @@ plugins: strategy: {} test: {} vars: {} -version: 1.9.0-beta.1 +version: 1.9.0 diff --git a/changelogs/changelog.yaml b/changelogs/changelog.yaml index c05af6436..a8404bf84 100644 --- a/changelogs/changelog.yaml +++ b/changelogs/changelog.yaml @@ -1176,6 +1176,76 @@ releases: name: zos_script namespace: '' release_date: '2023-10-24' + 1.9.0: + changes: + bugfixes: + - module_utils/job.py - job output containing non-printable characters would + crash modules. Fix now handles the error gracefully and returns a message + to the user inside `content` of the `ddname` that failed. (https://github.com/ansible-collections/ibm_zos_core/pull/1288). + - zos_apf - When operation=list was selected and more than one data set entry + was fetched, the module only returned one data set. Fix now returns the complete + list. (https://github.com/ansible-collections/ibm_zos_core/pull/1236). + - zos_data_set - Fixes a small parsing bug in module_utils/data_set function + which extracts volume serial(s) from a LISTCAT command output. Previously + a leading '-' was left behind for volser strings under 6 chars. (https://github.com/ansible-collections/ibm_zos_core/pull/1247). + - zos_job_submit - Was ignoring the default value for location=DATA_SET, now + when location is not specified it will default to DATA_SET. (https://github.com/ansible-collections/ibm_zos_core/pull/1120). + - zos_job_submit - when a JCL error occurred, the ret_code[msg_code] contained + JCLERROR followed by an integer where the integer appeared to be a reason + code when actually it is a multi line marker used to coordinate errors spanning + more than one line. Now when a JCLERROR occurs, only the JCLERROR is returned + for property ret_code[msg_code]. (https://github.com/ansible-collections/ibm_zos_core/pull/1283). + - zos_job_submit - when a response was returned, it contained an undocumented + property; ret_code[msg_text]. Now when a response is returned, it correctly + returns property ret_code[msg_txt]. (https://github.com/ansible-collections/ibm_zos_core/pull/1283). + - zos_job_submit - when typrun=copy was used in JCL it would fail the module + with an improper message and error condition. While this case continues to + be considered a failure, the message has been corrected and it fails under + the condition that not enough time has been added to the modules execution. + (https://github.com/ansible-collections/ibm_zos_core/pull/1283). + - zos_job_submit - when typrun=hold was used in JCL it would fail the module + with an improper message and error condition. While this case continues to + be considered a failure, the message has been corrected and it fails under + the condition that not enough time has been added to the modules execution. + (https://github.com/ansible-collections/ibm_zos_core/pull/1283). + - zos_job_submit - when typrun=jchhold was used in JCL it would fail the module + with an improper message and error condition. While this case continues to + be considered a failure, the message has been corrected and it fails under + the condition that not enough time has been added to the modules execution. + (https://github.com/ansible-collections/ibm_zos_core/pull/1283). + - zos_job_submit - when typrun=scan was used in JCL, it would fail the module. + Now typrun=scan no longer fails the module and an appropriate message is returned + with appropriate return code values. (https://github.com/ansible-collections/ibm_zos_core/pull/1283). + - zos_job_submit - when wait_time_s was used, the duration would run approximately + 5 second longer than reported in the duration. Now the when duration is returned, + it is the actual accounting from when the job is submitted to when the module + reads the job output. (https://github.com/ansible-collections/ibm_zos_core/pull/1283). + major_changes: + - zos_job_submit - when job statuses were read, were limited to AC (active), + CC (completed normally), ABEND (ended abnormally) and ? (error unknown), SEC + (security error), JCLERROR (job had a jcl error). Now the additional statuses + are supported, CANCELLED (job was cancelled), CAB (converter abend), CNV (converter + error), SYS (system failure) and FLU (job was flushed). (https://github.com/ansible-collections/ibm_zos_core/pull/1283). + release_summary: 'Release Date: ''2024-03-11'' + + This changelog describes all changes made to the modules and plugins included + + in this collection. The release date is the date the changelog is created. + + For additional details such as required dependencies and availability review + + the collections `release notes `__' + fragments: + - 1120-bugfix-zos_job_submit-default_value.yml + - 1236-bugfix-zos_apf-return-list.yml + - 1246-bugfix-zos_job_submit-typrun.yml + - 1247-volser-parsing-leading-dash-bugfix.yml + - 1288-job-submit-non-utf8-chars.yml + - 1292-doc-zos_tso_command-example.yml + - 1294-doc-zos_ping-scp.yml + - 1296-doc-sftp-collection-requirements.yml + - v1.9.0_summary.yml + release_date: '2024-03-16' 1.9.0-beta.1: changes: bugfixes: diff --git a/changelogs/fragments/1032-clean-job_submit-test.yml b/changelogs/fragments/1032-clean-job_submit-test.yml new file mode 100644 index 000000000..bb4248aec --- /dev/null +++ b/changelogs/fragments/1032-clean-job_submit-test.yml @@ -0,0 +1,3 @@ +trivial: + - test_zos_job_submit_func.py - Removed test setting that was covering a missing duration value. + (https://github.com/ansible-collections/ibm_zos_core/pull/1364). diff --git a/changelogs/fragments/1307-update-sanity-zos_copy.yml b/changelogs/fragments/1307-update-sanity-zos_copy.yml new file mode 100644 index 000000000..858f0b64c --- /dev/null +++ b/changelogs/fragments/1307-update-sanity-zos_copy.yml @@ -0,0 +1,10 @@ +minor_changes: + - zos_copy - Documented `group` and `owner` options. + (https://github.com/ansible-collections/ibm_zos_core/pull/1307). + +trivial: + - zos_copy - Removed many of the variables that were passed from the + action plugin to the module, reimplementing the logic inside the + module instead. Removed the use of temp_path variable inside zos_copy + in favor of using remote_src to deal with files copied to remote. + (https://github.com/ansible-collections/ibm_zos_core/pull/1307). \ No newline at end of file diff --git a/changelogs/fragments/1320-Zos_mvs_raw_ignores_tmp_hlq.yml b/changelogs/fragments/1320-Zos_mvs_raw_ignores_tmp_hlq.yml new file mode 100644 index 000000000..058faf66e --- /dev/null +++ b/changelogs/fragments/1320-Zos_mvs_raw_ignores_tmp_hlq.yml @@ -0,0 +1,5 @@ +bugfixes: + - zos_mvs_raw - The module ignored the value of `tmp_hlq` option when creating temporary data sets. + Fix now honors the value if provided and uses it as High Level Qualifier for temporary data sets created + during the module execution. + (https://github.com/ansible-collections/ibm_zos_core/pull/1320). \ No newline at end of file diff --git a/docs/source/filters.rst b/docs/source/filters.rst index 51e3a034f..bbf24c6d4 100644 --- a/docs/source/filters.rst +++ b/docs/source/filters.rst @@ -5,13 +5,9 @@ Filters ======= -Filters in Ansible are from Jinja2, and are used to transform data inside -a template expression. The templates operate on the Ansible controller, and not -on the target host. Therefore, filters execute on the controller as they augment -the data locally. - -Jinja2 ships with many filters as does Ansible, and also allows users to add -their own custom filters. +Filters are used to transform data inside a template expression. The templates +operate on the Ansible controller, not on the managed node. Therefore, +filters execute on the controller as they augment the data locally. The **IBM z/OS core collection** includes filters and their usage in sample playbooks. Unlike collections that can be identified at the top level using the diff --git a/docs/source/modules/zos_archive.rst b/docs/source/modules/zos_archive.rst index 525c7c0be..fe93474f0 100644 --- a/docs/source/modules/zos_archive.rst +++ b/docs/source/modules/zos_archive.rst @@ -342,7 +342,7 @@ Examples # Simple archive - name: Archive file into a tar zos_archive: - path: /tmp/archive/foo.txt + src: /tmp/archive/foo.txt dest: /tmp/archive/foo_archive_test.tar format: name: tar @@ -350,7 +350,7 @@ Examples # Archive multiple files - name: Compress list of files into a zip zos_archive: - path: + src: - /tmp/archive/foo.txt - /tmp/archive/bar.txt dest: /tmp/archive/foo_bar_archive_test.zip @@ -360,7 +360,7 @@ Examples # Archive one data set into terse - name: Compress data set into a terse zos_archive: - path: "USER.ARCHIVE.TEST" + src: "USER.ARCHIVE.TEST" dest: "USER.ARCHIVE.RESULT.TRS" format: name: terse @@ -368,7 +368,7 @@ Examples # Use terse with different options - name: Compress data set into a terse, specify pack algorithm and use adrdssu zos_archive: - path: "USER.ARCHIVE.TEST" + src: "USER.ARCHIVE.TEST" dest: "USER.ARCHIVE.RESULT.TRS" format: name: terse @@ -379,7 +379,7 @@ Examples # Use a pattern to store - name: Compress data set pattern using xmit zos_archive: - path: "USER.ARCHIVE.*" + src: "USER.ARCHIVE.*" exclude_sources: "USER.ARCHIVE.EXCLUDE.*" dest: "USER.ARCHIVE.RESULT.XMIT" format: diff --git a/docs/source/modules/zos_backup_restore.rst b/docs/source/modules/zos_backup_restore.rst index cc6c60d66..d70efc7a1 100644 --- a/docs/source/modules/zos_backup_restore.rst +++ b/docs/source/modules/zos_backup_restore.rst @@ -200,6 +200,15 @@ hlq | **type**: str +tmp_hlq + Override the default high level qualifier (HLQ) for temporary and backup data sets. + + The default HLQ is the Ansible user that executes the module and if that is not available, then the value of ``TMPHLQ`` is used. + + | **required**: False + | **type**: str + + Examples diff --git a/docs/source/modules/zos_copy.rst b/docs/source/modules/zos_copy.rst index 86a3a9463..00e274b00 100644 --- a/docs/source/modules/zos_copy.rst +++ b/docs/source/modules/zos_copy.rst @@ -91,6 +91,8 @@ dest If ``dest`` is a nonexistent USS file, it will be created. + If ``dest`` is a new USS file or replacement, the file will be appropriately tagged with either the system's default locale or the encoding option defined. If the USS file is a replacement, the user must have write authority to the file either through ownership, group or other permissions, else the copy will fail. + If ``dest`` is a nonexistent data set, it will be created following the process outlined here and in the ``volume`` option. If ``dest`` is a nonexistent data set, the attributes assigned will depend on the type of ``src``. If ``src`` is a USS file, ``dest`` will have a Fixed Block (FB) record format and the remaining attributes will be computed. If *is_binary=true*, ``dest`` will have a Fixed Block (FB) record format with a record length of 80, block size of 32760, and the remaining attributes will be computed. If *executable=true*,``dest`` will have an Undefined (U) record format with a record length of 0, block size of 32760, and the remaining attributes will be computed. @@ -787,9 +789,9 @@ Notes For supported character sets used to encode data, refer to the `documentation `_. - `zos_copy <./zos_copy.html>`_ uses SFTP (Secure File Transfer Protocol) for the underlying transfer protocol; Co:Z SFTP is not supported. In the case of Co:z SFTP, you can exempt the Ansible userid on z/OS from using Co:Z thus falling back to using standard SFTP. + This module uses SFTP (Secure File Transfer Protocol) for the underlying transfer protocol; SCP (secure copy protocol) and Co:Z SFTP are not supported. In the case of Co:z SFTP, you can exempt the Ansible user id on z/OS from using Co:Z thus falling back to using standard SFTP. If the module detects SCP, it will temporarily use SFTP for transfers, if not available, the module will fail. - Beginning in version 1.8.x, zos_copy will no longer attempt to autocorrect a copy of a data type member into a PDSE that contains program objects. You can control this behavior using module option executable that will signify an executable is being copied into a PDSE with other executables. Mixing data type members with program objects will be responded with a (FSUM8976,./zos_copy.html) error. + Beginning in version 1.8.x, zos_copy will no longer attempt to correct a copy of a data type member into a PDSE that contains program objects. You can control this behavior using module option ``executable`` that will signify an executable is being copied into a PDSE with other executables. Mixing data type members with program objects will result in a (FSUM8976,./zos_copy.html) error. diff --git a/docs/source/modules/zos_data_set.rst b/docs/source/modules/zos_data_set.rst index 70e798a08..0ea34875f 100644 --- a/docs/source/modules/zos_data_set.rst +++ b/docs/source/modules/zos_data_set.rst @@ -97,7 +97,7 @@ type ``MEMBER`` expects to be used with an existing partitioned data set. - Choices are case-insensitive. + Choices are case-sensitive. | **required**: False | **type**: str @@ -139,7 +139,7 @@ space_type record_format The format of the data set. (e.g ``FB``) - Choices are case-insensitive. + Choices are case-sensitive. When *type=KSDS*, *type=ESDS*, *type=RRDS*, *type=LDS* or *type=ZFS* then *record_format=None*, these types do not have a default *record_format*. @@ -370,7 +370,7 @@ batch ``MEMBER`` expects to be used with an existing partitioned data set. - Choices are case-insensitive. + Choices are case-sensitive. | **required**: False | **type**: str @@ -412,7 +412,7 @@ batch record_format The format of the data set. (e.g ``FB``) - Choices are case-insensitive. + Choices are case-sensitive. When *type=KSDS*, *type=ESDS*, *type=RRDS*, *type=LDS* or *type=ZFS* then *record_format=None*, these types do not have a default *record_format*. @@ -568,7 +568,7 @@ Examples - name: Create a sequential data set if it does not exist zos_data_set: name: someds.name.here - type: seq + type: SEQ state: present - name: Create a PDS data set if it does not exist @@ -577,26 +577,26 @@ Examples type: pds space_primary: 5 space_type: M - record_format: fba + record_format: FBA record_length: 25 - name: Attempt to replace a data set if it exists zos_data_set: name: someds.name.here - type: pds + type: PDS space_primary: 5 space_type: M - record_format: u + record_format: U record_length: 25 replace: yes - name: Attempt to replace a data set if it exists. If not found in the catalog, check if it is available on volume 222222, and catalog if found. zos_data_set: name: someds.name.here - type: pds + type: PDS space_primary: 5 space_type: M - record_format: u + record_format: U record_length: 25 volumes: "222222" replace: yes @@ -604,19 +604,19 @@ Examples - name: Create an ESDS data set if it does not exist zos_data_set: name: someds.name.here - type: esds + type: ESDS - name: Create a KSDS data set if it does not exist zos_data_set: name: someds.name.here - type: ksds + type: KSDS key_length: 8 key_offset: 0 - name: Create an RRDS data set with storage class MYDATA if it does not exist zos_data_set: name: someds.name.here - type: rrds + type: RRDS sms_storage_class: mydata - name: Delete a data set if it exists @@ -661,7 +661,7 @@ Examples type: PDS space_primary: 5 space_type: M - record_format: fb + record_format: FB replace: yes - name: someds.name.here1(member1) type: MEMBER diff --git a/docs/source/modules/zos_fetch.rst b/docs/source/modules/zos_fetch.rst index 21b573a2a..87a50a65a 100644 --- a/docs/source/modules/zos_fetch.rst +++ b/docs/source/modules/zos_fetch.rst @@ -204,7 +204,7 @@ Notes For supported character sets used to encode data, refer to the `documentation `_. - `zos_fetch <./zos_fetch.html>`_ uses SFTP (Secure File Transfer Protocol) for the underlying transfer protocol; Co:Z SFTP is not supported. In the case of Co:z SFTP, you can exempt the Ansible userid on z/OS from using Co:Z thus falling back to using standard SFTP. + This module uses SFTP (Secure File Transfer Protocol) for the underlying transfer protocol; SCP (secure copy protocol) and Co:Z SFTP are not supported. In the case of Co:z SFTP, you can exempt the Ansible user id on z/OS from using Co:Z thus falling back to using standard SFTP. If the module detects SCP, it will temporarily use SFTP for transfers, if not available, the module will fail. diff --git a/docs/source/modules/zos_job_submit.rst b/docs/source/modules/zos_job_submit.rst index 6cff37a6a..8f4dda61b 100644 --- a/docs/source/modules/zos_job_submit.rst +++ b/docs/source/modules/zos_job_submit.rst @@ -16,9 +16,8 @@ zos_job_submit -- Submit JCL Synopsis -------- -- Submit JCL from a data set, USS, or from the controller. -- Submit a job and optionally monitor for completion. -- Optionally, wait a designated time until the job finishes. +- Submit JCL in a data set, USS file, or file on the controller. +- Submit a job and monitor for completion. - For an uncataloged dataset, specify the volume serial number. @@ -57,18 +56,6 @@ location | **choices**: DATA_SET, USS, LOCAL -wait - Setting this option will yield no change, it is deprecated. There is no no need to set *wait*; setting *wait_times_s* is the correct way to configure the amount of tme to wait for a job to execute. - - Configuring wait used by the `zos_job_submit <./zos_job_submit.html>`_ module has been deprecated and will be removed in ibm.ibm_zos_core collection. - - See option *wait_time_s*. - - | **required**: False - | **type**: bool - | **default**: False - - wait_time_s Option *wait_time_s* is the total time that module `zos_job_submit <./zos_job_submit.html>`_ will wait for a submitted job to complete. The time begins when the module is executed on the managed node. @@ -333,6 +320,8 @@ Notes .. note:: For supported character sets used to encode data, refer to the `documentation `_. + This module uses `zos_copy <./zos_copy.html>`_ to copy local scripts to the remote machine which uses SFTP (Secure File Transfer Protocol) for the underlying transfer protocol; SCP (secure copy protocol) and Co:Z SFTP are not supported. In the case of Co:z SFTP, you can exempt the Ansible user id on z/OS from using Co:Z thus falling back to using standard SFTP. If the module detects SCP, it will temporarily use SFTP for transfers, if not available, the module will fail. + @@ -689,24 +678,46 @@ jobs } msg - Return code resulting from the job submission. Jobs that take longer to assign a value can have a value of '?'. + Job status resulting from the job submission. + + Job status `ABEND` indicates the job ended abnormally. + + Job status `AC` indicates the job is active, often a started task or job taking long. + + Job status `CAB` indicates a converter abend. + + Job status `CANCELED` indicates the job was canceled. + + Job status `CNV` indicates a converter error. + + Job status `FLU` indicates the job was flushed. + + Job status `JCLERR` or `JCL ERROR` indicates the JCL has an error. + + Job status `SEC` or `SEC ERROR` indicates the job as encountered a security error. + + Job status `SYS` indicates a system failure. + + Job status `?` indicates status can not be determined. | **type**: str - | **sample**: CC 0000 + | **sample**: AC msg_code - Return code extracted from the `msg` so that it can be evaluated as a string. Jobs that take longer to assign a value can have a value of '?'. + The return code from the submitted job as a string. | **type**: str msg_txt - Returns additional information related to the job. Jobs that take longer to assign a value can have a value of '?'. + Returns additional information related to the submitted job. | **type**: str - | **sample**: The job completion code (CC) was not available in the job output, please review the job log." + | **sample**: The job JOB00551 was run with special job processing TYPRUN=SCAN. This will result in no completion, return code or job steps and changed will be false. code - Return code converted to an integer value (when possible). For JCL ERRORs, this will be None. + The return code converted to an integer value when available. + + Jobs which have no return code will return NULL, such is the case of a job that errors or is active. | **type**: int diff --git a/docs/source/modules/zos_script.rst b/docs/source/modules/zos_script.rst index f51096361..31b237588 100644 --- a/docs/source/modules/zos_script.rst +++ b/docs/source/modules/zos_script.rst @@ -296,9 +296,7 @@ Notes For supported character sets used to encode data, refer to the `documentation `_. - This module uses `zos_copy <./zos_copy.html>`_ to copy local scripts to the remote machine. - - `zos_copy <./zos_copy.html>`_ uses SFTP (Secure File Transfer Protocol) for the underlying transfer protocol; Co:Z SFTP is not supported. In the case of Co:z SFTP, you can exempt the Ansible userid on z/OS from using Co:Z thus falling back to using standard SFTP. + This module uses `zos_copy <./zos_copy.html>`_ to copy local scripts to the remote machine which uses SFTP (Secure File Transfer Protocol) for the underlying transfer protocol; SCP (secure copy protocol) and Co:Z SFTP are not supported. In the case of Co:z SFTP, you can exempt the Ansible user id on z/OS from using Co:Z thus falling back to using standard SFTP. If the module detects SCP, it will temporarily use SFTP for transfers, if not available, the module will fail. This module executes scripts inside z/OS UNIX System Services. For running REXX scripts contained in data sets or CLISTs, consider issuing a TSO command with `zos_tso_command <./zos_tso_command.html>`_. diff --git a/docs/source/modules/zos_tso_command.rst b/docs/source/modules/zos_tso_command.rst index f3cdb0254..4af6b1b52 100644 --- a/docs/source/modules/zos_tso_command.rst +++ b/docs/source/modules/zos_tso_command.rst @@ -72,7 +72,7 @@ Examples - LISTDSD DATASET('HLQ.DATA.SET') ALL GENERIC max_rc: 4 - - name: Execute TSO command to run explicitly a REXX script from a data set. + - name: Execute TSO command to run a REXX script explicitly from a data set. zos_tso_command: commands: - EXEC HLQ.DATASET.REXX exec diff --git a/docs/source/modules/zos_unarchive.rst b/docs/source/modules/zos_unarchive.rst index da80bd31a..91fa597ee 100644 --- a/docs/source/modules/zos_unarchive.rst +++ b/docs/source/modules/zos_unarchive.rst @@ -362,14 +362,14 @@ Examples # Simple extract - name: Copy local tar file and unpack it on the managed z/OS node. zos_unarchive: - path: "./files/archive_folder_test.tar" + src: "./files/archive_folder_test.tar" format: name: tar # use include - name: Unarchive a bzip file selecting only a file to unpack. zos_unarchive: - path: "/tmp/test.bz2" + src: "/tmp/test.bz2" format: name: bz2 include: @@ -378,7 +378,7 @@ Examples # Use exclude - name: Unarchive a terse data set and excluding data sets from unpacking. zos_unarchive: - path: "USER.ARCHIVE.RESULT.TRS" + src: "USER.ARCHIVE.RESULT.TRS" format: name: terse exclude: @@ -388,7 +388,7 @@ Examples # List option - name: List content from XMIT zos_unarchive: - path: "USER.ARCHIVE.RESULT.XMIT" + src: "USER.ARCHIVE.RESULT.XMIT" format: name: xmit format_options: @@ -404,6 +404,8 @@ Notes .. note:: VSAMs are not supported. + This module uses `zos_copy <./zos_copy.html>`_ to copy local scripts to the remote machine which uses SFTP (Secure File Transfer Protocol) for the underlying transfer protocol; SCP (secure copy protocol) and Co:Z SFTP are not supported. In the case of Co:z SFTP, you can exempt the Ansible user id on z/OS from using Co:Z thus falling back to using standard SFTP. If the module detects SCP, it will temporarily use SFTP for transfers, if not available, the module will fail. + See Also @@ -411,7 +413,7 @@ See Also .. seealso:: - - :ref:`zos_unarchive_module` + - :ref:`zos_archive_module` @@ -420,14 +422,14 @@ Return Values ------------- -path - File path or data set name unarchived. +src + File path or data set name unpacked. | **returned**: always | **type**: str dest_path - Destination path where archive was extracted. + Destination path where archive was unpacked. | **returned**: always | **type**: str diff --git a/docs/source/plugins.rst b/docs/source/plugins.rst index 5c8605ad3..ef0f6c183 100644 --- a/docs/source/plugins.rst +++ b/docs/source/plugins.rst @@ -5,30 +5,33 @@ Plugins ======= -Plugins that come with the **IBM z/OS core collection** augment Ansible's core +Plugins that come with the **IBM z/OS core collection** complement Ansible's core functionality. Ansible uses a plugin architecture to enable a rich, flexible and expandable feature set. Action ------ -* ``zos_ping``: Manages the REXX source transferred to the z/OS managed node for - `zos_ping`_. -* ``zos_copy``: Used to `copy data`_ from the controller to the z/OS managed - node. -* ``zos_fetch``: Used to `fetch data`_ from the z/OS managed node to the - controller. -* ``zos_job_submit``: Used to `submit a job`_ from the controller and optionally - monitor the job completion. +Action plugins integrate local processing and local data with module functionality. +Action plugins are executed by default when an associated module is used; no additional +user action is required, this documentation is reference only. -.. _normal: - https://github.com/ansible/ansible/blob/devel/lib/ansible/plugins/action/normal.py -.. _zos_ping: - modules/zos_ping.html -.. _copy data: +* `zos_copy`_: Used to copy data from the controller to the z/OS manage node. +* `zos_fetch`_: Used to fetch data from the z/OS managed node to the controller. +* `zos_job_submit`_: Used to submit a job from the controller to the z/OS manage node. +* `zos_ping`_: Used to transfer the modules REXX source to the z/OS managed node. +* `zos_script`_: Used to transfer scripts from the controller to the z/OS manage node. +* `_zos_unarchive`_: Used to transfer archives from the controller to the z/OS manage node. + +.. _zos_copy: modules/zos_copy.html -.. _fetch data: +.. _zos_fetch: modules/zos_fetch.html -.. _submit a job: +.. _zos_job_submit: modules/zos_job_submit.html - +.. _zos_ping: + modules/zos_ping.html +.. _zos_script: + modules/zos_script.html +.. _zos_unarchive: + modules/zos_unarchive.html diff --git a/docs/source/release_notes.rst b/docs/source/release_notes.rst index 726c1b64c..7c2c3a929 100644 --- a/docs/source/release_notes.rst +++ b/docs/source/release_notes.rst @@ -1,13 +1,22 @@ .. ........................................................................... -.. © Copyright IBM Corporation 2020, 2021, 2023 . +.. © Copyright IBM Corporation 2020, 2024 . .. ........................................................................... ======== Releases ======== -Version 1.9.0-beta.1 -==================== +Version 1.9.0 +============= + +Major Changes +------------- + - IBM Ansible z/OS core collection (**ibm_zos_core**) version 1.9.0 will be the last release to support ZOAU 1.2.x. + + - IBM Ansible z/OS core version 1.9.0 will continue to receive security updates and bug fixes. + + - Starting with IBM Ansible z/OS core version 1.10.0, ZOAU version 1.3.0 will be required. + - IBM Open Enterprise SDK for Python version 3.9.x is no longer supported. Minor Changes ------------- @@ -21,7 +30,24 @@ Minor Changes - Improved messages in the action plugin. - Improved the action plugin performance, flow and use of undocumented variables. - Improved the modules handling of ZOAU import errors allowing for the traceback to flow back to the source. -- ``zos_tso_command`` - Has been updated with a new example demonstrating how to explicitly execute a REXX script in a data set. + - Improved job status support, now the supported statuses for property **ret_code[msg]** are: + + - Job status **ABEND** indicates the job ended abnormally. + - Job status **AC** indicates the job is active, often a started task or job taking long. + - Job status **CAB** indicates a converter abend. + - Job status **CANCELED** indicates the job was canceled. + - Job status **CNV** indicates a converter error. + - Job status **FLU** indicates the job was flushed. + - Job status **JCLERR** or **JCL ERROR** indicates the JCL has an error. + - Job status **SEC** or **SEC ERROR** indicates the job as encountered a security error. + - Job status **SYS** indicates a system failure. + - Job status **?** indicates status can not be determined. + +- ``zos_tso_command`` + + - Has been updated with a new example demonstrating how to explicitly execute a REXX script in a data set. + - Has been updated with a new example demonstrating how to chain multiple TSO commands into one invocation using semicolons. + - ``zos_mvs_raw`` - Has been enhanced to ensure that **instream-data** for option **dd_input** contain blanks in columns 1 and 2 while retaining a maximum length @@ -33,40 +59,69 @@ Minor Changes Bugfixes -------- +- ``zos_apf`` - Fixed an issue that when **operation=list** was selected and more than one data set entry was fetched, only one + data set was returned, now the complete list is returned. + - ``zos_copy`` - - Fixed an issue when copying an aliased executable from a data set to a non-existent data set, the destination data sets primary - and secondary extents would not match the source data set extent sizes. + - Fixed an issue that when copying an aliased executable from a data set to a non-existent data set, the destination + datasets primary and secondary extents would not match the source data set extent sizes. - Fixed an issue when performing a copy operation to an existing file, the copied file resulted in having corrupted contents. -- ``zos_job_output`` - Fixed an issue that when using a job ID with less than 8 characters would result in a traceback. The fix +- ``zos_job_submit`` + + - Fixed an issue that when no **location** is set, the default is not correctly configured to **location=DATA_SET**. + - Fixed an issue that when a JCL error is encountered, the **ret_code[msg_code]** no longer will contain the multi line marker used to coordinate errors. + - Fixed an issue that when a response was returned, the property **ret_code[msg_text]** was incorrectly returned over **ret_code[msg_txt]**. + - Fixed an issue that when JCL contained **TYPRUN=SCAN**, the module would fail. The module no longer fails and an appropriate message and response is returned. + - Fixed an issue that when JCL contained either **TYPRUN=COPY**, **TYPRUN=HOLD**, or **TYPRUN=JCLHOLD** an improper message was returned and the job submission failed. + Now the job will fail under the condition that the module has exceeded its wait time and return a proper message. + - Fixed an issue where when option **wait_time_s** was used, the duration would be approximately 5 seconds longer than what was reported in the duration. + Now the duration is from when the job is submitted to when the module reads the job output. + +- ``zos_job_output`` - Fixed an issue that when using a job ID with less than 8 characters, would result in a traceback. The fix supports shorter job IDs as well as the use of wildcards. -- ``zos_job_query`` - Fixed an issue that when using a job ID with less than 8 characters would result in a traceback. The fix +- ``zos_job_query`` - Fixed an issue that when using a job ID with less than 8 characters, would result in a traceback. The fix supports shorter job IDs as well as the use of wildcards. - ``zos_unarchive`` - - Fixed an issue when using a local file with the USS format option that would fail sending it to the managed node. - - Fixed an issue that occurred when unarchiving USS files that would leave temporary files behind on the managed node. + - Fixed an issue that when using a local file with the USS format option, the module would fail to send the archive to the managed node. + - Fixed an issue that occurred when unarchiving USS files, the module would leave temporary files behind on the managed node. + +- ``module_utils`` + + - ``job.py`` - Improved exception handling and added a message inside the **content** of the **ddname** when a non-printable + character (character that can not be converted to UTF-8) is encountered. + - ``data_set.py`` - Fixed an issue that when a volser name less than 6 characters was encountered, the volser name was padded with hyphens to have length 6. + Known Issues ------------ Several modules have reported UTF-8 decoding errors when interacting with results that contain non-printable UTF-8 characters in the response. -This occurs when a module receives content that does not correspond to a UTF-8 value. These include modules ``zos_job_submit``, ``zos_job_output``, -``zos_operator_action_query``` but are not limited to this list. This will be addressed in **ibm_zos_core** version 1.10.0-beta.1. Each case is -unique, some options to work around the error are below. +- This occurs when a module receives content that does not correspond to a UTF-8 value. These include modules ``zos_job_submit``, ``zos_job_output``, + ``zos_operator_action_query``` but are not limited to this list. This has been addressed in this release and corrected with **ZOAU version 1.2.5.6**. +- If the appropriate level of ZOAU can not be installed, some options are to: -- Specify that the ASA assembler option be enabled to instruct the assembler to use ANSI control characters instead of machine code control characters. -- Add **ignore_errors:true** to the playbook task so the task error will not fail the playbook. -- If the error is resulting from a batch job, add **ignore_errors:true** to the task and capture the output into a variable and extract the job ID with - a regular expression and then use ``zos_job_output`` to display the DD without the non-printable character such as the DD **JESMSGLG**. + - Specify that the ASA assembler option be enabled to instruct the assembler to use ANSI control characters instead of machine code control characters. + - Ignore module errors by using **ignore_errors:true** for a specific playbook task. + - If the error is resulting from a batch job, add **ignore_errors:true** to the task and capture the output into a registered variable to extract the + job ID with a regular expression. Then use ``zos_job_output`` to display the DD without the non-printable character such as the DD **JESMSGLG**. + - If the error is the result of a batch job, set option **return_output** to false so that no DDs are read which could contain the non-printable UTF-8 characters. + +An undocumented option **size** was defined in module **zos_data_set**, this has been removed to satisfy collection certification, use the intended +and documented **space_primary** option. + +In the past, choices could be defined in either lower or upper case. Now, only the case that is identified in the docs can be set, +this is so that the collection can continue to maintain certified status. Availability ------------ +* `Automation Hub`_ * `Galaxy`_ * `GitHub`_ @@ -75,7 +130,7 @@ Reference * Supported by `z/OS®`_ V2R4 (or later) but prior to version V3R1 * Supported by the `z/OS® shell`_ -* Supported by `IBM Open Enterprise SDK for Python`_ `3.9`_ - `3.11`_ +* Supported by `IBM Open Enterprise SDK for Python`_ `3.10`_ - `3.12`_ * Supported by IBM `Z Open Automation Utilities 1.2.5`_ (or later) but prior to version 1.3. Version 1.8.0 @@ -978,6 +1033,8 @@ Known issues https://www.ibm.com/docs/en/python-zos/3.10 .. _3.11: https://www.ibm.com/docs/en/python-zos/3.11 +.. _3.12: + https://www.ibm.com/docs/en/python-zos/3.12 .. _Z Open Automation Utilities 1.1.0: https://www.ibm.com/docs/en/zoau/1.1.x .. _Z Open Automation Utilities 1.1.1: diff --git a/galaxy.yml b/galaxy.yml index 93af5d038..c408424aa 100644 --- a/galaxy.yml +++ b/galaxy.yml @@ -6,7 +6,7 @@ namespace: ibm name: ibm_zos_core # The collection version -version: 1.9.0-beta.1 +version: 1.10.0-beta.1 # Collection README file readme: README.md @@ -96,4 +96,5 @@ build_ignore: - tests/sanity/ignore-2.11.txt - tests/sanity/ignore-2.12.txt - tests/sanity/ignore-2.13.txt + - tests/sanity/ignore-2.14.txt - venv* diff --git a/meta/ibm_zos_core_meta.yml b/meta/ibm_zos_core_meta.yml index abab47f9c..7e24bc280 100644 --- a/meta/ibm_zos_core_meta.yml +++ b/meta/ibm_zos_core_meta.yml @@ -1,10 +1,10 @@ name: ibm_zos_core -version: "1.9.0-beta.1" +version: "1.10.0-beta.1" managed_requirements: - name: "IBM Open Enterprise SDK for Python" - version: ">=3.9" + version: ">=3.10" - name: "Z Open Automation Utilities" version: - - "1.2.5" + - "1.3.0" diff --git a/meta/runtime.yml b/meta/runtime.yml index be99ccf4b..898ad8ff5 100644 --- a/meta/runtime.yml +++ b/meta/runtime.yml @@ -1,2 +1,2 @@ --- -requires_ansible: '>=2.14.0' +requires_ansible: '>=2.15.0' diff --git a/plugins/action/zos_copy.py b/plugins/action/zos_copy.py index 592126b00..e9c238b87 100644 --- a/plugins/action/zos_copy.py +++ b/plugins/action/zos_copy.py @@ -29,11 +29,10 @@ from ansible import cli from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.data_set import ( - is_member, - is_data_set + is_member ) -from ansible_collections.ibm.ibm_zos_core.plugins.module_utils import encode, validation +from ansible_collections.ibm.ibm_zos_core.plugins.module_utils import encode from ansible_collections.ibm.ibm_zos_core.plugins.module_utils import template @@ -69,8 +68,8 @@ def run(self, tmp=None, task_vars=None): owner = task_args.get("owner", None) group = task_args.get("group", None) - is_pds = is_src_dir = False - temp_path = is_uss = is_mvs_dest = src_member = None + is_src_dir = False + temp_path = is_uss = None if dest: if not isinstance(dest, string_types): @@ -78,7 +77,6 @@ def run(self, tmp=None, task_vars=None): return self._exit_action(result, msg, failed=True) else: is_uss = "/" in dest - is_mvs_dest = is_data_set(dest) else: msg = "Destination is required" return self._exit_action(result, msg, failed=True) @@ -96,13 +94,11 @@ def run(self, tmp=None, task_vars=None): msg = "'src' or 'dest' must not be empty" return self._exit_action(result, msg, failed=True) else: - src_member = is_member(src) if not remote_src: if src.startswith('~'): src = os.path.expanduser(src) src = os.path.realpath(src) is_src_dir = os.path.isdir(src) - is_pds = is_src_dir and is_mvs_dest if not src and not content: msg = "'src' or 'content' is required" @@ -196,11 +192,6 @@ def run(self, tmp=None, task_vars=None): src = rendered_dir - task_args["size"] = sum( - os.stat(os.path.join(validation.validate_safe_path(path), validation.validate_safe_path(f))).st_size - for path, dirs, files in os.walk(src) - for f in files - ) else: if mode == "preserve": task_args["mode"] = "0{0:o}".format( @@ -231,7 +222,6 @@ def run(self, tmp=None, task_vars=None): src = rendered_file - task_args["size"] = os.stat(src).st_size display.vvv(u"ibm_zos_copy calculated size: {0}".format(os.stat(src).st_size), host=self._play_context.remote_addr) transfer_res = self._copy_to_remote( src, is_dir=is_src_dir, ignore_stderr=ignore_sftp_stderr @@ -242,15 +232,31 @@ def run(self, tmp=None, task_vars=None): return transfer_res display.vvv(u"ibm_zos_copy temp path: {0}".format(transfer_res.get("temp_path")), host=self._play_context.remote_addr) + if not encoding: + encoding = { + "from": encode.Defaults.get_default_system_charset(), + } + + """ + We format temp_path correctly to pass it as src option to the module, + we keep the original source to return to the user and avoid confusion + by returning the temp_path created. + """ + original_src = task_args.get("src") + if original_src: + if not remote_src: + base_name = os.path.basename(original_src) + if original_src.endswith("/"): + src = temp_path + "/" + else: + src = temp_path + else: + src = temp_path + task_args.update( dict( - is_uss=is_uss, - is_pds=is_pds, - is_src_dir=is_src_dir, - src_member=src_member, - temp_path=temp_path, - is_mvs_dest=is_mvs_dest, - local_charset=encode.Defaults.get_default_system_charset() + src=src, + encoding=encoding, ) ) copy_res = self._execute_module( @@ -284,17 +290,20 @@ def run(self, tmp=None, task_vars=None): self._remote_cleanup(dest, copy_res.get("dest_exists"), task_vars) return result - return _update_result(is_binary, copy_res, self._task.args) + return _update_result(is_binary, copy_res, self._task.args, original_src) def _copy_to_remote(self, src, is_dir=False, ignore_stderr=False): """Copy a file or directory to the remote z/OS system """ - temp_path = "/{0}/{1}".format(gettempprefix(), _create_temp_path_name()) + temp_path = "/{0}/{1}/{2}".format(gettempprefix(), _create_temp_path_name(), os.path.basename(src)) + self._connection.exec_command("mkdir -p {0}".format(os.path.dirname(temp_path))) _src = src.replace("#", "\\#") _sftp_action = 'put' + full_temp_path = temp_path if is_dir: src = src.rstrip("/") if src.endswith("/") else src + temp_path = os.path.dirname(temp_path) base = os.path.basename(src) self._connection.exec_command("mkdir -p {0}/{1}".format(temp_path, base)) _sftp_action += ' -r' # add '-r` to clone the source trees @@ -379,7 +388,7 @@ def _copy_to_remote(self, src, is_dir=False, ignore_stderr=False): display.vvv(u"ibm_zos_copy SSH transfer method restored to {0}".format(user_ssh_transfer_method), host=self._play_context.remote_addr) is_ssh_transfer_method_updated = False - return dict(temp_path=temp_path) + return dict(temp_path=full_temp_path) def _remote_cleanup(self, dest, dest_exists, task_vars): """Remove all files or data sets pointed to by 'dest' on the remote @@ -417,7 +426,7 @@ def _exit_action(self, result, msg, failed=False): return result -def _update_result(is_binary, copy_res, original_args): +def _update_result(is_binary, copy_res, original_args, original_src): """ Helper function to update output result with the provided values """ ds_type = copy_res.get("ds_type") src = copy_res.get("src") @@ -431,7 +440,7 @@ def _update_result(is_binary, copy_res, original_args): invocation=dict(module_args=original_args), ) if src: - updated_result["src"] = src + updated_result["src"] = original_src if note: updated_result["note"] = note if backup_name: diff --git a/plugins/filter/wtor.py b/plugins/filter/wtor.py index 28e908376..17b530218 100644 --- a/plugins/filter/wtor.py +++ b/plugins/filter/wtor.py @@ -12,6 +12,61 @@ from __future__ import absolute_import, division, print_function __metaclass__ = type + +DOCUMENTATION = r""" +name: filter_wtor_messages +author: Demetrios Dimatos (@ddimatos) +version_added: "1.2.0" +short_description: Filter a list of WTOR messages +description: + - Filter a list of WTOR (write to operator with reply) messages found by + module zos_operator_action_query. + - Filter using a string or regular expression. +options: + wtor_response: + description: + - A list containing response property `message_text`, provided the + module zos_operator_action_query. + - The list can be the outstanding messages found in the modules + response under the `actions` property or the entire module + response. + type: list + required: true + text: + description: + - String of text to match or a regular expression to use as filter criteria. + type: str + required: true + ingore_case: + description: + - Should the filter enable case sensitivity when performing a match. + type: bool + required: false + default: false +""" + +EXAMPLES = r""" +- name: Filter actionable messages that match 'IEE094D SPECIFY OPERAND' and if so, set is_specify_operand = true. + set_fact: + is_specify_operand: "{{ result | ibm.ibm_zos_core.filter_wtor_messages('IEE094D SPECIFY OPERAND') }}" + when: result is defined and not result.failed + +- name: Evaluate if there are any existing dump messages matching 'IEE094D SPECIFY OPERAND' + assert: + that: + - is_specify_operand is defined + - bool_zos_operator_action_continue + success_msg: "Found 'IEE094D SPECIFY OPERAND' message." + fail_msg: "Did not find 'IEE094D SPECIFY OPERAND' message." +""" + +RETURN = r""" + _value: + description: A list containing dictionaries matching the WTOR. + type: list + elements: dict +""" + import re diff --git a/plugins/module_utils/job.py b/plugins/module_utils/job.py index 1f49a2b26..25483b45d 100644 --- a/plugins/module_utils/job.py +++ b/plugins/module_utils/job.py @@ -1,4 +1,4 @@ -# Copyright (c) IBM Corporation 2019 - 2024 +# Copyright (c) IBM Corporation 2019, 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at @@ -21,7 +21,7 @@ # Only importing this module so we can catch a JSONDecodeError that sometimes happens # when a job's output has non-printable chars that conflict with JSON's control # chars. -from json import decoder +from json import JSONDecodeError from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.better_arg_parser import ( BetterArgParser, ) @@ -29,6 +29,12 @@ ZOAUImportError ) +try: + from zoautil_py import exceptions +except ImportError: + exceptions = ZOAUImportError(traceback.format_exc()) + + try: # For files that import individual functions from a ZOAU module, # we'll replace the imports to instead get the module. @@ -40,6 +46,18 @@ except Exception: jobs = ZOAUImportError(traceback.format_exc()) +JOB_ERROR_STATUSES = frozenset(["ABEND", # ZOAU job ended abnormally + "SEC ERROR", # Security error (legacy Ansible code) + "SEC", # ZOAU security error + "JCL ERROR", # Job had a JCL error (legacy Ansible code) + "JCLERR", # ZOAU job had a JCL error + "CANCELED", # ZOAU job was cancelled + "CAB", # ZOAU converter abend + "CNV", # ZOAU converter error + "SYS", # ZOAU system failure + "FLU" # ZOAU job was flushed + ]) + def job_output(job_id=None, owner=None, job_name=None, dd_name=None, dd_scan=True, duration=0, timeout=0, start_time=timer()): """Get the output from a z/OS job based on various search criteria. @@ -89,11 +107,6 @@ def job_output(job_id=None, owner=None, job_name=None, dd_name=None, dd_scan=Tru start_time=start_time ) - # while ((job_detail is None or len(job_detail) == 0) and duration <= timeout): - # current_time = timer() - # duration = round(current_time - start_time) - # sleep(1) - if len(job_detail) == 0: # some systems have issues with "*" while some require it to see results job_id = "" if job_id == "*" else job_id @@ -238,17 +251,9 @@ def _get_job_status(job_id="*", owner="*", job_name="*", dd_name=None, dd_scan=T # Preserve the original job_id for the failure path job_id_temp = job_id - # jls output: owner=job[0], name=job[1], id=job[2], status=job[3], rc=job[4] - # e.g.: OMVSADM HELLO JOB00126 JCLERR ? - # jobs.listing(job_id, owner) in 1.2.0 has owner param, 1.1 does not - # jls output has expanded in zoau 1.2.3 and later: jls -l -v shows headers - # jobclass=job[5] serviceclass=job[6] priority=job[7] asid=job[8] - # creationdatetime=job[9] queueposition=job[10] - # starting in zoau 1.2.4, program_name[11] was added. In 1.3.0, include_extended - # has to be set to true so we get the program name for a job. - # Testing has shown that the program_name impact is minor, so we're removing that option - final_entries = [] + + # In 1.3.0, include_extended has to be set to true so we get the program name for a job. entries = jobs.fetch_multiple(job_id=job_id_temp, include_extended=True) while ((entries is None or len(entries) == 0) and duration <= timeout): @@ -276,25 +281,17 @@ def _get_job_status(job_id="*", owner="*", job_name="*", dd_name=None, dd_scan=T job["system"] = "" job["owner"] = entry.owner - job["ret_code"] = dict() - # From v1.3.0, ZOAU sets unavailable job fields as None, instead of '?'. - # This new way of constructing msg allows for a better empty message. - # "" instead of "None None". - job["ret_code"]["msg"] = "{0} {1}".format( - entry.status if entry.status else "", - entry.return_code if entry.return_code else "" - ).strip() - + job["ret_code"] = {} + job["ret_code"]["msg"] = entry.status job["ret_code"]["msg_code"] = entry.return_code job["ret_code"]["code"] = None if entry.return_code and len(entry.return_code) > 0: if entry.return_code.isdigit(): job["ret_code"]["code"] = int(entry.return_code) - job["ret_code"]["msg_text"] = entry.status if entry.status else "?" + job["ret_code"]["msg_txt"] = entry.status - # Beginning in ZOAU v1.3.0, the Job class changes svc_class to - # service_class. + # Beginning in ZOAU v1.3.0, the Job class changes svc_class to service_class. job["svc_class"] = entry.service_class job["job_class"] = entry.job_class job["priority"] = entry.priority @@ -310,16 +307,45 @@ def _get_job_status(job_id="*", owner="*", job_name="*", dd_name=None, dd_scan=T job["duration"] = duration if dd_scan: - list_of_dds = jobs.list_dds(entry.job_id) - while ((list_of_dds is None or len(list_of_dds) == 0) and duration <= timeout): + # If true, it means the job is not ready for DD queries and the duration and + # timeout should apply here instructing the user to add more time + is_dd_query_exception = False + is_jesjcl = False + list_of_dds = [] + + try: + list_of_dds = jobs.list_dds(entry.job_id) + except exceptions.DDQueryException as err: + if 'BGYSC5201E' in str(err): + is_dd_query_exception = True + pass + + # Check if the Job has JESJCL, if not, its in the JES INPUT queue, thus wait the full wait_time_s. + # Idea here is to force a TYPRUN{HOLD|JCLHOLD|COPY} job to go the full wait duration since we have + # currently no way to detect them, but if we know the job is one of the JOB_ERROR_STATUS lets + # exit the wait time supplied as we know it is a job failure. + is_jesjcl = True if search_dictionaries("dd_name", "JESJCL", list_of_dds) else False + is_job_error_status = True if entry.status in JOB_ERROR_STATUSES else False + + while ((list_of_dds is None or len(list_of_dds) == 0 or is_dd_query_exception) and + (not is_jesjcl and not is_job_error_status and duration <= timeout)): current_time = timer() duration = round(current_time - start_time) sleep(1) - list_of_dds = jobs.list_dds(entry.job_id) + try: + # Note, in the event of an exception, eg job has TYPRUN=HOLD + # list_of_dds will still be populated with valuable content + list_of_dds = jobs.list_dds(entry.job_id) + is_jesjcl = True if search_dictionaries("dd_name", "JESJCL", list_of_dds) else False + is_job_error_status = True if entry.status in JOB_ERROR_STATUSES else False + except exceptions.DDQueryException as err: + if 'BGYSC5201E' in str(err): + is_dd_query_exception = True + continue job["duration"] = duration - for single_dd in list_of_dds: + dd = {} if "dd_name" not in single_dd: @@ -360,23 +386,24 @@ def _get_job_status(job_id="*", owner="*", job_name="*", dd_name=None, dd_scan=T tmpcont = None if "step_name" in single_dd: if "dd_name" in single_dd: - # In case ZOAU fails when reading the job output, we'll - # add a message to the user telling them of this. - # ZOAU cannot read partial output from a job, so we - # have to make do with nothing from this step if it fails. + # In case ZOAU fails when reading the job output, we'll add a + # message to the user telling them of this. ZOAU cannot read + # partial output from a job, so we have to make do with nothing + # from this step if it fails. try: tmpcont = jobs.read_output( entry.job_id, single_dd["step_name"], single_dd["dd_name"] ) - except (UnicodeDecodeError, decoder.JSONDecodeError): + except (UnicodeDecodeError, JSONDecodeError, TypeError, KeyError) as e: tmpcont = ( "Non-printable UTF-8 characters were present in this output. " - "Please access it manually." + "Please access it from the job log." ) dd["content"] = tmpcont.split("\n") + job["ret_code"]["steps"].extend(_parse_steps(tmpcont)) job["ddnames"].append(dd) @@ -397,16 +424,6 @@ def _get_job_status(job_id="*", owner="*", job_name="*", dd_name=None, dd_scan=T job["subsystem"] = (tmptext.split("\n")[ 0]).replace(" ", "") - # Extract similar: "19.49.44 JOB06848 IEFC452I DOCEASYT - JOB NOT RUN - JCL ERROR 029 " - # then further reduce down to: 'JCL ERROR 029' - if job["ret_code"]["msg_code"] == "?": - if "JOB NOT RUN -" in tmpcont: - tmptext = tmpcont.split( - "JOB NOT RUN -")[1].split("\n")[0] - job["ret_code"]["msg"] = tmptext.strip() - job["ret_code"]["msg_code"] = None - job["ret_code"]["code"] = None - final_entries.append(job) if not final_entries: final_entries = _job_not_found(job_id, owner, job_name, "unavailable") @@ -439,3 +456,25 @@ def _ddname_pattern(contents, resolve_dependencies): ) ) return str(contents) + + +def search_dictionaries(key, value, list_of_dictionaries): + """ Searches a list of dictionaries given key and returns + the value dictionary. + + Arguments: + key {str} -- dictionary key to search for. + value {str} -- value to match for the dictionary key + list {str} -- list of dictionaries + + Returns: + dictionary -- dictionary matching the key and value + + Raises: + TypeError -- When input is not a list of dictionaries + """ + if not isinstance(list_of_dictionaries, list): + raise TypeError( + "Unsupported type for 'list_of_dictionaries', must be a list of dictionaries") + + return [element for element in list_of_dictionaries if element[key] == value] diff --git a/plugins/module_utils/zos_mvs_raw.py b/plugins/module_utils/zos_mvs_raw.py index 7c2badf84..466775939 100644 --- a/plugins/module_utils/zos_mvs_raw.py +++ b/plugins/module_utils/zos_mvs_raw.py @@ -24,7 +24,7 @@ class MVSCmd(object): """ @staticmethod - def execute(pgm, dds, parm="", debug=False, verbose=False): + def execute(pgm, dds, parm="", debug=False, verbose=False, tmp_hlq=None): """Execute an unauthorized MVS command. Args: @@ -36,9 +36,10 @@ def execute(pgm, dds, parm="", debug=False, verbose=False): MVSCmdResponse: The response of the command. """ module = AnsibleModuleHelper(argument_spec={}) - command = "mvscmd {0} {1} {2} ".format( + command = "mvscmd {0} {1} {2} {3}".format( "-d" if debug else "", "-v" if verbose else "", + "--tmphlq={0}".format(tmp_hlq.upper()) if tmp_hlq else "", MVSCmd._build_command(pgm, dds, parm), ) rc, out, err = module.run_command(command) @@ -64,7 +65,6 @@ def execute_authorized(pgm, dds, parm="", debug=False, verbose=False, tmp_hlq=No "--tmphlq={0}".format(tmp_hlq.upper()) if tmp_hlq else "", MVSCmd._build_command(pgm, dds, parm), ) - rc, out, err = module.run_command(command) return MVSCmdResponse(rc, out, err) diff --git a/plugins/modules/zos_copy.py b/plugins/modules/zos_copy.py index a854d1cae..9acb3c1c6 100644 --- a/plugins/modules/zos_copy.py +++ b/plugins/modules/zos_copy.py @@ -94,6 +94,10 @@ - C(dest) can be a USS file, directory or MVS data set name. - If C(dest) has missing parent directories, they will be created. - If C(dest) is a nonexistent USS file, it will be created. + - If C(dest) is a new USS file or replacement, the file will be appropriately tagged with + either the system's default locale or the encoding option defined. If the USS file is + a replacement, the user must have write authority to the file either through ownership, + group or other permissions, else the module will fail. - If C(dest) is a nonexistent data set, it will be created following the process outlined here and in the C(volume) option. - If C(dest) is a nonexistent data set, the attributes assigned will depend on the type of @@ -143,7 +147,7 @@ to: description: - The encoding to be converted to - required: true + required: false type: str tmp_hlq: description: @@ -243,6 +247,15 @@ type: bool default: true required: false + group: + description: + - Name of the group that will own the file system objects. + - When left unspecified, it uses the current group of the current user + unless you are root, in which case it can preserve the previous + ownership. + - This option is only applicable if C(dest) is USS, otherwise ignored. + type: str + required: false mode: description: - The permission of the destination file or directory. @@ -261,6 +274,15 @@ the source file. type: str required: false + owner: + description: + - Name of the user that should own the filesystem object, as would be + passed to the chown command. + - When left unspecified, it uses the current user unless you are root, + in which case it can preserve the previous ownership. + - This option is only applicable if C(dest) is USS, otherwise ignored. + type: str + required: false remote_src: description: - If set to C(false), the module searches for C(src) at the local machine. @@ -449,15 +471,16 @@ - VSAM data sets can only be copied to other VSAM data sets. - For supported character sets used to encode data, refer to the L(documentation,https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/resources/character_set.html). - - L(zos_copy,./zos_copy.html) uses SFTP (Secure File Transfer Protocol) for the underlying - transfer protocol; Co:Z SFTP is not supported. In the case of Co:z SFTP, - you can exempt the Ansible userid on z/OS from using Co:Z thus falling back - to using standard SFTP. - - Beginning in version 1.8.x, zos_copy will no longer attempt to autocorrect a copy of a data type member - into a PDSE that contains program objects. You can control this behavior using module option - executable that will signify an executable is being copied into a PDSE with other - executables. Mixing data type members with program objects will be responded with a - (FSUM8976,./zos_copy.html) error. + - This module uses SFTP (Secure File Transfer Protocol) for the underlying + transfer protocol; SCP (secure copy protocol) and Co:Z SFTP are not supported. In the + case of Co:z SFTP, you can exempt the Ansible user id on z/OS from using Co:Z thus falling + back to using standard SFTP. If the module detects SCP, it will temporarily use SFTP for + transfers, if not available, the module will fail. + - Beginning in version 1.8.x, zos_copy will no longer attempt to correct a copy of + a data type member into a PDSE that contains program objects. You can control this + behavior using module option C(executable) that will signify an executable is being + copied into a PDSE with other executables. Mixing data type members with program + objects will result in a (FSUM8976,./zos_copy.html) error. seealso: - module: zos_fetch - module: zos_data_set @@ -803,37 +826,35 @@ """ -from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.import_handler import ( - ZOAUImportError, -) -from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.mvs_cmd import ( - idcams -) -from ansible_collections.ibm.ibm_zos_core.plugins.module_utils import ( - better_arg_parser, data_set, encode, backup, copy, validation, -) -from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.ansible_module import ( - AnsibleModuleHelper, -) -from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.data_set import ( - is_member -) -from ansible.module_utils._text import to_bytes, to_native -from ansible.module_utils.basic import AnsibleModule -from ansible.module_utils.six import PY3 -from re import IGNORECASE -from hashlib import sha256 import glob +import math +import os import shutil import stat -import math import tempfile -import os import traceback +from hashlib import sha256 +from re import IGNORECASE + +from ansible.module_utils._text import to_bytes, to_native +from ansible.module_utils.basic import AnsibleModule +from ansible.module_utils.six import PY3 +from ansible_collections.ibm.ibm_zos_core.plugins.module_utils import ( + backup, better_arg_parser, copy, data_set, encode, validation) +from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.ansible_module import \ + AnsibleModuleHelper +from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.data_set import ( + is_member, + is_data_set +) +from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.import_handler import \ + ZOAUImportError +from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.mvs_cmd import \ + idcams if PY3: - from re import fullmatch import pathlib + from re import fullmatch else: from re import match as fullmatch @@ -892,7 +913,6 @@ def run_command(self, cmd, **kwargs): def copy_to_seq( self, src, - temp_path, conv_path, dest, src_type @@ -904,13 +924,11 @@ def copy_to_seq( Arguments: src {str} -- Path to USS file or data set name - temp_path {str} -- Path to the location where the control node - transferred data to conv_path {str} -- Path to the converted source file dest {str} -- Name of destination data set src_type {str} -- Type of the source """ - new_src = conv_path or temp_path or src + new_src = conv_path or src copy_args = dict() copy_args["options"] = "" @@ -1031,15 +1049,15 @@ def copy_tree(self, src_dir, dest_dir, dirs_exist_ok=False): entries = list(itr) return self._copy_tree(entries, src_dir, dest_dir, dirs_exist_ok=dirs_exist_ok) - def convert_encoding(self, src, temp_path, encoding): + def convert_encoding(self, src, encoding, remote_src): """Convert encoding for given src Arguments: src {str} -- Path to the USS source file or directory - temp_path {str} -- Path to the location where the control node - transferred data to encoding {dict} -- Charsets that the source is to be converted from and to + remote_src {bool} -- Whether the file was already on the remote + node or not. Raises: CopyOperationError -- When the encoding of a USS file is not @@ -1051,19 +1069,10 @@ def convert_encoding(self, src, temp_path, encoding): from_code_set = encoding.get("from") to_code_set = encoding.get("to") enc_utils = encode.EncodeUtils() - new_src = temp_path or src - + new_src = src if os.path.isdir(new_src): - if temp_path: - if src.endswith("/"): - new_src = "{0}/{1}".format( - temp_path, os.path.basename(os.path.dirname(src)) - ) - else: - new_src = "{0}/{1}".format(temp_path, - os.path.basename(src)) try: - if not temp_path: + if remote_src: temp_dir = tempfile.mkdtemp() shutil.copytree(new_src, temp_dir, dirs_exist_ok=True) new_src = temp_dir @@ -1081,7 +1090,7 @@ def convert_encoding(self, src, temp_path, encoding): raise CopyOperationError(msg=str(err)) else: try: - if not temp_path: + if remote_src: fd, temp_src = tempfile.mkstemp() os.close(fd) shutil.copy(new_src, temp_src) @@ -1270,24 +1279,23 @@ def copy_to_uss( src, dest, conv_path, - temp_path, src_ds_type, src_member, member_name, - force + force, + content_copy, ): """Copy a file or data set to a USS location Arguments: src {str} -- The USS source dest {str} -- Destination file or directory on USS - temp_path {str} -- Path to the location where the control node - transferred data to conv_path {str} -- Path to the converted source file or directory src_ds_type {str} -- Type of source src_member {bool} -- Whether src is a data set member member_name {str} -- The name of the source data set member force {bool} -- Whether to copy files to an already existing directory + content_copy {bool} -- Whether copy is using content option or not. Returns: {str} -- Destination where the file was copied to @@ -1322,11 +1330,11 @@ def copy_to_uss( if "File exists" not in err: raise CopyOperationError(msg=to_native(err)) - if os.path.isfile(temp_path or conv_path or src): - dest = self._copy_to_file(src, dest, conv_path, temp_path) + if os.path.isfile(conv_path or src): + dest = self._copy_to_file(src, dest, content_copy, conv_path) changed_files = None else: - dest, changed_files = self._copy_to_dir(src, dest, conv_path, temp_path, force) + dest, changed_files = self._copy_to_dir(src, dest, conv_path, force) if self.common_file_args is not None: mode = self.common_file_args.get("mode") @@ -1347,14 +1355,13 @@ def copy_to_uss( self.module.set_owner_if_different(dest, owner, False) return dest - def _copy_to_file(self, src, dest, conv_path, temp_path): + def _copy_to_file(self, src, dest, content_copy, conv_path): """Helper function to copy a USS src to USS dest. Arguments: src {str} -- USS source file path dest {str} -- USS dest file path - temp_path {str} -- Path to the location where the control node - transferred data to + content_copy {bool} -- Whether copy is using content option or not. conv_path {str} -- Path to the converted source file or directory Raises: @@ -1363,11 +1370,10 @@ def _copy_to_file(self, src, dest, conv_path, temp_path): Returns: {str} -- Destination where the file was copied to """ - src_path = os.path.basename(src) if src else "inline_copy" + src_path = os.path.basename(src) if not content_copy else "inline_copy" if os.path.isdir(dest): dest = os.path.join(validation.validate_safe_path(dest), validation.validate_safe_path(src_path)) - - new_src = temp_path or conv_path or src + new_src = conv_path or src try: if self.is_binary: copy.copy_uss2uss_binary(new_src, dest) @@ -1402,7 +1408,6 @@ def _copy_to_dir( src_dir, dest_dir, conv_path, - temp_path, force ): """Helper function to copy a USS directory to another USS directory. @@ -1413,8 +1418,6 @@ def _copy_to_dir( src_dir {str} -- USS source directory dest_dir {str} -- USS dest directory conv_path {str} -- Path to the converted source directory - temp_path {str} -- Path to the location where the control node - transferred data to force {bool} -- Whether to copy files to an already existing directory Raises: @@ -1426,14 +1429,7 @@ def _copy_to_dir( that got copied. """ copy_directory = True if not src_dir.endswith("/") else False - - if temp_path: - temp_path = "{0}/{1}".format( - temp_path, - os.path.basename(os.path.normpath(src_dir)) - ) - - new_src_dir = temp_path or conv_path or src_dir + new_src_dir = conv_path or src_dir new_src_dir = os.path.normpath(new_src_dir) dest = dest_dir changed_files, original_permissions = self._get_changed_files(new_src_dir, dest_dir, copy_directory) @@ -1661,7 +1657,6 @@ def __init__( def copy_to_pdse( self, src, - temp_path, conv_path, dest, src_ds_type, @@ -1676,8 +1671,6 @@ def copy_to_pdse( Arguments: src {str} -- Path to USS file/directory or data set name. - temp_path {str} -- Path to the location where the control node - transferred data to. conv_path {str} -- Path to the converted source file/directory. dest {str} -- Name of destination data set. src_ds_type {str} -- The type of source. @@ -1685,7 +1678,7 @@ def copy_to_pdse( dest_member {str, optional} -- Name of destination member in data set. encoding {dict, optional} -- Dictionary with encoding options. """ - new_src = conv_path or temp_path or src + new_src = conv_path or src src_members = [] dest_members = [] @@ -2660,15 +2653,10 @@ def run_module(module, arg_def): owner = module.params.get('owner') encoding = module.params.get('encoding') volume = module.params.get('volume') - is_uss = module.params.get('is_uss') - is_pds = module.params.get('is_pds') - is_src_dir = module.params.get('is_src_dir') - is_mvs_dest = module.params.get('is_mvs_dest') - temp_path = module.params.get('temp_path') - src_member = module.params.get('src_member') tmphlq = module.params.get('tmp_hlq') force = module.params.get('force') force_lock = module.params.get('force_lock') + content = module.params.get('content') dest_data_set = module.params.get('dest_data_set') if dest_data_set: @@ -2676,6 +2664,13 @@ def run_module(module, arg_def): dest_data_set["volumes"] = [volume] copy_member = is_member(dest) + # This section we initialize different variables + # that we used to pass from the action plugin. + is_src_dir = os.path.isdir(src) + is_uss = "/" in dest + is_mvs_dest = is_data_set(dest) + is_pds = is_src_dir and is_mvs_dest + src_member = is_member(src) # ******************************************************************** # When copying to and from a data set member, 'dest' or 'src' will be @@ -2722,18 +2717,17 @@ def run_module(module, arg_def): # data sets with record format 'FBA' or 'VBA'. src_has_asa_chars = dest_has_asa_chars = False try: - # If temp_path, the plugin has copied a file from the controller to USS. - if temp_path or "/" in src: + if "/" in src: src_ds_type = "USS" - if remote_src and os.path.isdir(src): + if os.path.isdir(src): is_src_dir = True # When the destination is a dataset, we'll normalize the source # file to UTF-8 for the record length computation as Python # generally uses UTF-8 as the default encoding. if not is_binary and not is_uss and not executable: - new_src = temp_path or src + new_src = src new_src = os.path.normpath(new_src) # Normalizing encoding when src is a USS file (only). encode_utils = encode.EncodeUtils() @@ -2790,9 +2784,8 @@ def run_module(module, arg_def): if is_uss: dest_ds_type = "USS" if src_ds_type == "USS" and not is_src_dir and (dest.endswith("/") or os.path.isdir(dest)): - src_basename = os.path.basename(src) if src else "inline_copy" + src_basename = os.path.basename(src) if not content else "inline_copy" dest = os.path.normpath("{0}/{1}".format(dest, src_basename)) - if dest.startswith("//"): dest = dest.replace("//", "/") @@ -2841,12 +2834,7 @@ def run_module(module, arg_def): if copy_member: dest_member_exists = dest_exists and data_set.DataSet.data_set_member_exists(dest) elif src_ds_type == "USS": - if temp_path: - root_dir = "{0}/{1}".format(temp_path, os.path.basename(os.path.normpath(src))) - root_dir = os.path.normpath(root_dir) - else: - root_dir = src - + root_dir = src dest_member_exists = dest_exists and data_set.DataSet.files_in_data_set_members(root_dir, dest) elif src_ds_type in data_set.DataSet.MVS_PARTITIONED: dest_member_exists = dest_exists and data_set.DataSet.data_set_shared_members(src, dest) @@ -2987,17 +2975,13 @@ def run_module(module, arg_def): # original one. This change applies only to the # allocate_destination_data_set call. if converted_src: - if remote_src: - original_src = src - src = converted_src - else: - original_temp = temp_path - temp_path = converted_src + original_src = src + src = converted_src try: if not is_uss: res_args["changed"], res_args["dest_data_set_attrs"] = allocate_destination_data_set( - temp_path or src, + src, dest_name, src_ds_type, dest_ds_type, dest_exists, @@ -3010,20 +2994,14 @@ def run_module(module, arg_def): ) except Exception as err: if converted_src: - if remote_src: - src = original_src - else: - temp_path = original_temp + src = original_src module.fail_json( msg="Unable to allocate destination data set: {0}".format(str(err)), dest_exists=dest_exists ) if converted_src: - if remote_src: - src = original_src - else: - temp_path = original_temp + src = original_src # ******************************************************************** # Encoding conversion is only valid if the source is a local file, @@ -3044,7 +3022,7 @@ def run_module(module, arg_def): # if is_mvs_dest: # encoding["to"] = encode.Defaults.DEFAULT_EBCDIC_MVS_CHARSET - conv_path = copy_handler.convert_encoding(src, temp_path, encoding) + conv_path = copy_handler.convert_encoding(src, encoding, remote_src) # ------------------------------- o ----------------------------------- # Copy to USS file or directory @@ -3068,17 +3046,17 @@ def run_module(module, arg_def): src, dest, conv_path, - temp_path, src_ds_type, src_member, member_name, - force + force, + bool(content) ) res_args['size'] = os.stat(dest).st_size remote_checksum = dest_checksum = None try: - remote_checksum = get_file_checksum(temp_path or src) + remote_checksum = get_file_checksum(src) dest_checksum = get_file_checksum(dest) if validate: @@ -3100,12 +3078,11 @@ def run_module(module, arg_def): elif dest_ds_type in data_set.DataSet.MVS_SEQ: # TODO: check how ASA behaves with this if src_ds_type == "USS" and not is_binary: - new_src = conv_path or temp_path or src + new_src = conv_path or src conv_path = normalize_line_endings(new_src, encoding) copy_handler.copy_to_seq( src, - temp_path, conv_path, dest, src_ds_type @@ -3117,8 +3094,6 @@ def run_module(module, arg_def): # Copy to PDS/PDSE # --------------------------------------------------------------------- elif dest_ds_type in data_set.DataSet.MVS_PARTITIONED or dest_ds_type == "LIBRARY": - if not remote_src and not copy_member and os.path.isdir(temp_path): - temp_path = os.path.join(validation.validate_safe_path(temp_path), validation.validate_safe_path(os.path.basename(src))) pdse_copy_handler = PDSECopyHandler( module, @@ -3132,7 +3107,6 @@ def run_module(module, arg_def): pdse_copy_handler.copy_to_pdse( src, - temp_path, conv_path, dest_name, src_ds_type, @@ -3163,7 +3137,7 @@ def run_module(module, arg_def): ) ) - return res_args, temp_path, conv_path + return res_args, conv_path def main(): @@ -3185,7 +3159,7 @@ def main(): ), "to": dict( type='str', - required=True, + required=False, ) } ), @@ -3255,14 +3229,6 @@ def main(): auto_reload=dict(type='bool', default=False), ) ), - is_uss=dict(type='bool'), - is_pds=dict(type='bool'), - is_src_dir=dict(type='bool'), - is_mvs_dest=dict(type='bool'), - size=dict(type='int'), - temp_path=dict(type='str'), - src_member=dict(type='bool'), - local_charset=dict(type='str'), force=dict(type='bool', default=False), force_lock=dict(type='bool', default=False), mode=dict(type='str', required=False), @@ -3333,15 +3299,16 @@ def main(): ) if ( - not module.params.get("encoding") + not module.params.get("encoding").get("to") and not module.params.get("remote_src") and not module.params.get("is_binary") and not module.params.get("executable") ): - module.params["encoding"] = { - "from": module.params.get("local_charset"), - "to": encode.Defaults.get_default_system_charset(), - } + module.params["encoding"]["to"] = encode.Defaults.get_default_system_charset() + elif ( + not module.params.get("encoding").get("to") + ): + module.params["encoding"] = None if module.params.get("encoding"): module.params.update( @@ -3357,15 +3324,15 @@ def main(): ) ) - res_args = temp_path = conv_path = None + res_args = conv_path = None try: - res_args, temp_path, conv_path = run_module(module, arg_def) + res_args, conv_path = run_module(module, arg_def) module.exit_json(**res_args) except CopyOperationError as err: cleanup([]) module.fail_json(**(err.json_args)) finally: - cleanup([temp_path, conv_path]) + cleanup([conv_path]) class EncodingConversionError(Exception): diff --git a/plugins/modules/zos_fetch.py b/plugins/modules/zos_fetch.py index dc4bc8071..cc26b622b 100644 --- a/plugins/modules/zos_fetch.py +++ b/plugins/modules/zos_fetch.py @@ -146,10 +146,11 @@ - Fetching HFS or ZFS type data sets is currently not supported. - For supported character sets used to encode data, refer to the L(documentation,https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/resources/character_set.html). - - L(zos_fetch,./zos_fetch.html) uses SFTP (Secure File Transfer Protocol) for the underlying - transfer protocol; Co:Z SFTP is not supported. In the case of Co:z SFTP, - you can exempt the Ansible userid on z/OS from using Co:Z thus falling back - to using standard SFTP. + - This module uses SFTP (Secure File Transfer Protocol) for the underlying + transfer protocol; SCP (secure copy protocol) and Co:Z SFTP are not supported. In the + case of Co:z SFTP, you can exempt the Ansible user id on z/OS from using Co:Z thus falling + back to using standard SFTP. If the module detects SCP, it will temporarily use SFTP for + transfers, if not available, the module will fail. seealso: - module: zos_data_set - module: zos_copy diff --git a/plugins/modules/zos_job_submit.py b/plugins/modules/zos_job_submit.py index 1fd5030b5..7c66c2543 100644 --- a/plugins/modules/zos_job_submit.py +++ b/plugins/modules/zos_job_submit.py @@ -1,7 +1,7 @@ #!/usr/bin/python # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2019 - 2024 +# Copyright (c) IBM Corporation 2019, 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at @@ -25,9 +25,8 @@ - "Demetrios Dimatos (@ddimatos)" short_description: Submit JCL description: - - Submit JCL from a data set, USS, or from the controller. - - Submit a job and optionally monitor for completion. - - Optionally, wait a designated time until the job finishes. + - Submit JCL in a data set, USS file, or file on the controller. + - Submit a job and monitor for completion. - For an uncataloged dataset, specify the volume serial number. version_added: "1.0.0" options: @@ -126,6 +125,13 @@ notes: - For supported character sets used to encode data, refer to the L(documentation,https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/resources/character_set.html). + - This module uses L(zos_copy,./zos_copy.html) to copy local scripts to + the remote machine which uses SFTP (Secure File Transfer Protocol) for the + underlying transfer protocol; SCP (secure copy protocol) and Co:Z SFTP are not + supported. In the case of Co:z SFTP, you can exempt the Ansible user id on z/OS + from using Co:Z thus falling back to using standard SFTP. If the module detects + SCP, it will temporarily use SFTP for transfers, if not available, the module + will fail. """ RETURN = r""" @@ -217,28 +223,40 @@ contains: msg: description: - Return code resulting from the job submission. Jobs that take - longer to assign a value can have a value of '?'. + - Job status resulting from the job submission. + - Job status `ABEND` indicates the job ended abnormally. + - Job status `AC` indicates the job is active, often a started task or job taking long. + - Job status `CAB` indicates a converter abend. + - Job status `CANCELED` indicates the job was canceled. + - Job status `CNV` indicates a converter error. + - Job status `FLU` indicates the job was flushed. + - Job status `JCLERR` or `JCL ERROR` indicates the JCL has an error. + - Job status `SEC` or `SEC ERROR` indicates the job as encountered a security error. + - Job status `SYS` indicates a system failure. + - Job status `?` indicates status can not be determined. + - Jobs where status can not be determined will result in None (NULL). type: str - sample: CC 0000 + sample: AC msg_code: description: - Return code extracted from the `msg` so that it can be evaluated - as a string. Jobs that take longer to assign a value can have a - value of '?'. + - The return code from the submitted job as a string. + - Jobs which have no return code will result in None (NULL), such + is the case of a job that errors or is active. type: str sample: 0000 msg_txt: description: - Returns additional information related to the job. Jobs that take - longer to assign a value can have a value of '?'. + - Returns additional information related to the submitted job. + - Jobs which have no additional information will result in None (NULL). type: str - sample: The job completion code (CC) was not available in the job - output, please review the job log." + sample: The job JOB00551 was run with special job processing TYPRUN=SCAN. + This will result in no completion, return code or job steps and + changed will be false. code: description: - Return code converted to an integer value (when possible). - For JCL ERRORs, this will be None. + - The return code converted to an integer value when available. + - Jobs which have no return code will result in None (NULL), such + is the case of a job that errors or is active. type: int sample: 0 steps: @@ -537,15 +555,10 @@ "system": "STL1" } ] -message: - description: This option is being deprecated - returned: success - type: str - sample: Submit JCL operation succeeded. """ EXAMPLES = r""" -- name: Submit JCL in a PDSE member +- name: Submit JCL in a PDSE member. zos_job_submit: src: HLQ.DATA.LLQ(SAMPLE) location: DATA_SET @@ -597,7 +610,7 @@ BetterArgParser, ) from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.job import ( - job_output, + job_output, search_dictionaries, JOB_ERROR_STATUSES ) from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.import_handler import ( ZOAUImportError, @@ -627,8 +640,10 @@ jobs = ZOAUImportError(traceback.format_exc()) -JOB_COMPLETION_MESSAGES = frozenset(["CC", "ABEND", "SEC ERROR", "JCL ERROR", "JCLERR"]) -JOB_ERROR_MESSAGES = frozenset(["ABEND", "SEC ERROR", "SEC", "JCL ERROR", "JCLERR"]) +JOB_STATUSES = list(dict.fromkeys(JOB_ERROR_STATUSES)) +JOB_STATUSES.append("CC") + +JOB_SPECIAL_PROCESSING = frozenset(["TYPRUN"]) MAX_WAIT_TIME_S = 86400 @@ -693,23 +708,39 @@ def submit_src_jcl(module, src, src_name=None, timeout=0, is_unix=True, volume=N # which is what ZOAU sends back, opitonally we can check the 'status' as # that is sent back as `AC` when the job is not complete but the problem # with monitoring 'AC' is that STARTED tasks never exit the AC status. + job_fetched = None + job_fetch_rc = None + job_fetch_status = None + if job_submitted: - job_fetch_rc = jobs.fetch_multiple(job_submitted.job_id)[0].return_code - job_fetch_status = jobs.fetch_multiple(job_submitted.job_id)[0].status + try: + job_fetched = jobs.fetch_multiple(job_submitted.job_id)[0] + job_fetch_rc = job_fetched.return_code + job_fetch_status = job_fetched.status + except zoau_exceptions.JobFetchException: + pass # Before moving forward lets ensure our job has completed but if we see - # status that matches one in JOB_ERROR_MESSAGES, don't wait, let the code - # drop through and get analyzed in the main as it will scan the job ouput. - # Any match to JOB_ERROR_MESSAGES ends our processing and wait times. - while (job_fetch_status not in JOB_ERROR_MESSAGES and + # status that matches one in JOB_STATUSES, don't wait, let the code + # drop through and get analyzed in the main as it will scan the job ouput + # Any match to JOB_STATUSES ends our processing and wait times + while (job_fetch_status not in JOB_STATUSES and job_fetch_status == 'AC' and ((job_fetch_rc is None or len(job_fetch_rc) == 0 or job_fetch_rc == '?') and duration < timeout)): current_time = timer() duration = round(current_time - start_time) sleep(1) - job_fetch_rc = jobs.fetch_multiple(job_submitted.job_id)[0].return_code - job_fetch_status = jobs.fetch_multiple(job_submitted.job_id)[0].status + try: + job_fetched = jobs.fetch_multiple(job_submitted.job_id)[0] + job_fetch_rc = job_fetched.return_code + job_fetch_status = job_fetched.status + # Allow for jobs that need more time to be fectched to run the wait_time_s + except zoau_exceptions.JobFetchException as err: + if duration >= timeout: + raise err + else: + continue # ZOAU throws a JobSubmitException when the job sumbission fails thus there is no # JCL RC to share with the user, if there is a RC, that will be processed @@ -736,11 +767,12 @@ def submit_src_jcl(module, src, src_name=None, timeout=0, is_unix=True, volume=N result["stderr"] = to_text(err) result["duration"] = duration result["job_id"] = job_submitted.job_id + _msg_detail = "the job with status {0}".format(job_fetch_status) if job_fetch_status else "its status" result["msg"] = ("The JCL has been submitted {0} with ID {1} but there was an " - "error while fetching its status within the allocated time of {2} " + "error while fetching {2} within the allocated time of {3} " "seconds. Consider using module zos_job_query to poll for the " "job for more information. Standard error may have additional " - "information.".format(src_name, job_submitted.job_id, str(timeout))) + "information.".format(src_name, job_submitted.job_id, _msg_detail, str(timeout))) module.fail_json(**result) # Between getting a job_submitted and the jobs.fetch_multiple(job_submitted.job_id)[0].return_code @@ -882,7 +914,7 @@ def run_module(): if wait_time_s <= 0 or wait_time_s > MAX_WAIT_TIME_S: result["failed"] = True - result["msg"] = ("The value for option `wait_time_s` is not valid, it must " + result["msg"] = ("The value for option 'wait_time_s' is not valid, it must " "be greater than 0 and less than {0}.".format(str(MAX_WAIT_TIME_S))) module.fail_json(**result) @@ -899,29 +931,39 @@ def run_module(): job_submitted_id, duration = submit_src_jcl( module, src, src_name=src, timeout=wait_time_s, is_unix=True) - try: - # Explictly pass None for the unused args else a default of '*' will be - # used and return undersirable results - job_output_txt = None + # Explictly pass None for the unused args else a default of '*' will be + # used and return undersirable results + job_output_txt = None + try: job_output_txt = job_output( job_id=job_submitted_id, owner=None, job_name=None, dd_name=None, dd_scan=return_output, duration=duration, timeout=wait_time_s, start_time=start_time) + # This is resolvig a bug where the duration coming from job_output is passed by value, duration + # being an immutable type can not be changed and must be returned or accessed from the job.py. + if job_output is not None: + duration = job_output_txt[0].get("duration") if not None else duration + result["duration"] = duration if duration >= wait_time_s: result["failed"] = True result["changed"] = False + _msg = ("The JCL submitted with job id {0} but appears to be a long " + "running job that exceeded its maximum wait time of {1} " + "second(s). Consider using module zos_job_query to poll for " + "a long running job or increase option 'wait_times_s' to a value " + "greater than {2}.".format(str(job_submitted_id), str(wait_time_s), str(duration))) + _msg_suffix = ("Consider using module zos_job_query to poll for " + "a long running job or increase option 'wait_times_s' to a value " + "greater than {0}.".format(str(duration))) + if job_output_txt is not None: result["jobs"] = job_output_txt - result["msg"] = ( - "The JCL submitted with job id {0} but appears to be a long " - "running job that exceeded its maximum wait time of {1} " - "second(s). Consider using module zos_job_query to poll for " - "a long running job or increase option 'wait_times_s` to a value " - "greater than {2}.".format( - str(job_submitted_id), str(wait_time_s), str(duration))) + job_ret_code = job_output_txt[0].get("ret_code") + job_ret_code.update({"msg_txt": _msg_suffix}) + result["msg"] = _msg module.exit_json(**result) # Job has submitted, the module changed the managed node @@ -932,35 +974,76 @@ def run_module(): job_ret_code = job_output_txt[0].get("ret_code") if job_ret_code: - job_msg = job_ret_code.get("msg") - job_code = job_ret_code.get("code") - - # retcode["msg"] should never be empty where a retcode["code"] can be None, - # "msg" could be an ABEND which has no corresponding "code" - if job_msg is None: - _msg = ("Unable to find a 'msg' in the 'ret_code' dictionary, " - "please review the job log.") - result["stderr"] = _msg - raise Exception(_msg) + job_ret_code_msg = job_ret_code.get("msg") + job_ret_code_code = job_ret_code.get("code") + job_ret_code_msg_code = job_ret_code.get("msg_code") if return_output is True and max_rc is not None: - is_changed = assert_valid_return_code(max_rc, job_code, job_ret_code) - - if re.search("^(?:{0})".format("|".join(JOB_COMPLETION_MESSAGES)), job_msg): - # If the job_msg doesn't have a CC, it is an improper completion (error/abend) - if re.search("^(?:CC)", job_msg) is None: - _msg = ("The job completion code (CC) was not in the job log. " - "Please review the error {0} and the job log.".format(job_msg)) - result["stderr"] = _msg + is_changed = assert_valid_return_code(max_rc, job_ret_code_code, job_ret_code, result) + + if job_ret_code_msg is not None: + if re.search("^(?:{0})".format("|".join(JOB_STATUSES)), job_ret_code_msg): + # If the job_ret_code_msg doesn't have a CC (completion code), the job failed. + if re.search("^(?:CC)", job_ret_code_msg) is None: + _msg = ("The job completion code (CC) was not in the job log. " + "please review the job log for status {0}.".format(job_ret_code_msg)) + result["stderr"] = _msg + job_ret_code.update({"msg_txt": _msg}) + raise Exception(_msg) + + if job_ret_code_code is None: + # If there is no job_ret_code_code (Job return code) it may NOT be an error, + # some jobs will never return have an RC, eg Jobs with TYPRUN=*, + # Started tasks (which are not supported) so further analyze the + # JESJCL DD to figure out if its a TYPRUN job + + job_dd_names = job_output_txt[0].get("ddnames") + jes_jcl_dd = search_dictionaries("ddname", "JESJCL", job_dd_names) + + # Its possible jobs don't have a JESJCL which are active and this would + # cause an index out of range error. + if not jes_jcl_dd: + _msg_detail = " for status {0}.".format(job_ret_code_msg) if job_ret_code_msg else "." + _msg = ("The job return code was not available in the job log, " + "please review the job log{0}".format(_msg_detail)) + job_ret_code.update({"msg_txt": _msg}) raise Exception(_msg) - if job_code is None: - raise Exception("The job return code was not available in the job log, " - "please review the job log and error {0}.".format(job_msg)) - - if job_code != 0 and max_rc is None: - raise Exception("The job return code {0} was non-zero in the " - "job output, this job has failed.".format(str(job_code))) + jes_jcl_dd_content = jes_jcl_dd[0].get("content") + jes_jcl_dd_content_str = " ".join(jes_jcl_dd_content) + + # The regex can be r"({0})\s*=\s*(COPY|HOLD|JCLHOLD|SCAN)" once zoau support is in. + special_processing_keyword = re.search(r"({0})\s*=\s*(SCAN)" + .format("|".join(JOB_SPECIAL_PROCESSING)), jes_jcl_dd_content_str) + + if special_processing_keyword: + job_ret_code.update({"msg": special_processing_keyword[0]}) + job_ret_code.update({"code": None}) + job_ret_code.update({"msg_code": None}) + job_ret_code.update({"msg_txt": "The job {0} was run with special job " + "processing {1}. This will result in no completion, " + "return code or job steps and changed will be false." + .format(job_submitted_id, special_processing_keyword[0])}) + is_changed = False + else: + # The job_ret_code_code is None at this point, but the job_ret_code_msg_code could be populated + # so check both and provide a proper response. + + if job_ret_code_msg_code is None: + _msg_detail = " for status {0}.".format(job_ret_code_msg) if job_ret_code_msg else "." + _msg = ("The job return code was not available in the job log, " + "please review the job log{0}".format(_msg_detail)) + job_ret_code.update({"msg_txt": _msg}) + raise Exception(_msg) + + # raise Exception("The job return code was not available in the job log, " + # "please review the job log and error {0}.".format(job_ret_code_msg)) + elif job_ret_code_code != 0 and max_rc is None: + _msg = ("The job return code {0} was non-zero in the " + "job output, this job has failed.".format(str(job_ret_code_code))) + job_ret_code.update({"msg_txt": _msg}) + result["stderr"] = _msg + raise Exception(_msg) if not return_output: for job in result.get("jobs", []): @@ -975,7 +1058,6 @@ def run_module(): result["stderr"] = _msg result["jobs"] = None raise Exception(_msg) - except Exception as err: result["failed"] = True result["changed"] = False @@ -995,27 +1077,32 @@ def run_module(): module.exit_json(**result) -def assert_valid_return_code(max_rc, job_rc, ret_code): +def assert_valid_return_code(max_rc, job_rc, ret_code, result): if job_rc is None: raise Exception( "The job return code (ret_code[code]) was not available in the jobs output, " "this job has failed.") if job_rc > max_rc: - raise Exception("The job return code, 'ret_code[code]' {0} for the submitted job is " - "greater than the value set for option 'max_rc' {1}. " - "Increase the value for 'max_rc' otherwise this job submission " - "has failed.".format(str(job_rc), str(max_rc))) + _msg = ("The job return code, 'ret_code[code]' {0} for the submitted job is " + "greater than the value set for option 'max_rc' {1}. " + "Increase the value for 'max_rc' otherwise this job submission " + "has failed.".format(str(job_rc), str(max_rc))) + ret_code.update({"msg_txt": _msg}) + result["stderr"] = _msg + raise Exception(_msg) for step in ret_code["steps"]: step_cc_rc = int(step["step_cc"]) step_name_for_rc = step["step_name"] if step_cc_rc > max_rc: - raise Exception("The step name {0} with return code {1} for the submitted job is " - "greater than the value set for option 'max_rc' {2}. " - "Increase the value for 'max_rc' otherwise this job submission " - "has failed.".format(step_name_for_rc, str(step_cc_rc), str(max_rc))) - + _msg = ("The step name {0} with return code {1} for the submitted job is " + "greater than the value set for option 'max_rc' {2}. " + "Increase the value for 'max_rc' otherwise this job submission " + "has failed.".format(step_name_for_rc, str(step_cc_rc), str(max_rc))) + ret_code.update({"msg_txt": _msg}) + result["stderr"] = _msg + raise Exception(_msg) # If there is NO exception rasied it means that max_rc is larger than the # actual RC from the submitted job. In this case, the ansible changed status # should NOT be 'changed=true' even though the user did override the return code, diff --git a/plugins/modules/zos_mvs_raw.py b/plugins/modules/zos_mvs_raw.py index 502d2ead7..a440c31c6 100644 --- a/plugins/modules/zos_mvs_raw.py +++ b/plugins/modules/zos_mvs_raw.py @@ -96,16 +96,16 @@ - Maps to DSNTYPE on z/OS. type: str choices: - - library - - pds - - pdse - - large - - basic - - seq - - rrds - - esds - - lds - - ksds + - LIBRARY + - PDS + - PDSE + - LARGE + - BASIC + - SEQ + - RRDS + - ESDS + - LDS + - KSDS disposition: description: - I(disposition) indicates the status of a data set. @@ -174,12 +174,12 @@ using I(space_primary) and I(space_secondary). type: str choices: - - trk - - cyl - - b - - k - - m - - g + - TRK + - CYL + - B + - K + - M + - G space_primary: description: - The primary amount of space to allocate for a new data set. @@ -325,11 +325,11 @@ - The format and characteristics of the records for new data set. type: str choices: - - u - - vb - - vba - - fb - - fba + - U + - VB + - VBA + - FB + - FBA return_content: description: - Determines how content should be returned to the user. @@ -505,11 +505,11 @@ a UNIX file would normally be treated as a stream of bytes. type: str choices: - - u - - vb - - vba - - fb - - fba + - U + - VB + - VBA + - FB + - FBA return_content: description: - Determines how content should be returned to the user. @@ -717,16 +717,16 @@ - Maps to DSNTYPE on z/OS. type: str choices: - - library - - pds - - pdse - - large - - basic - - seq - - rrds - - esds - - lds - - ksds + - LIBRARY + - PDS + - PDSE + - LARGE + - BASIC + - SEQ + - RRDS + - ESDS + - LDS + - KSDS disposition: description: - I(disposition) indicates the status of a data set. @@ -795,12 +795,12 @@ using I(space_primary) and I(space_secondary). type: str choices: - - trk - - cyl - - b - - k - - m - - g + - TRK + - CYL + - B + - K + - M + - G space_primary: description: - The primary amount of space to allocate for a new data set. @@ -946,11 +946,11 @@ - The format and characteristics of the records for new data set. type: str choices: - - u - - vb - - vba - - fb - - fba + - U + - VB + - VBA + - FB + - FBA return_content: description: - Determines how content should be returned to the user. @@ -1124,11 +1124,11 @@ a UNIX file would normally be treated as a stream of bytes. type: str choices: - - u - - vb - - vba - - fb - - fba + - U + - VB + - VBA + - FB + - FBA return_content: description: - Determines how content should be returned to the user. @@ -1300,13 +1300,13 @@ data_set_name: mypgm.output.ds disposition: new reuse: yes - type: seq + type: SEQ space_primary: 5 space_secondary: 1 - space_type: m + space_type: M volumes: - "000000" - record_format: fb + record_format: FB return_content: type: text - dd_input: @@ -1324,13 +1324,13 @@ data_set_name: mypgm.output.ds disposition: new reuse: yes - type: seq + type: SEQ space_primary: 5 space_secondary: 1 - space_type: m + space_type: M volumes: - "000000" - record_format: fb + record_format: FB return_content: type: text - dd_input: @@ -1369,13 +1369,13 @@ data_set_name: mypgm.output.ds disposition: new reuse: yes - type: seq + type: SEQ space_primary: 5 space_secondary: 1 - space_type: m + space_type: M volumes: - "000000" - record_format: fb + record_format: FB return_content: type: text - dd_input: @@ -1398,15 +1398,15 @@ disposition: new replace: yes backup: yes - type: seq + type: SEQ space_primary: 5 space_secondary: 1 - space_type: m + space_type: M volumes: - "000000" - "111111" - "SCR002" - record_format: fb + record_format: FB return_content: type: text - dd_input: @@ -1628,10 +1628,6 @@ backups = [] -# Use of global tmphlq to keep coherent classes definitions -g_tmphlq = "" - - def run_module(): """Executes all module-related functions. @@ -1651,7 +1647,7 @@ def run_module(): type="str", choices=["delete", "keep", "catalog", "uncatalog", "catlg", "uncatlg"], ), - space_type=dict(type="str", choices=["trk", "cyl", "b", "k", "m", "g"]), + space_type=dict(type="str", choices=["TRK", "CYL", "B", "K", "M", "G"]), space_primary=dict(type="int"), space_secondary=dict(type="int"), volumes=dict(type="raw"), @@ -1664,16 +1660,16 @@ def run_module(): type=dict( type="str", choices=[ - "library", - "pds", - "pdse", - "seq", - "basic", - "large", - "ksds", - "rrds", - "lds", - "esds", + "LIBRARY", + "PDS", + "PDSE", + "SEQ", + "BASIC", + "LARGE", + "KSDS", + "RRDS", + "LDS", + "ESDS", ], ), encryption_key_1=dict( @@ -1695,7 +1691,7 @@ def run_module(): key_length=dict(type="int", no_log=False), key_offset=dict(type="int", no_log=False), record_length=dict(type="int"), - record_format=dict(type="str", choices=["u", "vb", "vba", "fb", "fba"]), + record_format=dict(type="str", choices=["U", "VB", "VBA", "FB", "FBA"]), return_content=dict( type="dict", options=dict( @@ -1770,7 +1766,7 @@ def run_module(): ), block_size=dict(type="int"), record_length=dict(type="int"), - record_format=dict(type="str", choices=["u", "vb", "vba", "fb", "fba"]), + record_format=dict(type="str", choices=["U", "VB", "VBA", "FB", "FBA"]), return_content=dict( type="dict", options=dict( @@ -1839,8 +1835,7 @@ def run_module(): if not module.check_mode: try: parms = parse_and_validate_args(module.params) - global g_tmphlq - g_tmphlq = parms.get("tmp_hlq") + tmphlq = parms.get("tmp_hlq") dd_statements = build_dd_statements(parms) program = parms.get("program_name") program_parm = parms.get("parm") @@ -1852,6 +1847,7 @@ def run_module(): dd_statements=dd_statements, authorized=authorized, verbose=verbose, + tmp_hlq=tmphlq, ) if program_response.rc != 0 and program_response.stderr: raise ZOSRawError( @@ -1894,7 +1890,7 @@ def parse_and_validate_args(params): type="str", choices=["delete", "keep", "catalog", "uncatalog", "catlg", "uncatlg"], ), - space_type=dict(type="str", choices=["trk", "cyl", "b", "k", "m", "g"]), + space_type=dict(type="str", choices=["TRK", "CYL", "B", "K", "M", "G"]), space_primary=dict(type="int"), space_secondary=dict(type="int"), volumes=dict(type=volumes), @@ -1907,16 +1903,16 @@ def parse_and_validate_args(params): type=dict( type="str", choices=[ - "library", - "pds", - "pdse", - "seq", - "basic", - "large", - "ksds", - "rrds", - "lds", - "esds", + "LIBRARY", + "PDS", + "PDSE", + "SEQ", + "BASIC", + "LARGE", + "KSDS", + "RRDS", + "LDS", + "ESDS", ], ), encryption_key_1=dict( @@ -1940,7 +1936,7 @@ def parse_and_validate_args(params): type=key_offset, default=key_offset_default, dependencies=["type"] ), record_length=dict(type="int"), - record_format=dict(type="str", choices=["u", "vb", "vba", "fb", "fba"]), + record_format=dict(type="str", choices=["U", "VB", "VBA", "FB", "FBA"]), return_content=dict( type="dict", options=dict( @@ -1996,7 +1992,7 @@ def parse_and_validate_args(params): ), block_size=dict(type="int"), record_length=dict(type="int"), - record_format=dict(type="str", choices=["u", "vb", "vba", "fb", "fba"]), + record_format=dict(type="str", choices=["U", "VB", "VBA", "FB", "FBA"]), return_content=dict( type="dict", options=dict( @@ -2088,8 +2084,8 @@ def key_length(contents, dependencies): """ if contents is None: return contents - if contents is not None and dependencies.get("type") != "ksds": - raise ValueError('key_length is only valid when "type=ksds".') + if contents is not None and dependencies.get("type") != "KSDS": + raise ValueError('key_length is only valid when "type=KSDS".') if not re.fullmatch(r"[0-9]+", str(contents)): raise ValueError( 'Invalid argument "{0}" for type "key_length".'.format(str(contents)) @@ -2109,8 +2105,8 @@ def key_offset(contents, dependencies): """ if contents is None: return contents - if contents is not None and dependencies.get("type") != "ksds": - raise ValueError('key_offset is only valid when "type=ksds".') + if contents is not None and dependencies.get("type") != "KSDS": + raise ValueError('key_offset is only valid when "type=KSDS".') if not re.fullmatch(r"[0-9]+", str(contents)): raise ValueError( @@ -2131,9 +2127,9 @@ def key_length_default(contents, dependencies): """ KEY_LENGTH = 5 length = None - if contents is None and dependencies.get("type") == "ksds": + if contents is None and dependencies.get("type") == "KSDS": length = KEY_LENGTH - elif dependencies.get("type") == "ksds": + elif dependencies.get("type") == "KSDS": length = contents return length @@ -2149,9 +2145,9 @@ def key_offset_default(contents, dependencies): """ KEY_OFFSET = 0 offset = None - if contents is None and dependencies.get("type") == "ksds": + if contents is None and dependencies.get("type") == "KSDS": offset = KEY_OFFSET - elif dependencies.get("type") == "ksds": + elif dependencies.get("type") == "KSDS": offset = contents return offset @@ -2408,7 +2404,7 @@ def build_dd_statements(parms): dd_statements = [] for dd in parms.get("dds"): dd_name = get_dd_name(dd) - dd = set_extra_attributes_in_dd(dd) + dd = set_extra_attributes_in_dd(dd, parms) data_definition = build_data_definition(dd) if data_definition is None: raise ValueError("No valid data definition found.") @@ -2444,26 +2440,27 @@ def get_dd_name(dd): return dd_name -def set_extra_attributes_in_dd(dd): +def set_extra_attributes_in_dd(dd, parms): """ - Set any extra attributes in dds like in global g_tmphlq. + Set any extra attributes in dds like in global tmp_hlq. Args: dd (dict): A single DD parm as specified in module parms. Returns: dd (dict): A single DD parm as specified in module parms. """ + tmphlq = parms.get("tmp_hlq") if dd.get("dd_data_set"): - dd.get("dd_data_set")["tmphlq"] = g_tmphlq + dd.get("dd_data_set")["tmphlq"] = tmphlq elif dd.get("dd_input"): - dd.get("dd_input")["tmphlq"] = g_tmphlq + dd.get("dd_input")["tmphlq"] = tmphlq elif dd.get("dd_output"): - dd.get("dd_output")["tmphlq"] = g_tmphlq + dd.get("dd_output")["tmphlq"] = tmphlq elif dd.get("dd_vio"): - dd.get("dd_vio")["tmphlq"] = g_tmphlq + dd.get("dd_vio")["tmphlq"] = tmphlq elif dd.get("dd_concat"): for single_dd in dd.get("dd_concat").get("dds", []): - set_extra_attributes_in_dd(single_dd) + set_extra_attributes_in_dd(single_dd, parms) return dd @@ -2572,6 +2569,7 @@ def __init__( """ self.backup = None self.return_content = ReturnContent(**(return_content or {})) + self.tmphlq = tmphlq primary_unit = space_type secondary_unit = space_type key_label1 = None @@ -2698,7 +2696,6 @@ def __init__( ) -# TODO: potentially extend the available parameters to end user class RawInputDefinition(InputDefinition): """Wrapper around InputDefinition to contain information about desired return contents. @@ -2707,7 +2704,7 @@ class RawInputDefinition(InputDefinition): InputDefinition (InputDefinition): Input DD data type to be used in a DDStatement. """ - def __init__(self, content="", return_content=None, **kwargs): + def __init__(self, content="", return_content=None, tmphlq="", **kwargs): """Initialize RawInputDefinition Args: @@ -2715,7 +2712,7 @@ def __init__(self, content="", return_content=None, **kwargs): return_content (dict, optional): Determines how content should be returned to the user. Defaults to {}. """ self.return_content = ReturnContent(**(return_content or {})) - super().__init__(content=content) + super().__init__(content=content, tmphlq=tmphlq) class RawOutputDefinition(OutputDefinition): @@ -2726,7 +2723,7 @@ class RawOutputDefinition(OutputDefinition): OutputDefinition (OutputDefinition): Output DD data type to be used in a DDStatement. """ - def __init__(self, return_content=None, **kwargs): + def __init__(self, return_content=None, tmphlq="", **kwargs): """Initialize RawOutputDefinition Args: @@ -2734,7 +2731,7 @@ def __init__(self, return_content=None, **kwargs): return_content (dict, optional): Determines how content should be returned to the user. Defaults to {}. """ self.return_content = ReturnContent(**(return_content or {})) - super().__init__() + super().__init__(tmphlq=tmphlq) class ReturnContent(object): @@ -2761,28 +2758,6 @@ def __init__(self, type=None, src_encoding=None, response_encoding=None): self.response_encoding = response_encoding -def to_bytes(size, unit): - """Convert sizes of various units to bytes. - - Args: - size (int): The size to convert. - unit (str): The unit of size. - - Returns: - int: The size converted to bytes. - """ - num_bytes = 0 - if unit == "b": - num_bytes = size - elif unit == "k": - num_bytes = size * 1024 - elif unit == "m": - num_bytes = size * 1048576 - elif unit == "g": - num_bytes = size * 1073741824 - return num_bytes - - def rename_parms(parms, name_map): """Rename parms based on a provided dictionary. @@ -2839,7 +2814,7 @@ def data_set_exists(name, volumes=None): def run_zos_program( - program, parm="", dd_statements=None, authorized=False, verbose=False + program, parm="", dd_statements=None, authorized=False, verbose=False, tmp_hlq=None ): """Run a program on z/OS. @@ -2848,6 +2823,7 @@ def run_zos_program( parm (str, optional): Additional argument string if required. Defaults to "". dd_statements (list[DDStatement], optional): DD statements to allocate for the program. Defaults to []. authorized (bool, optional): Determines if program will execute as an authorized user. Defaults to False. + tmp_hlq (str, optional): Arguments overwrite variable tmp_hlq Returns: MVSCmdResponse: Holds the response information for program execution. @@ -2857,11 +2833,11 @@ def run_zos_program( response = None if authorized: response = MVSCmd.execute_authorized( - pgm=program, parm=parm, dds=dd_statements, verbose=verbose + pgm=program, parm=parm, dds=dd_statements, verbose=verbose, tmp_hlq=tmp_hlq ) else: response = MVSCmd.execute( - pgm=program, parm=parm, dds=dd_statements, verbose=verbose + pgm=program, parm=parm, dds=dd_statements, verbose=verbose, tmp_hlq=tmp_hlq ) return response diff --git a/plugins/modules/zos_ping.py b/plugins/modules/zos_ping.py index 6de0cccf0..5f134cd90 100644 --- a/plugins/modules/zos_ping.py +++ b/plugins/modules/zos_ping.py @@ -1,7 +1,7 @@ #!/usr/bin/python # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2019 - 2024 +# Copyright (c) IBM Corporation 2019, 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at diff --git a/plugins/modules/zos_ping.rexx b/plugins/modules/zos_ping.rexx index a881146b0..beca54c3b 100644 --- a/plugins/modules/zos_ping.rexx +++ b/plugins/modules/zos_ping.rexx @@ -85,7 +85,7 @@ If (rc <> 0 | returnCode <> HWTJ_OK) Then Do failModule(errmsg, "", retC) End -/* Check for Python version >= 3.8 eg: 'Python 3.10.0' */ +/* Check for Python version >= 3.10 eg: 'Python 3.10.0' */ retC = bpxwunix('python3 --version', out., err.) If (err.0 > 0) Then Do Do index=1 To err.0 diff --git a/plugins/modules/zos_script.py b/plugins/modules/zos_script.py index b69d70b2d..0677d187d 100644 --- a/plugins/modules/zos_script.py +++ b/plugins/modules/zos_script.py @@ -116,11 +116,12 @@ - For supported character sets used to encode data, refer to the L(documentation,https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/resources/character_set.html). - This module uses L(zos_copy,./zos_copy.html) to copy local scripts to - the remote machine. - - L(zos_copy,./zos_copy.html) uses SFTP (Secure File Transfer Protocol) - for the underlying transfer protocol; Co:Z SFTP is not supported. In - the case of Co:z SFTP, you can exempt the Ansible userid on z/OS from - using Co:Z thus falling back to using standard SFTP. + the remote machine which uses SFTP (Secure File Transfer Protocol) for the + underlying transfer protocol; SCP (secure copy protocol) and Co:Z SFTP are not + supported. In the case of Co:z SFTP, you can exempt the Ansible user id on z/OS + from using Co:Z thus falling back to using standard SFTP. If the module detects + SCP, it will temporarily use SFTP for transfers, if not available, the module + will fail. - This module executes scripts inside z/OS UNIX System Services. For running REXX scripts contained in data sets or CLISTs, consider issuing a TSO command with L(zos_tso_command,./zos_tso_command.html). diff --git a/plugins/modules/zos_tso_command.py b/plugins/modules/zos_tso_command.py index 6c2cb6ef6..17e190fb2 100644 --- a/plugins/modules/zos_tso_command.py +++ b/plugins/modules/zos_tso_command.py @@ -1,7 +1,7 @@ #!/usr/bin/python # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2019 - 2024 +# Copyright (c) IBM Corporation 2019, 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at diff --git a/plugins/modules/zos_unarchive.py b/plugins/modules/zos_unarchive.py index e9b17766c..aa315b3fb 100644 --- a/plugins/modules/zos_unarchive.py +++ b/plugins/modules/zos_unarchive.py @@ -29,8 +29,6 @@ - Supported sources are USS (UNIX System Services) or z/OS data sets. - Mixing MVS data sets with USS files for unarchiving is not supported. - The archive is sent to the remote as binary, so no encoding is performed. - - options: src: description: @@ -311,12 +309,17 @@ type: bool required: false default: false - notes: - VSAMs are not supported. - + - This module uses L(zos_copy,./zos_copy.html) to copy local scripts to + the remote machine which uses SFTP (Secure File Transfer Protocol) for the + underlying transfer protocol; SCP (secure copy protocol) and Co:Z SFTP are not + supported. In the case of Co:z SFTP, you can exempt the Ansible user id on z/OS + from using Co:Z thus falling back to using standard SFTP. If the module detects + SCP, it will temporarily use SFTP for transfers, if not available, the module + will fail. seealso: - - module: zos_unarchive + - module: zos_archive ''' EXAMPLES = r''' diff --git a/tests/functional/modules/test_zos_copy_func.py b/tests/functional/modules/test_zos_copy_func.py index cf7f1494b..6e6a9a073 100644 --- a/tests/functional/modules/test_zos_copy_func.py +++ b/tests/functional/modules/test_zos_copy_func.py @@ -794,6 +794,12 @@ def test_copy_subdirs_folders_and_validate_recursive_encoding_local(ansible_zos_ @pytest.mark.uss @pytest.mark.parametrize("copy_directory", [False, True]) def test_copy_local_dir_to_non_existing_dir(ansible_zos_module, copy_directory): + """ + This test evaluates the behavior of testing copy of a directory when src ends + with '/' versus only the dir name. Expectation is that when only dir name is provided + that directory is also created on the remote, when directory name ends with '/' + this means we only copy that directory contents without creating it on the remote. + """ hosts = ansible_zos_module dest_path = "/tmp/new_dir" @@ -1684,7 +1690,7 @@ def test_copy_seq_data_set_to_seq_asa(ansible_zos_module): hosts.all.zos_data_set( name=src, state="present", - type="seq", + type="SEQ", replace=True ) @@ -1733,7 +1739,7 @@ def test_copy_seq_data_set_to_partitioned_asa(ansible_zos_module): hosts.all.zos_data_set( name=src, state="present", - type="seq", + type="SEQ", replace=True ) @@ -1784,7 +1790,7 @@ def test_copy_partitioned_data_set_to_seq_asa(ansible_zos_module): hosts.all.zos_data_set( name=src, state="present", - type="pdse", + type="PDSE", replace=True ) @@ -1834,7 +1840,7 @@ def test_copy_partitioned_data_set_to_partitioned_asa(ansible_zos_module): hosts.all.zos_data_set( name=src, state="present", - type="pdse", + type="PDSE", replace=True ) @@ -1884,7 +1890,7 @@ def test_copy_asa_data_set_to_text_file(ansible_zos_module): hosts.all.zos_data_set( name=src, state="present", - type="seq", + type="SEQ", record_format="FBA", record_length=80, block_size=27920, @@ -1977,8 +1983,8 @@ def test_copy_dest_lock(ansible_zos_module, ds_type): hosts.all.zos_data_set(name=data_set_1, state="present", type=ds_type, replace=True) hosts.all.zos_data_set(name=data_set_2, state="present", type=ds_type, replace=True) if ds_type == "PDS" or ds_type == "PDSE": - hosts.all.zos_data_set(name=src_data_set, state="present", type="member", replace=True) - hosts.all.zos_data_set(name=dest_data_set, state="present", type="member", replace=True) + hosts.all.zos_data_set(name=src_data_set, state="present", type="MEMBER", replace=True) + hosts.all.zos_data_set(name=dest_data_set, state="present", type="MEMBER", replace=True) # copy text_in source hosts.all.shell(cmd="decho \"{0}\" \"{1}\"".format(DUMMY_DATA, src_data_set)) # copy/compile c program and copy jcl to hold data set lock for n seconds in background(&) @@ -2266,7 +2272,7 @@ def test_copy_file_to_empty_sequential_data_set(ansible_zos_module, src): dest = get_tmp_ds_name() try: - hosts.all.zos_data_set(name=dest, type="seq", state="present") + hosts.all.zos_data_set(name=dest, type="SEQ", state="present") if src["is_file"]: copy_result = hosts.all.zos_copy(src=src["src"], dest=dest, remote_src=src["is_remote"], force=src["force"]) @@ -2294,7 +2300,7 @@ def test_copy_file_to_non_empty_sequential_data_set(ansible_zos_module, src): dest = get_tmp_ds_name() try: - hosts.all.zos_data_set(name=dest, type="seq", state="absent") + hosts.all.zos_data_set(name=dest, type="SEQ", state="absent") hosts.all.zos_copy(content="Inline content", dest=dest) copy_result = hosts.all.zos_copy(src=src["src"], dest=dest, remote_src=src["is_remote"], force=src["force"]) @@ -2432,7 +2438,7 @@ def test_copy_ps_to_empty_ps(ansible_zos_module, force): dest = get_tmp_ds_name() try: - hosts.all.zos_data_set(name=dest, type="seq", state="present") + hosts.all.zos_data_set(name=dest, type="SEQ", state="present") copy_res = hosts.all.zos_copy(src=src_ds, dest=dest, remote_src=True, force=force) verify_copy = hosts.all.shell( @@ -2458,7 +2464,7 @@ def test_copy_ps_to_non_empty_ps(ansible_zos_module, force): dest = get_tmp_ds_name() try: - hosts.all.zos_data_set(name=dest, type="seq", state="absent") + hosts.all.zos_data_set(name=dest, type="SEQ", state="absent") hosts.all.zos_copy(content="Inline content", dest=dest) copy_res = hosts.all.zos_copy(src=src_ds, dest=dest, remote_src=True, force=force) @@ -2489,7 +2495,7 @@ def test_copy_ps_to_non_empty_ps_with_special_chars(ansible_zos_module, force): dest = get_tmp_ds_name() try: - hosts.all.zos_data_set(name=dest, type="seq", state="absent") + hosts.all.zos_data_set(name=dest, type="SEQ", state="absent") hosts.all.zos_copy(content=DUMMY_DATA_SPECIAL_CHARS, dest=dest) copy_res = hosts.all.zos_copy(src=src_ds, dest=dest, remote_src=True, force=force) @@ -2520,7 +2526,7 @@ def test_backup_sequential_data_set(ansible_zos_module, backup): dest = get_tmp_ds_name() try: - hosts.all.zos_data_set(name=dest, type="seq", state="present") + hosts.all.zos_data_set(name=dest, type="SEQ", state="present") if backup: copy_res = hosts.all.zos_copy(src=src, dest=dest, force=True, backup=True, backup_name=backup) @@ -2565,10 +2571,10 @@ def test_copy_file_to_non_existing_member(ansible_zos_module, src): try: hosts.all.zos_data_set( name=data_set, - type="pdse", + type="PDSE", space_primary=5, space_type="M", - record_format="fba", + record_format="FBA", record_length=80, replace=True ) @@ -2611,14 +2617,14 @@ def test_copy_file_to_existing_member(ansible_zos_module, src): try: hosts.all.zos_data_set( name=data_set, - type="pdse", + type="PDSE", space_primary=5, space_type="M", - record_format="fba", + record_format="FBA", record_length=80, replace=True ) - hosts.all.zos_data_set(name=dest, type="member", state="present") + hosts.all.zos_data_set(name=dest, type="MEMBER", state="present") if src["is_file"]: copy_result = hosts.all.zos_copy(src=src["src"], dest=dest, force=src["force"], remote_src=src["is_remote"]) @@ -2647,31 +2653,31 @@ def test_copy_file_to_existing_member(ansible_zos_module, src): @pytest.mark.seq @pytest.mark.pdse @pytest.mark.parametrize("args", [ - dict(type="seq", is_binary=False), - dict(type="seq", is_binary=True), - dict(type="pds", is_binary=False), - dict(type="pds", is_binary=True), - dict(type="pdse", is_binary=False), - dict(type="pdse", is_binary=True) + dict(type="SEQ", is_binary=False), + dict(type="SEQ", is_binary=True), + dict(type="PDS", is_binary=False), + dict(type="PDS", is_binary=True), + dict(type="PDSE", is_binary=False), + dict(type="PDSE", is_binary=True) ]) def test_copy_data_set_to_non_existing_member(ansible_zos_module, args): hosts = ansible_zos_module src_data_set = get_tmp_ds_name() - src = src_data_set if args["type"] == "seq" else "{0}(TEST)".format(src_data_set) + src = src_data_set if args["type"] == "SEQ" else "{0}(TEST)".format(src_data_set) dest_data_set = get_tmp_ds_name() dest = "{0}(MEMBER)".format(dest_data_set) try: hosts.all.zos_data_set(name=src_data_set, type=args["type"]) - if args["type"] != "seq": - hosts.all.zos_data_set(name=src, type="member") + if args["type"] != "SEQ": + hosts.all.zos_data_set(name=src, type="MEMBER") hosts.all.shell( "decho 'Records for test' '{0}'".format(src), executable=SHELL_EXECUTABLE ) - hosts.all.zos_data_set(name=dest_data_set, type="pdse", replace=True) + hosts.all.zos_data_set(name=dest_data_set, type="PDSE", replace=True) copy_result = hosts.all.zos_copy(src=src, dest=dest, is_binary=args["is_binary"], remote_src=True) verify_copy = hosts.all.shell( @@ -2694,32 +2700,32 @@ def test_copy_data_set_to_non_existing_member(ansible_zos_module, args): @pytest.mark.seq @pytest.mark.pdse @pytest.mark.parametrize("args", [ - dict(type="seq", force=False), - dict(type="seq", force=True), - dict(type="pds", force=False), - dict(type="pds", force=True), - dict(type="pdse", force=False), - dict(type="pdse", force=True) + dict(type="SEQ", force=False), + dict(type="SEQ", force=True), + dict(type="PDS", force=False), + dict(type="PDS", force=True), + dict(type="PDSE", force=False), + dict(type="PDSE", force=True) ]) def test_copy_data_set_to_existing_member(ansible_zos_module, args): hosts = ansible_zos_module src_data_set = get_tmp_ds_name() - src = src_data_set if args["type"] == "seq" else "{0}(TEST)".format(src_data_set) + src = src_data_set if args["type"] == "SEQ" else "{0}(TEST)".format(src_data_set) dest_data_set = get_tmp_ds_name() dest = "{0}(MEMBER)".format(dest_data_set) try: hosts.all.zos_data_set(name=src_data_set, type=args["type"]) - if args["type"] != "seq": - hosts.all.zos_data_set(name=src, type="member") + if args["type"] != "SEQ": + hosts.all.zos_data_set(name=src, type="MEMBER") hosts.all.shell( "decho 'Records for test' '{0}'".format(src), executable=SHELL_EXECUTABLE ) - hosts.all.zos_data_set(name=dest_data_set, type="pdse", replace=True) - hosts.all.zos_data_set(name=dest, type="member") + hosts.all.zos_data_set(name=dest_data_set, type="PDSE", replace=True) + hosts.all.zos_data_set(name=dest, type="MEMBER") copy_result = hosts.all.zos_copy(src=src, dest=dest, force=args["force"], remote_src=True) verify_copy = hosts.all.shell( @@ -2838,7 +2844,7 @@ def test_copy_dir_crlf_endings_to_non_existing_pdse(ansible_zos_module): @pytest.mark.uss @pytest.mark.pdse -@pytest.mark.parametrize("src_type", ["pds", "pdse"]) +@pytest.mark.parametrize("src_type", ["PDS", "PDSE"]) def test_copy_dir_to_existing_pdse(ansible_zos_module, src_type): hosts = ansible_zos_module src_dir = "/tmp/testdir" @@ -2854,7 +2860,7 @@ def test_copy_dir_to_existing_pdse(ansible_zos_module, src_type): type=src_type, space_primary=5, space_type="M", - record_format="fba", + record_format="FBA", record_length=80, ) @@ -2877,18 +2883,18 @@ def test_copy_dir_to_existing_pdse(ansible_zos_module, src_type): @pytest.mark.seq @pytest.mark.pdse -@pytest.mark.parametrize("src_type", ["seq", "pds", "pdse"]) +@pytest.mark.parametrize("src_type", ["SEQ", "PDS", "PDSE"]) def test_copy_data_set_to_non_existing_pdse(ansible_zos_module, src_type): hosts = ansible_zos_module src_data_set = get_tmp_ds_name() - src = src_data_set if src_type == "seq" else "{0}(TEST)".format(src_data_set) + src = src_data_set if src_type == "SEQ" else "{0}(TEST)".format(src_data_set) dest_data_set = get_tmp_ds_name() dest = "{0}(MEMBER)".format(dest_data_set) try: hosts.all.zos_data_set(name=src_data_set, type=src_type) - if src_type != "seq": - hosts.all.zos_data_set(name=src, type="member") + if src_type != "SEQ": + hosts.all.zos_data_set(name=src, type="MEMBER") hosts.all.shell( "decho 'Records for test' '{0}'".format(src), @@ -2918,10 +2924,10 @@ def test_copy_data_set_to_non_existing_pdse(ansible_zos_module, src_type): @pytest.mark.pdse @pytest.mark.parametrize("args", [ - dict(src_type="pds", dest_type="pds"), - dict(src_type="pds", dest_type="pdse"), - dict(src_type="pdse", dest_type="pds"), - dict(src_type="pdse", dest_type="pdse"), + dict(src_type="PDS", dest_type="PDS"), + dict(src_type="PDS", dest_type="PDSE"), + dict(src_type="PDSE", dest_type="PDS"), + dict(src_type="PDSE", dest_type="PDSE"), ]) def test_copy_pds_to_existing_pds(ansible_zos_module, args): hosts = ansible_zos_module @@ -2973,7 +2979,7 @@ def test_copy_pds_loadlib_member_to_pds_loadlib_member(ansible_zos_module, is_cr hosts.all.zos_data_set( name=cobol_src_pds, state="present", - type="pds", + type="PDS", space_primary=2, record_format="FB", record_length=80, @@ -2984,7 +2990,7 @@ def test_copy_pds_loadlib_member_to_pds_loadlib_member(ansible_zos_module, is_cr hosts.all.zos_data_set( name=src_lib, state="present", - type="pdse", + type="PDSE", record_format="U", record_length=0, block_size=32760, @@ -3006,7 +3012,7 @@ def test_copy_pds_loadlib_member_to_pds_loadlib_member(ansible_zos_module, is_cr hosts.all.zos_data_set( name=dest_lib, state="present", - type="pdse", + type="PDSE", record_format="U", record_length=0, block_size=32760, @@ -3018,7 +3024,7 @@ def test_copy_pds_loadlib_member_to_pds_loadlib_member(ansible_zos_module, is_cr hosts.all.zos_data_set( name=dest_lib_aliases, state="present", - type="pdse", + type="PDSE", record_format="U", record_length=0, block_size=32760, @@ -3111,7 +3117,7 @@ def test_copy_pds_loadlib_member_to_uss_to_loadlib(ansible_zos_module): hosts.all.zos_data_set( name=src_lib, state="present", - type="pdse", + type="PDSE", record_format="U", record_length=0, block_size=32760, @@ -3122,7 +3128,7 @@ def test_copy_pds_loadlib_member_to_uss_to_loadlib(ansible_zos_module): hosts.all.zos_data_set( name=cobol_src_pds, state="present", - type="pds", + type="PDS", space_primary=2, record_format="FB", record_length=80, @@ -3132,7 +3138,7 @@ def test_copy_pds_loadlib_member_to_uss_to_loadlib(ansible_zos_module): hosts.all.zos_data_set( name=dest_lib, state="present", - type="pdse", + type="PDSE", record_format="U", record_length=0, block_size=32760, @@ -3143,7 +3149,7 @@ def test_copy_pds_loadlib_member_to_uss_to_loadlib(ansible_zos_module): hosts.all.zos_data_set( name=dest_lib_aliases, state="present", - type="pdse", + type="PDSE", record_format="U", record_length=0, block_size=32760, @@ -3261,7 +3267,7 @@ def test_copy_pds_loadlib_to_pds_loadlib(ansible_zos_module, is_created): hosts.all.zos_data_set( name=cobol_src_pds, state="present", - type="pds", + type="PDS", space_primary=2, record_format="FB", record_length=80, @@ -3272,7 +3278,7 @@ def test_copy_pds_loadlib_to_pds_loadlib(ansible_zos_module, is_created): hosts.all.zos_data_set( name=src_lib, state="present", - type="pdse", + type="PDSE", record_format="U", record_length=0, block_size=32760, @@ -3300,7 +3306,7 @@ def test_copy_pds_loadlib_to_pds_loadlib(ansible_zos_module, is_created): hosts.all.zos_data_set( name=dest_lib, state="present", - type="pdse", + type="PDSE", record_format="U", record_length=0, block_size=32760, @@ -3312,7 +3318,7 @@ def test_copy_pds_loadlib_to_pds_loadlib(ansible_zos_module, is_created): hosts.all.zos_data_set( name=dest_lib_aliases, state="present", - type="pdse", + type="PDSE", record_format="U", record_length=0, block_size=32760, @@ -3453,7 +3459,7 @@ def test_copy_local_pds_loadlib_to_pds_loadlib(ansible_zos_module, is_created): hosts.all.zos_data_set( name=cobol_src_pds, state="present", - type="pds", + type="PDS", space_primary=2, record_format="FB", record_length=80, @@ -3464,7 +3470,7 @@ def test_copy_local_pds_loadlib_to_pds_loadlib(ansible_zos_module, is_created): hosts.all.zos_data_set( name=src_lib, state="present", - type="pdse", + type="PDSE", record_format="U", record_length=0, block_size=32760, @@ -3524,7 +3530,7 @@ def test_copy_local_pds_loadlib_to_pds_loadlib(ansible_zos_module, is_created): hosts.all.zos_data_set( name=dest_lib, state="present", - type="pdse", + type="PDSE", record_format="U", record_length=0, block_size=32760, @@ -3615,7 +3621,7 @@ def test_copy_pds_loadlib_to_uss_to_pds_loadlib(ansible_zos_module): hosts.all.zos_data_set( name=cobol_src_pds, state="present", - type="pds", + type="PDS", space_primary=2, record_format="FB", record_length=80, @@ -3626,7 +3632,7 @@ def test_copy_pds_loadlib_to_uss_to_pds_loadlib(ansible_zos_module): hosts.all.zos_data_set( name=src_lib, state="present", - type="pdse", + type="PDSE", record_format="U", record_length=0, block_size=32760, @@ -3651,7 +3657,7 @@ def test_copy_pds_loadlib_to_uss_to_pds_loadlib(ansible_zos_module): hosts.all.zos_data_set( name=dest_lib, state="present", - type="pdse", + type="PDSE", record_format="U", record_length=0, block_size=32760, @@ -3663,7 +3669,7 @@ def test_copy_pds_loadlib_to_uss_to_pds_loadlib(ansible_zos_module): hosts.all.zos_data_set( name=dest_lib_aliases, state="present", - type="pdse", + type="PDSE", record_format="U", record_length=0, block_size=32760, @@ -3827,7 +3833,7 @@ def test_copy_executables_uss_to_member(ansible_zos_module, is_created): hosts.all.zos_data_set( name=dest, state="present", - type="pdse", + type="PDSE", record_format="U", record_length=0, block_size=32760, @@ -3878,7 +3884,7 @@ def test_copy_pds_member_with_system_symbol(ansible_zos_module): hosts.all.zos_data_set( name=dest, state="present", - type="pdse", + type="PDSE", replace=True ) @@ -3914,8 +3920,8 @@ def test_copy_multiple_data_set_members(ansible_zos_module): ds_list = ["{0}({1})".format(src, member) for member in member_list] try: - hosts.all.zos_data_set(name=src, type="pds") - hosts.all.zos_data_set(name=dest, type="pds") + hosts.all.zos_data_set(name=src, type="PDS") + hosts.all.zos_data_set(name=dest, type="PDS") for member in ds_list: hosts.all.shell( @@ -3960,8 +3966,8 @@ def test_copy_multiple_data_set_members_in_loop(ansible_zos_module): dest_ds_list = ["{0}({1})".format(dest, member) for member in member_list] try: - hosts.all.zos_data_set(name=src, type="pds") - hosts.all.zos_data_set(name=dest, type="pds") + hosts.all.zos_data_set(name=src, type="PDS") + hosts.all.zos_data_set(name=dest, type="PDS") for src_member in src_ds_list: hosts.all.shell( @@ -3994,7 +4000,7 @@ def test_copy_multiple_data_set_members_in_loop(ansible_zos_module): @pytest.mark.uss @pytest.mark.pdse -@pytest.mark.parametrize("ds_type", ["pds", "pdse"]) +@pytest.mark.parametrize("ds_type", ["PDS", "PDSE"]) def test_copy_member_to_non_existing_uss_file(ansible_zos_module, ds_type): hosts = ansible_zos_module data_set = get_tmp_ds_name() @@ -4032,10 +4038,10 @@ def test_copy_member_to_non_existing_uss_file(ansible_zos_module, ds_type): @pytest.mark.uss @pytest.mark.pdse @pytest.mark.parametrize("args", [ - dict(ds_type="pds", force=False), - dict(ds_type="pds", force=True), - dict(ds_type="pdse", force=False), - dict(ds_type="pdse", force=True) + dict(ds_type="PDS", force=False), + dict(ds_type="PDS", force=True), + dict(ds_type="PDSE", force=False), + dict(ds_type="PDSE", force=True) ]) def test_copy_member_to_existing_uss_file(ansible_zos_module, args): hosts = ansible_zos_module @@ -4079,7 +4085,7 @@ def test_copy_member_to_existing_uss_file(ansible_zos_module, args): @pytest.mark.uss @pytest.mark.pdse @pytest.mark.aliases -@pytest.mark.parametrize("src_type", ["pds", "pdse"]) +@pytest.mark.parametrize("src_type", ["PDS", "PDSE"]) def test_copy_pdse_to_uss_dir(ansible_zos_module, src_type): hosts = ansible_zos_module src_ds = get_tmp_ds_name() @@ -4124,7 +4130,7 @@ def test_copy_pdse_to_uss_dir(ansible_zos_module, src_type): @pytest.mark.uss @pytest.mark.pdse @pytest.mark.aliases -@pytest.mark.parametrize("src_type", ["pds", "pdse"]) +@pytest.mark.parametrize("src_type", ["PDS", "PDSE"]) def test_copy_member_to_uss_dir(ansible_zos_module, src_type): hosts = ansible_zos_module src_ds = get_tmp_ds_name() @@ -4170,7 +4176,7 @@ def test_copy_member_to_uss_dir(ansible_zos_module, src_type): @pytest.mark.seq @pytest.mark.pdse -@pytest.mark.parametrize("src_type", ["pds", "pdse"]) +@pytest.mark.parametrize("src_type", ["PDS", "PDSE"]) def test_copy_member_to_non_existing_seq_data_set(ansible_zos_module, src_type): hosts = ansible_zos_module src_ds = get_tmp_ds_name() @@ -4206,10 +4212,10 @@ def test_copy_member_to_non_existing_seq_data_set(ansible_zos_module, src_type): @pytest.mark.seq @pytest.mark.pdse @pytest.mark.parametrize("args", [ - dict(type="pds", force=False), - dict(type="pds", force=True), - dict(type="pdse", force=False), - dict(type="pdse", force=True), + dict(type="PDS", force=False), + dict(type="PDS", force=True), + dict(type="PDSE", force=False), + dict(type="PDSE", force=True), ]) def test_copy_member_to_existing_seq_data_set(ansible_zos_module, args): hosts = ansible_zos_module @@ -4218,7 +4224,7 @@ def test_copy_member_to_existing_seq_data_set(ansible_zos_module, args): dest = get_tmp_ds_name() try: - hosts.all.zos_data_set(name=dest, type="seq", state="present", replace=True) + hosts.all.zos_data_set(name=dest, type="SEQ", state="present", replace=True) hosts.all.zos_data_set(name=src_ds, type=args["type"], state="present") for data_set in [src, dest]: @@ -4251,7 +4257,7 @@ def test_copy_member_to_existing_seq_data_set(ansible_zos_module, args): @pytest.mark.uss @pytest.mark.pdse -@pytest.mark.parametrize("dest_type", ["pds", "pdse"]) +@pytest.mark.parametrize("dest_type", ["PDS", "PDSE"]) def test_copy_file_to_member_convert_encoding(ansible_zos_module, dest_type): hosts = ansible_zos_module src = "/etc/profile" @@ -4262,7 +4268,7 @@ def test_copy_file_to_member_convert_encoding(ansible_zos_module, dest_type): type=dest_type, space_primary=5, space_type="M", - record_format="fba", + record_format="FBA", record_length=25, ) @@ -4294,10 +4300,10 @@ def test_copy_file_to_member_convert_encoding(ansible_zos_module, dest_type): @pytest.mark.pdse @pytest.mark.parametrize("args", [ - dict(type="pds", backup=None), - dict(type="pds", backup="USER.TEST.PDS.BACKUP"), - dict(type="pdse", backup=None), - dict(type="pdse", backup="USER.TEST.PDSE.BACKUP"), + dict(type="PDS", backup=None), + dict(type="PDS", backup="USER.TEST.PDS.BACKUP"), + dict(type="PDSE", backup=None), + dict(type="PDSE", backup="USER.TEST.PDSE.BACKUP"), ]) def test_backup_pds(ansible_zos_module, args): hosts = ansible_zos_module @@ -4343,7 +4349,7 @@ def test_backup_pds(ansible_zos_module, args): @pytest.mark.seq @pytest.mark.pdse -@pytest.mark.parametrize("src_type", ["seq", "pds", "pdse"]) +@pytest.mark.parametrize("src_type", ["SEQ", "PDS", "PDSE"]) def test_copy_data_set_to_volume(ansible_zos_module, volumes_on_systems, src_type): hosts = ansible_zos_module source = get_tmp_ds_name() @@ -4359,8 +4365,8 @@ def test_copy_data_set_to_volume(ansible_zos_module, volumes_on_systems, src_typ try: hosts.all.zos_data_set(name=source, type=src_type, state='present') - if src_type != "seq": - hosts.all.zos_data_set(name=source_member, type="member", state='present') + if src_type != "SEQ": + hosts.all.zos_data_set(name=source_member, type="MEMBER", state='present') copy_res = hosts.all.zos_copy( src=source, @@ -4631,7 +4637,7 @@ def test_copy_uss_file_to_existing_sequential_data_set_twice_with_tmphlq_option( src_file = "/etc/profile" tmphlq = "TMPHLQ" try: - hosts.all.zos_data_set(name=dest, type="seq", state="present") + hosts.all.zos_data_set(name=dest, type="SEQ", state="present") copy_result = hosts.all.zos_copy(src=src_file, dest=dest, remote_src=True, force=force) copy_result = hosts.all.zos_copy(src=src_file, dest=dest, remote_src=True, backup=True, tmp_hlq=tmphlq, force=force) diff --git a/tests/functional/modules/test_zos_job_query_func.py b/tests/functional/modules/test_zos_job_query_func.py index ee7b03157..8f6c6e072 100644 --- a/tests/functional/modules/test_zos_job_query_func.py +++ b/tests/functional/modules/test_zos_job_query_func.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2019, 2020, 2023 +# Copyright (c) IBM Corporation 2019, 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at @@ -57,7 +57,7 @@ def test_zos_job_id_query_multi_wildcards_func(ansible_zos_module): cmd="echo {0} > {1}/SAMPLE".format(quote(JCLQ_FILE_CONTENTS), TEMP_PATH) ) hosts.all.zos_data_set( - name=JDATA_SET_NAME, state="present", type="pds", replace=True + name=JDATA_SET_NAME, state="present", type="PDS", replace=True ) hosts.all.shell( cmd="cp {0}/SAMPLE \"//'{1}(SAMPLE)'\"".format(TEMP_PATH, JDATA_SET_NAME) @@ -90,7 +90,7 @@ def test_zos_job_name_query_multi_wildcards_func(ansible_zos_module): cmd="echo {0} > {1}/SAMPLE".format(quote(JCLQ_FILE_CONTENTS), TEMP_PATH) ) hosts.all.zos_data_set( - name=NDATA_SET_NAME, state="present", type="pds", replace=True + name=NDATA_SET_NAME, state="present", type="PDS", replace=True ) hosts.all.shell( cmd="cp {0}/SAMPLE \"//'{1}(SAMPLE)'\"".format(TEMP_PATH, NDATA_SET_NAME) diff --git a/tests/functional/modules/test_zos_job_submit_func.py b/tests/functional/modules/test_zos_job_submit_func.py index 394a087ad..c148b6223 100644 --- a/tests/functional/modules/test_zos_job_submit_func.py +++ b/tests/functional/modules/test_zos_job_submit_func.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2019 - 2024 +# Copyright (c) IBM Corporation 2019, 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at @@ -163,7 +163,7 @@ //****************************************************************************** //* Job containing a non existent DSN that will force an error. //* Returns: -//* ret_code->(code=null, msg=JCLERR ?, msg_text=JCLERR, msg_code=?) +//* ret_code->(code=null, msg=JCLERR, msg_txt=JCLERR, msg_code=None) //* msg --> The JCL submitted with job id JOB00532 but there was an error, //* please review the error for further details: The job completion //* code (CC) was not in the job log. Please review the error @@ -198,7 +198,7 @@ //* Another job containing no job card resulting in a JCLERROR with an value. It //* won't always be 952, it will increment. //* Returns: -//* ret_code->(code=null, msg=JCL ERROR 952, msg_text=JCLERR, msg_code=null) +//* ret_code->(code=null, msg=JCLERR, msg_text=JCLERR, msg_code=null) //* msg --> The JCL submitted with job id JOB00728 but there was an error, //* please review the error for further details: The job completion //* code (CC) was not in the job log. Please review the error @@ -214,11 +214,11 @@ //* Job containing a USER=FOOBAR that will cause JES to return a SEC ERROR which //* is a security error. //* Returns: -//* ret_code->(code=null, msg=SEC ?, msg_text=SEC, msg_code=?) -//* msg --> The JCL submitted with job id JOB00464 but there was an error, +//* ret_code->(code=None, msg=SEC, msg_txt=, msg_code=?) +//* msg --> The JCL submitted with job id JOB01062 but there was an error, //* please review the error for further details: The job return code -//* was not available in the job log, please review the job log -//* and error SEC ?.", +//* was not available in the job log, please review the job log and +//* status SEC. //****************************************************************************** //INVUSER JOB (T043JM,JM00,1,0,0,0),'HELLO WORLD - JRM',CLASS=R, // MSGCLASS=X,MSGLEVEL=1,NOTIFY=S0JM,USER=FOOBAR @@ -234,22 +234,102 @@ JCL_FILE_CONTENTS_TYPRUN_SCAN = """//* //****************************************************************************** -//* Job containing a TYPRUN=SCAN that will cause JES to run a syntax check and -//* not actually run the JCL. +//* Job containing a TYPRUN=SCAN will cause JES to run a syntax check and +//* not actually run the JCL. The job will be put on the H output queue, DDs +//* JESJCL and JESMSGLG are available. Ansible considers this a passing job. //* Returns: -//* ret_code->(code=null, msg=? ?, msg_text=?, msg_code=?) -//* msg --> The JCL submitted with job id JOB00620 but there was an error, -//* please review the error for further details: The job return code -//* was not available in the job log, please review the job log -//* and error ? ?.", +//* ret_code->(code=null, msg=TYPRUN=SCAN, msg_txt=, msg_code=null) +//* msg --> The job JOB00551 was run with special job processing TYPRUN=SCAN. +//* This will result in no completion, return code or job steps and +//* changed will be false." //****************************************************************************** -//TYPESCAN JOB (T043JM,JM00,1,0,0,0),'HELLO WORLD - JRM',CLASS=R, -// MSGCLASS=X,MSGLEVEL=1,NOTIFY=S0JM,TYPRUN=SCAN +//SCAN JOB (T043JM,JM00,1,0,0,0),'HELLO WORLD - JRM',CLASS=R, +// MSGCLASS=H,MSGLEVEL=1,NOTIFY=S0JM,TYPRUN=SCAN //STEP0001 EXEC PGM=IEBGENER //SYSIN DD DUMMY //SYSPRINT DD SYSOUT=* //SYSUT1 DD * -HELLO, WORLD +HELLO, WORLD. SCAN OPERATION +/* +//SYSUT2 DD SYSOUT=* +// +""" + +JCL_FILE_CONTENTS_TYPRUN_COPY = """//* +//****************************************************************************** +//* Job containing a TYPRUN=COPY will cause JES to copy the input job +//* (source content) stream directly to a sysout data set (device specified in +//* the message class parameter (H)) and schedule it for output processing, in +//* other words, the job will be put on the H output queue; DD's +//* JESMSGLG and JESJCLIN are available. Ansible considers this a failing job +//* given currently the jobs status can not be determined so it times out. +//* Returns: +//* ret_code->(code=None, msg=None, msg_txt=, msg_code=None) +//* msg --> The JCL submitted with job id JOB00555 but appears to be a long +//* running job that exceeded its maximum wait time of 10 second(s). +//* Consider using module zos_job_query to poll for a long running +//* job or increase option 'wait_times_s' to a value greater than 11. +//****************************************************************************** +//COPY JOB (T043JM,JM00,1,0,0,0),'HELLO WORLD - JRM',CLASS=R, +// MSGCLASS=H,MSGLEVEL=1,NOTIFY=S0JM,TYPRUN=COPY +//STEP0001 EXEC PGM=IEBGENER +//SYSIN DD DUMMY +//SYSPRINT DD SYSOUT=* +//SYSUT1 DD * +HELLO, WORLD. COPY OPERATION +/* +//SYSUT2 DD SYSOUT=* +// +""" + +JCL_FILE_CONTENTS_TYPRUN_HOLD = """//* +//****************************************************************************** +//* Job containing a TYPRUN=HOLD will cause JES to hold this JCL without +//* executing it until a special event occurs at which time, the operator will +//* release the job from HOLD and allow the job to continue processing. +//* Ansible considers this a failing job +//* given currently the jobs status can not be determined so it times out. +//* Returns: +//* ret_code->(code=None, msg=None, msg_txt=, msg_code=None) +//* msg --> The JCL submitted with job id JOB00555 but appears to be a long +//* running job that exceeded its maximum wait time of 10 second(s). +//* Consider using module zos_job_query to poll for a long running +//* job or increase option 'wait_times_s' to a value greater than 11. +//****************************************************************************** +//HOLD JOB (T043JM,JM00,1,0,0,0),'HELLO WORLD - JRM',CLASS=R, +// MSGCLASS=H,MSGLEVEL=1,NOTIFY=S0JM,TYPRUN=HOLD +//STEP0001 EXEC PGM=IEBGENER +//SYSIN DD DUMMY +//SYSPRINT DD SYSOUT=* +//SYSUT1 DD * +HELLO, WORLD. HOLD OPERATION +/* +//SYSUT2 DD SYSOUT=* +// +""" + +JCL_FILE_CONTENTS_TYPRUN_JCLHOLD = """//* +//****************************************************************************** +//* Job containing a TYPRUN=JCLHOLD will cause JES to will keep the submitted +//* job in the input queue until it's released by an operator or by the default +//* time assigned to the class parameter. As the operator you enter 'A' or 'R' +//* to release it from the queue. +//* Ansible considers this a failing job +//* given currently the jobs status can not be determined so it times out. +//* Returns: +//* ret_code->(code=None, msg=None, msg_txt=, msg_code=None) +//* msg --> The JCL submitted with job id JOB00555 but appears to be a long +//* running job that exceeded its maximum wait time of 10 second(s). +//* Consider using module zos_job_query to poll for a long running +//* job or increase option 'wait_times_s' to a value greater than 11. +//****************************************************************************** +//JCLHOLD JOB (T043JM,JM00,1,0,0,0),'HELLO WORLD - JRM',CLASS=R, +// MSGCLASS=H,MSGLEVEL=1,NOTIFY=S0JM,TYPRUN=JCLHOLD +//STEP0001 EXEC PGM=IEBGENER +//SYSIN DD DUMMY +//SYSPRINT DD SYSOUT=* +//SYSUT1 DD * +HELLO, WORLD. JCLHOLD OPERATION /* //SYSUT2 DD SYSOUT=* // @@ -342,9 +422,11 @@ def test_job_submit_PDS(ansible_zos_module, location): hosts.all.shell( cmd="echo {0} > {1}/SAMPLE".format(quote(JCL_FILE_CONTENTS), TEMP_PATH) ) + hosts.all.zos_data_set( - name=data_set_name, state="present", type="pds", replace=True + name=data_set_name, state="present", type="PDS", replace=True ) + hosts.all.shell( cmd="cp {0}/SAMPLE \"//'{1}(SAMPLE)'\"".format(TEMP_PATH, data_set_name) ) @@ -362,8 +444,8 @@ def test_job_submit_PDS(ansible_zos_module, location): assert result.get("jobs")[0].get("ret_code").get("code") == 0 assert result.get("changed") is True finally: - hosts.all.file(path=TEMP_PATH, state="absent") - hosts.all.zos_data_set(name=data_set_name, state="absent") + hosts.all.file(path=TEMP_PATH, state="absent") + hosts.all.zos_data_set(name=data_set_name, state="absent") def test_job_submit_PDS_special_characters(ansible_zos_module): @@ -374,7 +456,7 @@ def test_job_submit_PDS_special_characters(ansible_zos_module): cmd="echo {0} > {1}/SAMPLE".format(quote(JCL_FILE_CONTENTS), TEMP_PATH) ) hosts.all.zos_data_set( - name=DATA_SET_NAME_SPECIAL_CHARS, state="present", type="pds", replace=True + name=DATA_SET_NAME_SPECIAL_CHARS, state="present", type="PDS", replace=True ) hosts.all.shell( cmd="cp {0}/SAMPLE \"//'{1}(SAMPLE)'\"".format( @@ -465,7 +547,7 @@ def test_job_submit_PDS_volume(ansible_zos_module, volumes_on_systems): ) hosts.all.zos_data_set( - name=data_set_name, state="present", type="pds", replace=True, volumes=volume_1 + name=data_set_name, state="present", type="PDS", replace=True, volumes=volume_1 ) hosts.all.shell( @@ -473,7 +555,7 @@ def test_job_submit_PDS_volume(ansible_zos_module, volumes_on_systems): ) hosts.all.zos_data_set( - name=data_set_name, state="uncataloged", type="pds" + name=data_set_name, state="uncataloged", type="PDS" ) results = hosts.all.zos_job_submit(src=data_set_name+"(SAMPLE)", location="DATA_SET", volume=volume_1) @@ -498,7 +580,7 @@ def test_job_submit_PDS_5_SEC_JOB_WAIT_15(ansible_zos_module): ) hosts.all.zos_data_set( - name=data_set_name, state="present", type="pds", replace=True + name=data_set_name, state="present", type="PDS", replace=True ) hosts.all.shell( @@ -531,7 +613,7 @@ def test_job_submit_PDS_30_SEC_JOB_WAIT_60(ansible_zos_module): ) hosts.all.zos_data_set( - name=data_set_name, state="present", type="pds", replace=True + name=data_set_name, state="present", type="PDS", replace=True ) hosts.all.shell( @@ -564,7 +646,7 @@ def test_job_submit_PDS_30_SEC_JOB_WAIT_10_negative(ansible_zos_module): ) hosts.all.zos_data_set( - name=data_set_name, state="present", type="pds", replace=True + name=data_set_name, state="present", type="PDS", replace=True ) hosts.all.shell( @@ -613,10 +695,8 @@ def test_job_submit_max_rc(ansible_zos_module, args): #Expecting: - "The job return code 8 was non-zero in the job output, this job has failed" # - Consider using module zos_job_query to poll for a long running job or # increase option \\'wait_times_s` to a value greater than 10.", - if result.get('duration'): - duration = result.get('duration') - else: - duration = 0 + + duration = result.get('duration') if duration >= args["wait_time_s"]: re.search(r'long running job', repr(result.get("msg"))) @@ -734,43 +814,113 @@ def test_negative_job_submit_local_jcl_no_dsn(ansible_zos_module): with open(tmp_file.name, "w") as f: f.write(JCL_FILE_CONTENTS_NO_DSN) hosts = ansible_zos_module - results = hosts.all.zos_job_submit(src=tmp_file.name, location="LOCAL") + results = hosts.all.zos_job_submit(src=tmp_file.name, wait_time_s=20, location="LOCAL") + import pprint for result in results.contacted.values(): - # Expecting: The job completion code (CC) was not in the job log....." assert result.get("changed") is False assert re.search(r'completion code', repr(result.get("msg"))) assert result.get("jobs")[0].get("job_id") is not None -# Should have a JCL ERROR def test_negative_job_submit_local_jcl_invalid_user(ansible_zos_module): tmp_file = tempfile.NamedTemporaryFile(delete=True) with open(tmp_file.name, "w") as f: f.write(JCL_FILE_CONTENTS_INVALID_USER) hosts = ansible_zos_module results = hosts.all.zos_job_submit(src=tmp_file.name, location="LOCAL") + for result in results.contacted.values(): - # Expecting: The job completion code (CC) was not in the job log....." assert result.get("changed") is False - assert re.search(r'return code was not available', repr(result.get("msg"))) - assert re.search(r'error SEC', repr(result.get("msg"))) + assert re.search(r'please review the error for further details', repr(result.get("msg"))) + assert re.search(r'please review the job log for status SEC', repr(result.get("msg"))) assert result.get("jobs")[0].get("job_id") is not None - assert re.search(r'SEC', repr(result.get("jobs")[0].get("ret_code").get("msg_text"))) + assert re.search(r'please review the job log for status SEC', repr(result.get("jobs")[0].get("ret_code").get("msg_txt"))) -def test_negative_job_submit_local_jcl_typrun_scan(ansible_zos_module): +def test_job_submit_local_jcl_typrun_scan(ansible_zos_module): tmp_file = tempfile.NamedTemporaryFile(delete=True) with open(tmp_file.name, "w") as f: f.write(JCL_FILE_CONTENTS_TYPRUN_SCAN) hosts = ansible_zos_module - results = hosts.all.zos_job_submit(src=tmp_file.name, location="LOCAL") + results = hosts.all.zos_job_submit(src=tmp_file.name, + location="LOCAL", + wait_time_s=20, + encoding={ + "from": "UTF-8", + "to": "IBM-1047" + },) for result in results.contacted.values(): - # Expecting: The job completion code (CC) was not in the job log....." assert result.get("changed") is False - assert re.search(r'return code was not available', repr(result.get("msg"))) - assert re.search(r'error ? ?', repr(result.get("msg"))) assert result.get("jobs")[0].get("job_id") is not None - assert result.get("jobs")[0].get("ret_code").get("msg_text") == "?" + assert re.search(r'run with special job processing TYPRUN=SCAN', repr(result.get("jobs")[0].get("ret_code").get("msg_txt"))) + assert result.get("jobs")[0].get("ret_code").get("code") is None + assert result.get("jobs")[0].get("ret_code").get("msg") == "TYPRUN=SCAN" + assert result.get("jobs")[0].get("ret_code").get("msg_code") is None + + +def test_job_submit_local_jcl_typrun_copy(ansible_zos_module): + tmp_file = tempfile.NamedTemporaryFile(delete=True) + with open(tmp_file.name, "w") as f: + f.write(JCL_FILE_CONTENTS_TYPRUN_COPY) + hosts = ansible_zos_module + results = hosts.all.zos_job_submit(src=tmp_file.name, + location="LOCAL", + wait_time_s=20, + encoding={ + "from": "UTF-8", + "to": "IBM-1047" + },) + import pprint + for result in results.contacted.values(): + pprint.pprint(result) + assert result.get("changed") is False + assert result.get("jobs")[0].get("job_id") is not None + assert re.search(r'please review the job log', repr(result.get("jobs")[0].get("ret_code").get("msg_txt"))) + assert result.get("jobs")[0].get("ret_code").get("code") is None + assert result.get("jobs")[0].get("ret_code").get("msg") is None + assert result.get("jobs")[0].get("ret_code").get("msg_code") is None + + +def test_job_submit_local_jcl_typrun_hold(ansible_zos_module): + tmp_file = tempfile.NamedTemporaryFile(delete=True) + with open(tmp_file.name, "w") as f: + f.write(JCL_FILE_CONTENTS_TYPRUN_HOLD) + hosts = ansible_zos_module + results = hosts.all.zos_job_submit(src=tmp_file.name, + location="LOCAL", + wait_time_s=20, + encoding={ + "from": "UTF-8", + "to": "IBM-1047" + },) + for result in results.contacted.values(): + assert result.get("changed") is False + assert result.get("jobs")[0].get("job_id") is not None + assert re.search(r'long running job', repr(result.get("jobs")[0].get("ret_code").get("msg_txt"))) + assert result.get("jobs")[0].get("ret_code").get("code") is None + assert result.get("jobs")[0].get("ret_code").get("msg") == "AC" + assert result.get("jobs")[0].get("ret_code").get("msg_code") is None + + +def test_job_submit_local_jcl_typrun_jclhold(ansible_zos_module): + tmp_file = tempfile.NamedTemporaryFile(delete=True) + with open(tmp_file.name, "w") as f: + f.write(JCL_FILE_CONTENTS_TYPRUN_JCLHOLD) + hosts = ansible_zos_module + results = hosts.all.zos_job_submit(src=tmp_file.name, + location="LOCAL", + wait_time_s=20, + encoding={ + "from": "UTF-8", + "to": "IBM-1047" + },) + for result in results.contacted.values(): + assert result.get("changed") is False + assert result.get("jobs")[0].get("job_id") is not None + assert re.search(r'long running job', repr(result.get("jobs")[0].get("ret_code").get("msg_txt"))) + assert result.get("jobs")[0].get("ret_code").get("code") is None + assert result.get("jobs")[0].get("ret_code").get("msg") == "AC" + assert result.get("jobs")[0].get("ret_code").get("msg_code") is None # This test case is related to the following GitHub issues: @@ -807,4 +957,4 @@ def test_zoau_bugfix_invalid_utf8_chars(ansible_zos_module): assert result.get("jobs")[0].get("ret_code").get("code") == 0 assert result.get("changed") is True finally: - hosts.all.file(path=TEMP_PATH, state="absent") + hosts.all.file(path=TEMP_PATH, state="absent") \ No newline at end of file diff --git a/tests/functional/modules/test_zos_mvs_raw_func.py b/tests/functional/modules/test_zos_mvs_raw_func.py index fd20a6a92..ca5b6384d 100644 --- a/tests/functional/modules/test_zos_mvs_raw_func.py +++ b/tests/functional/modules/test_zos_mvs_raw_func.py @@ -62,7 +62,7 @@ def test_disposition_new(ansible_zos_module): dd_name=SYSPRINT_DD, data_set_name=default_data_set, disposition="new", - type="seq", + type="SEQ", return_content=dict(type="text"), ), ), @@ -86,7 +86,7 @@ def test_dispositions_for_existing_data_set(ansible_zos_module, disposition): hosts = ansible_zos_module default_data_set = get_tmp_ds_name() hosts.all.zos_data_set( - name=default_data_set, type="seq", state="present", replace=True + name=default_data_set, type="SEQ", state="present", replace=True ) results = hosts.all.zos_mvs_raw( program_name="idcams", @@ -118,7 +118,7 @@ def test_list_cat_for_existing_data_set_with_tmp_hlq_option(ansible_zos_module, default_volume = volumes.get_available_vol() default_data_set = get_tmp_ds_name()[:25] hosts.all.zos_data_set( - name=default_data_set, type="seq", state="present", replace=True + name=default_data_set, type="SEQ", state="present", replace=True ) results = hosts.all.zos_mvs_raw( program_name="idcams", @@ -133,12 +133,12 @@ def test_list_cat_for_existing_data_set_with_tmp_hlq_option(ansible_zos_module, return_content=dict(type="text"), replace=True, backup=True, - type="seq", + type="SEQ", space_primary=5, space_secondary=1, - space_type="m", + space_type="M", volumes=default_volume, - record_format="fb" + record_format="FB" ), ), dict(dd_input=dict(dd_name=SYSIN_DD, content=IDCAMS_STDIN)), @@ -172,7 +172,7 @@ def test_new_disposition_for_data_set_members(ansible_zos_module): dd_name=SYSPRINT_DD, data_set_name=DEFAULT_DATA_SET_WITH_MEMBER, disposition="new", - type="pds", + type="PDS", directory_blocks=15, return_content=dict(type="text"), ), @@ -197,7 +197,7 @@ def test_dispositions_for_existing_data_set_members(ansible_zos_module, disposit default_data_set = get_tmp_ds_name() DEFAULT_DATA_SET_WITH_MEMBER = default_data_set + '(MEM)' hosts.all.zos_data_set( - name=default_data_set, type="pds", state="present", replace=True + name=default_data_set, type="PDS", state="present", replace=True ) results = hosts.all.zos_mvs_raw( program_name="idcams", @@ -234,7 +234,7 @@ def test_normal_dispositions_data_set(ansible_zos_module, normal_disposition, ch default_data_set = get_tmp_ds_name() results = hosts.all.zos_data_set( name=default_data_set, - type="seq", + type="SEQ", state="present", replace=True, volumes=[volume_1], @@ -267,11 +267,11 @@ def test_normal_dispositions_data_set(ansible_zos_module, normal_disposition, ch @pytest.mark.parametrize( "space_type,primary,secondary,expected", [ - ("trk", 3, 1, 169992), - ("cyl", 3, 1, 2549880), - ("b", 3, 1, 56664), - ("k", 3, 1, 56664), - ("m", 3, 1, 2889864), + ("TRK", 3, 1, 169992), + ("CYL", 3, 1, 2549880), + ("B", 3, 1, 56664), + ("K", 3, 1, 56664), + ("M", 3, 1, 3003192), ], ) def test_space_types(ansible_zos_module, space_type, primary, secondary, expected): @@ -288,7 +288,7 @@ def test_space_types(ansible_zos_module, space_type, primary, secondary, expecte dd_name=SYSPRINT_DD, data_set_name=default_data_set, disposition="new", - type="seq", + type="SEQ", space_primary=primary, space_secondary=secondary, space_type=space_type, @@ -315,7 +315,7 @@ def test_space_types(ansible_zos_module, space_type, primary, secondary, expecte @pytest.mark.parametrize( "data_set_type", - ["pds", "pdse", "large", "basic", "seq"], + ["PDS", "PDSE", "LARGE", "BASIC", "SEQ"], ) def test_data_set_types_non_vsam(ansible_zos_module, data_set_type, volumes_on_systems): try: @@ -351,7 +351,7 @@ def test_data_set_types_non_vsam(ansible_zos_module, data_set_type, volumes_on_s @pytest.mark.parametrize( "data_set_type", - ["ksds", "rrds", "lds", "esds"], + ["KSDS", "RRDS", "LDS", "ESDS"], ) def test_data_set_types_vsam(ansible_zos_module, data_set_type, volumes_on_systems): try: @@ -374,7 +374,7 @@ def test_data_set_types_vsam(ansible_zos_module, data_set_type, volumes_on_syste volumes=[volume_1], ), ) - if data_set_type != "ksds" + if data_set_type != "KSDS" else dict( dd_data_set=dict( dd_name=SYSPRINT_DD, @@ -393,14 +393,14 @@ def test_data_set_types_vsam(ansible_zos_module, data_set_type, volumes_on_syste # * because that means data set exists and is VSAM so we can't read it results = hosts.all.command(cmd="head \"//'{0}'\"".format(default_data_set)) for result in results.contacted.values(): - assert "EDC5041I" in result.get("stderr", "") + assert "EDC5041I" or "EDC5049I" in result.get("stderr", "") finally: hosts.all.zos_data_set(name=default_data_set, state="absent") @pytest.mark.parametrize( "record_format", - ["u", "vb", "vba", "fb", "fba"], + ["U", "VB", "VBA", "FB", "FBA"], ) def test_record_formats(ansible_zos_module, record_format, volumes_on_systems): try: @@ -453,7 +453,7 @@ def test_return_content_type(ansible_zos_module, return_content_type, expected, default_data_set = get_tmp_ds_name() results = hosts.all.zos_data_set( name=default_data_set, - type="seq", + type="SEQ", state="present", replace=True, volumes=[volume_1], @@ -505,7 +505,7 @@ def test_return_text_content_encodings( default_data_set = get_tmp_ds_name() results = hosts.all.zos_data_set( name=default_data_set, - type="seq", + type="SEQ", state="present", replace=True, volumes=[volume_1], @@ -544,7 +544,7 @@ def test_reuse_existing_data_set(ansible_zos_module): hosts = ansible_zos_module default_data_set = get_tmp_ds_name() hosts.all.zos_data_set( - name=default_data_set, type="seq", state="present", replace=True + name=default_data_set, type="SEQ", state="present", replace=True ) results = hosts.all.zos_mvs_raw( program_name="IDCAMS", @@ -555,7 +555,7 @@ def test_reuse_existing_data_set(ansible_zos_module): dd_name=SYSPRINT_DD, data_set_name=default_data_set, disposition="new", - type="seq", + type="SEQ", reuse=True, return_content=dict(type="text"), ), @@ -577,7 +577,7 @@ def test_replace_existing_data_set(ansible_zos_module): hosts = ansible_zos_module default_data_set = get_tmp_ds_name() hosts.all.zos_data_set( - name=default_data_set, type="seq", state="present", replace=True + name=default_data_set, type="SEQ", state="present", replace=True ) results = hosts.all.zos_mvs_raw( program_name="IDCAMS", @@ -588,7 +588,7 @@ def test_replace_existing_data_set(ansible_zos_module): dd_name=SYSPRINT_DD, data_set_name=default_data_set, disposition="new", - type="seq", + type="SEQ", replace=True, return_content=dict(type="text"), ), @@ -619,7 +619,7 @@ def test_replace_existing_data_set_make_backup(ansible_zos_module): dd_name=SYSPRINT_DD, data_set_name=default_data_set, disposition="new", - type="seq", + type="SEQ", replace=True, return_content=dict(type="text"), ), @@ -636,7 +636,7 @@ def test_replace_existing_data_set_make_backup(ansible_zos_module): dd_name=SYSPRINT_DD, data_set_name=default_data_set, disposition="new", - type="seq", + type="SEQ", replace=True, backup=True, return_content=dict(type="text"), @@ -687,7 +687,7 @@ def test_input_empty(ansible_zos_module): dd_name=SYSPRINT_DD, data_set_name=default_data_set, disposition="new", - type="seq", + type="SEQ", return_content=dict(type="text"), ), ), @@ -719,7 +719,7 @@ def test_input_large(ansible_zos_module): dd_name=SYSPRINT_DD, data_set_name=default_data_set, disposition="new", - type="seq", + type="SEQ", return_content=dict(type="text"), ), ), @@ -752,7 +752,7 @@ def test_input_provided_as_list(ansible_zos_module): dd_name=SYSPRINT_DD, data_set_name=default_data_set, disposition="new", - type="seq", + type="SEQ", return_content=dict(type="text"), ), ), @@ -792,7 +792,7 @@ def test_input_return_content_types(ansible_zos_module, return_content_type, exp dd_name=SYSPRINT_DD, data_set_name=default_data_set, disposition="new", - type="seq", + type="SEQ", ), ), dict( @@ -844,7 +844,7 @@ def test_input_return_text_content_encodings( dd_name=SYSPRINT_DD, data_set_name=default_data_set, disposition="new", - type="seq", + type="SEQ", ), ), dict( @@ -1164,7 +1164,7 @@ def test_file_record_length(ansible_zos_module, record_length): @pytest.mark.parametrize( "record_format", - ["u", "vb", "vba", "fb", "fba"], + ["U", "VB", "VBA", "FB", "FBA"], ) def test_file_record_format(ansible_zos_module, record_format): try: @@ -1353,7 +1353,7 @@ def test_concatenation_with_data_set_dd_and_response(ansible_zos_module): dd_data_set=dict( data_set_name=default_data_set, disposition="new", - type="seq", + type="SEQ", return_content=dict(type="text"), ) ), @@ -1361,7 +1361,7 @@ def test_concatenation_with_data_set_dd_and_response(ansible_zos_module): dd_data_set=dict( data_set_name=DEFAULT_DATA_SET_2, disposition="new", - type="seq", + type="SEQ", ) ), ], @@ -1391,8 +1391,8 @@ def test_concatenation_with_data_set_dd_with_replace_and_backup(ansible_zos_modu hosts = ansible_zos_module default_data_set = get_tmp_ds_name() DEFAULT_DATA_SET_2 = get_tmp_ds_name() - hosts.all.zos_data_set(name=default_data_set, state="present", type="seq") - hosts.all.zos_data_set(name=DEFAULT_DATA_SET_2, state="present", type="seq") + hosts.all.zos_data_set(name=default_data_set, state="present", type="SEQ") + hosts.all.zos_data_set(name=DEFAULT_DATA_SET_2, state="present", type="SEQ") results = hosts.all.zos_mvs_raw( program_name="idcams", auth=True, @@ -1405,7 +1405,7 @@ def test_concatenation_with_data_set_dd_with_replace_and_backup(ansible_zos_modu dd_data_set=dict( data_set_name=default_data_set, disposition="new", - type="seq", + type="SEQ", replace=True, backup=True, return_content=dict(type="text"), @@ -1415,7 +1415,7 @@ def test_concatenation_with_data_set_dd_with_replace_and_backup(ansible_zos_modu dd_data_set=dict( data_set_name=DEFAULT_DATA_SET_2, disposition="new", - type="seq", + type="SEQ", replace=True, backup=True, ) @@ -1462,7 +1462,7 @@ def test_concatenation_with_data_set_member(ansible_zos_module): default_data_set = get_tmp_ds_name() DEFAULT_DATA_SET_2 = get_tmp_ds_name() DEFAULT_DATA_SET_WITH_MEMBER = default_data_set + '(MEM)' - hosts.all.zos_data_set(name=default_data_set, state="present", type="pds") + hosts.all.zos_data_set(name=default_data_set, state="present", type="PDS") hosts.all.zos_data_set(name=DEFAULT_DATA_SET_2, state="absent") results = hosts.all.zos_mvs_raw( program_name="idcams", @@ -1482,7 +1482,7 @@ def test_concatenation_with_data_set_member(ansible_zos_module): dd_data_set=dict( data_set_name=DEFAULT_DATA_SET_2, disposition="new", - type="seq", + type="SEQ", ) ), ], @@ -1538,7 +1538,7 @@ def test_concatenation_with_unix_dd_and_response_datasets(ansible_zos_module): dd_data_set=dict( data_set_name=DEFAULT_DATA_SET_2, disposition="new", - type="seq", + type="SEQ", ) ), ], @@ -1766,7 +1766,7 @@ def test_concatenation_all_dd_types(ansible_zos_module, dds, input_pos, input_co try: hosts = ansible_zos_module default_data_set = "ANSIBLE.USER.PRIVATE.TEST" - hosts.all.zos_data_set(name=default_data_set, state="present", type="seq") + hosts.all.zos_data_set(name=default_data_set, state="present", type="SEQ") hosts.all.file(path=DEFAULT_PATH, state="directory") hosts.all.file(path=DEFAULT_PATH_WITH_FILE, state="absent") results = hosts.all.zos_mvs_raw(program_name="idcams", auth=True, dds=dds) diff --git a/tests/sanity/ignore-2.14.txt b/tests/sanity/ignore-2.14.txt index 0167d6c81..c04ae2328 100644 --- a/tests/sanity/ignore-2.14.txt +++ b/tests/sanity/ignore-2.14.txt @@ -2,8 +2,6 @@ plugins/modules/zos_apf.py validate-modules:missing-gplv3-license # Licensed und plugins/modules/zos_backup_restore.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_blockinfile.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_copy.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 -plugins/modules/zos_copy.py validate-modules:parameter-type-not-in-doc # Passing args from action plugin -plugins/modules/zos_copy.py validate-modules:undocumented-parameter # Passing args from action plugin plugins/modules/zos_data_set.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_encode.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_fetch.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 diff --git a/tests/sanity/ignore-2.15.txt b/tests/sanity/ignore-2.15.txt index 0167d6c81..c04ae2328 100644 --- a/tests/sanity/ignore-2.15.txt +++ b/tests/sanity/ignore-2.15.txt @@ -2,8 +2,6 @@ plugins/modules/zos_apf.py validate-modules:missing-gplv3-license # Licensed und plugins/modules/zos_backup_restore.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_blockinfile.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_copy.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 -plugins/modules/zos_copy.py validate-modules:parameter-type-not-in-doc # Passing args from action plugin -plugins/modules/zos_copy.py validate-modules:undocumented-parameter # Passing args from action plugin plugins/modules/zos_data_set.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_encode.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_fetch.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 diff --git a/tests/sanity/ignore-2.16.txt b/tests/sanity/ignore-2.16.txt index 0167d6c81..c04ae2328 100644 --- a/tests/sanity/ignore-2.16.txt +++ b/tests/sanity/ignore-2.16.txt @@ -2,8 +2,6 @@ plugins/modules/zos_apf.py validate-modules:missing-gplv3-license # Licensed und plugins/modules/zos_backup_restore.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_blockinfile.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_copy.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 -plugins/modules/zos_copy.py validate-modules:parameter-type-not-in-doc # Passing args from action plugin -plugins/modules/zos_copy.py validate-modules:undocumented-parameter # Passing args from action plugin plugins/modules/zos_data_set.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_encode.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_fetch.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 diff --git a/tests/unit/test_zoau_version_checker_unit.py b/tests/unit/test_zoau_version_checker_unit.py index 96031f4a1..15bcce58b 100644 --- a/tests/unit/test_zoau_version_checker_unit.py +++ b/tests/unit/test_zoau_version_checker_unit.py @@ -45,10 +45,24 @@ (['1','2','1'], "2022/08/17 21:25:13 CUT V1.2.1"), (['1','2','1'], "2022/08/25 21:44:21 CUT V1.2.1 31163ab 1856"), (['1','2','1'], "2022/09/07 15:26:50 CUT V1.2.1 d2f6557 1880"), + (['1','2','1','1'], ""), (['1','2','3'], "2022/12/03 13:33:22 CUT V1.2.3 6113dc9 2512"), (['1','2','2'], "2022/12/06 20:44:00 CUT V1.2.2 ee30137 2525"), (['1','2','3'], "2023/03/16 18:17:00 CUT V1.2.3 1aa591fb 2148 PH50145"), - (['1', '2', '4', '0'], "2023/06/02 13:28:30 CUT V1.2.4.0 3b866824 2873 PH52034 826 267d9646"), + (['1','2','3','1'], ""), + (['1','2','3','2'], ""), + (['1','2','4','0'], "2023/06/02 13:28:30 CUT V1.2.4.0 3b866824 2873 PH52034 826 267d9646"), + (['1','2','4','1'], ""), + (['1','2','4','2'], ""), + (['1','2','4','3'], ""), + (['1','2','4','4'], ""), + (['1','2','4','5'], ""), + (['1','2','5','0'], ""), + (['1','2','5','1'], ""), + (['1','2','5','2'], ""), + (['1','2','5','3'], ""), + (['1','2','5','4'], ""), + (['1','2','5','6'], ""), ] diff --git a/tests/unit/test_zos_mvs_raw_unit.py b/tests/unit/test_zos_mvs_raw_unit.py index e50734756..f528412da 100644 --- a/tests/unit/test_zos_mvs_raw_unit.py +++ b/tests/unit/test_zos_mvs_raw_unit.py @@ -59,7 +59,7 @@ def run_command(self, *args, **kwargs): "new", "keep", "keep", - "cyl", + "CYL", 5, 1, "smsclas1", @@ -67,17 +67,17 @@ def run_command(self, *args, **kwargs): "smsclas1", 80, "SOMEKEYLAB100", - "library", + "LIBRARY", {"label": "keyforme", "encoding": "h"}, {"label": "keyforme2", "encoding": "h"}, - "u", + "U", ), ( "data.set.name(mem1)", "shr", "delete", "keep", - "trk", + "TRK", "5", 1, "smsclas1", @@ -85,17 +85,17 @@ def run_command(self, *args, **kwargs): "smsclas3", 120, "somekeylab1", - "basic", + "BASIC", {"label": "keyforme", "encoding": "l"}, {"label": "keyforme2", "encoding": "h"}, - "fb", + "FB", ), ( "DATA.NAME.HERE.NOW", "old", "catalog", "uncatalog", - "b", + "B", 55, "100", "SMSCLASS", @@ -103,17 +103,17 @@ def run_command(self, *args, **kwargs): "smscD@s3", 120, "keyfor342fdsme", - "large", + "LARGE", {"label": "keyforME", "encoding": "l"}, {"label": "KEY4me", "encoding": "h"}, - "fba", + "FBA", ), ( "DAT@.now", "mod", "delete", "uncatalog", - "g", + "G", 1, "9", "SMSCLASS", @@ -121,17 +121,17 @@ def run_command(self, *args, **kwargs): "", 120, "keyfor342fdsme", - "pdse", + "PDSE", {"label": "keyforME", "encoding": "l"}, {"label": "KEY4me", "encoding": "h"}, - "vb", + "VB", ), ( "DAT$.now", "new", "delete", "keep", - "m", + "M", 1, 9, "SMSCLASS", @@ -139,10 +139,10 @@ def run_command(self, *args, **kwargs): "", 0, "", - "lds", + "LDS", {"label": "keyforME", "encoding": "l"}, {"label": "keyyyyy343asdfasfsdfa", "encoding": "l"}, - "vba", + "VBA", ), ], ) @@ -237,7 +237,7 @@ def test_argument_parsing_data_set( "delete", 0, 100, - "fb", + "FB", "record", "r", ["ocreat", "oappend", "onoctty"], @@ -248,14 +248,14 @@ def test_argument_parsing_data_set( "delete", 200, "100", - "fba", + "FBA", "record", "w", ["oappend", "osync"], ), - ("/u/OEUSR01", "keep", "delete", 0, 100, "vb", "binary", "rw", ["ononblock"]), - ("/u/testmeee", "keep", "delete", 0, 100, "vba", "record", "read_only", []), - ("/u/hellow/d/or4ld", "keep", "keep", 0, 100, "u", "text", "write_only", []), + ("/u/OEUSR01", "keep", "delete", 0, 100, "VB", "binary", "rw", ["ononblock"]), + ("/u/testmeee", "keep", "delete", 0, 100, "VBA", "record", "read_only", []), + ("/u/hellow/d/or4ld", "keep", "keep", 0, 100, "U", "text", "write_only", []), ], ) def test_argument_parsing_unix( @@ -338,7 +338,7 @@ def test_argument_parsing_unix( "old", "keep", "keep", - "cyl", + "CYL", 5, 1, "smsclas1", @@ -346,17 +346,17 @@ def test_argument_parsing_unix( "smsclas1", 80, "SOMEKEYLAB100", - "library", + "LIBRARY", {"label": "keyforme", "encoding": "h"}, {"label": "keyforme2", "encoding": "h"}, - "u", + "U", ), ( "data.set.name(mem1waytoolong)", "excl", "delete", "keep", - "trk", + "TRK", "5", 1, "smsclas1", @@ -364,10 +364,10 @@ def test_argument_parsing_unix( "smsclas3", 120, "somekeylab1", - "basic", + "BASIC", {"label": "keyforme", "encoding": "l"}, {"label": "keyforme2", "encoding": "h"}, - "fb", + "FB", ), ( "DATA.NAME.HERE.NOW", @@ -382,17 +382,17 @@ def test_argument_parsing_unix( "smscD@s3", 120, "keyfor342fdsme", - "large", + "LARGE", {"label": "keyforME", "encoding": "l"}, {"label": "KEY4me", "encoding": "h"}, - "fba", + "FBA", ), ( "DAT@.now", "mod", "delete", "uncatalog", - "g", + "G", 1, "9", "SMSCLASSsss", @@ -400,17 +400,17 @@ def test_argument_parsing_unix( "", 120, "keyfor342fdsme", - "pdse", + "PDSE", {"label": "keyforME", "encoding": "l"}, {"label": "KEY4me", "encoding": "h"}, - "vb", + "VB", ), ( "DAT$.now", "new", "delete", "meep", - "m", + "M", 1, 9, "SMSCLASS", @@ -418,10 +418,10 @@ def test_argument_parsing_unix( "", 0, "", - "ksdss", + "KSDSS", {"label": "keyforME", "encoding": "l"}, {"label": "keyyyyy343asdfasfsdfa", "encoding": "l"}, - "vba", + "VBA", ), ], ) @@ -525,7 +525,7 @@ def test_argument_parsing_data_set_failure_path( "delete", 200, "100", - "fba", + "FBA", "record", "w", ["append", "osync"], @@ -537,12 +537,12 @@ def test_argument_parsing_data_set_failure_path( "delete", 0, 100, - "vba", + "VBA", "record", "read_only", ["hello"], ), - ("/u/hellow/d/or4ld", "meep", "keep", 0, 100, "u", "text", None, []), + ("/u/hellow/d/or4ld", "meep", "keep", 0, 100, "U", "text", None, []), ], ) def test_argument_parsing_unix_failure_path( @@ -620,7 +620,7 @@ def test_ksds_defaults( "dd_name": "MYDD1", "data_set_name": "my.ds", "disposition": "new", - "type": "ksds", + "type": "KSDS", } }, ], @@ -663,7 +663,7 @@ def test_ksds_exception_key_length( "dd_name": "MYDD1", "data_set_name": "my.ds", "disposition": "new", - "type": "esds", + "type": "ESDS", "key_length": 5, } }, @@ -693,7 +693,7 @@ def test_ksds_exception_key_offset( "dd_name": "MYDD1", "data_set_name": "my.ds", "disposition": "new", - "type": "esds", + "type": "ESDS", "key_offset": 5, } },