From ab4849c370241ed2cae6dcf84eb468e995d97958 Mon Sep 17 00:00:00 2001 From: Joshua Ramirez Date: Tue, 4 Jun 2024 20:13:19 +0000 Subject: [PATCH 1/2] Pull request #2772: Remove dev files Merge in SDK/oci-go-sdk from remove_dev_files to github Squashed commit of the following: commit ec3c4c96e5b1e8779eb0f6f2358c936a1aa2d5be Author: Joshuram Date: Tue Jun 4 08:45:48 2024 -0700 removing dev files commit 590af01d6cb398df852705b4a43be8cb4d12287b Author: Joshuram Date: Tue Jun 4 08:45:39 2024 -0700 removing dev files --- MakefileDevelopment.mk | 114 -- ocibuild.conf | 94 -- scripts/IntegTestsDocker | 31 - scripts/README.md | 26 - scripts/SCAJobDocker | 15 - scripts/add_or_update_spec-requirements.txt | 1 - scripts/add_or_update_spec.py | 400 ----- scripts/auto_gen_utils/.gitignore | 12 - .../1_process_preview_jira_queue.py | 1314 ---------------- .../auto_gen_utils/2_pre_generation_set_up.py | 63 - .../3_report_generation_status.py | 342 ----- .../4_on_generation_complete.py | 444 ------ .../5_mark_preview_tickets_done_post_merge.py | 292 ---- scripts/auto_gen_utils/README.md | 253 ---- scripts/auto_gen_utils/__init__.py | 0 .../add_or_update_scripts/__init__.py | 0 .../add_or_update_spec_utils.py | 81 - .../cli_add_or_update_spec.py | 253 ---- .../datagen_add_or_update_spec.py | 147 -- .../dotnet_sdk_add_or_update_spec.py | 497 ------ .../go_sdk_add_or_update_spec.py | 173 --- .../java_sdk_add_or_update_spec.py | 721 --------- .../legacy_java_sdk_add_or_update_spec.py | 773 ---------- .../module_pom_file_add_or_update_spec.py | 382 ----- .../powershell_add_or_update_spec.py | 437 ------ .../python_sdk_add_or_update_spec.py | 225 --- .../ruby_sdk_add_or_update_spec.py | 188 --- .../single_pom_file_add_or_update_spec.py | 346 ----- .../spec_updater_base.py | 377 ----- .../templates/pom-template.xml | 103 -- .../typescript_sdk_add_or_update_spec.py | 454 ------ .../auto_gen_utils/autogen_issue_advisor.py | 801 ---------- .../autogen_issue_advisor_preview.py | 486 ------ .../autogen_issue_advisor_public.py | 1342 ----------------- .../autogen_issue_advisor_shared.py | 781 ---------- .../change_dexreq_to_release_approved.py | 33 - .../change_dexreq_to_release_approved.sh | 15 - scripts/auto_gen_utils/clean_auto_branches.py | 228 --- .../cli_manual_changes_required_check.py | 209 --- scripts/auto_gen_utils/codecov_baseline.html | 35 - .../comment_with_fields_to_be_deleted.py | 116 -- scripts/auto_gen_utils/config.py | 1148 -------------- ...olidate_feature_id_enabled_groups_files.py | 114 -- .../create_cli_design_review_ticket.py | 571 ------- .../create_recordings_ticket.py | 153 -- scripts/auto_gen_utils/dexreq_migration.py | 281 ---- scripts/auto_gen_utils/generate_button.js | 114 -- .../auto_gen_utils/get_jira_access_token.py | 34 - scripts/auto_gen_utils/hooks/pre-commit | 2 - scripts/auto_gen_utils/ocibuild.conf | 59 - .../pull_request_tracker/.gitignore | 2 - .../pull_request_tracker/README.md | 74 - .../pull_request_tracker/__init__.py | 0 .../pull_request_tracker/dexreq_pr_tracker.py | 918 ----------- .../homepage/publish_homepage.py | 21 - .../homepage/reports_homepage.html | 88 -- .../homepage/reports_index.json | 3 - .../pull_request_tracker/resources/config | 6 - .../resources/dexreq_pr_tracker.pem | 30 - .../resources/dexreq_pr_tracker_public.pem | 9 - .../pull_request_tracker/tc_run.sh | 49 - .../templates/issue_table.html | 142 -- .../templates/pull_request_table.html | 45 - .../templates/pull_requests_table.html | 15 - .../templates/report_table.html | 250 --- scripts/auto_gen_utils/python_cli/__init__.py | 0 .../auto-route-jira-sd/auto_close_ticket.py | 33 - .../auto-route-jira-sd/auto_suggest.py | 338 ----- .../confluence_api_utils/utils.py | 51 - .../confluence_automation.py | 38 - .../auto-route-jira-sd/constants.py | 41 - .../auto-route-jira-sd/jira_client_util.py | 60 - .../auto-route-jira-sd/mos_mapping.py | 24 - .../python_cli/check_design_review_tickets.py | 33 - .../auto_gen_utils/python_cli/constants.py | 21 - .../auto_gen_utils/python_cli/exceptions.py | 9 - .../python_cli/execute_manual_changes.py | 20 - .../python_cli/generate_local_changes.py | 51 - .../python_cli/generate_manual_changes.py | 49 - scripts/auto_gen_utils/python_cli/git_util.py | 50 - .../python_cli/install_python_cli_local.sh | 5 - .../auto_gen_utils/python_cli/jira_config.py | 36 - .../auto_gen_utils/python_cli/jira_util.py | 555 ------- .../python_cli/manual_change_examples.py | 51 - .../python_cli/rename_root_group.py | 15 - .../python_cli/self_service_manual_change.py | 426 ------ .../self_service_manual_change_util.py | 437 ------ .../copy_params_from_generated.py.js2 | 2 - .../templates/flatten_parameter.py.js2 | 24 - .../templates/flatten_parameter_kwargs.py.js2 | 13 - .../templates/flatten_parameter_option.py.js2 | 3 - .../templates/jira_exception_message.py.js2 | 4 - .../python_cli/templates/move_command.py.js2 | 6 - .../python_cli/templates/move_group.py.js2 | 8 - .../templates/new_extended_file.py.js2 | 14 - .../templates/remove_command.py.js2 | 9 - .../templates/remove_parameter.py.js2 | 11 - .../templates/rename_command.py.js2 | 9 - .../templates/rename_parameter.py.js2 | 20 - .../templates/rename_parameter_kwargs.py.js2 | 6 - .../templates/rename_parameter_option.py.js2 | 3 - .../auto_gen_utils/requirements-ocibuild.txt | 83 - scripts/auto_gen_utils/requirements.txt | 59 - .../sdk_regions_updater/README.md | 47 - .../sdk_regions_updater/__init__.py | 0 .../auto_gen_utils/sdk_regions_updater/config | 6 - .../dotnet_sdk_region_updater.py | 71 - .../go_sdk_region_updater.py | 95 -- .../java_sdk_region_updater.py | 131 -- .../python_sdk_region_updater.py | 62 - .../region_updater_utils.py | 211 --- .../ruby_sdk_region_updater.py | 63 - .../sdk_regions_updater.py | 106 -- .../templates/dotnet-sdk-realms.tpl | 17 - .../templates/dotnet-sdk-regions.tpl | 17 - .../templates/go-sdk-regions.tpl | 20 - .../templates/python-sdk-regions.tpl | 16 - .../templates/ruby-sdk-regions.tpl | 30 - .../typescript_sdk_region_updater.py | 128 -- scripts/auto_gen_utils/setup.cfg | 4 - scripts/auto_gen_utils/shared/__init__.py | 0 .../auto_gen_utils/shared/bitbucket_utils.py | 603 -------- .../shared/buildsvc_tc_compatibility.py | 93 -- .../auto_gen_utils/shared/version_utils.py | 160 -- .../api_review/GATE_CHECK_README.md | 66 - .../gate_check_api_review_tickets.py | 540 ------- .../api_review/process_api_review_tickets.py | 80 - .../bug_bash/process_bug_bash_tickets.py | 273 ---- ...t_cli_and_python_sdk_generation_success.sh | 28 - .../11_commit_and_push_generated_changes.sh | 35 - ...etup_cli_and_python_sdk_public_branches.sh | 61 - .../3_record_python_sdk_generation_success.sh | 5 - .../public/5_record_cli_generation_success.sh | 5 - ..._python_sdk_build_and_publish_artifacts.sh | 119 -- .../7_record_python_sdk_build_success.sh | 5 - .../8_cli_build_and_publish_artifacts.sh | 47 - .../cli/public/9_record_cli_build_success.sh | 5 - .../team_city_scripts/cli/public/constants.py | 36 - .../cli/public/gather_cli_changelog_entry.py | 266 ---- .../set_changed_service_env_variable.py | 48 - .../1_setup_testdata_preview_branch.sh | 33 - .../preview/6_report_gen_and_build_status.sh | 26 - .../preview/9_mark_done_post_bulk_merge.sh | 32 - .../datagen/public/1_setup_testdata_branch.sh | 31 - .../public/6_report_gen_and_build_status.sh | 26 - .../public/9_mark_done_post_bulk_merge.sh | 32 - .../1_setup_dotnet_sdk_preview_branch.sh | 33 - .../3_record_sdk_generation_success.sh | 5 - .../preview/5_record_sdk_build_success.sh | 5 - .../preview/6_report_gen_and_build_status.sh | 26 - .../preview/7_commit_generated_changes.sh | 26 - .../preview/9_mark_done_post_bulk_merge.sh | 36 - .../public/1_setup_dotnet_sdk_branch.sh | 31 - .../public/3_record_sdk_generation_success.sh | 5 - .../public/5_record_sdk_build_success.sh | 5 - .../public/6_report_gen_and_build_status.sh | 26 - .../public/7_commit_generated_changes.sh | 26 - .../public/9_mark_done_post_bulk_merge.sh | 36 - .../github_issue_summary_header.py | 100 -- .../github_label_routing_config.yaml | 757 ---------- .../github_issues/github_repo_config.yaml | 12 - .../github_issues/issue_config.py | 128 -- .../github_issues/jira_wrapper.py | 230 --- .../github_issues/route_issues_from_github.py | 442 ------ .../github_issues/update_github_labels.py | 116 -- .../team_city_scripts/github_issues/util.py | 36 - .../go/public/1_setup_go_public_branch.sh | 41 - .../public/3_record_sdk_generation_success.sh | 5 - .../go/public/5_record_sdk_build_success.sh | 5 - .../public/6_report_gen_and_build_status.sh | 42 - .../go/public/7_commit_generated_changes.sh | 27 - .../java/check_for_pom_version_mismatch.py | 110 -- .../java/checkout_source_branch.py | 88 -- .../compare-all-versions-using-clirr.sh | 160 -- ...pare-all-versions-using-codegen-version.sh | 158 -- .../java/determine_build_profile.py | 229 --- .../determine_codegen_projects_from_commit.py | 102 -- .../java/determine_full_version.py | 130 -- .../java/public/1_setup_java_public_branch.sh | 41 - .../public/3_record_sdk_generation_success.sh | 5 - .../java/public/5_record_sdk_build_success.sh | 5 - .../public/6_report_gen_and_build_status.sh | 26 - .../java/public/7_commit_generated_changes.sh | 26 - ...arn_about_backward_incompatible_changes.py | 338 ----- .../java/warn_about_source_formatting.py | 118 -- .../check_for_pom_version_mismatch.py | 110 -- .../legacy_java_sdk/checkout_source_branch.py | 88 -- .../determine_build_profile.py | 229 --- .../determine_codegen_projects_from_commit.py | 102 -- .../legacy_java_sdk/determine_full_version.py | 130 -- .../1_setup_legacy_java_public_branch.sh | 41 - .../public/3_record_sdk_generation_success.sh | 5 - .../public/5_record_sdk_build_success.sh | 5 - .../public/6_report_gen_and_build_status.sh | 26 - .../public/7_commit_generated_changes.sh | 26 - ...arn_about_backward_incompatible_changes.py | 335 ---- .../warn_about_source_formatting.py | 117 -- .../determine_issue_routing_info_tag.py | 211 --- .../determine_java_sdk_versions_used.py | 79 - .../determine_pr_target_branch.py | 51 - .../determine_test_classes.py | 286 ---- .../jira_ticket_reporter.py | 87 -- .../oci_testing_service/ocits_shared.py | 499 ------ .../report_test_results.py | 505 ------- .../report_testing_service_failure.py | 98 -- .../report_testing_service_stderr.py | 171 --- .../oci_testing_service/save_codecov_data.py | 94 -- .../switch_to_matching_sdk_branch.py | 313 ---- .../orm/fail_build_on_code_coverage.py | 189 --- .../1_setup_powershell_preview_branch.sh | 29 - .../preview/3_record_ps_generation_success.sh | 5 - .../preview/5_record_ps_build_success.sh | 5 - .../preview/6_report_gen_and_build_status.sh | 26 - .../preview/7_commit_generated_changes.sh | 26 - .../preview/9_mark_done_post_bulk_merge.sh | 32 - .../public/1_setup_powershell_branch.sh | 29 - .../public/3_record_ps_generation_success.sh | 5 - .../public/5_record_ps_build_success.sh | 5 - .../public/6_report_gen_and_build_status.sh | 26 - .../public/7_commit_generated_changes.sh | 26 - .../public/9_mark_done_post_bulk_merge.sh | 32 - .../team_city_scripts/process_jira_queue.sh | 198 --- .../shared/1_setup_python_public_branch.sh | 31 - .../shared/3_record_sdk_generation_success.sh | 5 - .../shared/4_build_and_publish_artifacts.sh | 111 -- .../shared/5_record_sdk_build_success.sh | 5 - .../shared/6_report_gen_and_build_status.sh | 18 - .../7_commit_and_push_generated_changes.sh | 13 - .../shared/disableStrictHostKeyChecking.sh | 14 - .../preview/1_setup_ruby_preview_branch.sh | 43 - .../ruby/preview/2_configure_rbenv.sh | 59 - .../4_record_sdk_generation_success.sh | 7 - .../preview/5_build_and_publish_artifacts.sh | 85 -- .../preview/6_record_sdk_build_success.sh | 7 - .../preview/7_report_gen_and_build_status.sh | 26 - .../preview/8_commit_generated_changes.sh | 25 - .../preview/9_mark_done_post_bulk_merge.sh | 41 - .../ruby/public/1_setup_ruby_public_branch.sh | 43 - .../ruby/public/2_configure_rbenv.sh | 62 - .../public/4_record_sdk_generation_success.sh | 7 - .../public/5_build_and_publish_artifacts.sh | 95 -- .../ruby/public/6_record_sdk_build_success.sh | 7 - .../public/7_report_gen_and_build_status.sh | 28 - .../ruby/public/8_commit_generated_changes.sh | 25 - .../public/9_mark_done_post_bulk_merge.sh | 46 - .../run_autogen_issue_advisor.sh | 72 - .../run_autogen_issue_advisor_calendar.sh | 90 -- .../1_setup_typescript_preview_branch.sh | 35 - .../3_record_sdk_generation_success.sh | 5 - .../preview/5_record_sdk_build_success.sh | 5 - .../preview/6_report_gen_and_build_status.sh | 26 - .../preview/7_commit_generated_changes.sh | 26 - .../preview/9_mark_done_post_bulk_merge.sh | 32 - .../public/1_setup_typescript_branch.sh | 33 - .../public/3_record_sdk_generation_success.sh | 5 - .../public/5_record_sdk_build_success.sh | 5 - .../public/6_report_gen_and_build_status.sh | 26 - .../public/7_commit_generated_changes.sh | 26 - .../public/9_mark_done_post_bulk_merge.sh | 32 - .../udx_automation/README.md | 40 - .../udx_automation/__init__.py | 0 .../udx_ticket_review_automation.py | 417 ----- .../team_city_scripts/zip_and_delete.py | 40 - scripts/auto_gen_utils/tests/__init__.py | 0 .../tests/test_bitbucket_utils.py | 96 -- .../tests/test_buildsvc_tc_compatibility.py | 577 ------- .../tests/test_clean_auto_branches.py | 101 -- .../tests/test_cli_branch_text.py | 32 - scripts/auto_gen_utils/tests/test_config.py | 221 --- ...test_module_pom_file_add_or_update_spec.py | 32 - .../test_python_sdk_add_or_update_spec.py | 52 - .../tests/test_version_utils.py | 70 - .../auto_gen_utils/update-codegen-version.sh | 259 ---- scripts/auto_gen_utils/update_region.py | 156 -- .../update_testing_service_dependency.py | 46 - scripts/auto_gen_utils/util.py | 1064 ------------- scripts/auto_gen_utils/verify.sh | 12 - scripts/clone_key_repo.sh | 7 - scripts/generate_docs.sh | 126 -- scripts/setup_test_docker.sh | 23 - scripts/test_pom.original.xml | 1269 ---------------- shared-build-service-scripts/.gitignore | 20 - shared-build-service-scripts/README.md | 18 - shared-build-service-scripts/check-secrets.sh | 64 - .../git-submodule-helpers.sh | 111 -- .../make-pr-comment.sh | 136 -- shared-build-service-scripts/relpath.sh | 3 - .../setup-git-secrets-readwrite.sh | 60 - .../setup-git-secrets.sh | 60 - shared-build-service-scripts/ssh_give_pass.sh | 6 - 290 files changed, 37345 deletions(-) delete mode 100644 MakefileDevelopment.mk delete mode 100644 ocibuild.conf delete mode 100644 scripts/IntegTestsDocker delete mode 100644 scripts/README.md delete mode 100644 scripts/SCAJobDocker delete mode 100644 scripts/add_or_update_spec-requirements.txt delete mode 100644 scripts/add_or_update_spec.py delete mode 100644 scripts/auto_gen_utils/.gitignore delete mode 100755 scripts/auto_gen_utils/1_process_preview_jira_queue.py delete mode 100644 scripts/auto_gen_utils/2_pre_generation_set_up.py delete mode 100644 scripts/auto_gen_utils/3_report_generation_status.py delete mode 100644 scripts/auto_gen_utils/4_on_generation_complete.py delete mode 100644 scripts/auto_gen_utils/5_mark_preview_tickets_done_post_merge.py delete mode 100644 scripts/auto_gen_utils/README.md delete mode 100644 scripts/auto_gen_utils/__init__.py delete mode 100644 scripts/auto_gen_utils/add_or_update_scripts/__init__.py delete mode 100644 scripts/auto_gen_utils/add_or_update_scripts/add_or_update_spec_utils.py delete mode 100644 scripts/auto_gen_utils/add_or_update_scripts/cli_add_or_update_spec.py delete mode 100644 scripts/auto_gen_utils/add_or_update_scripts/datagen_add_or_update_spec.py delete mode 100755 scripts/auto_gen_utils/add_or_update_scripts/dotnet_sdk_add_or_update_spec.py delete mode 100644 scripts/auto_gen_utils/add_or_update_scripts/go_sdk_add_or_update_spec.py delete mode 100644 scripts/auto_gen_utils/add_or_update_scripts/java_sdk_add_or_update_spec.py delete mode 100644 scripts/auto_gen_utils/add_or_update_scripts/legacy_java_sdk_add_or_update_spec.py delete mode 100644 scripts/auto_gen_utils/add_or_update_scripts/module_pom_file_add_or_update_spec.py delete mode 100644 scripts/auto_gen_utils/add_or_update_scripts/powershell_add_or_update_spec.py delete mode 100644 scripts/auto_gen_utils/add_or_update_scripts/python_sdk_add_or_update_spec.py delete mode 100644 scripts/auto_gen_utils/add_or_update_scripts/ruby_sdk_add_or_update_spec.py delete mode 100644 scripts/auto_gen_utils/add_or_update_scripts/single_pom_file_add_or_update_spec.py delete mode 100644 scripts/auto_gen_utils/add_or_update_scripts/spec_updater_base.py delete mode 100644 scripts/auto_gen_utils/add_or_update_scripts/templates/pom-template.xml delete mode 100644 scripts/auto_gen_utils/add_or_update_scripts/typescript_sdk_add_or_update_spec.py delete mode 100644 scripts/auto_gen_utils/autogen_issue_advisor.py delete mode 100644 scripts/auto_gen_utils/autogen_issue_advisor_preview.py delete mode 100644 scripts/auto_gen_utils/autogen_issue_advisor_public.py delete mode 100644 scripts/auto_gen_utils/autogen_issue_advisor_shared.py delete mode 100644 scripts/auto_gen_utils/change_dexreq_to_release_approved.py delete mode 100755 scripts/auto_gen_utils/change_dexreq_to_release_approved.sh delete mode 100644 scripts/auto_gen_utils/clean_auto_branches.py delete mode 100644 scripts/auto_gen_utils/cli_manual_changes_required_check.py delete mode 100644 scripts/auto_gen_utils/codecov_baseline.html delete mode 100644 scripts/auto_gen_utils/comment_with_fields_to_be_deleted.py delete mode 100644 scripts/auto_gen_utils/config.py delete mode 100644 scripts/auto_gen_utils/consolidate_feature_id_enabled_groups_files.py delete mode 100644 scripts/auto_gen_utils/create_cli_design_review_ticket.py delete mode 100644 scripts/auto_gen_utils/create_recordings_ticket.py delete mode 100644 scripts/auto_gen_utils/dexreq_migration.py delete mode 100644 scripts/auto_gen_utils/generate_button.js delete mode 100644 scripts/auto_gen_utils/get_jira_access_token.py delete mode 100755 scripts/auto_gen_utils/hooks/pre-commit delete mode 100644 scripts/auto_gen_utils/ocibuild.conf delete mode 100644 scripts/auto_gen_utils/pull_request_tracker/.gitignore delete mode 100644 scripts/auto_gen_utils/pull_request_tracker/README.md delete mode 100644 scripts/auto_gen_utils/pull_request_tracker/__init__.py delete mode 100644 scripts/auto_gen_utils/pull_request_tracker/dexreq_pr_tracker.py delete mode 100644 scripts/auto_gen_utils/pull_request_tracker/homepage/publish_homepage.py delete mode 100644 scripts/auto_gen_utils/pull_request_tracker/homepage/reports_homepage.html delete mode 100644 scripts/auto_gen_utils/pull_request_tracker/homepage/reports_index.json delete mode 100644 scripts/auto_gen_utils/pull_request_tracker/resources/config delete mode 100644 scripts/auto_gen_utils/pull_request_tracker/resources/dexreq_pr_tracker.pem delete mode 100644 scripts/auto_gen_utils/pull_request_tracker/resources/dexreq_pr_tracker_public.pem delete mode 100755 scripts/auto_gen_utils/pull_request_tracker/tc_run.sh delete mode 100644 scripts/auto_gen_utils/pull_request_tracker/templates/issue_table.html delete mode 100644 scripts/auto_gen_utils/pull_request_tracker/templates/pull_request_table.html delete mode 100644 scripts/auto_gen_utils/pull_request_tracker/templates/pull_requests_table.html delete mode 100644 scripts/auto_gen_utils/pull_request_tracker/templates/report_table.html delete mode 100644 scripts/auto_gen_utils/python_cli/__init__.py delete mode 100644 scripts/auto_gen_utils/python_cli/auto-route-jira-sd/auto_close_ticket.py delete mode 100644 scripts/auto_gen_utils/python_cli/auto-route-jira-sd/auto_suggest.py delete mode 100644 scripts/auto_gen_utils/python_cli/auto-route-jira-sd/confluence_api_utils/utils.py delete mode 100644 scripts/auto_gen_utils/python_cli/auto-route-jira-sd/confluence_automation.py delete mode 100644 scripts/auto_gen_utils/python_cli/auto-route-jira-sd/constants.py delete mode 100644 scripts/auto_gen_utils/python_cli/auto-route-jira-sd/jira_client_util.py delete mode 100644 scripts/auto_gen_utils/python_cli/auto-route-jira-sd/mos_mapping.py delete mode 100644 scripts/auto_gen_utils/python_cli/check_design_review_tickets.py delete mode 100644 scripts/auto_gen_utils/python_cli/constants.py delete mode 100644 scripts/auto_gen_utils/python_cli/exceptions.py delete mode 100644 scripts/auto_gen_utils/python_cli/execute_manual_changes.py delete mode 100644 scripts/auto_gen_utils/python_cli/generate_local_changes.py delete mode 100644 scripts/auto_gen_utils/python_cli/generate_manual_changes.py delete mode 100644 scripts/auto_gen_utils/python_cli/git_util.py delete mode 100755 scripts/auto_gen_utils/python_cli/install_python_cli_local.sh delete mode 100644 scripts/auto_gen_utils/python_cli/jira_config.py delete mode 100644 scripts/auto_gen_utils/python_cli/jira_util.py delete mode 100644 scripts/auto_gen_utils/python_cli/manual_change_examples.py delete mode 100644 scripts/auto_gen_utils/python_cli/rename_root_group.py delete mode 100644 scripts/auto_gen_utils/python_cli/self_service_manual_change.py delete mode 100644 scripts/auto_gen_utils/python_cli/self_service_manual_change_util.py delete mode 100644 scripts/auto_gen_utils/python_cli/templates/copy_params_from_generated.py.js2 delete mode 100644 scripts/auto_gen_utils/python_cli/templates/flatten_parameter.py.js2 delete mode 100644 scripts/auto_gen_utils/python_cli/templates/flatten_parameter_kwargs.py.js2 delete mode 100644 scripts/auto_gen_utils/python_cli/templates/flatten_parameter_option.py.js2 delete mode 100644 scripts/auto_gen_utils/python_cli/templates/jira_exception_message.py.js2 delete mode 100644 scripts/auto_gen_utils/python_cli/templates/move_command.py.js2 delete mode 100644 scripts/auto_gen_utils/python_cli/templates/move_group.py.js2 delete mode 100644 scripts/auto_gen_utils/python_cli/templates/new_extended_file.py.js2 delete mode 100644 scripts/auto_gen_utils/python_cli/templates/remove_command.py.js2 delete mode 100644 scripts/auto_gen_utils/python_cli/templates/remove_parameter.py.js2 delete mode 100644 scripts/auto_gen_utils/python_cli/templates/rename_command.py.js2 delete mode 100644 scripts/auto_gen_utils/python_cli/templates/rename_parameter.py.js2 delete mode 100644 scripts/auto_gen_utils/python_cli/templates/rename_parameter_kwargs.py.js2 delete mode 100644 scripts/auto_gen_utils/python_cli/templates/rename_parameter_option.py.js2 delete mode 100644 scripts/auto_gen_utils/requirements-ocibuild.txt delete mode 100644 scripts/auto_gen_utils/requirements.txt delete mode 100644 scripts/auto_gen_utils/sdk_regions_updater/README.md delete mode 100644 scripts/auto_gen_utils/sdk_regions_updater/__init__.py delete mode 100644 scripts/auto_gen_utils/sdk_regions_updater/config delete mode 100644 scripts/auto_gen_utils/sdk_regions_updater/dotnet_sdk_region_updater.py delete mode 100644 scripts/auto_gen_utils/sdk_regions_updater/go_sdk_region_updater.py delete mode 100644 scripts/auto_gen_utils/sdk_regions_updater/java_sdk_region_updater.py delete mode 100644 scripts/auto_gen_utils/sdk_regions_updater/python_sdk_region_updater.py delete mode 100644 scripts/auto_gen_utils/sdk_regions_updater/region_updater_utils.py delete mode 100644 scripts/auto_gen_utils/sdk_regions_updater/ruby_sdk_region_updater.py delete mode 100644 scripts/auto_gen_utils/sdk_regions_updater/sdk_regions_updater.py delete mode 100644 scripts/auto_gen_utils/sdk_regions_updater/templates/dotnet-sdk-realms.tpl delete mode 100644 scripts/auto_gen_utils/sdk_regions_updater/templates/dotnet-sdk-regions.tpl delete mode 100644 scripts/auto_gen_utils/sdk_regions_updater/templates/go-sdk-regions.tpl delete mode 100644 scripts/auto_gen_utils/sdk_regions_updater/templates/python-sdk-regions.tpl delete mode 100644 scripts/auto_gen_utils/sdk_regions_updater/templates/ruby-sdk-regions.tpl delete mode 100644 scripts/auto_gen_utils/sdk_regions_updater/typescript_sdk_region_updater.py delete mode 100644 scripts/auto_gen_utils/setup.cfg delete mode 100644 scripts/auto_gen_utils/shared/__init__.py delete mode 100644 scripts/auto_gen_utils/shared/bitbucket_utils.py delete mode 100644 scripts/auto_gen_utils/shared/buildsvc_tc_compatibility.py delete mode 100644 scripts/auto_gen_utils/shared/version_utils.py delete mode 100644 scripts/auto_gen_utils/team_city_scripts/api_review/GATE_CHECK_README.md delete mode 100644 scripts/auto_gen_utils/team_city_scripts/api_review/gate_check_api_review_tickets.py delete mode 100644 scripts/auto_gen_utils/team_city_scripts/api_review/process_api_review_tickets.py delete mode 100644 scripts/auto_gen_utils/team_city_scripts/bug_bash/process_bug_bash_tickets.py delete mode 100755 scripts/auto_gen_utils/team_city_scripts/cli/public/10_report_cli_and_python_sdk_generation_success.sh delete mode 100755 scripts/auto_gen_utils/team_city_scripts/cli/public/11_commit_and_push_generated_changes.sh delete mode 100755 scripts/auto_gen_utils/team_city_scripts/cli/public/1_setup_cli_and_python_sdk_public_branches.sh delete mode 100755 scripts/auto_gen_utils/team_city_scripts/cli/public/3_record_python_sdk_generation_success.sh delete mode 100755 scripts/auto_gen_utils/team_city_scripts/cli/public/5_record_cli_generation_success.sh delete mode 100755 scripts/auto_gen_utils/team_city_scripts/cli/public/6_python_sdk_build_and_publish_artifacts.sh delete mode 100755 scripts/auto_gen_utils/team_city_scripts/cli/public/7_record_python_sdk_build_success.sh delete mode 100755 scripts/auto_gen_utils/team_city_scripts/cli/public/8_cli_build_and_publish_artifacts.sh delete mode 100755 scripts/auto_gen_utils/team_city_scripts/cli/public/9_record_cli_build_success.sh delete mode 100644 scripts/auto_gen_utils/team_city_scripts/cli/public/constants.py delete mode 100644 scripts/auto_gen_utils/team_city_scripts/cli/public/gather_cli_changelog_entry.py delete mode 100644 scripts/auto_gen_utils/team_city_scripts/cli/public/set_changed_service_env_variable.py delete mode 100755 scripts/auto_gen_utils/team_city_scripts/datagen/preview/1_setup_testdata_preview_branch.sh delete mode 100755 scripts/auto_gen_utils/team_city_scripts/datagen/preview/6_report_gen_and_build_status.sh delete mode 100755 scripts/auto_gen_utils/team_city_scripts/datagen/preview/9_mark_done_post_bulk_merge.sh delete mode 100755 scripts/auto_gen_utils/team_city_scripts/datagen/public/1_setup_testdata_branch.sh delete mode 100755 scripts/auto_gen_utils/team_city_scripts/datagen/public/6_report_gen_and_build_status.sh delete mode 100755 scripts/auto_gen_utils/team_city_scripts/datagen/public/9_mark_done_post_bulk_merge.sh delete mode 100755 scripts/auto_gen_utils/team_city_scripts/dotnet_sdk/preview/1_setup_dotnet_sdk_preview_branch.sh delete mode 100755 scripts/auto_gen_utils/team_city_scripts/dotnet_sdk/preview/3_record_sdk_generation_success.sh delete mode 100755 scripts/auto_gen_utils/team_city_scripts/dotnet_sdk/preview/5_record_sdk_build_success.sh delete mode 100755 scripts/auto_gen_utils/team_city_scripts/dotnet_sdk/preview/6_report_gen_and_build_status.sh delete mode 100755 scripts/auto_gen_utils/team_city_scripts/dotnet_sdk/preview/7_commit_generated_changes.sh delete mode 100755 scripts/auto_gen_utils/team_city_scripts/dotnet_sdk/preview/9_mark_done_post_bulk_merge.sh delete mode 100755 scripts/auto_gen_utils/team_city_scripts/dotnet_sdk/public/1_setup_dotnet_sdk_branch.sh delete mode 100755 scripts/auto_gen_utils/team_city_scripts/dotnet_sdk/public/3_record_sdk_generation_success.sh delete mode 100755 scripts/auto_gen_utils/team_city_scripts/dotnet_sdk/public/5_record_sdk_build_success.sh delete mode 100755 scripts/auto_gen_utils/team_city_scripts/dotnet_sdk/public/6_report_gen_and_build_status.sh delete mode 100755 scripts/auto_gen_utils/team_city_scripts/dotnet_sdk/public/7_commit_generated_changes.sh delete mode 100755 scripts/auto_gen_utils/team_city_scripts/dotnet_sdk/public/9_mark_done_post_bulk_merge.sh delete mode 100644 scripts/auto_gen_utils/team_city_scripts/github_issues/github_issue_summary_header.py delete mode 100644 scripts/auto_gen_utils/team_city_scripts/github_issues/github_label_routing_config.yaml delete mode 100644 scripts/auto_gen_utils/team_city_scripts/github_issues/github_repo_config.yaml delete mode 100644 scripts/auto_gen_utils/team_city_scripts/github_issues/issue_config.py delete mode 100644 scripts/auto_gen_utils/team_city_scripts/github_issues/jira_wrapper.py delete mode 100644 scripts/auto_gen_utils/team_city_scripts/github_issues/route_issues_from_github.py delete mode 100644 scripts/auto_gen_utils/team_city_scripts/github_issues/update_github_labels.py delete mode 100644 scripts/auto_gen_utils/team_city_scripts/github_issues/util.py delete mode 100755 scripts/auto_gen_utils/team_city_scripts/go/public/1_setup_go_public_branch.sh delete mode 100755 scripts/auto_gen_utils/team_city_scripts/go/public/3_record_sdk_generation_success.sh delete mode 100755 scripts/auto_gen_utils/team_city_scripts/go/public/5_record_sdk_build_success.sh delete mode 100755 scripts/auto_gen_utils/team_city_scripts/go/public/6_report_gen_and_build_status.sh delete mode 100755 scripts/auto_gen_utils/team_city_scripts/go/public/7_commit_generated_changes.sh delete mode 100644 scripts/auto_gen_utils/team_city_scripts/java/check_for_pom_version_mismatch.py delete mode 100644 scripts/auto_gen_utils/team_city_scripts/java/checkout_source_branch.py delete mode 100755 scripts/auto_gen_utils/team_city_scripts/java/compatibility/compare-all-versions-using-clirr.sh delete mode 100755 scripts/auto_gen_utils/team_city_scripts/java/compatibility/compare-all-versions-using-codegen-version.sh delete mode 100644 scripts/auto_gen_utils/team_city_scripts/java/determine_build_profile.py delete mode 100644 scripts/auto_gen_utils/team_city_scripts/java/determine_codegen_projects_from_commit.py delete mode 100755 scripts/auto_gen_utils/team_city_scripts/java/determine_full_version.py delete mode 100755 scripts/auto_gen_utils/team_city_scripts/java/public/1_setup_java_public_branch.sh delete mode 100755 scripts/auto_gen_utils/team_city_scripts/java/public/3_record_sdk_generation_success.sh delete mode 100755 scripts/auto_gen_utils/team_city_scripts/java/public/5_record_sdk_build_success.sh delete mode 100755 scripts/auto_gen_utils/team_city_scripts/java/public/6_report_gen_and_build_status.sh delete mode 100755 scripts/auto_gen_utils/team_city_scripts/java/public/7_commit_generated_changes.sh delete mode 100644 scripts/auto_gen_utils/team_city_scripts/java/warn_about_backward_incompatible_changes.py delete mode 100644 scripts/auto_gen_utils/team_city_scripts/java/warn_about_source_formatting.py delete mode 100644 scripts/auto_gen_utils/team_city_scripts/legacy_java_sdk/check_for_pom_version_mismatch.py delete mode 100644 scripts/auto_gen_utils/team_city_scripts/legacy_java_sdk/checkout_source_branch.py delete mode 100644 scripts/auto_gen_utils/team_city_scripts/legacy_java_sdk/determine_build_profile.py delete mode 100644 scripts/auto_gen_utils/team_city_scripts/legacy_java_sdk/determine_codegen_projects_from_commit.py delete mode 100755 scripts/auto_gen_utils/team_city_scripts/legacy_java_sdk/determine_full_version.py delete mode 100755 scripts/auto_gen_utils/team_city_scripts/legacy_java_sdk/public/1_setup_legacy_java_public_branch.sh delete mode 100755 scripts/auto_gen_utils/team_city_scripts/legacy_java_sdk/public/3_record_sdk_generation_success.sh delete mode 100755 scripts/auto_gen_utils/team_city_scripts/legacy_java_sdk/public/5_record_sdk_build_success.sh delete mode 100755 scripts/auto_gen_utils/team_city_scripts/legacy_java_sdk/public/6_report_gen_and_build_status.sh delete mode 100755 scripts/auto_gen_utils/team_city_scripts/legacy_java_sdk/public/7_commit_generated_changes.sh delete mode 100644 scripts/auto_gen_utils/team_city_scripts/legacy_java_sdk/warn_about_backward_incompatible_changes.py delete mode 100644 scripts/auto_gen_utils/team_city_scripts/legacy_java_sdk/warn_about_source_formatting.py delete mode 100644 scripts/auto_gen_utils/team_city_scripts/oci_testing_service/determine_issue_routing_info_tag.py delete mode 100644 scripts/auto_gen_utils/team_city_scripts/oci_testing_service/determine_java_sdk_versions_used.py delete mode 100644 scripts/auto_gen_utils/team_city_scripts/oci_testing_service/determine_pr_target_branch.py delete mode 100644 scripts/auto_gen_utils/team_city_scripts/oci_testing_service/determine_test_classes.py delete mode 100644 scripts/auto_gen_utils/team_city_scripts/oci_testing_service/jira_ticket_reporter.py delete mode 100644 scripts/auto_gen_utils/team_city_scripts/oci_testing_service/ocits_shared.py delete mode 100644 scripts/auto_gen_utils/team_city_scripts/oci_testing_service/report_test_results.py delete mode 100644 scripts/auto_gen_utils/team_city_scripts/oci_testing_service/report_testing_service_failure.py delete mode 100644 scripts/auto_gen_utils/team_city_scripts/oci_testing_service/report_testing_service_stderr.py delete mode 100644 scripts/auto_gen_utils/team_city_scripts/oci_testing_service/save_codecov_data.py delete mode 100644 scripts/auto_gen_utils/team_city_scripts/oci_testing_service/switch_to_matching_sdk_branch.py delete mode 100644 scripts/auto_gen_utils/team_city_scripts/orm/fail_build_on_code_coverage.py delete mode 100755 scripts/auto_gen_utils/team_city_scripts/powershell/preview/1_setup_powershell_preview_branch.sh delete mode 100755 scripts/auto_gen_utils/team_city_scripts/powershell/preview/3_record_ps_generation_success.sh delete mode 100755 scripts/auto_gen_utils/team_city_scripts/powershell/preview/5_record_ps_build_success.sh delete mode 100755 scripts/auto_gen_utils/team_city_scripts/powershell/preview/6_report_gen_and_build_status.sh delete mode 100755 scripts/auto_gen_utils/team_city_scripts/powershell/preview/7_commit_generated_changes.sh delete mode 100755 scripts/auto_gen_utils/team_city_scripts/powershell/preview/9_mark_done_post_bulk_merge.sh delete mode 100755 scripts/auto_gen_utils/team_city_scripts/powershell/public/1_setup_powershell_branch.sh delete mode 100755 scripts/auto_gen_utils/team_city_scripts/powershell/public/3_record_ps_generation_success.sh delete mode 100755 scripts/auto_gen_utils/team_city_scripts/powershell/public/5_record_ps_build_success.sh delete mode 100755 scripts/auto_gen_utils/team_city_scripts/powershell/public/6_report_gen_and_build_status.sh delete mode 100755 scripts/auto_gen_utils/team_city_scripts/powershell/public/7_commit_generated_changes.sh delete mode 100755 scripts/auto_gen_utils/team_city_scripts/powershell/public/9_mark_done_post_bulk_merge.sh delete mode 100755 scripts/auto_gen_utils/team_city_scripts/process_jira_queue.sh delete mode 100755 scripts/auto_gen_utils/team_city_scripts/python_sdk/shared/1_setup_python_public_branch.sh delete mode 100755 scripts/auto_gen_utils/team_city_scripts/python_sdk/shared/3_record_sdk_generation_success.sh delete mode 100755 scripts/auto_gen_utils/team_city_scripts/python_sdk/shared/4_build_and_publish_artifacts.sh delete mode 100755 scripts/auto_gen_utils/team_city_scripts/python_sdk/shared/5_record_sdk_build_success.sh delete mode 100755 scripts/auto_gen_utils/team_city_scripts/python_sdk/shared/6_report_gen_and_build_status.sh delete mode 100755 scripts/auto_gen_utils/team_city_scripts/python_sdk/shared/7_commit_and_push_generated_changes.sh delete mode 100755 scripts/auto_gen_utils/team_city_scripts/python_sdk/shared/disableStrictHostKeyChecking.sh delete mode 100755 scripts/auto_gen_utils/team_city_scripts/ruby/preview/1_setup_ruby_preview_branch.sh delete mode 100755 scripts/auto_gen_utils/team_city_scripts/ruby/preview/2_configure_rbenv.sh delete mode 100755 scripts/auto_gen_utils/team_city_scripts/ruby/preview/4_record_sdk_generation_success.sh delete mode 100755 scripts/auto_gen_utils/team_city_scripts/ruby/preview/5_build_and_publish_artifacts.sh delete mode 100755 scripts/auto_gen_utils/team_city_scripts/ruby/preview/6_record_sdk_build_success.sh delete mode 100755 scripts/auto_gen_utils/team_city_scripts/ruby/preview/7_report_gen_and_build_status.sh delete mode 100755 scripts/auto_gen_utils/team_city_scripts/ruby/preview/8_commit_generated_changes.sh delete mode 100755 scripts/auto_gen_utils/team_city_scripts/ruby/preview/9_mark_done_post_bulk_merge.sh delete mode 100755 scripts/auto_gen_utils/team_city_scripts/ruby/public/1_setup_ruby_public_branch.sh delete mode 100755 scripts/auto_gen_utils/team_city_scripts/ruby/public/2_configure_rbenv.sh delete mode 100755 scripts/auto_gen_utils/team_city_scripts/ruby/public/4_record_sdk_generation_success.sh delete mode 100755 scripts/auto_gen_utils/team_city_scripts/ruby/public/5_build_and_publish_artifacts.sh delete mode 100755 scripts/auto_gen_utils/team_city_scripts/ruby/public/6_record_sdk_build_success.sh delete mode 100755 scripts/auto_gen_utils/team_city_scripts/ruby/public/7_report_gen_and_build_status.sh delete mode 100755 scripts/auto_gen_utils/team_city_scripts/ruby/public/8_commit_generated_changes.sh delete mode 100755 scripts/auto_gen_utils/team_city_scripts/ruby/public/9_mark_done_post_bulk_merge.sh delete mode 100755 scripts/auto_gen_utils/team_city_scripts/run_autogen_issue_advisor.sh delete mode 100755 scripts/auto_gen_utils/team_city_scripts/run_autogen_issue_advisor_calendar.sh delete mode 100755 scripts/auto_gen_utils/team_city_scripts/typescript/preview/1_setup_typescript_preview_branch.sh delete mode 100755 scripts/auto_gen_utils/team_city_scripts/typescript/preview/3_record_sdk_generation_success.sh delete mode 100755 scripts/auto_gen_utils/team_city_scripts/typescript/preview/5_record_sdk_build_success.sh delete mode 100755 scripts/auto_gen_utils/team_city_scripts/typescript/preview/6_report_gen_and_build_status.sh delete mode 100755 scripts/auto_gen_utils/team_city_scripts/typescript/preview/7_commit_generated_changes.sh delete mode 100755 scripts/auto_gen_utils/team_city_scripts/typescript/preview/9_mark_done_post_bulk_merge.sh delete mode 100755 scripts/auto_gen_utils/team_city_scripts/typescript/public/1_setup_typescript_branch.sh delete mode 100755 scripts/auto_gen_utils/team_city_scripts/typescript/public/3_record_sdk_generation_success.sh delete mode 100755 scripts/auto_gen_utils/team_city_scripts/typescript/public/5_record_sdk_build_success.sh delete mode 100755 scripts/auto_gen_utils/team_city_scripts/typescript/public/6_report_gen_and_build_status.sh delete mode 100755 scripts/auto_gen_utils/team_city_scripts/typescript/public/7_commit_generated_changes.sh delete mode 100755 scripts/auto_gen_utils/team_city_scripts/typescript/public/9_mark_done_post_bulk_merge.sh delete mode 100644 scripts/auto_gen_utils/team_city_scripts/udx_automation/README.md delete mode 100644 scripts/auto_gen_utils/team_city_scripts/udx_automation/__init__.py delete mode 100644 scripts/auto_gen_utils/team_city_scripts/udx_automation/udx_ticket_review_automation.py delete mode 100644 scripts/auto_gen_utils/team_city_scripts/zip_and_delete.py delete mode 100644 scripts/auto_gen_utils/tests/__init__.py delete mode 100644 scripts/auto_gen_utils/tests/test_bitbucket_utils.py delete mode 100644 scripts/auto_gen_utils/tests/test_buildsvc_tc_compatibility.py delete mode 100644 scripts/auto_gen_utils/tests/test_clean_auto_branches.py delete mode 100644 scripts/auto_gen_utils/tests/test_cli_branch_text.py delete mode 100644 scripts/auto_gen_utils/tests/test_config.py delete mode 100644 scripts/auto_gen_utils/tests/test_module_pom_file_add_or_update_spec.py delete mode 100644 scripts/auto_gen_utils/tests/test_python_sdk_add_or_update_spec.py delete mode 100644 scripts/auto_gen_utils/tests/test_version_utils.py delete mode 100755 scripts/auto_gen_utils/update-codegen-version.sh delete mode 100644 scripts/auto_gen_utils/update_region.py delete mode 100644 scripts/auto_gen_utils/update_testing_service_dependency.py delete mode 100644 scripts/auto_gen_utils/util.py delete mode 100755 scripts/auto_gen_utils/verify.sh delete mode 100755 scripts/clone_key_repo.sh delete mode 100755 scripts/generate_docs.sh delete mode 100644 scripts/setup_test_docker.sh delete mode 100644 scripts/test_pom.original.xml delete mode 100644 shared-build-service-scripts/.gitignore delete mode 100644 shared-build-service-scripts/README.md delete mode 100755 shared-build-service-scripts/check-secrets.sh delete mode 100755 shared-build-service-scripts/git-submodule-helpers.sh delete mode 100755 shared-build-service-scripts/make-pr-comment.sh delete mode 100755 shared-build-service-scripts/relpath.sh delete mode 100755 shared-build-service-scripts/setup-git-secrets-readwrite.sh delete mode 100755 shared-build-service-scripts/setup-git-secrets.sh delete mode 100755 shared-build-service-scripts/ssh_give_pass.sh diff --git a/MakefileDevelopment.mk b/MakefileDevelopment.mk deleted file mode 100644 index 09505a5a62..0000000000 --- a/MakefileDevelopment.mk +++ /dev/null @@ -1,114 +0,0 @@ -#### Project generation setup -PROJECT_NAME=github.com/oracle/oci-go-sdk -PROJECT_PATH=$(GOPATH)/src/$(PROJECT_NAME) -REMOVE_AFTER_GENERATE=audit/audit_waiters.go objectstorage/objectstorage_waiters.go -DOC_SERVER_URL_DEV=https:\/\/docs.cloud.oracle.com - -#### Versions -#### If you are doing a release, do not forget to increment this versions -VER_MAJOR=65 -VER_MINOR=67 -VER_PATCH=0 -################### - -#### AutoTest setup -AUTOTEST_DIR = autotest -AUTOTEST_HELPERS = client.go configurations.go main_test.go -AUTOTEST_FILES = $(notdir $(wildcard $(AUTOTEST_DIR)/*client_auto_test.go)) -AUTOTEST_TARGETS = $(patsubst %_client_auto_test.go, autotest-%, $(AUTOTEST_FILES)) - -define HELP_MESSAGE -make -f MakefileDevelopment.mk build: builds and generate the sdk -make -f MakefileDevelopment.mk build-sdk: to build the sdk only -make -f MakefileDevelopment.mk generate: to generate the sdk -make -f MakefileDevelopment.mk list-autotest-services to list all autogenerated test targets at the service level -make -f MakefileDevelopment.mk autotest-[name of the package] to run autogenerated tests -make -f MakefileDevelopment.mk autotest-all runs all autogenerated tests -endef - - -.PHONY: help - -export HELP_MESSAGE -help: - @echo "$$HELP_MESSAGE" - -list-autotest-services: - @echo $(AUTOTEST_TARGETS) - -test-all: build-sdk build-autotest test-sdk-only test-integ-test - -autotest-all: build-sdk test-sdk-only $(AUTOTEST_TARGETS) - -autotest: build-autotest - go test -v -run $(TEST_NAME) -count 1 -timeout 3h github.com/oracle/oci-go-sdk/autotest - -$(AUTOTEST_TARGETS): autotest-%:% - @echo Testing $(AUTOTEST_DIR)/$<_client_auto_test.go - @(cd $(AUTOTEST_DIR) && go test -v $(AUTOTEST_HELPERS) $<_client_auto_test.go) - -generate: - @echo "Cleaning and generating sdk" - @(cd $(PROJECT_PATH) && make clean-generate) - @echo "Cleaning autotest files" - @find autotest -name \*_auto_test.go|xargs rm -f - PROJECT_NAME=$(PROJECT_NAME) mvn clean install - @(cd $(PROJECT_PATH) && rm -f $(REMOVE_AFTER_GENERATE)) - find . -name \*.go |xargs sed -i "" "s#\"$(PROJECT_NAME)/\(v[0-9]*/\)*#\"$(PROJECT_NAME)/v$(VER_MAJOR)/#g" - -generate-local: generate - @make pre-doc-local - @make test - -build-autotest: - @echo "building autotests" - @(cd $(AUTOTEST_DIR) && gofmt -s -w . && go test -c) - -build-sdk: - @echo "Building sdk" - @(cd $(PROJECT_PATH) && make build) - -test-sdk-only: - @echo "Testing sdk common" - @(cd $(PROJECT_PATH) && make test) - -test-integ-test: - @echo "Testing sdk integ test" - @(cd $(PROJECT_PATH) && make test-integ) - -release-sdk: - @echo "Building oci-go-sdk with major:$(VER_MAJOR) minor:$(VER_MINOR) patch:$(VER_PATCH) tag:$(VER_TAG)" - @(cd $(PROJECT_PATH)) - find . -name \*.go |xargs sed -i "s#\"$(PROJECT_NAME)/\(v[0-9]*/\)*#\"$(PROJECT_NAME)/v$(VER_MAJOR)/#g" - @(VER_MAJOR=$(VER_MAJOR) VER_MINOR=$(VER_MINOR) VER_PATCH=$(VER_PATCH) VER_TAG=$(VER_TAG) make release) - -build: generate build-sdk build-autotest - @(cd $(PROJECT_PATH) && make pre-doc) - -release: generate release-sdk build-autotest - -generate-pipeline: build-sdk build-autotest test-sdk-only - -# command used by self service pipeline to clean all generated files -clean-pipeline: - @echo "Cleaning generated files" - @(cd $(PROJECT_PATH) && make clean-generate) - @echo "Cleaning autotest files" - @find autotest -name \*_auto_test.go|xargs rm -f - @(cd $(PROJECT_PATH) && rm -f $(REMOVE_AFTER_GENERATE)) - -# doing build and lint for generated code in self-service pipeline -lint-pipeline: update-import build-autotest test-sdk-only - @echo "Rendering doc server to ${DOC_SERVER_URL_DEV}" - find . -name \*.go |xargs sed -i 's/{{DOC_SERVER_URL}}/${DOC_SERVER_URL_DEV}/g' - # Note: This should stay the old docs URL (with us-phoenix-1), because it - # processes go files and changes the old URL into the new URL - find . -name \*.go |xargs sed -i 's/https:\/\/docs.us-phoenix-1.oraclecloud.com/${DOC_SERVER_URL_DEV}/g' - -# update all imports to match latest major version -update-import: - find . -name \*.go |xargs sed -i "s#\"$(PROJECT_NAME)/\(v[0-9]*/\)*#\"$(PROJECT_NAME)/v$(VER_MAJOR)/#g" - -# clone SDK-CLI keys repo for use in integration tests -build-clone-keys: - ./scripts/clone_key_repo.sh \ No newline at end of file diff --git a/ocibuild.conf b/ocibuild.conf deleted file mode 100644 index df922d7d3f..0000000000 --- a/ocibuild.conf +++ /dev/null @@ -1,94 +0,0 @@ -runnerTag: latest - -name: oci-go-sdk -team: SDK -phoneBookId: software-development-kit -description: "This Builds and Tests the OCI GO SDK BitBucket Branches" - -version: 1.1.${BLD_NUMBER} - -printVerboseLogs: false - -triggerOnCommitBranches: ["master", "preview", "pull-requests"] -releaseBranches: [] - -enableGit: true - -# 'dex-build' compartment in 'dex-us-phoenix-1' -authCompartmentOcid: ocid1.compartment.oc1..aaaaaaaa2ttm66djhqpzww6vbx4cwd346wdhwc6h5od3rq5idzy7b3e4yyaq - -# Build time variables -variables: { - "TF_VAR_private_key_path": "/usr/src/app/tf_private_key.pem", - "TF_VAR_compartment_id": "ocid1.compartment.oc1..aaaaaaaajysgrl7uthulky6bnjz4ckzpoaen3egpxydbtn22jz4woms7jvea", - "TF_VAR_compartment_ocid": "ocid1.compartment.oc1..aaaaaaaajysgrl7uthulky6bnjz4ckzpoaen3egpxydbtn22jz4woms7jvea", - "TF_VAR_fingerprint": "$(cat ./oci-sdk-cli-keys/go-sdk/resources/fingerprint.txt)", - "TF_VAR_tenancy_ocid": "ocid1.tenancy.oc1..aaaaaaaa5nfwo53cezleyy6t73v6rn6knhu3molvptnl3kcq34l5zb7ptiaq", - "TF_VAR_user_ocid": "ocid1.user.oc1..aaaaaaaalrxyqrm7fall2m6zsuex5xrhx5ljqt22o7a7rwa23kbs4defp33a", - "TF_VAR_region": "us-phoenix-1", - "TestSuite": "TestProvider", - "DBG": "0", - "SOURCE_DIR": "/usr/src/app/src/github.com/oracle/oci-go-sdk/", - "TF_TEST_ARTIFACT": "oci-go-sdk-docker-test-internal", - "TEST_ARTIFACT_VERSION": "73" -} - -steps: - [ - { - # Clone sdk-cli keys from bitbucket, needed in testing phase - name: clone-keys - type: make - makeCommands: [{ target: "build-clone-keys", args: "-f MakefileDevelopment.mk" }] - compartmentOcid: "ocid1.tenancy.oc1..aaaaaaaabugizl4jcpyqk6v3xyxds4p46raidyl3lrwonooegddmgckua6pq" - secrets: [ - { envVarName: "BITBUCKET_READONLY_PRIVATEKEY", secretServicePath: "/secret/sdkdevopspipelines/bitbucket/readonly/privatekey/latest"}, - { envVarName: "BITBUCKET_READONLY_PASSPHRASE", secretServicePath: "/secret/sdkdevopspipelines/bitbucket/readonly/passphrase/latest"}, - ] - artifacts:["**"] - }, - { - # Build and Run Go Integ Tests in Docker - name: run-tests - type: dockerizer - dependsOn: clone-keys - dockerFile: "./scripts/IntegTestsDocker" - dockerBuildPwd: "./" - artifacts: [] - dockerBuildArgs: { - "TF_VAR_private_key_path": ${TF_VAR_private_key_path}, - "TF_VAR_compartment_id": ${TF_VAR_compartment_id}, - "TF_VAR_compartment_ocid": ${TF_VAR_compartment_ocid}, - "TF_VAR_fingerprint": ${TF_VAR_fingerprint}, - "TF_VAR_tenancy_ocid": ${TF_VAR_tenancy_ocid}, - "TF_VAR_user_ocid": ${TF_VAR_user_ocid}, - "TF_VAR_region": ${TF_VAR_region}, - "TestSuite": ${TestSuite}, - "DBG": ${DBG}, - "SOURCE_DIR": ${SOURCE_DIR}, - "TF_TEST_ARTIFACT": ${TF_TEST_ARTIFACT}, - "TEST_ARTIFACT_VERSION": ${TEST_ARTIFACT_VERSION} - } - }, - { - # Run SCA scanner Job in docker - name: run-sca-job - type: dockerizer - dockerFile: "./scripts/SCAJobDocker" - dockerBuildPwd: "./" - artifacts: [] - environment: { - "RUNNER_PROXY": "www-proxy-hqdc.us.oracle.com:80" - "HTTPS_PROXY": "www-proxy-hqdc.us.oracle.com:80" - "HTTP_PROXY": "www-proxy-hqdc.us.oracle.com:80" - "NO_PROXY": ".oraclecorp.com" - "https_proxy": "www-proxy-hqdc.us.oracle.com:80" - "http_proxy": "www-proxy-hqdc.us.oracle.com:80" - "no_proxy": ".oraclecorp.com" - "GOROOT": "/usr/local/go" - "GOPATH": "/usr/src/app" - "EXCLUSIONS": ".git/,vendor/" - "EXCEPTIONS_FLAG": "true" - } - }, - ] \ No newline at end of file diff --git a/scripts/IntegTestsDocker b/scripts/IntegTestsDocker deleted file mode 100644 index 7d142acb91..0000000000 --- a/scripts/IntegTestsDocker +++ /dev/null @@ -1,31 +0,0 @@ -ARG TF_TEST_ARTIFACT=$TF_TEST_ARTIFACT -ARG TEST_ARTIFACT_VERSION=$TEST_ARTIFACT_VERSION -FROM odo-docker-signed-local.artifactory.oci.oraclecorp.com/${TF_TEST_ARTIFACT}:${TEST_ARTIFACT_VERSION} - -RUN yum install git - -# Set up environment -ENV GOPROXY=https://artifactory.oci.oraclecorp.com/api/go/go-proxy - -RUN mkdir -p $GOPATH/src/github.com/oracle/oci-go-sdk/ -COPY ./ $GOPATH/src/github.com/oracle/oci-go-sdk -COPY ./scripts/setup_test_docker.sh ./ -COPY ./oci-sdk-cli-keys/go-sdk/resources/go_sdk_test_user_key.pem ./ - -ARG SOURCE_DIR=$SOURCE_DIR -ENV TF_VAR_private_key_path=/usr/src/app/tf_private_key.pem -RUN chmod a+x ./setup_test_docker.sh && export TF_VAR_private_key_path=/usr/src/app/tf_private_key.pem && ./setup_test_docker.sh - -WORKDIR $GOPATH/src/github.com/oracle/oci-go-sdk/ - -# Run tests -ARG TF_VAR_compartment_id=$TF_VAR_compartment_id -ARG TF_VAR_compartment_ocid=$TF_VAR_compartment_ocid -ARG TF_VAR_fingerprint=$TF_VAR_fingerprint -ARG TF_VAR_tenancy_ocid=$TF_VAR_tenancy_ocid -ARG TF_VAR_user_ocid=$TF_VAR_user_ocid -ARG TestSuite=$TestSuite -ARG DBG=$DBG -ARG TF_VAR_region=$TF_VAR_region - -RUN OCI_GO_SDK_DEBUG=$DBG make test \ No newline at end of file diff --git a/scripts/README.md b/scripts/README.md deleted file mode 100644 index 15a79caf38..0000000000 --- a/scripts/README.md +++ /dev/null @@ -1,26 +0,0 @@ -# go-sdk SDK Utilities - -This README describes the various scripts and utilities included in this folder. - -## add_or_update_spec.py -This simplifies the process of updating a pom.xml file for a new service, or changing the spec version for an existing service (it is more useful in the case of the former, rather than the latter). - -### Setup -You should run this script in a Python virtual environment (see [here](https://bitbucket.oci.oraclecorp.com/projects/SDK/repos/python-sdk/browse/Internal-README.rst) for more information on doing the basic setup for a virtual environment if you don't already have one). - -Once you are in a virtual environment, you can run `pip install -r add_or_update_spec-requirements.txt` to install the dependencies for the script. - -### Execution -You can run the script with `python add_or_update_spec.py --help` to get an overview of the various options/parameters which can be fed to it. - -If you want to test the script out, this folder has a sample pom.xml file in it. Here is an example of running the script to add the Container Engine service to the pom.xml file: - -``` -python add_or_update_spec.py --artifact-id clusters-api-spec \ ---group-id com.oracle.pic.clusters \ ---spec-name container_engine \ ---relative-spec-path clusters-api-spec.yaml \ ---subdomain containerengine \ ---version 1.0.7 \ ---pom-location test_pom.xml -``` diff --git a/scripts/SCAJobDocker b/scripts/SCAJobDocker deleted file mode 100644 index 6fe03fabfc..0000000000 --- a/scripts/SCAJobDocker +++ /dev/null @@ -1,15 +0,0 @@ -FROM odo-docker-signed-local.artifactory.oci.oraclecorp.com/oci-sca-go-docker:latest - -RUN mkdir -p /usr/src/app -COPY ./ /usr/src/app -WORKDIR /usr/src/app - - -RUN echo "Use of package should be reviewed: 'crypto/sha1' @utils.go, ignore" >> exceptions.csv -RUN echo "Use of package should be reviewed: 'crypto/md5' @multipart_manifest.go, ignore" >> exceptions.csv - -RUN cat exceptions.csv - -ENV EXCLUSIONS=".git/,vendor/" - -RUN scanner -exclude $EXCLUSIONS | tee result-scanner.txt \ No newline at end of file diff --git a/scripts/add_or_update_spec-requirements.txt b/scripts/add_or_update_spec-requirements.txt deleted file mode 100644 index e5fcdf077b..0000000000 --- a/scripts/add_or_update_spec-requirements.txt +++ /dev/null @@ -1 +0,0 @@ -click==6.7 \ No newline at end of file diff --git a/scripts/add_or_update_spec.py b/scripts/add_or_update_spec.py deleted file mode 100644 index c54733e813..0000000000 --- a/scripts/add_or_update_spec.py +++ /dev/null @@ -1,400 +0,0 @@ -#!/usr/bin/env python - -# -# This script manipulates pom.xml to either add new specs or update the versions of existing specs. -# - -import xml.etree.ElementTree as ET -import re -import click -from click.exceptions import UsageError - -try: - # when run locally - from auto_gen_utils.shared.version_utils import is_version_increasing -except ImportError: - # when run from the self-service pipeline - from shared.version_utils import is_version_increasing - -DEFAULT_POM_LOCATION = "../pom.xml" -DEFAULT_GITHUB_WHITELIST_LOCATION = "../github.whitelist" -DEFAULT_MAKE_FILE_LOCATION = "../Makefile" - -PROPERTIES_ELEMENT_ARTIFACT_VERSION = """<{artifact_id}.artifact.version>{version}""" -PROPERTIES_ELEMENT_ARTIFACT_ID = """<{artifact_id}.artifact.id>{artifact_id}""" -PROPERTIES_ELEMENT_SPEC_NAME = """<{artifact_id}.spec.name>{spec_path_relative_to_jar}""" - -UNPACK_EXECUTION_TEMPLATE = """ - - unpack-{spec_name} - initialize - - unpack - - - - - {group_id} - ${{{artifact_id}.artifact.id}} - jar - **/* - ${{spec.temp.dir}}/{spec_name} - - - - -""" - -PREFER_EXECUTION_TEMPLATE = """ - - spec-conditionals-prefer-{spec_name} - initialize - - prefer - - - - - ${{spec.temp.dir}}/{spec_name}/source/${{{artifact_id}.spec.name}} - - ${{spec.temp.dir}}/{spec_name}/${{{artifact_id}.spec.name}} - - ${{preferred.temp.dir}}/{spec_name}/${{{artifact_id}.spec.name}} - - -""" - -PREPROCESS_EXECUTION_TEMPLATE = """ - - spec-conditionals-preprocess-{spec_name} - initialize - - preprocess - - - ${{preferred.temp.dir}}/{spec_name}/${{{artifact_id}.spec.name}} - ${{preprocessed.temp.dir}}/{spec_name}/${{{artifact_id}.spec.name}} - ${{enabled.groups.file}} - ${{enabled.groups.dir}} - - -""" - -GENERATE_EXECUTION_TEMPLATE = """ - - go-public-sdk-{spec_name} - compile - - generate - - - oracle-go-sdk - ${{preprocessed.temp.dir}}/{spec_name}/${{{artifact_id}.spec.name}} - ${{env.GOPATH}}/src/${{fullyQualifiedProjectName}} - {spec_name} - ${{generationType}} - - {spec_name} - ${{fullyQualifiedProjectName}} - {subdomain} - {regional_non_regional_service_overrides} - - ${{project.basedir}}/featureId.yaml - ${{project.basedir}}/codegenConfig/featureIds - - -""" - -CLEAN_ELEMENT_TEMPLATE = """ - - lib/oci/{spec_name} - - **/* - - - util.rb - - -""" - -DEPENDENCY_MANAGEMENT_TEMPLATE = """ - - {group_id} - ${{{artifact_id}.artifact.id}} - ${{{artifact_id}.artifact.version}} - -""" - -ns = {"ns":"http://maven.apache.org/POM/4.0.0"} - -# allow default namespace for output, dont print ns0: prefixes everywhere -ET.register_namespace('',"http://maven.apache.org/POM/4.0.0") - -def parse_pom(pom_location): - return ET.parse(pom_location) - - -def generate_and_add_property_element(pom, artifact_id, version, spec_path_relative_to_jar): - artifact_version_content = PROPERTIES_ELEMENT_ARTIFACT_VERSION.format( - artifact_id=artifact_id, - version=version - ) - artifact_version_element = ET.fromstring(artifact_version_content) - - artifact_id_content = PROPERTIES_ELEMENT_ARTIFACT_ID.format( - artifact_id=artifact_id - ) - artifact_id_element = ET.fromstring(artifact_id_content) - - spec_name_content = PROPERTIES_ELEMENT_SPEC_NAME.format( - artifact_id=artifact_id, - spec_path_relative_to_jar=spec_path_relative_to_jar - ) - spec_name_element = ET.fromstring(spec_name_content) - - xpath = ".//ns:properties" - properties = pom.findall(xpath, ns)[0] - properties.append(artifact_version_element) - properties.append(artifact_id_element) - properties.append(spec_name_element) - - -def update_relative_spec_path(pom, artifact_id, spec_path_relative_to_jar): - xpath = ".//ns:properties/ns:{artifact_id}.spec.name".format(artifact_id=artifact_id) - spec_file_node = pom.findall(xpath, ns)[0] - spec_file_node.text = spec_path_relative_to_jar - - -def generate_and_add_unpack_element(pom, spec_name, group_id, artifact_id, spec_path_relative_to_jar): - content = UNPACK_EXECUTION_TEMPLATE.format( - spec_name=spec_name, - group_id=group_id, - artifact_id=artifact_id, - spec_path_relative_to_jar=spec_path_relative_to_jar) - - unpack_element = ET.fromstring(content) - - # find maven-dependency-plugin where unpacking happens - unpack_plugin_executions = pom.findall(".//ns:plugin[ns:artifactId='maven-dependency-plugin']/ns:executions", ns)[0] - unpack_plugin_executions.append(unpack_element) - - -def generate_and_add_prefer_element(pom, spec_name, group_id, artifact_id, spec_path_relative_to_jar): - content = PREFER_EXECUTION_TEMPLATE.format( - spec_name=spec_name, - group_id=group_id, - artifact_id=artifact_id, - spec_path_relative_to_jar=spec_path_relative_to_jar) - - unpack_element = ET.fromstring(content) - - # find maven-dependency-plugin where unpacking happens - unpack_plugin_executions = pom.findall(".//ns:plugin[ns:artifactId='spec-conditionals-preprocessor-plugin']/ns:executions", ns)[0] - unpack_plugin_executions.append(unpack_element) - - -def generate_and_add_preprocess_element(pom, spec_name, group_id, artifact_id, spec_path_relative_to_jar): - content = PREPROCESS_EXECUTION_TEMPLATE.format( - spec_name=spec_name, - group_id=group_id, - artifact_id=artifact_id, - spec_path_relative_to_jar=spec_path_relative_to_jar) - - unpack_element = ET.fromstring(content) - - # find maven-dependency-plugin where unpacking happens - unpack_plugin_executions = pom.findall(".//ns:plugin[ns:artifactId='spec-conditionals-preprocessor-plugin']/ns:executions", ns)[0] - unpack_plugin_executions.append(unpack_element) - - -def generate_and_add_generate_section(pom, spec_name, artifact_id, spec_path_relative_to_jar, subdomain, regional_sub_service_overrides, non_regional_sub_service_overrides): - regional_non_regional_service_overrides_content = '' - if regional_sub_service_overrides or non_regional_sub_service_overrides: - if regional_sub_service_overrides: - for override in regional_sub_service_overrides: - regional_non_regional_service_overrides_content += 'true\n'.format(service_name=override) - - if non_regional_sub_service_overrides: - for override in non_regional_sub_service_overrides: - regional_non_regional_service_overrides_content += 'false\n'.format(service_name=override) - - content = GENERATE_EXECUTION_TEMPLATE.format( - artifact_id=artifact_id, - spec_name=spec_name, - spec_path_relative_to_jar=spec_path_relative_to_jar, - subdomain=subdomain, - regional_non_regional_service_overrides=regional_non_regional_service_overrides_content) - - generate_element = ET.fromstring(content) - - # find bmc-sdk-swagger-maven-plugin where generation happens - generate_plugin_executions = pom.findall(".//ns:plugin[ns:artifactId='bmc-sdk-swagger-maven-plugin']/ns:executions", ns)[0] - generate_plugin_executions.append(generate_element) - - -def generate_and_add_clean_section(pom, spec_name): - content = CLEAN_ELEMENT_TEMPLATE.format( - spec_name=spec_name) - - clean_element = ET.fromstring(content) - - # find filesetes where clean directory goes - filesets = pom.findall(".//ns:plugin[ns:artifactId='maven-clean-plugin']//ns:filesets", ns)[0] - filesets.append(clean_element) - - -def generate_and_add_dependency_management_section(pom, spec_name, group_id, artifact_id, version): - content = DEPENDENCY_MANAGEMENT_TEMPLATE.format( - spec_name=spec_name, - group_id=group_id, - artifact_id=artifact_id, - version=version) - - dep_mgt_element = ET.fromstring(content) - - # find dependencies where version is specified - dependencies = pom.findall(".//ns:dependencyManagement/ns:dependencies", ns)[0] - dependencies.append(dep_mgt_element) - - -def update_version_of_existing_spec(pom, artifact_id, version): - xpath = ".//ns:properties//ns:{artifact_id}.artifact.version".format(artifact_id=artifact_id) - dependency = pom.findall(xpath, ns)[0] - - old_version = dependency.text - - if not is_version_increasing(old_version, version): - return old_version - - dependency.text = version - return None # the old version was lower - -def indent(elem, level=0): - indent_str = " " - i = "\n" + level*indent_str - if len(elem): - if not elem.text or not elem.text.strip(): - elem.text = i + indent_str - for e in elem: - indent(e, level+1) - if not e.tail or not e.tail.strip(): - e.tail = i + indent_str - if not e.tail or not e.tail.strip(): - e.tail = i - else: - if level and (not elem.tail or not elem.tail.strip()): - elem.tail = i - - -def add_spec_module_to_github_whitelist(spec_name, github_whitelist_location): - with open(github_whitelist_location, 'a') as f: - f.write('\n^{}/'.format(spec_name)) - -def add_spec_name_to_make_file(spec_name, make_file_location): - specNameToken = '##SPECNAME##' - with open(make_file_location) as f: - newText=f.read().replace(specNameToken, "{} {}".format(spec_name, specNameToken)) - - with open(make_file_location, "w") as f: - f.write(newText) - -def goify_specname(name): - return name.replace('_', '').lower() - -def isNewService(pom, artifact_id): - propertyXPath = ".//ns:properties" - properties = pom.findall(propertyXPath, ns)[0] - - for child in properties.getiterator(): - if (child.text.lower() == artifact_id.lower()): - return True - return False - -def add_or_update_spec(artifact_id=None, group_id=None, spec_name=None, relative_spec_path=None, subdomain=None, version=None, spec_generation_type=None, regional_sub_service_overrides=None, non_regional_sub_service_overrides=None, pom_location=None, github_whitelist_location=None, makefile_location=None): - if not artifact_id: - raise click.exceptions.MissingParameter(param_type='option', param_hint='--artifact-id') - - if spec_generation_type: - print('Note: --spec-generation-type is ignored for the GO SDK, since it is set in the ../pom.xml file for all modules') - - pom = parse_pom(pom_location) - - # force format of spec_name by removing underscore and lower case - if spec_name: - spec_name = goify_specname(spec_name) - - # determine if this artifact is already in the spec - if isNewService(pom, artifact_id): - print('Artifact {} already exists in pom.xml. Updating specified fields...'.format(artifact_id)) - - if version: - newer_version = update_version_of_existing_spec(pom, artifact_id, version) - if newer_version: - print('The version was not updated to {}, because it was already at {}.'.format(version, newer_version)) - - if relative_spec_path: - update_relative_spec_path(pom, artifact_id, relative_spec_path) - else: - if not version: - raise UsageError('Must specify --version for a new spec') - - if not subdomain: - raise UsageError('Must specify --subdomain for a new spec') - - if not group_id: - raise UsageError('Must specify --group-id for new spec') - - if not spec_name: - raise UsageError('Must specify --spec-name for new spec') - - if not relative_spec_path: - raise UsageError('Must specify --relative-spec-path for new spec') - - print('Artifact {} does not exist in pom.xml. Adding it...'.format(spec_name)) - generate_and_add_property_element(pom, artifact_id, version, relative_spec_path) - generate_and_add_unpack_element(pom, spec_name, group_id, artifact_id, relative_spec_path) - generate_and_add_prefer_element(pom, spec_name, group_id, artifact_id, relative_spec_path) - generate_and_add_preprocess_element(pom, spec_name, group_id, artifact_id, relative_spec_path) - generate_and_add_generate_section(pom, spec_name, artifact_id, relative_spec_path, subdomain, regional_sub_service_overrides, non_regional_sub_service_overrides) - generate_and_add_clean_section(pom, spec_name) - generate_and_add_dependency_management_section(pom, spec_name, group_id, artifact_id, version) - add_spec_module_to_github_whitelist(spec_name, github_whitelist_location) - add_spec_name_to_make_file(spec_name, makefile_location) - - # pretty print pom - indent(pom.getroot()) - pom.write(pom_location, encoding="UTF-8", xml_declaration=True) - - print('Success!') - - -@click.command() -@click.option('--artifact-id', help='The artifact id for the spec artifact (e.g. coreservices-api-spec') -@click.option('--group-id', help='The group id for the spec artifact (e.g. com.oracle.pic.commons)') -@click.option('--spec-name', help='The name of the spec. This will be used (e.g. core, identity, object_storage). This is also used as the module name.') -@click.option('--relative-spec-path', help='The relative path of the spec within the artifact (e.g. coreservices-api-spec-20160918-external.yaml)') -@click.option('--subdomain', help='The subdomain for the service (e.g. if the endpoint is https://iaas.{domain}/20160918), the subdomain is "iaas"') -@click.option('--version', help='The version of the spec artifact (e.g. 0.0.1-SNAPSHOT') -@click.option('--spec-generation-type', help='The generation type: PUBLIC or PREVIEW') -@click.option('--regional-sub-service-overrides', multiple=True, help="""For specs that contain multiple services -(because there are operations with different tags in the spec), which of those services should be considered regional. -Services are considered as regional by default. - -This should be the snake_cased name of the tag/service. For example kms_provisioning instead of kmsProvisioning. - -This parameter can be provided multiple times""") -@click.option('--non-regional-sub-service-overrides', multiple=True, help="""For specs that contain multiple services -(because there are operations with different tags in the spec), which of those services should be considered non-regional. - -This should be the snake_cased name of the tag/service. For example kms_provisioning instead of kmsProvisioning. - -This parameter can be provided multiple times""") -@click.option('--pom-location', type=click.Path(exists=True), default=DEFAULT_POM_LOCATION, help='Location of the pom.xml file to update') -@click.option('--github-whitelist-location', type=click.Path(exists=True), default=DEFAULT_GITHUB_WHITELIST_LOCATION, help='Location of the github.whitelist file to update') -@click.option('--makefile-location', type=click.Path(exists=True), default=DEFAULT_MAKE_FILE_LOCATION, help='Location of the Makefile to update') -def add_or_update_spec_command(artifact_id, group_id, spec_name, relative_spec_path, subdomain, version, spec_generation_type, regional_sub_service_overrides, non_regional_sub_service_overrides, pom_location, github_whitelist_location, makefile_location): - add_or_update_spec(artifact_id, group_id, spec_name, relative_spec_path, subdomain, version, spec_generation_type, regional_sub_service_overrides, non_regional_sub_service_overrides, pom_location, github_whitelist_location, makefile_location) - - -if __name__ == '__main__': - add_or_update_spec_command() diff --git a/scripts/auto_gen_utils/.gitignore b/scripts/auto_gen_utils/.gitignore deleted file mode 100644 index 2790495e37..0000000000 --- a/scripts/auto_gen_utils/.gitignore +++ /dev/null @@ -1,12 +0,0 @@ -.idea/ -.vscode/ -*.pyc -/temp -.pytest_cache/ -.DS_STORE -*.python-version -auto-gen-utils/ -/team_city_scripts/java/compatibility/output/ -requirements-mricken.txt -/input_ocibuild/ -/output_ocibuild*/ \ No newline at end of file diff --git a/scripts/auto_gen_utils/1_process_preview_jira_queue.py b/scripts/auto_gen_utils/1_process_preview_jira_queue.py deleted file mode 100755 index 1f97ec24ce..0000000000 --- a/scripts/auto_gen_utils/1_process_preview_jira_queue.py +++ /dev/null @@ -1,1314 +0,0 @@ -import argparse -import click -import datetime -import json -import os -import re -import requests -import shutil -import tempfile -import textwrap -import traceback -import zipfile - -import util -import config - -import shared.version_utils -from shared.buildsvc_tc_compatibility import build_log_link - -from jira import JIRAError - -DEFAULT_JIRA_ISSUE_FIELDS = ['summary', 'description', 'status', 'labels'] -CUSTOM_JIRA_ISSUE_FIELDS = [ - config.CUSTOM_FIELD_ID_ARTIFACT_ID, - config.CUSTOM_FIELD_ID_GROUP_ID, - config.CUSTOM_FIELD_ID_ARTIFACT_VERSION, - config.CUSTOM_FIELD_ID_SPEC_LOCATION_IN_ARTIFACT, - config.CUSTOM_FIELD_ID_SPEC_FRIENDLY_NAME, - config.CUSTOM_FIELD_ID_SERVICE_SUBDOMAIN, - config.CUSTOM_FIELD_ID_FEATURE_IDS, - config.CUSTOM_FIELD_ID_UDX_TICKET, - config.CUSTOM_FIELD_ID_JAVA_SDK_STATUS, - config.CUSTOM_FIELD_ID_PYTHON_SDK_STATUS, - config.CUSTOM_FIELD_ID_RUBY_SDK_STATUS, - config.CUSTOM_FIELD_ID_GO_SDK_STATUS, - config.CUSTOM_FIELD_ID_TYPESCRIPT_SDK_STATUS, - config.CUSTOM_FIELD_ID_DOTNET_SDK_STATUS, - config.CUSTOM_FIELD_ID_CLI_STATUS, - config.CUSTOM_FIELD_ID_POWERSHELL_STATUS, - config.CUSTOM_FIELD_ID_TEST_DATA_STATUS, - config.CUSTOM_FIELD_ID_LEGACY_JAVA_SDK_STATUS -] - -CUSTOM_JIRA_ISSUE_FIELDS_PER_BUILD_TYPE = { - config.BUILD_TYPE_INDIVIDUAL_PREVIEW: [], - config.BUILD_TYPE_BULK_PENDING_MERGE_PREVIEW: [], - config.BUILD_TYPE_INDIVIDUAL_PUBLIC: [ - config.CUSTOM_FIELD_ID_PREVIEW_ISSUE, - config.CUSTOM_FIELD_ID_SDK_CLI_GA_DATE, - config.CUSTOM_FIELD_ID_CHANGELOG, - config.CUSTOM_FIELD_ID_ACKNOWLEDGE_RESPONSIBILITIES - ], - config.BUILD_TYPE_BULK_PENDING_MERGE_PUBLIC: [ - config.CUSTOM_FIELD_ID_PREVIEW_ISSUE, - config.CUSTOM_FIELD_ID_SDK_CLI_GA_DATE, - config.CUSTOM_FIELD_ID_CHANGELOG, - config.CUSTOM_FIELD_ID_ACKNOWLEDGE_RESPONSIBILITIES - ] -} - -FEATURE_ID_DIR = 'featureIds' -CODEGEN_CONFIG_DIR = 'codegenConfig' -CONDITIONAL_PRE_PROCESSOR_DIR = 'enabledGroups' - -FEATURE_ID_FILES_FOR_TOOL = { - config.JAVA_SDK_NAME: [os.path.join(config.JAVA_SDK_REPO_RELATIVE_LOCATION, "bmc-codegen", FEATURE_ID_DIR)], - config.PYTHON_SDK_NAME: [os.path.join(config.PYTHON_SDK_REPO_RELATIVE_LOCATION, CODEGEN_CONFIG_DIR, FEATURE_ID_DIR)], - config.CLI_NAME: [ - os.path.join(config.PYTHON_SDK_REPO_RELATIVE_LOCATION, CODEGEN_CONFIG_DIR, FEATURE_ID_DIR), - os.path.join(config.CLI_REPO_RELATIVE_LOCATION, CODEGEN_CONFIG_DIR, FEATURE_ID_DIR)], - config.RUBY_SDK_NAME: [os.path.join(config.RUBY_SDK_REPO_RELATIVE_LOCATION, "codegen", CODEGEN_CONFIG_DIR, FEATURE_ID_DIR)], - config.GO_SDK_NAME: [os.path.join(config.GO_SDK_REPO_RELATIVE_LOCATION, CODEGEN_CONFIG_DIR, FEATURE_ID_DIR)], - config.TEST_DATA_GEN_NAME: [os.path.join(config.TEST_DATA_GEN_REPO_RELATIVE_LOCATION, "codegen", CODEGEN_CONFIG_DIR, FEATURE_ID_DIR)], - config.TYPESCRIPT_SDK_NAME: [os.path.join(config.TYPESCRIPT_SDK_REPO_RELATIVE_LOCATION, "codegen", FEATURE_ID_DIR)], - config.DOTNET_SDK_NAME: [os.path.join(config.DOTNET_SDK_REPO_RELATIVE_LOCATION, "Codegen", CODEGEN_CONFIG_DIR, FEATURE_ID_DIR)], - config.POWERSHELL_NAME: [ - os.path.join(config.POWERSHELL_REPO_RELATIVE_LOCATION, "codegen", CODEGEN_CONFIG_DIR, FEATURE_ID_DIR), - os.path.join(config.DOTNET_SDK_REPO_RELATIVE_LOCATION, "Codegen", CODEGEN_CONFIG_DIR, FEATURE_ID_DIR) - ], - config.LEGACY_JAVA_SDK_NAME: [os.path.join(config.LEGACY_JAVA_SDK_REPO_RELATIVE_LOCATION, "bmc-codegen", FEATURE_ID_DIR)] -} - -ENABLED_GROUPS_FILES_FOR_TOOL = { - config.JAVA_SDK_NAME: [os.path.join(config.JAVA_SDK_REPO_RELATIVE_LOCATION, "bmc-codegen", CONDITIONAL_PRE_PROCESSOR_DIR)], - config.PYTHON_SDK_NAME: [os.path.join(config.PYTHON_SDK_REPO_RELATIVE_LOCATION, CODEGEN_CONFIG_DIR, CONDITIONAL_PRE_PROCESSOR_DIR)], - config.CLI_NAME: [ - os.path.join(config.PYTHON_SDK_REPO_RELATIVE_LOCATION, CODEGEN_CONFIG_DIR, CONDITIONAL_PRE_PROCESSOR_DIR), - os.path.join(config.CLI_REPO_RELATIVE_LOCATION, CODEGEN_CONFIG_DIR, CONDITIONAL_PRE_PROCESSOR_DIR)], - config.RUBY_SDK_NAME: [os.path.join(config.RUBY_SDK_REPO_RELATIVE_LOCATION, "codegen", CODEGEN_CONFIG_DIR, CONDITIONAL_PRE_PROCESSOR_DIR)], - config.GO_SDK_NAME: [os.path.join(config.GO_SDK_REPO_RELATIVE_LOCATION, CODEGEN_CONFIG_DIR, CONDITIONAL_PRE_PROCESSOR_DIR)], - config.TEST_DATA_GEN_NAME: [os.path.join(config.TEST_DATA_GEN_REPO_RELATIVE_LOCATION, "codegen", CODEGEN_CONFIG_DIR, CONDITIONAL_PRE_PROCESSOR_DIR)], - config.TYPESCRIPT_SDK_NAME: [os.path.join(config.TYPESCRIPT_SDK_REPO_RELATIVE_LOCATION, "codegen",CONDITIONAL_PRE_PROCESSOR_DIR)], - config.DOTNET_SDK_NAME: [os.path.join(config.DOTNET_SDK_REPO_RELATIVE_LOCATION, "Codegen", CODEGEN_CONFIG_DIR, CONDITIONAL_PRE_PROCESSOR_DIR)], - config.POWERSHELL_NAME: [ - os.path.join(config.POWERSHELL_REPO_RELATIVE_LOCATION, "codegen", CODEGEN_CONFIG_DIR, CONDITIONAL_PRE_PROCESSOR_DIR), - os.path.join(config.DOTNET_SDK_REPO_RELATIVE_LOCATION, "Codegen", CODEGEN_CONFIG_DIR, CONDITIONAL_PRE_PROCESSOR_DIR) - ], - config.LEGACY_JAVA_SDK_NAME: [os.path.join(config.LEGACY_JAVA_SDK_REPO_RELATIVE_LOCATION, "bmc-codegen", CONDITIONAL_PRE_PROCESSOR_DIR)] -} - - -# What we call these things in the JIRA ticket -RESULT_FIELD_TO_JIRA_FIELD_NAME = { - 'subdomain': 'Service Subdomain', - 'spec_name': 'Service Friendly Name', - 'group_id': 'Spec Artifact Group Id', - 'artifact_id': 'Spec Artifact Id', - 'version': 'Spec Artifact Version', - 'relative_spec_path': 'Spec Location in Artifact' -} - - -TOOL_ARGUMENT_ALL = 'ALL' - - -BRANCH_TIMESTAMP = datetime.datetime.now().strftime('%Y-%m-%d-%H-%M-%S') - - -def limit_query_to_issue_keys(query, issues, for_any_status): - if issues: - issues_query = ' OR '.join(["key = '{}'".format(i) for i in issues]) - if for_any_status: - query = issues_query - else: - query = '({}) AND ({})'.format(issues_query, query) - return query - - -# returns True if processing of this issue should be skipped -def report_errors_and_warnings(issue, errors_map, warnings_map, tool_names): - errors = errors_map[issue.key] if issue.key in errors_map else [] - warnings = warnings_map[issue.key] if issue.key in warnings_map else [] - - if warnings and not errors: - # comment on issue - util.add_jira_comment( - issue.key, - """Several non-blocking issues in the ticket were found and had to be fixed: - - {warnings} - - The full build log can be found {build_log_link}. - - For TeamCity access see the [self-service FAQ|https://confluence.oci.oraclecorp.com/pages/viewpage.action?pageId=132774928#SDK/CLISelfServiceFrequentlyAskedQuestions-HowdoIgetaccesstoTeamCitylogsandartifacts?]. - - There is no need to re-start this ticket, but please keep these problems in mind when filing future tickets.""".format( - warnings='\n'.join(['*- {}*'.format(warning) for warning in warnings]), - build_id=build_id, - build_log_link=build_log_link(build_id) - ), - comment_type=config.COMMENT_TYPE_INFO - ) - return False # continue - if errors: - if warnings: - # comment on issue - util.add_jira_comment( - issue.key, - """The job failed due to the following errors in the ticket: - - {errors} - - There were also other issues that had to be fixed: - - {warnings} - - The full build log can be found {build_log_link}. - - For TeamCity access see the [self-service FAQ|https://confluence.oci.oraclecorp.com/pages/viewpage.action?pageId=132774928#SDK/CLISelfServiceFrequentlyAskedQuestions-HowdoIgetaccesstoTeamCitylogsandartifacts?]. - - Please fix the referenced input parameters and and update status to '{processing_requested_state}' to re-run.""".format( - errors='\n'.join(['*- {}*'.format(error) for error in errors]), - warnings='\n'.join(['*- {}*'.format(warning) for warning in warnings]), - processing_requested_state=config.STATUS_PROCESSING_REQUESTED, - build_log_link=build_log_link(build_id) - ), - comment_type=config.COMMENT_TYPE_ERROR - ) - else: - # comment on issue - util.add_jira_comment( - issue.key, - """The job failed due to the following errors in the ticket: - - {errors} - - The full build log can be found {build_log_link}. - - For TeamCity access see the [self-service FAQ|https://confluence.oci.oraclecorp.com/pages/viewpage.action?pageId=132774928#SDK/CLISelfServiceFrequentlyAskedQuestions-HowdoIgetaccesstoTeamCitylogsandartifacts?]. - - Please fix the referenced input parameters and update status to '{processing_requested_state}' to re-run.""".format( - errors='\n'.join(['*- {}*'.format(error) for error in errors]), - processing_requested_state=config.STATUS_PROCESSING_REQUESTED, - build_log_link=build_log_link(build_id) - ), - comment_type=config.COMMENT_TYPE_ERROR - ) - - if tool_names: - for tool_name in tool_names: - if tool_name in config.CUSTOM_FIELD_ID_FOR_TOOL and config.CUSTOM_FIELD_ID_FOR_TOOL[tool_name] and tool_name in config.CUSTOM_FIELD_NAME_FOR_TOOL: - # Only transition if there is a field in the Jira item for it - util.transition_issue_per_tool_status(util.JIRA_CLIENT(), issue, config.CUSTOM_STATUS_FAILURE, tool_name) - - # Reload after the transition - issue = util.get_dexreq_issue(issue.key, - fields=(DEFAULT_JIRA_ISSUE_FIELDS + CUSTOM_JIRA_ISSUE_FIELDS + CUSTOM_JIRA_ISSUE_FIELDS_PER_BUILD_TYPE[build_type])) - - # if an issue is already in 'DEX Support Required' based on failure for another tool, we do not want to overwrite that - util.transition_issue_overall_status_if_not_in_status(util.JIRA_CLIENT(), issue, desired_status=config.STATUS_SERVICE_TEAM_FAILURE_INVESTIGATION, blacklisted_status=config.STATUS_DEX_SUPPORT_REQUIRED) - return True # skip - return False # continue - - -def generate_individual_to_do_requests(all_issues, build_id, base_branch, tool_name, allow_individual_tool_generation, build_type, for_any_status): - warnings = {} # key: issue_key, value: list - errors = {} # key: issue_key, value: list - branches = {} - - if build_type == config.BUILD_TYPE_INDIVIDUAL_PUBLIC: - build_description = "public release" - branch_prefix = config.INDIVIDUAL_PUBLIC_BRANCH_PREFIX - spec_generation_type = config.PUBLIC_SPEC_GENERATION_TYPE - else: - build_description = "preview" - branch_prefix = config.INDIVIDUAL_PREVIEW_BRANCH_PREFIX - spec_generation_type = config.PREVIEW_SPEC_GENERATION_TYPE - - is_tool_jira_reportable = util.is_tool_jira_reportable(tool_name) - - for issue in all_issues: - custom_field_id_for_tool = config.CUSTOM_FIELD_ID_FOR_TOOL[tool_name] - if custom_field_id_for_tool: - custom_status_for_tool = getattr(issue.fields, custom_field_id_for_tool) - if not for_any_status and allow_individual_tool_generation and issue.fields and (custom_status_for_tool is None or custom_status_for_tool.value != config.CUSTOM_STATUS_TODO): - print('Skipping generating tool: {} for issue: {} because allow_individual_tool_generation = True and this tool is not set to To Do'.format(tool_name, issue.key)) - continue - - print("==============================================================================") - print('Generating {} {} for {} - {}'.format(build_description, tool_name, issue.key, issue.fields.summary)) - print("==============================================================================") - - if is_tool_jira_reportable: - util.transition_issue_per_tool_status(util.JIRA_CLIENT(), issue, config.CUSTOM_STATUS_PROCESSING, tool_name) - # Remove a possible BACKWARD_INCOMPATIBLE_CHANGES_LABEL - # Since we're restarting the generation, there may not be any more backward incompatible changes in this version - # If so, we'll find out later again in warn_about_backward_incompatible_changes.py run as part of the Java SDK generation - if config.BACKWARD_INCOMPATIBLE_CHANGES_LABEL in issue.fields.labels: - new_labels = list(issue.fields.labels) - new_labels.remove(config.BACKWARD_INCOMPATIBLE_CHANGES_LABEL) - issue.update(fields={"labels": new_labels}) - - # Reload after the transition - issue = util.get_dexreq_issue(issue.key, - fields=(DEFAULT_JIRA_ISSUE_FIELDS + CUSTOM_JIRA_ISSUE_FIELDS + CUSTOM_JIRA_ISSUE_FIELDS_PER_BUILD_TYPE[build_type])) - - try: - # start by making sure we are on the base branch with no other changes present - checkout_sdk_and_cli_branches(base_branch, tool_name) - for repo in config.REPOS_FOR_TOOL[tool_name]: - repo.git.reset('HEAD','--hard') - repo.git.clean('-f') - - feature_ids = getattr(issue.fields, config.CUSTOM_FIELD_ID_FEATURE_IDS) - if feature_ids: - feature_ids = [feature_id.strip().lower() for feature_id in feature_ids.split(',')] - print('Found the following feature IDs:') - print(feature_ids) - for feature_id_file in FEATURE_ID_FILES_FOR_TOOL[tool_name]: - util.update_feature_id_file(feature_id_file, feature_ids, issue.key) - - for enabled_groups_file in ENABLED_GROUPS_FILES_FOR_TOOL[tool_name]: - util.update_pre_processor_file(enabled_groups_file, feature_ids, issue.key) - - add_or_update_spec_params = convert_issue_to_script_params(issue) - print('Parameters for add or update script:') - pretty_print(add_or_update_spec_params) - - try: - result = invoke_add_or_update_spec(util.JIRA_CLIENT(), issue, add_or_update_spec_params, tool_name, spec_generation_type) - warn_on_unexpected_changes(issue, add_or_update_spec_params, tool_name, build_id, result, warnings) - except click.exceptions.MissingParameter as e: - print('ERROR: {}'.format(str(e))) - add_error(errors, issue.key, - """The job failed due to a missing required parameter. {exception}. - - The full build log can be found {build_log_link}. - - For TeamCity access see the [self-service FAQ|https://confluence.oci.oraclecorp.com/pages/viewpage.action?pageId=132774928#SDK/CLISelfServiceFrequentlyAskedQuestions-HowdoIgetaccesstoTeamCitylogsandartifacts?]. - - Please add the missing parameter and update status to '{processing_requested_state}' to re-run.""".format( - exception=str(e), - build_id=build_id, - processing_requested_state=config.STATUS_PROCESSING_REQUESTED, - build_log_link=build_log_link(build_id) - ) - ) - if is_tool_jira_reportable: - util.transition_issue_per_tool_status(util.JIRA_CLIENT(), issue, config.CUSTOM_STATUS_FAILURE, tool_name) - - # Reload after the transition - issue = util.get_dexreq_issue(issue.key, - fields=(DEFAULT_JIRA_ISSUE_FIELDS + CUSTOM_JIRA_ISSUE_FIELDS + CUSTOM_JIRA_ISSUE_FIELDS_PER_BUILD_TYPE[build_type])) - - # if an issue is already in 'DEX Support Required' based on failure for another tool, we do not want to overwrite that - util.transition_issue_overall_status_if_not_in_status(util.JIRA_CLIENT(), - issue, - desired_status=config.STATUS_SERVICE_TEAM_FAILURE_INVESTIGATION, - blacklisted_status=config.STATUS_DEX_SUPPORT_REQUIRED) - continue - - commit_message = '{commit_prefix} [[{issue_key}]]: {issue_summary}'.format( - commit_prefix=config.POM_UPDATE_COMMIT_MESSAGE_PREFIX, - issue_key=issue.key, - issue_summary=issue.fields.summary - ) - - # Include the tool name in the branch so we can differentiate between Python SDK branches created for the Python SDK - # preview vs Python SDK branches created as a depenency of the CLI preview - prefix = '{}-{}-{}'.format(branch_prefix, tool_name, issue.key) - - branch_name = generate_time_stamped_branch_name(prefix) - branches[issue.key] = branch_name - for repo in config.REPOS_FOR_TOOL[tool_name]: - git = repo.git - git.checkout(b=branch_name) - git.add(A=True) - message = commit_message - if 'nothing to commit' in git.status(): - message = "{} (no change in spec version or feature ids)".format(message) - print(message) - git.commit("-m", message, "--allow-empty") - if config.IS_DRY_RUN: - print('DRY-RUN: not pushing to branch {}'.format(branch_name)) - else: - git.push('-u','origin','HEAD') - - except Exception as e: - print('EXCEPTION: {}'.format(str(e))) - status_field = config.CUSTOM_FIELD_NAME_FOR_TOOL[tool_name] if util.is_tool_jira_reportable(tool_name) else "N/A" - add_error(errors, issue.key, - config.SELF_SERVICE_BUG_TEMPLATE.format( - exception=str(e), - custom_status_field=status_field, - build_log_link=build_log_link(build_id) - ) - ) - if is_tool_jira_reportable: - util.transition_issue_per_tool_status(util.JIRA_CLIENT(), issue, config.CUSTOM_STATUS_FAILURE, tool_name) - util.transition_issue_overall_status(util.JIRA_CLIENT(), issue, config.STATUS_DEX_SUPPORT_REQUIRED) - - # Reload after the transition - issue = util.get_dexreq_issue(issue.key, - fields=(DEFAULT_JIRA_ISSUE_FIELDS + CUSTOM_JIRA_ISSUE_FIELDS + CUSTOM_JIRA_ISSUE_FIELDS_PER_BUILD_TYPE[build_type])) - - traceback.print_exc() - continue - - return warnings, errors, branches - - -def generate_bulk_pending_merge(spec_generation_type, all_issues, build_id, base_branch, tool_name, allow_individual_tool_generation, for_any_status): - warnings = {} # key: issue_key, value: list - errors = {} # key: issue_key, value: list - branches = {} - - # start by making sure we are on the base branch with no other changes present - checkout_sdk_and_cli_branches(base_branch, tool_name) - for repo in config.REPOS_FOR_TOOL[tool_name]: - repo.git.reset('HEAD','--hard') - repo.git.clean('-f') - - is_tool_jira_reportable = util.is_tool_jira_reportable(tool_name) - - successful_issues = [] - for issue in all_issues: - custom_field_id_for_tool = config.CUSTOM_FIELD_ID_FOR_TOOL[tool_name] - if custom_field_id_for_tool: - custom_status_for_tool = getattr(issue.fields, custom_field_id_for_tool) - if allow_individual_tool_generation and not for_any_status and issue.fields and (custom_status_for_tool is None or custom_status_for_tool.value != config.CUSTOM_STATUS_TODO): - print('Skipping generating tool: {} for issue: {} because allow_individual_tool_generation = True and this tool is not set to To Do'.format(tool_name, issue.key)) - continue - - if tool_name == config.CLI_NAME: - if util.is_cli_pr_required(issue): - print('Skipping generating tool: {} for issue: {} as it requires manual changes'.format(tool_name, issue.key)) - continue - - print("==============================================================================") - print('Generating {} {} for {} - {}'.format(spec_generation_type, tool_name, issue.key, issue.fields.summary)) - print("==============================================================================") - - if is_tool_jira_reportable: - util.transition_issue_overall_status(util.JIRA_CLIENT(), issue, config.STATUS_PROCESSING_BULK) - util.transition_issue_per_tool_status(util.JIRA_CLIENT(), issue, config.CUSTOM_STATUS_PROCESSING, tool_name) - - # Reload after the transition - issue = util.get_dexreq_issue(issue.key, - fields=(DEFAULT_JIRA_ISSUE_FIELDS + CUSTOM_JIRA_ISSUE_FIELDS + CUSTOM_JIRA_ISSUE_FIELDS_PER_BUILD_TYPE[build_type])) - - try: - feature_ids = getattr(issue.fields, config.CUSTOM_FIELD_ID_FEATURE_IDS) - if feature_ids: - feature_ids = [feature_id.strip().lower() for feature_id in feature_ids.split(',')] - print('Found the following feature IDs:') - print(feature_ids) - for feature_id_file in FEATURE_ID_FILES_FOR_TOOL[tool_name]: - util.update_feature_id_file(feature_id_file, feature_ids, issue.key) - - for enabled_groups_file in ENABLED_GROUPS_FILES_FOR_TOOL[tool_name]: - util.update_pre_processor_file(enabled_groups_file, feature_ids, issue.key) - - add_or_update_spec_params = convert_issue_to_script_params(issue) - print('Parameters for add or update script:') - pretty_print(add_or_update_spec_params) - - try: - result = invoke_add_or_update_spec(util.JIRA_CLIENT(), issue, add_or_update_spec_params, tool_name, spec_generation_type) - warn_on_unexpected_changes(issue, add_or_update_spec_params, tool_name, build_id, result, warnings) - except click.exceptions.MissingParameter as e: - add_error(errors, issue.key, - """The job failed due to a missing required parameter. {exception}. - - The full build log can be found {build_log_link}. - - For TeamCity access see the [self-service FAQ|https://confluence.oci.oraclecorp.com/pages/viewpage.action?pageId=132774928#SDK/CLISelfServiceFrequentlyAskedQuestions-HowdoIgetaccesstoTeamCitylogsandartifacts?]. - - Please add the missing parameter and update status to {processing_requested_state} to re-run.""".format( - exception=str(e), - build_id=build_id, - processing_requested_state=config.STATUS_PROCESSING_REQUESTED, - build_log_link=build_log_link(build_id) - ) - ) - - print('ERROR: {}'.format(str(e))) - if config.IS_DRY_RUN: - print("DRY-RUN: not transitioning {} to {}".format(issue, config.CUSTOM_STATUS_FAILURE)) - elif not is_tool_jira_reportable: - print("Not transitioning {} to {} as tool {} is not reported to Jira".format(issue, - config.CUSTOM_STATUS_FAILURE, - tool_name)) - else: - util.transition_issue_per_tool_status(util.JIRA_CLIENT(), issue, config.CUSTOM_STATUS_FAILURE, tool_name) - # if an issue is already in 'DEX Support Required' based on failure for another tool, we do not want to overwrite that - util.transition_issue_overall_status_if_not_in_status(util.JIRA_CLIENT(), issue, desired_status=config.STATUS_SERVICE_TEAM_FAILURE_INVESTIGATION, blacklisted_status=config.STATUS_DEX_SUPPORT_REQUIRED) - - # Reload after the transition - issue = util.get_dexreq_issue(issue.key, - fields=(DEFAULT_JIRA_ISSUE_FIELDS + CUSTOM_JIRA_ISSUE_FIELDS + CUSTOM_JIRA_ISSUE_FIELDS_PER_BUILD_TYPE[build_type])) - continue - except Exception as e: - status_field = config.CUSTOM_FIELD_NAME_FOR_TOOL[tool_name] if util.is_tool_jira_reportable(tool_name) else "N/A" - add_error(errors, issue.key, - config.SELF_SERVICE_BUG_TEMPLATE.format( - exception=str(e), - custom_status_field=status_field, - build_log_link=build_log_link(build_id) - ) - ) - - if config.IS_DRY_RUN: - print("DRY-RUN: not transitioning {} to {}".format(issue, config.CUSTOM_STATUS_FAILURE)) - elif not is_tool_jira_reportable: - print("Not transitioning {} to {} as tool {} is not reported to Jira".format(issue, - config.CUSTOM_STATUS_FAILURE, - tool_name)) - else: - # Mark the individual SDK status values to the corect state regardless if it's bypassed/ignored. - util.transition_issue_per_tool_status(util.JIRA_CLIENT(), issue, config.CUSTOM_STATUS_FAILURE, tool_name) - # Only transition the overal ticket status to DEX SUPPORT REQUIRED if the SDK isn't bypassed/ignored. - if not config.BYPASS_CHECK_GENERATION_PREFIX + tool_name in issue.fields.labels: - util.transition_issue_overall_status(util.JIRA_CLIENT(), issue, config.STATUS_DEX_SUPPORT_REQUIRED) - - # Reload after the transition - issue = util.get_dexreq_issue(issue.key, - fields=(DEFAULT_JIRA_ISSUE_FIELDS + CUSTOM_JIRA_ISSUE_FIELDS + CUSTOM_JIRA_ISSUE_FIELDS_PER_BUILD_TYPE[build_type])) - traceback.print_exc() - continue - - successful_issues.append(issue) - - # only commit changes if some of the issues were successful - if len(successful_issues) > 0: - print('Committing changes for issues that successfully generated...') - successful_issue_keys = ', '.join([si.key for si in successful_issues]) - commit_message = '{commit_prefix} [[{issue_keys}]]'.format( - commit_prefix=config.POM_UPDATE_COMMIT_MESSAGE_PREFIX, - issue_keys=successful_issue_keys, - ) - - # Include the tool name in the branch so we can differentiate between Python SDK branches created for the Python SDK - # preview/master vs Python SDK branches created as a depenency of the CLI preview/master - branch_prefix_to_use = config.BULK_PUBLIC_BRANCH_PREFIX \ - if spec_generation_type == config.PUBLIC_SPEC_GENERATION_TYPE else config.BULK_PREVIEW_BRANCH_PREFIX - prefix = '{}-{}'.format(branch_prefix_to_use, tool_name) - branch_name = generate_time_stamped_branch_name(prefix) - - for issue_key in successful_issue_keys: - branches[issue_key] = branch_name - - for repo in config.REPOS_FOR_TOOL[tool_name]: - git = repo.git - git.checkout(b=branch_name) - git.add(A=True) - message = commit_message - if 'nothing to commit' in git.status(): - message = "{} (no change in spec version or feature ids)".format(message) - print(message) - git.commit("-m", message, "--allow-empty") - if config.IS_DRY_RUN: - print('DRY-RUN: not pushing to branch {}'.format(branch_name)) - else: - git.push('-u','origin','HEAD') - - return warnings, errors, branches - - -def add_error(errors, issue_key, message): - print(message) - errors_for_issue = [] - if issue_key in errors: - errors_for_issue = errors[issue_key] - else: - errors[issue_key] = errors_for_issue - errors_for_issue.append(message) - - -# Returns a singleton list [{field_id: 'field_id', new_value: 'new_value', field_name: 'field_name'}] triple if anything was changed -def validate_issue_properties(issue, warnings, field_id, converters): - field_name = config.CUSTOM_FIELD_NAME_FOR_ID[field_id] or field_id - original_value = getattr(issue.fields, field_id) - if original_value: - converted_value = original_value - for c in converters: - converted_value = c(converted_value) - - if converted_value != original_value: - setattr(issue.fields, field_id, converted_value.encode('utf-8')) - add_error(warnings, issue.key, "The field '{}' had formatting issues.".format(field_name)) - return [{"field_name": field_name, "field_id": field_id, "new_value": converted_value}] # changed - - return [] # not changed - else: - return [] # not changed - - -def check_for_internal_ticket(issue): - preview_issue = util.get_dexreq_issue(getattr(issue.fields, config.CUSTOM_FIELD_ID_PREVIEW_ISSUE)) - preview_summary = getattr(preview_issue.fields, "summary") - preview_labels = getattr(preview_issue.fields, "labels") - summary = getattr(issue.fields, "summary") - feature_ids = getattr(issue.fields, config.CUSTOM_FIELD_ID_FEATURE_IDS) - labels = getattr(issue.fields, "labels") - if labels: - for label in labels: - if "internal" in label.lower(): - return True - if preview_labels: - for label in preview_labels: - if "internal" in label.lower(): - return True - if feature_ids: - for feature_id in feature_ids: - if "internal" in feature_id.lower(): - return True - if summary and "internal" in summary.lower(): - return True - if preview_summary and "internal" in preview_summary.lower(): - return True - - return False - - -def validate_helper_whitespace(original_value): - return original_value.strip() - - -def validate_helper_upper(original_value): - return original_value.upper() - - -def validate_helper_dexreq_jira(original_value): - issues = util.get_dexreq_issue_keys(original_value) - return ", ".join(issues) - - -def validate_helper_udx_jira(original_value): - issues = util.get_jira_issue_keys(original_value, "UDX") - return ", ".join(issues) - - -def validate_issue_whitespace(issue, warnings, field_id): - return validate_issue_properties(issue, warnings, field_id, [validate_helper_whitespace]) - - -def validate_issue_whitespace_upper(issue, warnings, field_id): - return validate_issue_properties(issue, warnings, field_id, [validate_helper_whitespace, validate_helper_upper]) - - -def validate_issue_whitespace_upper_dexreq_jira(issue, warnings, field_id): - return validate_issue_properties(issue, warnings, field_id, [validate_helper_whitespace, validate_helper_upper, validate_helper_dexreq_jira]) - - -def validate_issue_whitespace_upper_udx_jira(issue, warnings, field_id): - return validate_issue_properties(issue, warnings, field_id, [validate_helper_whitespace, validate_helper_upper, validate_helper_udx_jira]) - - -# Returns True if "name" contains dashes, periods, or whitespaces; else returns False -def check_disallowed_separators_in_name(name): - disallowed_separators = ["-", ".", " "] - for separator in disallowed_separators: - if separator in name: - return True - return False - - -# Returns true if "name" is in camel case; else returns False -def check_camel_casing(name): - pattern = '[a-z]+[A-Z]' - if re.search(pattern, name) and "_" not in name: - return True - return False - - -def validate_issue(issue, build_type, dexreq_public_errors, dexreq_public_warnings): - errors = {} - warnings = {} - - errors.update(dexreq_public_errors) - warnings.update(dexreq_public_warnings) - - print('Checking for whitespace and other formatting issues...') - - # These apply both to preview tickets and public tickets - # (though for public tickets, they will look/update the preview ticket) - updates = [] - updates.extend(validate_issue_whitespace(issue, warnings, config.CUSTOM_FIELD_ID_ARTIFACT_ID)) - updates.extend(validate_issue_whitespace(issue, warnings, config.CUSTOM_FIELD_ID_ARTIFACT_VERSION)) - updates.extend(validate_issue_whitespace(issue, warnings, config.CUSTOM_FIELD_ID_GROUP_ID)) - updates.extend(validate_issue_whitespace(issue, warnings, config.CUSTOM_FIELD_ID_SPEC_LOCATION_IN_ARTIFACT)) - updates.extend(validate_issue_whitespace(issue, warnings, config.CUSTOM_FIELD_ID_SPEC_FRIENDLY_NAME)) - updates.extend(validate_issue_whitespace(issue, warnings, config.CUSTOM_FIELD_ID_SERVICE_SUBDOMAIN)) - updates.extend(validate_issue_whitespace(issue, warnings, config.CUSTOM_FIELD_ID_FEATURE_IDS)) - updates.extend(validate_issue_whitespace_upper_udx_jira(issue, warnings, config.CUSTOM_FIELD_ID_UDX_TICKET)) - - if build_type == config.BUILD_TYPE_INDIVIDUAL_PUBLIC: - updates.extend(validate_issue_whitespace_upper_dexreq_jira(issue, warnings, config.CUSTOM_FIELD_ID_PREVIEW_ISSUE)) - updates.extend(validate_issue_whitespace(issue, warnings, config.CUSTOM_FIELD_ID_CHANGELOG)) - updates.extend(validate_issue_whitespace(issue, warnings, config.CUSTOM_FIELD_ID_SDK_CLI_GA_DATE)) - - if updates: - # Make all updates at once - try: - error = util.update_issue_fields(util.JIRA_CLIENT(), issue, updates) - if error: - add_error(warnings, issue.key, "One or more of the fields '{}' had formatting issues, and we couldn't fix them (Error: {}).".format(", ".join(u['field_name'] for u in updates), error)) - except JIRAError as e: - add_error(warnings, issue.key, "One or more of the fields '{}' had formatting issues, and we couldn't fix them (Error: {}).".format(", ".join(u['field_name'] for u in updates), str(e))) - - # Reload after making changes - issue = util.get_dexreq_issue(issue.key, - fields=(DEFAULT_JIRA_ISSUE_FIELDS + CUSTOM_JIRA_ISSUE_FIELDS + CUSTOM_JIRA_ISSUE_FIELDS_PER_BUILD_TYPE[build_type])) - - version = getattr(issue.fields, config.CUSTOM_FIELD_ID_ARTIFACT_VERSION) - if not version: - add_error(errors, issue.key, 'You did not provide an artifact version. This field is always mandatory.') - - artifact_id = getattr(issue.fields, config.CUSTOM_FIELD_ID_ARTIFACT_ID) - if not artifact_id: - add_error(errors, issue.key, 'You did not provide an artifact id. This field is always mandatory.') - - if config.BYPASS_CHECK_FEATURE_ID_LABEL not in issue.fields.labels: - feature_ids = getattr(issue.fields, config.CUSTOM_FIELD_ID_FEATURE_IDS) - if not feature_ids: - add_error(errors, issue.key, 'You did not provide a feature id. This field is mandatory. Please provide a feature id. If you have an exceptional case, or if you are a new service that is launching its service for the first time in SDK/CLI, you can request an exception in the slack channel #oci_public_sdks') - - if config.BYPASS_CHECK_SPEC_EXTENSION_LABEL not in issue.fields.labels: - spec_location = getattr(issue.fields, config.CUSTOM_FIELD_ID_SPEC_LOCATION_IN_ARTIFACT) - if spec_location and (not spec_location.endswith('.cond.yaml')): - add_error(errors, issue.key, 'The spec file provided is not a ".cond.yaml" file. Please provide a ".cond.yaml" file. If you have an exceptional case, you can request an exception in the slack channel #oci_public_sdks.') - - if config.BYPASS_CHECK_SERVICE_FRIENDLY_NAME_LABEL not in issue.fields.labels: - service_friendly_name = getattr(issue.fields, config.CUSTOM_FIELD_ID_SPEC_FRIENDLY_NAME) - confluence_doc_refer_msg = "Please check [here|https://confluence.oci.oraclecorp.com/pages/viewpage.action?spaceKey=DEX&title=Requesting+a+preview+SDK+CLI#RequestingapreviewSDKCLI-II)IfthisisthefirsttimeyouarerequestingapreviewSDK/CLIforthisspec] for clarity." - if service_friendly_name and check_disallowed_separators_in_name(service_friendly_name): - err_message = "Service friendly name provided by you has one/more dashes ('-'), periods ('.'), or whitespaces (' ') as word separators. This is not allowed. Please use underscores ('_') instead to separate words. " - err_message += confluence_doc_refer_msg - add_error(errors, issue.key, err_message) - if service_friendly_name and check_camel_casing(service_friendly_name): - err_message = "Service friendly name provided by you is in camel-case, which is not allowed. Please use snake-case instead ie use underscores ('_') instead to separate words. " - err_message += confluence_doc_refer_msg - add_error(errors, issue.key, err_message) - if service_friendly_name and ("service" in service_friendly_name.lower()): - err_message = "Service friendly name provided by you contains the keyword 'service'. This is not allowed. If you have an exceptional case, you can request an exception in the slack channel #oci_public_sdks." - add_error(errors, issue.key, err_message) - - if config.BYPASS_CHECK_UDX_TICKET_LABEL not in issue.fields.labels: - udx_ticket = getattr(issue.fields, config.CUSTOM_FIELD_ID_UDX_TICKET) - if not udx_ticket: - add_error(errors, issue.key, 'You did not provide a UDX ticket. This field is always mandatory.') - else: - udx_ticket_keys = util.get_udx_issue_keys(udx_ticket) - if udx_ticket_keys: - jira_client = util.JIRA_CLIENT() - for u in udx_ticket_keys: - try: - udx_ticket_issue = jira_client.issue(u) - udx_ticket_status = str(getattr(udx_ticket_issue.fields, "status")) - if udx_ticket_status in config.UDX_TICKET_DISALLOWED_STATES: - add_error(errors, issue.key, 'The UDX ticket provided: {} is {}. It has to be an open ticket.'.format(u, udx_ticket_status)) - jira_client.create_issue_link(config.UDX_TICKET_LINK_RELATIONSHIP, udx_ticket_issue, issue) - except JIRAError as e: - if e.status_code == 404: - add_error(errors, issue.key, 'The UDX ticket provided could not be resolved. Please ensure {} is a valid JIRA issue.'.format(u)) - else: - add_error(errors, issue.key, 'The UDX ticket provided: {} could not be resolved. Error: {}.'.format(u, str(e))) - else: - add_error(errors, issue.key, 'The value provided \'{}\' is not valid for the \'UDX Ticket\' field.'.format(udx_ticket)) - - if build_type == config.BUILD_TYPE_INDIVIDUAL_PUBLIC: - sdk_cli_ga_date = getattr(issue.fields, config.CUSTOM_FIELD_ID_SDK_CLI_GA_DATE) - if not sdk_cli_ga_date: - add_error(errors, issue.key, 'You did not provide an SDK / CLI GA Date. This field is mandatory for public tickets.') - - acknowledge_responsibilities = getattr(issue.fields, config.CUSTOM_FIELD_ID_ACKNOWLEDGE_RESPONSIBILITIES) - if not acknowledge_responsibilities: - add_error(errors, issue.key, 'You did not check the box "Acknowledge Responsibilities". Please review the corresponding document and check the box.') - - changelog = getattr(issue.fields, config.CUSTOM_FIELD_ID_CHANGELOG) - if not changelog: - add_error(errors, issue.key, 'You did not provide a CHANGELOG entry". This field is mandatory for public tickets.') - - if config.BYPASS_CHECK_INTERNAL_TICKET_LABEL not in issue.fields.labels: - if check_for_internal_ticket(issue): - add_error(errors, issue.key, 'Public ticket failed as the preview ticket linked to it is marked as "internal", either in the summary, feature ID(s), and/or labels fields of the preview ticket. Internal features should not be added to the public OCI SDKs/CLI, as they\'re not meant to be consumed by external customers. If you have an exceptional case, you can request an exception in the slack channel #oci_public_sdks.') - - version_error = None - - if version: - print('Checking for unacceptable versions...') - version_error = shared.version_utils.is_version_not_acceptable(version) - - if version_error: - add_error(errors, issue.key, version_error) - - if version and artifact_id and getattr(issue.fields, config.CUSTOM_FIELD_ID_GROUP_ID): - print('Checking fully specified artifact...') - # if we are fully specifying a spec, make sure it exists in artifactory - full_version = getattr(issue.fields, config.CUSTOM_FIELD_ID_ARTIFACT_VERSION) - ARTIFACTORY_URL_FORMAT = 'https://artifactory.oci.oraclecorp.com/libs-release/{group}/{artifact_id}/{version}/{artifact_id}-{version}.jar' - artifact_url = ARTIFACTORY_URL_FORMAT.format( - group=getattr(issue.fields, config.CUSTOM_FIELD_ID_GROUP_ID).replace('.', '/'), - artifact_id=artifact_id, - version=full_version - ) - print('artifact_url: "{}"',format(artifact_url)) - - snapshot_artifactory_url = None - # we dont allow -SNAPSHOT versions but we do allow timestamped snapshots, which are in a separate location in artifactory so we need to check there - # this regex attempts to remove the timestamp from a timed snapshot build, which we need in order to build the artifactory URL - version_without_timed_snapshot = re.sub(r'-[0-9]{4}[0-1]{1}[0-9]{1}[0-3]{1}[0-9]{1}\.[0-2]{1}[0-9]{1}[0-5]{1}[0-9]{1}[0-5]{1}[0-9]{1}-[0-9]+$', '', full_version) - print('Version: "{}"'.format(full_version)) - print('Version without timed snapshot: "{}"'.format(version_without_timed_snapshot)) - if version_without_timed_snapshot != full_version: - SNAPSHOT_ARTIFACTORY_URL_FORMAT = 'https://artifactory.oci.oraclecorp.com/libs-snapshot/{group}/{artifact_id}/{version_without_timed_snapshot}-SNAPSHOT/{artifact_id}-{version}.jar' - snapshot_artifactory_url = SNAPSHOT_ARTIFACTORY_URL_FORMAT.format( - group=getattr(issue.fields, config.CUSTOM_FIELD_ID_GROUP_ID).replace('.', '/'), - artifact_id=artifact_id, - version=full_version, - version_without_timed_snapshot=version_without_timed_snapshot - ) - print('snapshot_artifactory_url: "{}"'.format(snapshot_artifactory_url)) - - download_success = False - try: - print('attempting to download from artifact URL: {}'.format(artifact_url)) - response = requests.get(artifact_url, verify=False) - download_success = response.status_code == 200 - print('download_success: {}'.format(download_success)) - - if not download_success and snapshot_artifactory_url: - # if that didn't work, try snapshot - print('attempting to download from artifact URL: {}'.format(snapshot_artifactory_url)) - response = requests.get(snapshot_artifactory_url, verify=False) - download_success = response.status_code == 200 - print('download_success: {}'.format(download_success)) - - if not download_success: - if snapshot_artifactory_url: - add_error(errors, issue.key, - 'Failed to download specified artifact from {release_artifactory_url} or {snapshot_artifactory_url}. Please confirm that your group id and artifact id are correct.'.format( - release_artifactory_url=artifact_url, - snapshot_artifactory_url=snapshot_artifactory_url - ) - ) - else: - add_error(errors, issue.key, - 'Failed to download specified artifact from {release_artifactory_url}. Please confirm that your group id and artifact id are correct.'.format( - release_artifactory_url=artifact_url, - snapshot_artifactory_url=snapshot_artifactory_url - ) - ) - except requests.exceptions.RequestException as e: - print('Request error while attempting to verify artifact existence. Allowing process to continue.' + str(e)) - - # only attempt to look inside the artifact if it was downloaded successfully, and we have a relative spec path - if getattr(issue.fields, config.CUSTOM_FIELD_ID_SPEC_LOCATION_IN_ARTIFACT): - if download_success: - if not os.path.exists("temp"): - os.makedirs("temp") - - f = tempfile.NamedTemporaryFile(delete=False, prefix="{issue}-{artifact_id}".format(issue=issue.key, artifact_id=artifact_id), suffix=".jar", dir="temp") - f.write(response.content) - f.close() - print("File name: {}".format(f.name)) - - with zipfile.ZipFile(f.name, "r") as zip_ref: - # extract to temp dir - temp_dir = tempfile.mkdtemp() - zip_ref.extractall(temp_dir) - spec_location = os.path.join(temp_dir, getattr(issue.fields, config.CUSTOM_FIELD_ID_SPEC_LOCATION_IN_ARTIFACT)) - if not os.path.isfile(spec_location): - add_error(errors, issue.key, 'Spec file did not exist inside artifact. Please verify that a file exists inside your artifact at the location specified by "Spec Location in Artifact".') - - # This check causes the OraCache SDK generation to fail as the endpoint format is different. For more details see https://jira.oci.oraclecorp.com/browse/DEX-4083 - # if getattr(issue.fields, config.CUSTOM_FIELD_ID_SERVICE_SUBDOMAIN): - # full_domain = '{subdomain}.us-phoenix-1.oraclecloud.com'.format(subdomain=getattr(issue.fields, config.CUSTOM_FIELD_ID_SERVICE_SUBDOMAIN)) - # - # try: - # socket.gethostbyname(full_domain) - # except socket.gaierror: - # add_error(errors, issue.key, 'Failed to resolve service domain: {}. Please ensure the subdomain specified in the ticket is correct.'.format(full_domain)) - - return errors, warnings, bool(updates) - - -def convert_issue_to_script_params(issue): - custom_field_id_to_param_name = { - config.CUSTOM_FIELD_ID_ARTIFACT_ID: 'artifact_id', - config.CUSTOM_FIELD_ID_GROUP_ID: 'group_id', - config.CUSTOM_FIELD_ID_ARTIFACT_VERSION: 'version', - config.CUSTOM_FIELD_ID_SPEC_FRIENDLY_NAME: 'spec_name', - config.CUSTOM_FIELD_ID_SPEC_LOCATION_IN_ARTIFACT: 'relative_spec_path', - config.CUSTOM_FIELD_ID_SERVICE_SUBDOMAIN: 'subdomain' - } - - params = {} - - for field, param_name in custom_field_id_to_param_name.items(): - value = getattr(issue.fields, field) - if value: - params[param_name] = value - - return params - - -def invoke_add_or_update_spec(jira_client, issue, params, tool_name, spec_generation_type): - # def add_or_update_spec(artifact_id, group_id, spec_name, relative_spec_path, version, spec_generation_type, add_sub_groups): - # Example: - # { - # "artifact_id": "maestro-spec", - # "endpoint": "https://orchestration.{domain}/20170630", - # "group_id": "com.oracle.pic.orchestration", - # "relative_spec_path": "api.yaml", - # "spec_generation_type": "PREVIEW", - # "spec_name": "orchestration", - # "version": "0.0.1-SNAPSHOT" - # } - # - # python ./scripts/add_or_update_spec.py \ - # --group-id "$GROUP_ID" \ - # --artifact-id "$ARTIFACT_ID" \ - # --version "$VERSION" \ - # --spec-name "$SPEC_NAME" \ - # --relative-spec-path "$RELATIVE_SPEC_PATH" \ - # --endpoint "$ENDPOINT" - - result = None - - # for now, hard code spec generation type to preview since we are only using this automation for preview SDK / CLI - params['spec_generation_type'] = spec_generation_type - - result = None - - if tool_name in [config.PYTHON_SDK_NAME, config.CLI_NAME]: - # This runs for both Python SDK and Python CLI - from add_or_update_scripts import python_sdk_add_or_update_spec - # Update Python SDK pom.xml - sdk_params = dict(params) - sdk_params.update({ - 'pom_location': os.path.join(config.PYTHON_SDK_REPO_RELATIVE_LOCATION, 'pom.xml'), - 'github_whitelist_location': os.path.join(config.PYTHON_SDK_REPO_RELATIVE_LOCATION, 'github.whitelist') - }) - result = python_sdk_add_or_update_spec.add_or_update_spec(**sdk_params) - # Note, we cannot return here, since CLI executes both the above block and the one below - - if tool_name == config.CLI_NAME: - from add_or_update_scripts import cli_add_or_update_spec - # Update CLI pom.xml - cli_params = dict(params) - cli_params.update({ - 'pom_location': os.path.join(config.CLI_REPO_RELATIVE_LOCATION, 'services') - }) - - # strip out '--endpoint' because it is not a valid argument for the CLI script - if 'endpoint' in cli_params: - del cli_params['endpoint'] - - if 'subdomain' in cli_params: - del cli_params['subdomain'] - - # invoke directly through python - result = cli_add_or_update_spec.add_or_update_spec(**cli_params) - - if tool_name == config.JAVA_SDK_NAME: - from add_or_update_scripts import java_sdk_add_or_update_spec - # Update Java SDK pom.xml - sdk_params = dict(params) - sdk_params.update({ - 'pom_location': os.path.join(config.JAVA_SDK_REPO_RELATIVE_LOCATION, 'pom.xml'), - 'github_whitelist_location': os.path.join(config.JAVA_SDK_REPO_RELATIVE_LOCATION, 'github.whitelist') - }) - result = java_sdk_add_or_update_spec.add_or_update_spec(**sdk_params) - - if tool_name == config.GO_SDK_NAME: - from add_or_update_scripts import go_sdk_add_or_update_spec - # Update Go SDK pom.xml - sdk_params = dict(params) - sdk_params.update({ - 'pom_location': os.path.join(config.GO_SDK_REPO_RELATIVE_LOCATION, 'pom.xml'), - 'github_whitelist_location': os.path.join(config.GO_SDK_REPO_RELATIVE_LOCATION, 'github.whitelist'), - 'makefile_location': os.path.join(config.GO_SDK_REPO_RELATIVE_LOCATION, 'Makefile') - }) - result = go_sdk_add_or_update_spec.add_or_update_spec(**sdk_params) - - if tool_name == config.RUBY_SDK_NAME: - from add_or_update_scripts.ruby_sdk_add_or_update_spec import RubySpecUpdater, RUBY_MODULE_LOCATION, \ - RUBY_POM_FILE_TEMPLATE, RUBY_SPEC_PARAMS_XML_PATH_DICT - # Update Ruby SDK pom.xml - sdk_params = dict(params) - sdk_params.update({ - 'pom_location': os.path.join(config.RUBY_SDK_REPO_RELATIVE_LOCATION, 'pom.xml'), - # Note: github_whitelist isn't used in Ruby for preview - }) - ruby_spec_updater = RubySpecUpdater(RUBY_MODULE_LOCATION, RUBY_POM_FILE_TEMPLATE, RUBY_SPEC_PARAMS_XML_PATH_DICT) - result = ruby_spec_updater.add_or_update_spec(**sdk_params) - - if tool_name == config.TEST_DATA_GEN_NAME: - from add_or_update_scripts.datagen_add_or_update_spec import TestDataGenSpecUpdater, \ - TEST_DATA_GEN_MODULE_LOCATION, TEST_DATA_GEN_POM_FILE_TEMPLATE, TEST_DATA_GEN_SPEC_PARAMS_XML_PATH_DICT - # Update sdk-client-test-data pom.xml - sdk_params = dict(params) - sdk_params.update({ - 'pom_location': os.path.join(config.TEST_DATA_GEN_REPO_RELATIVE_LOCATION, 'pom.xml'), - }) - test_data_gen_spec_updater = TestDataGenSpecUpdater(TEST_DATA_GEN_MODULE_LOCATION, - TEST_DATA_GEN_POM_FILE_TEMPLATE, - TEST_DATA_GEN_SPEC_PARAMS_XML_PATH_DICT) - result = test_data_gen_spec_updater.add_or_update_spec(**sdk_params) - - if tool_name == config.TYPESCRIPT_SDK_NAME: - from add_or_update_scripts import typescript_sdk_add_or_update_spec - # Update Typescript SDK pom.xml - sdk_params = dict(params) - sdk_params.update({ - 'pom_location': os.path.join(config.TYPESCRIPT_SDK_REPO_RELATIVE_LOCATION, 'package_version'), - 'github_whitelist_location': os.path.join(config.TYPESCRIPT_SDK_REPO_RELATIVE_LOCATION, 'github.whitelist') - }) - result = typescript_sdk_add_or_update_spec.add_or_update_spec(**sdk_params) - - if tool_name in [config.DOTNET_SDK_NAME, config.POWERSHELL_NAME]: - from add_or_update_scripts import dotnet_sdk_add_or_update_spec - # Update .NET SDK pom.xml - sdk_params = dict(params) - sdk_params.update({ - 'pom_location': os.path.join(config.DOTNET_SDK_REPO_RELATIVE_LOCATION, 'pom.xml'), - 'github_whitelist_location': os.path.join(config.DOTNET_SDK_REPO_RELATIVE_LOCATION, 'github.whitelist') - }) - result = dotnet_sdk_add_or_update_spec.add_or_update_spec(**sdk_params) - - if tool_name == config.POWERSHELL_NAME: - from add_or_update_scripts import powershell_add_or_update_spec - # Update .NET SDK pom.xml - sdk_params = dict(params) - sdk_params.update({ - 'pom_location': os.path.join(config.POWERSHELL_REPO_RELATIVE_LOCATION, 'pom.xml'), - 'github_whitelist_location': os.path.join(config.POWERSHELL_REPO_RELATIVE_LOCATION, 'github.whitelist') - }) - result = powershell_add_or_update_spec.add_or_update_spec(**sdk_params) - - if tool_name == config.LEGACY_JAVA_SDK_NAME: - from add_or_update_scripts import legacy_java_sdk_add_or_update_spec - # Update Legacy Java SDK pom.xml - sdk_params = dict(params) - sdk_params.update({ - 'pom_location': os.path.join(config.LEGACY_JAVA_SDK_REPO_RELATIVE_LOCATION, 'pom.xml'), - 'github_whitelist_location': os.path.join(config.LEGACY_JAVA_SDK_REPO_RELATIVE_LOCATION, 'github.whitelist') - }) - result = legacy_java_sdk_add_or_update_spec.add_or_update_spec(**sdk_params) - - if not result: - raise ValueError("Unknown tool: '{}'".format(tool_name)) - - return result - - -def warn_on_unexpected_changes(issue, params, tool_name, build_id, result, warnings): - if not result: - return - - if not result.existing: - return - - print(result) - - if "subdomain" in result.changed: - previous_subdomain = result.previous["subdomain"] - new_subdomain = params["subdomain"] - add_error(warnings, issue.key, "For the {}, the field '{}' was changed from '{}' to '{}'.".format(tool_name, RESULT_FIELD_TO_JIRA_FIELD_NAME["subdomain"], previous_subdomain, new_subdomain)) - if "relative_spec_path" in result.changed: - previous_path = result.previous["relative_spec_path"] - new_path = params["relative_spec_path"] - add_error(warnings, issue.key, "For the {}, the field '{}' was changed from '{}' to '{}'.".format(tool_name, RESULT_FIELD_TO_JIRA_FIELD_NAME["relative_spec_path"], previous_path, new_path)) - - for ignored in result.ignored: - add_error(warnings, issue.key, "For the {}, the field '{}' was ignored. It cannot be changed using self-service.".format(tool_name, RESULT_FIELD_TO_JIRA_FIELD_NAME[ignored])) - - -def convert_underscore_name_to_param(name): - return '--' + name.replace('_', '-') - - -def pretty_print(input): - print(json.dumps(input, indent=4, sort_keys=True)) - - -# Note: This uses the globally set BRANCH_TIMESTAMP; therefore, all branches created using one -# execution of this script will use the same timestamp -def generate_time_stamped_branch_name(prefix): - return '{prefix}-{timestamp}'.format(prefix=prefix, timestamp=BRANCH_TIMESTAMP) - - -def checkout_sdk_and_cli_branches(base_branch, tool_name): - for repo in config.REPOS_FOR_TOOL[tool_name]: - repo.git.checkout(base_branch) - - -# Branch suffix is "DEXREQ-673-2019-08-16-20-58-17" -def push_spec_baseline(spec_dir, build_type, issue, branch_suffix): - branch_prefix = util.get_branch_prefix_for_spec_diff(build_type) - branch_name = "{}-{}".format(branch_prefix, branch_suffix) - - repo = config.DEXREQ_REPO - git = repo.git - git.checkout(B=branch_name) - - for filename in os.listdir(spec_dir): - source = os.path.join(spec_dir, filename) - destination = os.path.join(config.DEXREQ_DIFF_REPO_RELATIVE_LOCATION, filename) - print("Copying {} -> {}".format(source, destination)) - shutil.rmtree(destination, True) - shutil.copytree(source, destination, ignore=shutil.ignore_patterns('*.lineNumberMapping')) - - git.add(A=True) - - commit_message = '{commit_prefix} [[{issue_key}]]: {issue_summary}'.format( - commit_prefix=config.SPEC_BASELINE_COMMIT_MESSAGE_PREFIX, - issue_key=issue.key, - issue_summary=issue.fields.summary - ) - - message = commit_message - if 'nothing to commit' in git.status(): - message = "{} (no change)".format(message) - print(message) - git.commit("-m", message, "--allow-empty") - if config.IS_DRY_RUN: - print('DRY-RUN: not pushing to branch {}'.format(branch_name)) - else: - git.push('-u','origin','HEAD') - - -# default is used when the attribute is missing -# noneDefault is used when the attribute exists but returns None -def getAttr(o, attribute, default=None, noneDefault=None): - Attr = getattr(o, attribute, default) - return noneDefault if Attr is None else Attr - - -# This script requires two inputs: -# - build_id: The team city build id for the build that is running this script. This is used to update the relevant JIRA tickets with links to the team city build -# - build_type: Value can be sneak_peek or preview (default will be preview) -# -# For preview builds, we will aggregate all open preview tickets and include them in the same build -# For sneak_peek builds, we will aggregate all open sneak peek tickets and include them in the same build -# -# TODO: It likely isn't an issue that any given sneak peek may contain extra changes from other tickets but we may update this in the future to generate individual -# builds with *only* those changes for each sneak peek ticket -# -# The script assumes it is being invoked from the root directory of the python-cli repository and that there is a sibling directory 'python-sdk' -# with the Python SDK in it -# -if __name__ == "__main__": - parser = argparse.ArgumentParser(description='Process the DEXREQ JIRA queue (preview and public).') - parser.add_argument('--build-id', - required=True, - help='The team city build id for the build that is running this script. This is used to update the relevant JIRA tickets with links to the team city build') - parser.add_argument('--build-type', - default=config.BUILD_TYPE_INDIVIDUAL_PREVIEW, - help='The build type to use, can be one of the following: {}'.format(', '.join(config.VALID_BUILD_TYPES))) - parser.add_argument('--base-branch', - default='preview', - help='The base branch to start from') - parser.add_argument('--tool', - default=config.CLI_NAME, - help='The tool for which to generate the preview. Accepted values: {}'.format(', '.join(config.TOOL_NAMES + [TOOL_ARGUMENT_ALL]))) - parser.add_argument('--dry-run', - default=False, - action='store_true', - help='Perform a dry-run') - parser.add_argument('--allow-individual-tool-generation', - default=False, - action='store_true', - help=textwrap.dedent("""\ - By default, we require ALL SDK / CLI statuses to be ready before running automation. For one-off previews this - means all statuses must be in "To Do" and for bulk previews all statuses must be in "Pending Merge". This flag allows - generating previews for all tools that are ready, and ignoring those which are not.""")) - parser.add_argument('--issue', - action='append', - help='By default, we query JIRA. This allows you to specify a DEXREQ issue to process instead: --issue DEXREQ-123') - parser.add_argument('--for-any-status', - default=False, - action='store_true', - help='Ignore status fields (don\'t require them to be in "To Do"). Can only be used together with --issue.') - parser.add_argument('--push-spec-baseline', - help='Push the baseline spec (after pre-processing, before generation) from the specified directory into a branch in the SDK/dexreq repo.') - parser.add_argument('--verbose', - default=False, - action='store_true', - help='Verbose logging') - - args = parser.parse_args() - - util.IS_VERBOSE = args.verbose - build_id = args.build_id - build_type = args.build_type - base_branch = args.base_branch - allow_individual_tool_generation = args.allow_individual_tool_generation - tool_name = args.tool - config.IS_DRY_RUN = args.dry_run - issues_filter = args.issue - if args.for_any_status and not issues_filter: - raise ValueError("--for-any-status can only be used with --issue") - for_any_status = args.for_any_status - - if base_branch.lower() == "preview" and build_type in config.PUBLIC_BUILD_TYPES: - raise ValueError("Used base branch '{}' with build type '{}'".format(base_branch, build_type)) - if base_branch.lower() == "master" and build_type in config.PREVIEW_BUILD_TYPES: - raise ValueError("Used base branch '{}' with build type '{}'".format(base_branch, build_type)) - - if tool_name not in config.TOOL_NAMES and not tool_name == TOOL_ARGUMENT_ALL: - raise ValueError("Tool name must be one of: {}".format(', '.join(config.TOOL_NAMES + [TOOL_ARGUMENT_ALL]))) - - if args.push_spec_baseline and (tool_name not in [TOOL_ARGUMENT_ALL, config.GO_SDK_NAME]): - raise ValueError("--push-spec-baseline can only be used with --tool ALL or --tool GoSDK") - - tools_to_run = config.TOOL_NAMES if tool_name == TOOL_ARGUMENT_ALL else [tool_name] - try: - if build_type == config.BUILD_TYPE_BULK_PENDING_MERGE_PUBLIC: - query = config.READY_FOR_PUBLIC_ANY_TOOLS_REQUESTED \ - if allow_individual_tool_generation else config.READY_FOR_PUBLIC_ALL_TOOLS - generate_func = lambda all_issues: generate_bulk_pending_merge(config.PUBLIC_SPEC_GENERATION_TYPE, # noqa:E731 - all_issues, - build_id, - base_branch, - tool_name, - allow_individual_tool_generation, - for_any_status) - processing_status = config.STATUS_PROCESSING_BULK - elif build_type == config.BUILD_TYPE_BULK_PENDING_MERGE_PREVIEW: - # in order to allow re-running bulk preview for a single tool we need to get all tickets in 'Ready for Preview' with any tool status set to 'To Do' - # inside the generation logic, we will only build for tools that are explicitly set to 'To Do' - query = config.READY_FOR_PREVIEW_ANY_TOOLS_REQUESTED \ - if allow_individual_tool_generation else config.READY_FOR_PREVIEW_ALL_TOOLS - generate_func = lambda all_issues: generate_bulk_pending_merge(config.PREVIEW_SPEC_GENERATION_TYPE, # noqa:E731 - all_issues, - build_id, - base_branch, - tool_name, - allow_individual_tool_generation, - for_any_status) - processing_status = config.STATUS_PROCESSING_BULK - elif build_type == config.BUILD_TYPE_INDIVIDUAL_PREVIEW: - query = config.TODO_PREVIEW_ALL_TOOLS - generate_func = lambda all_issues: generate_individual_to_do_requests(all_issues, # noqa:E731 - build_id, - base_branch, - tool_name, - allow_individual_tool_generation, - build_type, - for_any_status) - processing_status = config.STATUS_PROCESSING - elif build_type == config.BUILD_TYPE_INDIVIDUAL_PUBLIC: - query = config.TODO_PUBLIC_ALL_TOOLS - generate_func = lambda all_issues: generate_individual_to_do_requests(all_issues, # noqa:E731 - build_id, - base_branch, - tool_name, - allow_individual_tool_generation, - build_type, - for_any_status) - processing_status = config.STATUS_PROCESSING - else: - raise ValueError('Build type must be one of: {}'.format(', '.join(config.VALID_BUILD_TYPES))) - - query = limit_query_to_issue_keys(query, issues_filter, for_any_status) - print("query = {}".format(query)) - - dexreq_public_errors = {} - dexreq_public_warnings = {} - - # TODO: actual loading of public ticket data from preview ticket not yet enabled - all_issues = util.search_dexreq_issues(query, fields=(DEFAULT_JIRA_ISSUE_FIELDS + CUSTOM_JIRA_ISSUE_FIELDS + CUSTOM_JIRA_ISSUE_FIELDS_PER_BUILD_TYPE[build_type]), - errors=dexreq_public_errors, warnings=dexreq_public_warnings) - - # sort issues so that latest version is applied last if there are multiple tickets for the same spec - # if no version is set, default to 0 (it will not succeed without a version so it doesn't matter when it happens) - # this is only necessary for bulk preview, but doesn't hurt to do for other build types - all_issues.sort(key=lambda x: getAttr(x.fields, config.CUSTOM_FIELD_ID_ARTIFACT_VERSION, noneDefault="0")) - - print('Found the following issues to generate {} builds for:'.format(build_type)) - issues_to_be_processed = [] - for issue in all_issues: - if config.should_ignore_issue(issue.key): - print('{} - {} - being ignored per env var {}'.format(issue.key, issue.fields.summary, config.DEXREQ_IGNORED_ISSUES_ENV_VAR_NAME)) - continue - - # set all issues to processing now, if we do this as we generate for each tool it could overwrite meaningful statuses - # for previous tools like 'DEX Support Required' or 'Service Team Failure Investigation' - util.transition_issue_overall_status(util.JIRA_CLIENT(), issue, processing_status) - - # Reload after the transition - issue = util.get_dexreq_issue(issue.key, - fields=(DEFAULT_JIRA_ISSUE_FIELDS + CUSTOM_JIRA_ISSUE_FIELDS + CUSTOM_JIRA_ISSUE_FIELDS_PER_BUILD_TYPE[build_type])) - - print('{} - {}'.format(issue.key, issue.fields.summary)) - - validation_errors, validation_warnings, changed = validate_issue(issue, build_type, dexreq_public_errors, dexreq_public_warnings) - if not report_errors_and_warnings(issue, validation_errors, validation_warnings, tools_to_run): - if changed: - # Reload after making changes (but only reload if the issue is to be processed; if not, it's ok if the issue is out of date; we're never referring to it again) - issue = util.get_dexreq_issue(issue.key, - fields=(DEFAULT_JIRA_ISSUE_FIELDS + CUSTOM_JIRA_ISSUE_FIELDS + CUSTOM_JIRA_ISSUE_FIELDS_PER_BUILD_TYPE[build_type])) - - issues_to_be_processed.append(issue) - - generation_warnings = {} # key: issue_key, value: list - generation_errors = {} # key: issue_key, value: list - for tool_name in tools_to_run: - checkout_sdk_and_cli_branches(base_branch, tool_name) - per_sdk_generation_warnings, per_sdk_generation_errors, branches = generate_func(issues_to_be_processed) - generation_warnings.update(per_sdk_generation_warnings) - generation_errors.update(per_sdk_generation_errors) - - is_individual_build = build_type in [config.BUILD_TYPE_INDIVIDUAL_PREVIEW, config.BUILD_TYPE_INDIVIDUAL_PUBLIC] - is_go_build = (tool_name == config.GO_SDK_NAME) - if args.push_spec_baseline and is_individual_build and is_go_build: - for issue in issues_to_be_processed: - print("Branches: {}".format(branches)) - if issue.key in branches: - m = re.search("^.*{}-(.*)$".format(config.GO_SDK_NAME), branches[issue.key]) - if m: - branch_suffix = m.group(1) - push_spec_baseline(args.push_spec_baseline, build_type, issue, branch_suffix) - else: - print("Could not push spec baseline, there was no {} branch for {}, probably (probably 'Skipping generating tool: GoSDK' above)".format(config.GO_SDK_NAME, issue.key)) - - # these are warnings that are SDK specific - # examples: "For the GoSDK, the field 'Spec Artifact Group Id' was ignored. It cannot be changed using self-service." - if generation_warnings: - for issue_key in generation_warnings: - # comment on issue - util.add_jira_comment( - issue_key, - """Please check that the following settings indeed reflect what you wanted to do: - - {warnings} - - The full build log can be found {build_log_link}. - - For TeamCity access see the [self-service FAQ|https://confluence.oci.oraclecorp.com/pages/viewpage.action?pageId=132774928#SDK/CLISelfServiceFrequentlyAskedQuestions-HowdoIgetaccesstoTeamCitylogsandartifacts?]. - - There is no need to re-start this ticket, unless you determine that some settings were incorrect.""".format( - warnings='\n'.join(['*- {}*'.format(warning) for warning in generation_warnings[issue_key]]), - build_log_link=build_log_link(build_id) - ), - comment_type=config.COMMENT_TYPE_INFO - ) - - # these are errors that are SDK specific - # examples: Missing required parameter for given SDK add_or_update script, unexpected exception thrown in add_or_update script - if generation_errors: - for issue_key in generation_errors: - for generation_error in generation_errors[issue_key]: - util.add_jira_comment( - issue_key, - generation_error, - comment_type=config.COMMENT_TYPE_ERROR - ) - - except Exception as e: # noqa:F841 - # TODO: report error to JIRA tasks - raise diff --git a/scripts/auto_gen_utils/2_pre_generation_set_up.py b/scripts/auto_gen_utils/2_pre_generation_set_up.py deleted file mode 100644 index cc1d347c7c..0000000000 --- a/scripts/auto_gen_utils/2_pre_generation_set_up.py +++ /dev/null @@ -1,63 +0,0 @@ -# - this script is run as the first build step of a job triggered on commits to SDK/CLI branches -# of the form auto-preview-* or auto-public-* -# - process_preview_jira_queue.py will make a commit to an auto-preview-* or auto-public-* branch in the -# SDK (for Python CLI, in Python SDK AND the CLI) -# - this script will check out the equivalent branch - -import argparse -import util -import config - -BUILD_TYPE_DESCRIPTION = { - config.BUILD_TYPE_INDIVIDUAL_PREVIEW: "preview SDK", - config.BUILD_TYPE_INDIVIDUAL_PUBLIC: "public SDK", - config.BUILD_TYPE_BULK_PENDING_MERGE_PREVIEW: "bulk build of the preview SDK", - config.BUILD_TYPE_BULK_PENDING_MERGE_PUBLIC: "bulk build of the public SDK" -} - -if __name__ == "__main__": - parser = argparse.ArgumentParser(description='Post "beginning" status update to DEXREQ tickets.') - parser.add_argument('--build-id', - required=True, - help='The team city build id for the build that is running this script. This is used to update the relevant JIRA tickets with links to the team city build') - parser.add_argument('--tool', - default=config.CLI_NAME, - help='The tool for which to generate the build. Accepted values: {}'.format(', '.join(config.TOOL_NAMES))) - parser.add_argument('--build-type', - required=False, - help='The build type to use, can be one of the following: {}'.format(', '.join(config.VALID_BUILD_TYPES))) - parser.add_argument('--dry-run', - default=False, - action='store_true', - help='Perform a dry-run') - parser.add_argument('--issue', - action='append', - help='Only DEXREQ issues in this filter will be affected: --issue DEXREQ-123') - - args = parser.parse_args() - build_id = args.build_id - tool_name = args.tool - build_type = args.build_type - issues_whitelist = args.issue - if build_type: - build_description = " for the {}".format(BUILD_TYPE_DESCRIPTION[build_type]) - else: - build_description = "" - config.IS_DRY_RUN = args.dry_run - - # get current branch of the first repo - current_branch = [branch.strip()[2:] for branch in config.REPOS_FOR_TOOL[tool_name][0].git.branch().split('\n') if branch.startswith('* ')][0] - - last_commit_message = util.get_last_commit_message(tool_name) - - # will be of the form 'Updating pom.xml for DEXREQ-123: Preview for RQS - print('Found last commit: {}'.format(last_commit_message)) - - issue_keys = util.parse_issue_keys_from_commit_message(last_commit_message) - if issue_keys: - for issue_key in issue_keys: - if issues_whitelist and issue_key not in issues_whitelist: - print('Skipping processing ticket {} because it was not included in --issue filter'.format(issue_key)) - continue - else: - print('Did not find any issue keys in commit message (text between double brackets: "[[issue-key]]"). Not updating any JIRA issues.') diff --git a/scripts/auto_gen_utils/3_report_generation_status.py b/scripts/auto_gen_utils/3_report_generation_status.py deleted file mode 100644 index cbbfcc3ffc..0000000000 --- a/scripts/auto_gen_utils/3_report_generation_status.py +++ /dev/null @@ -1,342 +0,0 @@ -# - this script is run as the as the last step of a build step of a job triggered on commits to SDK/CLI branches -# of the form auto-preview-* or auto-public-* -# - at this point the pom.xml has been updated, and the new code has been generated -# - this script reports back to JIRA whether or not the code generation + build was successful and commits / pushes the generated code - -import argparse -import util -import config -import os -from os.path import exists -import shutil -import re -import shared.bitbucket_utils -from shared.buildsvc_tc_compatibility import build_log_link - -try: - from urllib import quote -except ImportError: - from urllib.parse import quote - - -IGNORED_TESTS_FILE_PATH = "../python-cli/changed_services.txt" -SPEC_PR_DESCRIPTION = """ -Please review these spec changes, and if they represent (1) your entire change, and (2) nothing else, approve this pull request.{merge_ticket_action} - -If something is wrong with these spec changes, please set this pull request to "Needs Work".{decline_ticket_action} - -Note that merging or declining this spec change does not affect your service's spec in any way. It is used merely in the automation of the SDK/CLI self-service pipeline. -""" - - -# Branch suffix is "DEXREQ-673-2019-08-16-20-58-17" -def push_spec_diff(spec_dir, build_type, issue_key, branch_suffix, description, diff_branch_suffix="-diff", is_pr_enabled=True, - commit_issue_summary_template="Updated spec for {issue_key}{description_text}", - pr_title_template="Spec changes for {issue_key}{description_text}", - pr_description=""): - branch_prefix = util.get_branch_prefix_for_spec_diff(build_type) - baseline_branch_name = "{}-{}".format(branch_prefix, branch_suffix) - diff_branch_name = baseline_branch_name + diff_branch_suffix - - repo = config.DEXREQ_REPO - git = repo.git - git.fetch("origin") - print("Switching to dexreq repo branch {}".format(baseline_branch_name)) - git.checkout(baseline_branch_name) - print("Creating new dexreq repo branch {}".format(diff_branch_name)) - git.checkout(B=diff_branch_name) - - for filename in os.listdir(spec_dir): - source = os.path.join(spec_dir, filename) - destination = os.path.join(config.DEXREQ_DIFF_REPO_RELATIVE_LOCATION, filename) - print("Copying {} -> {}".format(source, destination)) - shutil.rmtree(destination, True) - shutil.copytree(source, destination, ignore=shutil.ignore_patterns('*.lineNumberMapping')) - - git.add(A=True) - - commit_message = '{commit_prefix} [[{issue_key}]]: {issue_summary}'.format( - commit_prefix=config.SPEC_BASELINE_COMMIT_MESSAGE_PREFIX, - issue_key=issue_key, - issue_summary=commit_issue_summary_template.format( - issue_key=issue_key, - description_text="" if not description else (": " + description)) - ) - - message = commit_message - if 'nothing to commit' in git.status(): - message = "{} (no change)".format(message) - print(message) - git.commit("-m", message, "--allow-empty") - if config.IS_DRY_RUN: - print('DRY-RUN: not pushing to branch {}'.format(diff_branch_name)) - else: - git.push('-u','origin','HEAD') - - pr_url = None - title = pr_title_template.format( - issue_key=issue_key, - description_text="" if not description else (": " + description) - ) - if is_pr_enabled: - - if build_type == config.BUILD_TYPE_INDIVIDUAL_PREVIEW: - merge_ticket_action = " Then transition your DEXREQ ticket to 'Ready for Preview'." - decline_ticket_action = " Please revise your spec and generate a new spec artifact. Then update the spec version in your DEXREQ ticket and set the ticket status back to 'Processing Requested'." - else: - merge_ticket_action = "" - decline_ticket_action = "" - - pr_url = util.create_pull_request(config.DEXREQ_REPO_NAME, diff_branch_name, title, - SPEC_PR_DESCRIPTION.format(merge_ticket_action=merge_ticket_action, - decline_ticket_action=decline_ticket_action), - "", config.DEXREQ_REPO_NAME, baseline_branch_name) - - print("Pull request created: {}".format(pr_url)) - - pr_id = None - if pr_url: - m = re.search("^.*bitbucket.*/projects/([^/]*)/repos/([^/]*)/pull-requests/([0-9]*).*$", pr_url) - if m: - pr_id = m.group(3) - - issue = util.get_dexreq_issue(issue_key, fields=['created']) - created_date = getattr(issue.fields, 'created') - print("To get all spec diff PRs for {}, listing all PRs newer than {}".format(issue_key, created_date)) - - # The spec diff PR can't be older than the DEXREQ ticket, so only search that far - - prs = shared.bitbucket_utils.get_all_pullrequest_with_string_after('SDK', config.DEXREQ_REPO_NAME, issue.key, created_date) - - for pr in prs: - print("Spec change pr {} is {}".format(pr['id'], pr['state'])) - - if str(pr['id']) == str(pr_id): - print("\tThis is the PR that was just opened") - continue - - if pr['state'] == config.PULL_REQUEST_STATUS_OPEN: - # Decline other open PRs - print("\tDeclining spec change PR {}".format(pr['id'])) - shared.bitbucket_utils.decline_pr("SDK", config.DEXREQ_REPO_NAME, pr['id'], pr['version']) - shared.bitbucket_utils.make_general_comment("SDK", config.DEXREQ_REPO_NAME, pr['id'], "This PR has been declined in favor of a more recent spec diff PR: {}".format(pr_url)) - else: - # Not technically a PR (yet) - pr_url = 'https://bitbucket.oci.oraclecorp.com/projects/SDK/repos/{repo}/compare?targetBranch=refs%2Fheads%2F{target_branch}&sourceBranch=refs%2Fheads%2F{diff_branch}&title={title}&description={description}'.format( - repo=config.DEXREQ_REPO_NAME, - diff_branch=diff_branch_name, - title=quote(title), - description=quote(description), - target_branch=baseline_branch_name) - - print('Link with spec diff: {}'.format(pr_url)) - - return pr_url - - -def get_pipeline(build_type): - if build_type == config.BUILD_TYPE_INDIVIDUAL_PREVIEW or build_type == config.BUILD_TYPE_BULK_PENDING_MERGE_PREVIEW: - return config.PREVIEW_ISSUE_TYPE_NAME - elif build_type == config.BUILD_TYPE_BULK_PENDING_MERGE_PUBLIC or build_type == config.BUILD_TYPE_INDIVIDUAL_PUBLIC: - return config.PUBLIC_ISSUE_TYPE_NAME - else: - raise ValueError("Unknown build type: ".format(build_type)) - - -if __name__ == "__main__": - parser = argparse.ArgumentParser(description='Post generation status updates to DEXREQ tickets.') - parser.add_argument('--build-id', - required=True, - help='The team city build id for the build that is running this script. This is used to update the relevant JIRA tickets with links to the team city build') - parser.add_argument('--tool', - default=config.CLI_NAME, - help='The tool for which to generate the build. Accepted values: {}'.format(', '.join(config.TOOL_NAMES))) - parser.add_argument('--dry-run', - default=False, - action='store_true', - help='Perform a dry-run') - parser.add_argument('--push-spec-diff', - help='Push the changed spec (after generation and pre-processing) from the specified directory into a branch in the SDK/dexreq repo.') - parser.add_argument('--push-spec-diff-unprotected-by-conditional-groups', - help='Push the changed spec WITHOUT ADDING CONDITIONAL GROUPS (after pre-processing) from the specified directory into a branch in the SDK/dexreq repo.') - parser.add_argument('--build-type', - default=config.BUILD_TYPE_INDIVIDUAL_PREVIEW, - help='The build type to use, can be one of the following: {}'.format(', '.join(config.VALID_BUILD_TYPES))) - parser.add_argument('--optional-file-for-dexreq-ticket', - default=None, - help='An optional text file to be included in the DEXREQ ticket.') - - args = parser.parse_args() - - shared.bitbucket_utils.setup_bitbucket(args) - - build_id = args.build_id - tool_name = args.tool - build_type = args.build_type - config.IS_DRY_RUN = args.dry_run - shared.bitbucket_utils.dry_run = args.dry_run - - # this script runs after both generation and build have completed - # it will run even 'If previous steps have failed' to ensure we can report failures - generation_pass, build_pass = util.were_steps_successful(tool_name) - - # report to JIRA tasks whether or not jobs succeeded - # if either failed, just give link to build log and say that generation failed for some reason - # this will be hard for external users to investigate so we want to cover easy errors earlier in the process with explicit errors: - # - spec artifact / group / version doesnt exist in artifactory - # - invalid param set - # - relative spec path doesn't point at a spec (yaml file) - - failure_step = None - if not generation_pass: - failure_step = 'Generation' - elif not build_pass: - failure_step = 'Build' - - # get current branch of the first repo - current_branch = [branch.strip()[2:] for branch in config.REPOS_FOR_TOOL[tool_name][-1].git.branch().split('\n') if branch.startswith('* ')][0] - - # TODO: parse last commit message to related DEXREQ issue - last_commit_message = config.REPOS_FOR_TOOL[tool_name][-1].git.log(n=1, format='%s%n%b') - - # will be of the form 'Updating pom.xml for DEXREQ-123: Preview for RQS - print('Found last commit: {}'.format(last_commit_message)) - - issue_keys = util.parse_issue_keys_from_commit_message(last_commit_message) - print("Issue keys found: '{}'".format(", ".join(issue_keys))) - - descriptions = {} - for issue_key in issue_keys: - m = re.search(r"Updating pom.xml for \[\[.*{}.*\]\]:(.*)".format(issue_key), last_commit_message) - if m: - descriptions[issue_key] = m.group(1).strip() - - if failure_step: - if issue_keys: - for issue_key in issue_keys: - if config.IS_DRY_RUN: - print("DRY-RUN: not transitioning {} {} status to {}".format(issue_key, tool_name, config.CUSTOM_STATUS_FAILURE)) - else: - additional_comment = "" - if args.optional_file_for_dexreq_ticket and exists(args.optional_file_for_dexreq_ticket): - # additional comment - with open(args.optional_file_for_dexreq_ticket) as f: - optional_file_contents_for_dexreq_ticket = f.read() - additional_comment = "\n\nSpec validator output:\n\n{code}" + optional_file_contents_for_dexreq_ticket + "\n{code}" - - util.add_jira_comment( - issue_key, - config.STEP_FAILED_MESSAGE_TEMPLATE.format( - failure_step=failure_step, - tool_name=tool_name, - repos=util.join(config.REPO_NAMES_FOR_TOOL[tool_name]), - build_log_link=build_log_link(build_id, "build log"), - dex_support_required_status=config.STATUS_DEX_SUPPORT_REQUIRED, - additional_comment=additional_comment - ), - comment_type=config.COMMENT_TYPE_ERROR - ) - - # if it makes sense we can assign to different states based on build failing - issue = util.get_dexreq_issue(issue_key) - util.transition_issue_per_tool_status(util.JIRA_CLIENT(), issue, config.CUSTOM_STATUS_FAILURE, tool_name) - # Only transition the overall status to SERVICE TEAM FAILURE INVESTIGATION for non bulk gen build types - if build_type not in config.BULK_BUILD_TYPES and util.is_tool_jira_reportable(tool_name): - # if an issue is already in 'DEX Support Required' based on failure for another tool, we do not want to overwrite that - util.transition_issue_overall_status_if_not_in_status(util.JIRA_CLIENT(), issue, desired_status=config.STATUS_SERVICE_TEAM_FAILURE_INVESTIGATION, blacklisted_status=config.STATUS_DEX_SUPPORT_REQUIRED) - else: - print('Did not find any issue keys in commit message (text between double brackets: "[[issue-key]]"). Not updating any JIRA issues.') - - print('{failure_step} for tool: {tool_name} (repos: {repos}) failed. See previous build step(s) for more information'.format( - failure_step=failure_step, - tool_name=tool_name, - repos=", ".join(config.REPO_NAMES_FOR_TOOL[tool_name])) - ) - else: - print(config.GENERATION_AND_BUILD_SUCCESSFUL_TEMPLATE.format( - tool_name=tool_name, - repos=", ".join(config.REPO_NAMES_FOR_TOOL[tool_name])) - ) - - if args.push_spec_diff and issue_keys: - for issue_key in issue_keys: - m = re.search("^.*{}-(.*)$".format(tool_name), current_branch) - if m: - branch_suffix = m.group(1) - push_spec_diff(args.push_spec_diff, build_type, issue_key, branch_suffix, descriptions[issue_key] or "") - else: - print("Did not find '{}' and a timestamp in the current branch '{}', not pushing spec diff".format(tool_name, current_branch)) - - if args.push_spec_diff_unprotected_by_conditional_groups and issue_keys: - for issue_key in issue_keys: - issue = util.get_dexreq_issue(issue_key) - is_bypassed = config.BYPASS_CHECK_CHANGES_NOT_BEHIND_CONDITIONAL_GROUPS in issue.fields.labels - m = re.search("^.*{}-(.*)$".format(tool_name), current_branch) - if m: - branch_suffix = m.group(1) - link = push_spec_diff(args.push_spec_diff_unprotected_by_conditional_groups, build_type, issue_key, branch_suffix, descriptions[issue_key] or "", - diff_branch_suffix="-unprotected", is_pr_enabled=False, - commit_issue_summary_template="{bypassed_text}Spec changes not protected by conditional groups for {issue_key}{description_text}".format( - issue_key=issue_key, bypassed_text="(Bypassed) " if is_bypassed else "", description_text=descriptions[issue_key]), - pr_title_template="{bypassed_text}Spec changes not protected by conditional groups for {issue_key}{description_text}".format( - issue_key=issue_key, bypassed_text="(Bypassed) " if is_bypassed else "", description_text=descriptions[issue_key]), - pr_description="These changes may be unwanted 'passengers' and cannot be filtered out, except by changing the spec itself. All spec changes should be protected by conditional groups.{bypassed_text}".format( - bypassed_text="\n\nThese changes were accepted using a bypass label in the {issue_key} ticket.".format(issue_key=issue_key))) - - if link: - if is_bypassed: - util.add_jira_comment( - issue_key, - config.BYPASSED_UNPROTECTED_CHANGES_MESSAGE_TEMPLATE.format( - pipeline=get_pipeline(build_type), - link=link - ), - comment_type=config.COMMENT_TYPE_INFO - ) - else: - util.add_jira_comment( - issue_key, - config.UNPROTECTED_CHANGES_MESSAGE_TEMPLATE.format( - pipeline=get_pipeline(build_type), - link=link - ), - comment_type=config.COMMENT_TYPE_INFO - ) - print("Adding '{}' label to: {}".format(config.CHANGES_NOT_BEHIND_CONDITIONAL_GROUPS_LABEL, issue.key)) - issue.add_field_value('labels', config.CHANGES_NOT_BEHIND_CONDITIONAL_GROUPS_LABEL) - else: - print("Did not find '{}' and a timestamp in the current branch '{}', not pushing spec diff for changes not protected by conditional groups".format(tool_name, current_branch)) - # This is important in the preview generation when there is a conflict between generated and extended code in Python - # CLI. In this case we comment the extended code and ignore the affected service tests. these ignored tests are - # saved in a temp file changed_services.txt in the Python CLI directory, We check here if the file exists and - # had data in it, if yes then we add the CLI-ManualChangesRequired label to the ticket to exclude it from the - # bulk preview. - try: - print('Checking if CLI-ManualChangesRequired label need to be added to the ticket.') - print('Looking for the file ' + IGNORED_TESTS_FILE_PATH) - f = open(IGNORED_TESTS_FILE_PATH, "r") - affected_services = f.read() - print('File found and the affected services are: ') - print(affected_services) - if not affected_services or not len(affected_services): - print('There is no services affected by a conflict between Generated and Extended code in Python CLI.') - elif issue_keys: - for issue_key in issue_keys: - if config.IS_DRY_RUN: - print("DRY-RUN: adding label {} to {}".format(config.CLI_REQUIRED_MANUAL_CHANGES_LABEL, issue_key)) - else: - issue = util.get_dexreq_issue(issue_key) - if config.CLI_REQUIRED_MANUAL_CHANGES_LABEL not in issue.fields.labels: - print("Adding CLI-ManualChangesRequired label to: " + issue.key) - issue.add_field_value('labels', config.CLI_REQUIRED_MANUAL_CHANGES_LABEL) - issue.update() - print("Added CLI-ManualChangesRequired label to: " + issue.key) - - # We delete the temp file since no need for it now - print("Removing the temp file: " + IGNORED_TESTS_FILE_PATH) - # We delete the temp file since no need for it now - os.remove(IGNORED_TESTS_FILE_PATH) - else: - print('Did not find any issue keys in commit message (text between double brackets: "[[issue-key]]"). Not able to add CLI-ManualChangesRequired label to the JIRA issue.') - except: # noqa: ignore=E722 - print('Could not find the file ' + IGNORED_TESTS_FILE_PATH) diff --git a/scripts/auto_gen_utils/4_on_generation_complete.py b/scripts/auto_gen_utils/4_on_generation_complete.py deleted file mode 100644 index 0726d57072..0000000000 --- a/scripts/auto_gen_utils/4_on_generation_complete.py +++ /dev/null @@ -1,444 +0,0 @@ -# - this script is run as the as the last step of a build step of a job triggered on commits to SDK/CLI branches -# of the form auto-preview-* or auto-public-* -# - at this point the pom.xml has been updated, and the new code has been generated -# - this script reports back to JIRA whether or not the code generation was successful and commits / pushes the generated code - -import argparse -import sys -import util -import config -import re -import shared.bitbucket_utils -from datetime import datetime -from shared.buildsvc_tc_compatibility import build_log_link, build_artifacts_link - -try: - from urllib import quote -except ImportError: - from urllib.parse import quote - -PREVIEW_TC_LINK = "[Preview Auto-Generation](https://teamcity.oci.oraclecorp.com/project/Sdk_SelfService_Preview_AutoPreviewBuilds)" -PUBLIC_TC_LINK = "[Public Release Auto-Generation](https://teamcity.oci.oraclecorp.com/project/Sdk_SelfService_PublicV2_AutoPublicBuilds)" - -PR_DESCRIPTION_TEMPLATE = """This {build_description} was generated by the {tc_link} jobs in TeamCiy. - -For more information, see the {build_log_link}. - -This {build_description} includes changes for the following issues: {issue_keys}""" - -TRANSITIONED_TO_SERVICE_TEAM_WORK_REQUIRED_TEMPLATE = """Setting this ticket to '{service_team_work_required}' automatically, since the SDK status fields for all requested tools have been set to '{success}' or '{done}' -{bypassed_tools_message} -Service team, please: - -1. If you had manual changes to the CLI code, cherry-pick the CLI recordings from [preview|https://bitbucket.oci.oraclecorp.com/projects/SDK/repos/python-cli/browse?at=refs%2Fheads%2Fpreview] into this branch: [{generated_branch}|https://bitbucket.oci.oraclecorp.com/projects/SDK/repos/python-cli/browse?at=refs%2Fheads%2F{generated_branch}]. -2. Make your feature API publicly available to all customers (un-whitelisted) in Prod, then flip the 'Feature API is publicly available & un-whitelisted in Prod' JIRA ticket field value to 'Yes'.""" - - -TRANSITIONED_TO_SERVICE_TEAM_REVIEW_REQUIRED_TEMPLATE = """Setting this ticket to '{service_team_review_required}' automatically, since the SDK status fields for all requested tools have been set to '{success}' or '{done}' -{bypassed_tools_message} -{review_text}""" - - -def get_preview_review_text(issue_key, generated_branch, tool_name): - text = "Service team, please review the resulting generated source diffs and transition the issue to 'Ready for Preview' if appropriate." - - m = re.search("^.*{}-(.*)$".format(tool_name), generated_branch) - if not m: - print("Did not find '{}' and a timestamp in the branch: '{}'".format(tool_name, generated_branch)) - return text - - branch_suffix = m.group(1) - - print("Looking for branch with suffix: {}".format(branch_suffix)) - - pr = shared.bitbucket_utils.get_newest_pullrequest_matching_branch_suffix("SDK", config.DEXREQ_REPO_NAME, branch_suffix) - if pr: - hrefs = util.deep_get(pr, 'links.self') - if hrefs: - url = util.deep_get(hrefs[0], 'href') - text = "Service team, please review the spec diff in this pull request, and if it contains (1) your entire change, and (2) nothing else, approve the pull request and transition the issue to 'Ready for Preview':\n\n" + url - - return text - - -def check_all_tools_successful(issue_key, build_type, repo_name, generated_branch, running_tool_name): - issue = util.get_dexreq_issue(issue_key) - - bypassed_tools = [] - all_successful_or_done = True - # k -> Tool name (e.g., RubySDK), v -> Jira status field identifier - for tool_name, jira_field_id in util.get_jira_custom_field_ids_for_tool().items(): - status = getattr(issue.fields, jira_field_id) - if config.BYPASS_CHECK_GENERATION_PREFIX + tool_name in issue.fields.labels: - bypassed_tools.append(tool_name) - continue - - if not str(status) == config.CUSTOM_STATUS_SUCCESS and not str(status) == config.CUSTOM_STATUS_DONE: - all_successful_or_done = False - break - - bypassed_tools_message = '' - if bypassed_tools: - bypassed_tools_message = "\nThe following requested tools are ignored: {}\n".format(''.join(bypassed_tools)) - - print('All tools successful or done?: {}'.format(str(all_successful_or_done))) - if all_successful_or_done: - if build_type == config.BUILD_TYPE_INDIVIDUAL_PUBLIC: - cli_branch = get_cli_branch_text(generated_branch) - # All tool creation successful, in public builds, we set the overall ticket status to "Service Team Work Required" - util.transition_issue_overall_status(util.JIRA_CLIENT(), issue, config.STATUS_SERVICE_TEAM_WORK_REQUIRED) - util.add_jira_comment( - issue, - TRANSITIONED_TO_SERVICE_TEAM_WORK_REQUIRED_TEMPLATE.format( - service_team_work_required=config.STATUS_SERVICE_TEAM_WORK_REQUIRED, - success=config.CUSTOM_STATUS_SUCCESS, - done=config.STATUS_DONE, - bypassed_tools_message=bypassed_tools_message, - generated_branch=cli_branch, - repo=repo_name - ) - ) - elif build_type == config.BUILD_TYPE_INDIVIDUAL_PREVIEW: - # All tool creation successful, in preview builds, we set the overall ticket status to "Service Team Review Required" - - review_text = get_preview_review_text(issue_key, generated_branch, running_tool_name) - - util.transition_issue_overall_status(util.JIRA_CLIENT(), issue, config.STATUS_SERVICE_TEAM_REVIEW_REQUIRED) - util.add_jira_comment( - issue, - TRANSITIONED_TO_SERVICE_TEAM_REVIEW_REQUIRED_TEMPLATE.format( - service_team_review_required=config.STATUS_SERVICE_TEAM_REVIEW_REQUIRED, - success=config.CUSTOM_STATUS_SUCCESS, - done=config.CUSTOM_STATUS_DONE, - bypassed_tools_message=bypassed_tools_message, - review_text=review_text - ) - ) - elif build_type in config.BULK_BUILD_TYPES: - # All tool creation successful, in bulk builds, we set the overall ticket status to "DEX Bulk Review" - util.transition_issue_overall_status(util.JIRA_CLIENT(), issue, config.STATUS_DEX_BULK_REVIEW) - - -def get_cli_branch_text(gen_branch): - for tool in config.TOOL_NAMES: - if tool in gen_branch: - return gen_branch.replace(tool, config.CLI_NAME) - return gen_branch - - -def get_message(repo, commit_message, build_pass): - message = commit_message.strip() - if 'nothing to commit' in repo.git.status(): - message = "{} (no change in generated code)".format(message) - - if not build_pass: - message = "FAILED: {}\n\nNote: This branch failed to build. It cannot be merged without manual changes. If necessary, you can use this branch as a starting point to fix the build (e.g. if you made a breaking change in preview and you now have to change tests or samples).".format(message) - - return message - - -def get_title(name, issue_keys, build_description, build_pass, build_type): - if build_type in config.BULK_BUILD_TYPES: - time_stamp = datetime.now() - title = 'Auto Generated {build_description} for {repo_name} {date_time}'.format( - repo_name=name, - date_time=time_stamp.strftime("%c"), - build_description=build_description - ) - else: - title = 'Auto Generated {build_description} for {repo_name} {issue_keys}'.format( - repo_name=name, - issue_keys=', '.join(issue_keys), - build_description=build_description - ) - - if not build_pass: - title = "FAILED: {}".format(title) - - return title - - -def get_jira_message(tool_name, links, build_description): - links_text = "\n".join(links) - branch_text = "this branch" if len(links) == 1 else "these branches" - - template = config.BUILD_PASS_JIRA_MESSAGE_TEMPLATE if build_pass else config.BUILD_FAIL_JIRA_MESSAGE_TEMPLATE - - message = template.format( - tool_name=tool_name, - repos=util.join(config.REPO_NAMES_FOR_TOOL[tool_name]), - build_log_link=build_log_link(build_id, "build log"), - build_artifacts_link=build_artifacts_link(build_id), - links=links_text, - build_description=build_description, - branch_text=branch_text - ) - - comment_type = config.COMMENT_TYPE_SUCCESS if build_pass else config.COMMENT_TYPE_ERROR - - return message, comment_type - - -def get_link_for_bulk_pr(repo_for_link, generated_branch, title, description, target_repo_id, name, target_branch): - pr_url = util.create_pull_request(repo_for_link, generated_branch, title, description, target_repo_id, name, target_branch) - print("Automatically generated pull request: {}".format(pr_url)) - return "- [Pull request for the {repo} changes|{link}]".format(repo=name, link=pr_url) - - -def get_link_for_individual_build_passed(repo_for_link, generated_branch, title, description, target_repo_id, name, target_branch, build_type=None): - # If you change this, also change PR_REQUEST_LINK_TEMPLATE in autogen_issue_advisor_public.py - pull_request_link = 'https://bitbucket.oci.oraclecorp.com/projects/SDK/repos/{repo}/compare?targetBranch=refs%2Fheads%2F{target_branch}&sourceBranch=refs%2Fheads%2F{generated_branch}&title={title}&description={description}&targetRepoId={target_repo_id}'.format( - repo=repo_for_link, - generated_branch=generated_branch, - title=quote(title), - description=quote(description), - target_repo_id=target_repo_id, - target_branch=target_branch) - - if repo_for_link == 'python-cli' and build_type == config.BUILD_TYPE_INDIVIDUAL_PREVIEW: - # store PR link in preview-pr.txt file - it will be used in create_cli_design_review_ticket script. - with open('preview-pr.txt', 'w') as filehandle: - filehandle.write(pull_request_link) - - print('Link with diff for {} changes: {}'.format(name, pull_request_link)) - return "- [Link with diff for {repo} changes|{link}]".format(repo=name, link=pull_request_link) - - -def get_link_for_individual_build_failed(repo_for_link, generated_branch, title, description, target_repo_id, name, target_branch): - branch_link = 'https://bitbucket.oci.oraclecorp.com/projects/SDK/repos/{repo}/browse?at=refs%2Fheads%2F{generated_branch}'.format( - repo=repo_for_link, - generated_branch=generated_branch) - - print('Link with failed branch for {}: {}'.format(name, branch_link)) - return "- [Link to failed branch for {repo} changes|{link}]".format(repo=name, link=branch_link) - - -if __name__ == "__main__": - # team city build step will have already run Maven so at this point generation will be done - # somehow determine if Python SDK / CLI build jobs succeeded or failed and report back to JIRA - # consider invoking build_preview.sh from this script - # previously we had two build steps, one for build.sh for the python SDK and one for build.sh for the CLI not sure if we need both of those or not - # but we do need to wait in between build SDK and build CLI - # right now those build steps handle publishing the artifacts so it is probably better if I leave those steps as is and just run this after - parser = argparse.ArgumentParser(description='Post completion updates to DEXREQ tickets.') - parser.add_argument('--build-id', - required=True, - help='The team city build id for the build that is running this script. This is used to update the relevant JIRA tickets with links to the team city build') - parser.add_argument('--tool', - default=config.CLI_NAME, - help='The tool for which to generate the preview. Accepted values: {}'.format(', '.join(config.TOOL_NAMES))) - parser.add_argument('--build-type', - default=config.BUILD_TYPE_INDIVIDUAL_PREVIEW, - help='The build type to use, can be one of the following: {}'.format(', '.join(config.VALID_BUILD_TYPES))) - parser.add_argument('--dry-run', - default=False, - action='store_true', - help='Perform a dry-run') - - args = parser.parse_args() - build_id = args.build_id - tool_name = args.tool - build_type = args.build_type - config.IS_DRY_RUN = args.dry_run - - shared.bitbucket_utils.setup_bitbucket(args) - - # Build type based configuration - if build_type == config.BUILD_TYPE_BULK_PENDING_MERGE_PUBLIC: - tc_link = PUBLIC_TC_LINK - build_description = "Bulk Public" - target_branch = "master" - elif build_type == config.BUILD_TYPE_INDIVIDUAL_PUBLIC: - tc_link = PUBLIC_TC_LINK - build_description = "Public Release" - target_branch = "master" - elif build_type == config.BUILD_TYPE_BULK_PENDING_MERGE_PREVIEW: - tc_link = PREVIEW_TC_LINK - build_description = "Bulk Preview" - target_branch = "preview" - elif build_type == config.BUILD_TYPE_INDIVIDUAL_PREVIEW: - tc_link = PREVIEW_TC_LINK - build_description = "Preview" - target_branch = "preview" - else: - raise ValueError("Unknown build type: '{}', must be one of: {}".format( - build_type, ', '.join(config.VALID_BUILD_TYPES))) - - # Determines if a given build_tool should report status back to the associated Jira issue - is_tool_jira_reportable = util.is_tool_jira_reportable(tool_name) - - # check if we should even run - generation_pass, build_pass = util.were_steps_successful(tool_name) - if not generation_pass: - print("Generation did not pass, not proceeding.") - sys.exit(0) # exit, but without failing here; the generation step already failed - - if not build_pass and build_type in config.BULK_BUILD_TYPES: - print("Build did not pass, and this was a bulk build ({}). Not proceeding.".format(build_type)) - sys.exit(0) - - # get current branches - current_branches = {} - for name, repo in zip(config.REPO_NAMES_FOR_TOOL[tool_name], config.REPOS_FOR_TOOL[tool_name]): - current_branches[name] = [branch.strip()[2:] for branch in repo.git.branch().split('\n') if branch.startswith('* ')][0] - - values = list(current_branches.values()) - if not all(x == values[0] for x in values): - sys.exit('Expected branches for {} to be in sync. Got the following: {}'.format(config.REPO_NAMES_FOR_TOOL[tool_name], current_branches)) - - if not all(x.startswith("auto-") for x in values): - sys.exit('Expected all branches for {} to start with "auto-". Got the following: {}'.format(config.REPO_NAMES_FOR_TOOL[tool_name], current_branches)) - - branch = current_branches[config.REPO_NAMES_FOR_TOOL[tool_name][0]] - - # create new branches generated-{original auto-preview-* or auto-public-* branch name} - # this job is triggered by commits to auto-preview-* or auto-public-* so we want to move to a new branch we don't retrigger this job - prefix_to_use = config.GENERATION_BRANCH_PREFIX if build_pass else config.FAILED_BRANCH_PREFIX - generated_branch = '{generation_branch_prefix}-{branch}'.format(generation_branch_prefix=prefix_to_use, branch=branch) - print("Branch to push: {}".format(generated_branch)) - for repo in config.REPOS_FOR_TOOL[tool_name]: - repo.git.checkout(b=generated_branch) - - # TODO: parse last commit message to related DEXREQ issue - last_commit_message = util.get_last_commit_message(tool_name) - - # will be of the form 'Updating pom.xml for DEXREQ-123: Preview for RQS - print('Found last commit: {}'.format(last_commit_message)) - - issue_keys = util.parse_issue_keys_from_commit_message(last_commit_message) - if issue_keys: - commit_message = '{commit_prefix} [[{issue_keys}]]'.format(commit_prefix=config.GENERATION_COMMIT_MESSAGE_PREFIX, issue_keys=', '.join(issue_keys)) - else: - commit_message = '{commit_prefix} "{last_commit_message}"'.format(commit_prefix=config.GENERATION_COMMIT_MESSAGE_PREFIX, last_commit_message=last_commit_message) - - try: - # add all generated files and commit - links = [] - for name, repo, repo_for_link, target_repo_id in zip(config.REPO_NAMES_FOR_TOOL[tool_name], config.REPOS_FOR_TOOL[tool_name], config.REPO_FOR_LINKS[tool_name], config.TARGET_REPO_IDS_FOR_LINKS[tool_name]): - repo.git.add(A=True) - message = get_message(repo, commit_message, build_pass) - - print('Commiting the following: {}'.format(message)) - - repo.git.commit("-m", message, "--allow-empty") - - # Rebase/squash the last two commits together - last_two_log_message = repo.git.log(n=2, format='Author: %cn <%aE>%nDate: %aD%n%s%n%b') - repo.git.reset("--soft", "HEAD~2") - repo.git.commit("-m", last_two_log_message + "\n\n(automatically combined)", "--allow-empty") - - if config.IS_DRY_RUN: - print('DRY-RUN: not pushing to origin HEAD') - else: - repo.git.push('-u','origin','HEAD') - # now that generated-* branch is pushed, we can delete the auto-preview or auto-public branch - util.safe_delete_branch(repo, branch) - - title = get_title(name, issue_keys, build_description, build_pass, build_type) - - description = PR_DESCRIPTION_TEMPLATE.format( - build_log_link=build_log_link(build_id, text="build results"), - issue_keys=', '.join(issue_keys), - build_description=build_description, - tc_link=tc_link - ) - - last_repo_name = config.REPO_NAMES_FOR_TOOL[tool_name][-1] - link = None - if build_type in config.BULK_BUILD_TYPES: - # For the Python CLI, we build both the Python SDK (first) and the Python CLI (last). - # We only want to send out a pull request for the last repo. - if name == last_repo_name: - link = get_link_for_bulk_pr(repo_for_link, generated_branch, title, description, target_repo_id, name, target_branch) - else: - print('Not sending out PR for {}, since it is not the last repo ({}) for tool {}'.format(name, last_repo_name, tool_name)) - else: - if build_pass: - # For the Python CLI, we build both the Python SDK (first) and the Python CLI (last). - # We only want to include a link for the last repo. - if name == last_repo_name: - link = get_link_for_individual_build_passed(repo_for_link, generated_branch, title, description, target_repo_id, name, target_branch, build_type) - else: - print('Not including link for {}, since it is not the last repo ({}) for tool {}'.format(name, last_repo_name, tool_name)) - else: - # Build did not pass (even for the Python SDK repo of the Python CLI build) - link = get_link_for_individual_build_failed(repo_for_link, generated_branch, title, description, target_repo_id, name, target_branch) - if link: - links.append(link) - - if is_tool_jira_reportable: - if issue_keys: - for issue_key in issue_keys: - # post back to JIRA indicating that a build is ready and the change is Pending Merge by the SDK / CLI team - - message, comment_type = get_jira_message(tool_name, links, build_description) - - util.add_jira_comment( - issue_key, - message, - comment_type=comment_type - ) - - # update issue custom status for tool_name to 'Success' - issue = util.get_dexreq_issue(issue_key) - if build_pass: - util.transition_issue_per_tool_status(util.JIRA_CLIENT(), issue, config.CUSTOM_STATUS_SUCCESS, tool_name) - - # check if all status fields are not in 'Success' - check_all_tools_successful(issue_key, build_type, config.REPO_FOR_LINKS[tool_name][0], generated_branch, tool_name) - else: - util.transition_issue_per_tool_status(util.JIRA_CLIENT(), issue, config.CUSTOM_STATUS_FAILURE, tool_name) - # if an issue is already in 'DEX Support Required' based on failure for another tool, we do not want to overwrite that - util.transition_issue_overall_status_if_not_in_status(util.JIRA_CLIENT(), issue, desired_status=config.STATUS_SERVICE_TEAM_FAILURE_INVESTIGATION, blacklisted_status=config.STATUS_DEX_SUPPORT_REQUIRED) - - # refresh issue from server to make sure we have most up to date state - issue = util.get_dexreq_issue(issue_key) - - # this logic is a catch all to make sure if this is the last or only tool to complete - # that we never strand a ticket in 'Processing' - if issue.fields.status.name == config.STATUS_PROCESSING or issue.fields.status.name == config.STATUS_PROCESSING_BULK: - any_tools_processing = False - any_tools_failed = False - for jira_field_id in util.get_jira_custom_field_ids_for_tool().values(): - status = getattr(issue.fields, jira_field_id) - if str(status) == config.CUSTOM_STATUS_PROCESSING: - any_tools_processing = True - if str(status) == config.CUSTOM_STATUS_FAILURE: - any_tools_failed = True - - print('Any tools in Processing?: {}'.format(str(any_tools_processing))) - print('Any tools Failed?: {}'.format(str(any_tools_failed))) - if not any_tools_processing: - # ticket is still in 'Processing' global state but no individual tools are 'Processing' - # this can happen if the last job to be 'Processing' completes successfully but other tools are still in To Do / Failure / None - # for example, if 2 SDKs fail and we re-run a single one which succeeds, we will hit this case - # thus we look at the individual status fields and try to figure out which state to put the ticket in - if any_tools_failed: - util.transition_issue_overall_status_if_not_in_status(util.JIRA_CLIENT(), issue, desired_status=config.STATUS_SERVICE_TEAM_FAILURE_INVESTIGATION, blacklisted_status=config.STATUS_DEX_SUPPORT_REQUIRED) - else: - # this is a very rare case that should only happen when someone is manually tweaking SDK status fields - # so we put the ticket into backlog and the reporter will have to move it to a meaningful status - # this condition will be hit when the current job succeeded, no other jobs are in progress, and there are other status fields still in To Do or None - util.transition_issue_overall_status(util.JIRA_CLIENT(), issue, config.STATUS_BACKLOG) - else: - print('Did not find any issue keys in commit message (text between double brackets: "[[issue-key]]"). Not updating any JIRA issues.') - except Exception as e: - print(e) - - if not is_tool_jira_reportable: - raise e - - # catch anything that went wrong and post it in the ticket so it never gets stuck in 'Processing' - if issue_keys: - for issue_key in issue_keys: - issue = util.get_dexreq_issue(issue_key) - util.transition_issue_per_tool_status(util.JIRA_CLIENT(), issue, config.CUSTOM_STATUS_FAILURE, tool_name) - util.transition_issue_overall_status(util.JIRA_CLIENT(), issue, config.STATUS_DEX_SUPPORT_REQUIRED) - util.add_jira_comment( - issue.key, - config.SELF_SERVICE_BUG_TEMPLATE.format( - exception=str(e), - build_log_link=build_log_link(build_id) - ), - comment_type=config.COMMENT_TYPE_ERROR - ) diff --git a/scripts/auto_gen_utils/5_mark_preview_tickets_done_post_merge.py b/scripts/auto_gen_utils/5_mark_preview_tickets_done_post_merge.py deleted file mode 100644 index d9567db9da..0000000000 --- a/scripts/auto_gen_utils/5_mark_preview_tickets_done_post_merge.py +++ /dev/null @@ -1,292 +0,0 @@ -import argparse -import config -import util -import requests -import os -import re -import xml.etree.ElementTree as ET -from git.exc import GitCommandError -import urllib - -USERNAME = os.environ.get('TEAMCITY_USERNAME') -PASSWORD = os.environ.get('TEAMCITY_PASSWORD') -if USERNAME and PASSWORD: - TC_BASIC_AUTH = (USERNAME, PASSWORD) -else: - TC_BASIC_AUTH = None - - -def get_last_successful_build_id(build_conf_name, teamcity_branch): - branch_filter = "" - if teamcity_branch: - branch_filter = ",branch:{teamcity_branch}".format(teamcity_branch=urllib.quote_plus(teamcity_branch)) - - url = "https://teamcity.oci.oraclecorp.com/httpAuth/app/rest/builds/?locator=buildType:id:{build_conf_name},status:SUCCESS{branch_filter},count:1".format( - build_conf_name=build_conf_name, - branch_filter=branch_filter) - - print("get_last_successful_build_id url: {}".format(url)) - - response = requests.get(url, auth=TC_BASIC_AUTH, verify=False) - - root = ET.fromstring(response.text.encode('utf-8')) - build_nodes = root.findall("build") - if len(build_nodes) != 1: - return None - else: - id = build_nodes[0].attrib['id'] - print("Last successful build id for '{}': {}".format(build_conf_name, id)) - return id - - -def get_commits_of_build(build_id): - url = "https://teamcity.oci.oraclecorp.com/httpAuth/app/rest/builds/id:{build_id}".format( - build_id=build_id) - - response = requests.get(url, auth=TC_BASIC_AUTH, verify=False) - - root = ET.fromstring(response.text.encode('utf-8')) - - revision_nodes = root.findall(".//revision") - commits = [r.attrib['version'] for r in revision_nodes] - print("Commits of build {}: {}".format(build_id, commits)) - return commits - - -def get_commit_message_for_commit(repo, commit_hash): - return repo.git.log('-n 1', "--format='%s%n%b'", commit_hash) - - -def get_issue_keys_from_repo(repo, last_successful_commits, commit_lookback_range, issue_key_extractor): - delimiter = '++++++++' - last_commit_messages = [] - - # Find all the commit messages up to (excluding) the last successfully-built commit (up to commit_loopback_range) - for x in repo.git.log(n=commit_lookback_range, format='%H%n%s%n%b{}'.format(delimiter)).split(delimiter): - x = x.strip() - if not x: - continue # there's a blank one at the end - - lines = x.split('\n') - hash = lines[0] - message = '\n'.join(lines[1:]) - if hash in last_successful_commits: - print("Reached last successful commit: {}".format(hash)) - break - last_commit_messages.append({'hash': hash, 'message': message}) - - # will be of the form 'Updating pom.xml for [[DEXREQ-123, DEXREQ-234, DEXREQ-456]] - issue_keys = [] - - # go through the commit messages from oldest to newest (so later reverts can remove earlier commits) - for pair in reversed(last_commit_messages): - hash = pair['hash'] - commit_message = pair['message'].encode('utf-8') - - new_keys = issue_key_extractor(commit_message) - - # this was probably a revert commit - if b'revert' in commit_message.lower(): - print("Ignoring commit {} because it contains the string 'revert' -- removing keys: [{}]".format(hash, ", ".join(new_keys))) - # remove the keys found in this commit - for key in new_keys: - try: - if key in issue_keys: - issue_keys.remove(key) - except ValueError: - pass - - # find the commit hashes mentioned as being reverted - reverted_commits = re.findall(b'This reverts commit ([a-f0-9]*).', commit_message) - - # loop up the commit messages for the reverted commits - for rc in reverted_commits: - try: - reverted_message = get_commit_message_for_commit(repo, rc) - # and remove those issue keys from our list - reverted_keys = issue_key_extractor(reverted_message) - - print("\tReverted commit {} -- removing keys: [{}]".format(rc, ", ".join(reverted_keys))) - for key in reverted_keys: - if key in issue_keys: - issue_keys.remove(key) - except GitCommandError: - print("Could not get commit message for what looked like a commit: {}".format(rc)) - else: - print("Considering commit {} -- adding keys: [{}]\n{}".format(hash, ", ".join(new_keys), commit_message)) - issue_keys.extend(new_keys) - - print("==========") - - return issue_keys - - -def is_work_complete_for_tools(issue, tool_names): - work_complete = True - for tool in tool_names: - # Determines if a given build_tool should report status back to the associated Jira issue - if not util.is_tool_jira_reportable(tool): - print("Ignoring {} for checking for work completion due to not a service team facing surface: {}".format(tool)) - continue - - custom_field_id = config.CUSTOM_FIELD_ID_FOR_TOOL[tool] - custom_status = getattr(issue.fields, custom_field_id) - - # Skip any tools that have been labeled to skip its generation - if config.BYPASS_CHECK_GENERATION_PREFIX + tool in issue.fields.labels: - print("Ignoring {} for checking for work completion due to label: {}".format(tool, config.BYPASS_CHECK_GENERATION_PREFIX + tool)) - continue - - if not custom_status or not custom_status.value or not custom_status.value == config.CUSTOM_STATUS_DONE: - work_complete = False - break - - return work_complete - - -def default_issue_key_extractor(commit_message): - try: - commit_message = commit_message.decode('utf-8') - except AttributeError: - pass - return util.parse_issue_keys_from_specific_commit_message(commit_message, "{} [[".format(config.GENERATION_COMMIT_MESSAGE_PREFIX)) - - -# - this script will run on commits to the preview or master branch of the different SDKs or the CLI -# - for every trigger (merged commit), this job will look at recent commit messages to find if -# there are any preview or public DEXREQ tickets that need to be updated to SDK Status: Done -if __name__ == "__main__": - parser = argparse.ArgumentParser(description='Update SDK statuses to Done in DEXREQ tickets.') - parser.add_argument('--build-id', - required=True, - help='The team city build id for the build that is running this script. This is used to update the relevant JIRA tickets with links to the team city build') - parser.add_argument('--tool', - default=config.CLI_NAME, - help='The tool for which to generate the build. Accepted values: {}'.format(', '.join(config.TOOL_NAMES))) - parser.add_argument('--full-version', - help='The full version of this SDK / CLI that was produced. This is used to generate the final comment with a link to the artifacts.') - parser.add_argument('--short-version', - required=False, - help='The short version of this SDK / CLI that was produced. This is used to generate the final comment with a link to the artifacts. Only used by Java (e.g. full version "1.2.44-preview1-20180806.212454-12", short version "1.2.44-preview1-SNAPSHOT")') - parser.add_argument('--artifactory-repo', - required=False, - help='The artifactory repository where the artifact was published. This is used to generate the final comment with a link to the artifacts. Only used by Java (e.g. "opc-public-sdk-snapshot-maven-local" or "opc-public-sdk-release-maven-local")') - parser.add_argument('--issue', - action='append', - help='By default, we will process DEXREQ tickets found in recent commits. This allows you to specify specific DEXREQ issue(s) act on: --issue DEXREQ-123.') - parser.add_argument('--allow-transition-overall-issue-to-done', - default=False, - action='store_true', - help='Allows transitioning overall issue status to "Done" if all CLI / SDK Status fields are "Done"') - parser.add_argument('--allow-transition-overall-issue-to-deploy', - default=False, - action="store_true", - help='Allows transitioning the overall issue status to "To Deploy" if all CLI / SDK Status fields are "Done"') - parser.add_argument('--dry-run', - default=False, - action='store_true', - help='Perform a dry-run') - parser.add_argument('--build-conf-name', - required=True, - help="TeamCity build configuration name.") - # Use this query and see if you get the right results: - # https://teamcity.oci.oraclecorp.com/httpAuth/app/rest/builds/?locator=buildType:id:{build_conf_name),status:SUCCESS,branch:{teamcity_branch},count:100 - parser.add_argument('--teamcity-branch', - required=False, - help="Branch for TeamCity 'last successful build' query; examples: 'preview', 'master', '") - parser.add_argument('--build-type', - default=config.BUILD_TYPE_INDIVIDUAL_PREVIEW, - help='The build type to use, can be one of the following: {}'.format(', '.join(config.VALID_BUILD_TYPES))) - - args = parser.parse_args() - build_id = args.build_id - tool_name = args.tool - full_version = args.full_version - short_version = args.short_version - artifactory_repo = args.artifactory_repo - whitelisted_issues = args.issue - allow_transition_overall_issue_to_done = args.allow_transition_overall_issue_to_done - allow_transition_overall_issue_to_deploy = args.allow_transition_overall_issue_to_deploy - teamcity_buildconf_name = args.build_conf_name - teamcity_branch = args.teamcity_branch - build_type = args.build_type - config.IS_DRY_RUN = args.dry_run - - if TC_BASIC_AUTH and teamcity_buildconf_name: - # get the commits of the last successful build - last_successful_build_id = get_last_successful_build_id(teamcity_buildconf_name, teamcity_branch) - last_successful_commits = get_commits_of_build(last_successful_build_id) - commit_lookback_range = 1000 - else: - commit_lookback_range = 10 - last_successful_commits = [] - print("Using lookback range {}, since TEAMCITY_USERNAME, TEAMCITY_PASSWORD, and --build-conf-name are not all set.".format(commit_lookback_range)) - - # get current branches - current_branches = {} - for name, repo in zip(config.REPO_NAMES_FOR_TOOL[tool_name], config.REPOS_FOR_TOOL[tool_name]): - b = repo.git.branch() - print('Branches in repo {}:\n{}'.format(repo, str(b))) - if not b: - current_branches[name] = [] - continue - current_branches[name] = [branch.strip()[2:] for branch in b.split('\n') if branch.startswith('* ')][0] - - print('Current branches per tool: {}'.format(str(current_branches))) - - branch = current_branches[config.REPO_NAMES_FOR_TOOL[tool_name][-1]] - - repo = config.REPOS_FOR_TOOL[tool_name][-1] - - issue_keys = get_issue_keys_from_repo(repo, last_successful_commits, commit_lookback_range, default_issue_key_extractor) - print("Found issue keys: {}".format(", ".join(issue_keys))) - - if issue_keys: - for issue_key in issue_keys: - if whitelisted_issues and issue_key not in whitelisted_issues: - print('Skipping processing ticket {} because it was not included in --issue filter'.format(issue_key)) - continue - - issue = util.get_dexreq_issue(issue_key) - - # don't try to mark fields on an old ticket type because the fields won't be available - if issue.fields.issuetype.id != config.PREVIEW_ISSUE_TYPE_ID and issue.fields.issuetype.id != config.PUBLIC_ISSUE_TYPE_ID: - print('Skipping issue {} of old ticket type: {}'.format(issue_key, issue.fields.issuetype.name)) - continue - - # update issue custom status for tool_name to 'Done' - status = getattr(issue.fields, config.CUSTOM_FIELD_ID_FOR_TOOL[tool_name]) - if issue.fields and issue.fields.status and issue.fields.status.name == config.STATUS_DONE: - print('Skipping marking ticket {issue_key} as Done for tool: {tool} because overall ticket status is Done'.format(issue_key=issue_key, tool=tool_name)) - elif status and status.value and status.value == config.CUSTOM_STATUS_DONE: - # if the ticket is already marked done then ignore it - print('Skipping re-marking ticket {issue_key} as Done for tool: {tool}'.format(issue_key=issue_key, tool=tool_name)) - else: - # only mark the ticket SDK status as done if we are able to add a comment with links to the artifact - comment_template = config.BULK_GENERATION_COMPLETE_COMMENT_TEMPLATE_FOR_TOOL.get(tool_name) - if not comment_template: - print('Skipping marking ticket {issue_key} as Done for tool: {tool} because no comment template is defined'.format(issue_key=issue_key, tool=tool_name)) - continue - - if not full_version: - print('Skipping marking ticket {issue_key} as Done for tool: {tool} because --full-version was not specified'.format(issue_key=issue_key, tool=tool_name)) - continue - - util.add_jira_comment( - issue_key, - comment_template.format( - full_version=full_version, - short_version=short_version, - artifactory_repo=artifactory_repo - ) - ) - - util.transition_issue_per_tool_status(util.JIRA_CLIENT(), issue, config.CUSTOM_STATUS_DONE, tool_name) - - if is_work_complete_for_tools(issue, util.get_jira_reportable_tool_names()): - if allow_transition_overall_issue_to_done: - util.transition_issue_overall_status(util.JIRA_CLIENT(), issue, config.STATUS_DONE) - elif allow_transition_overall_issue_to_deploy: - util.transition_issue_overall_status(util.JIRA_CLIENT(), issue, config.STATUS_TO_DEPLOY) - else: - print('Did not find any issue keys in commit message (text between double brackets: "[[issue-key]]"). Not updating any JIRA issues.') diff --git a/scripts/auto_gen_utils/README.md b/scripts/auto_gen_utils/README.md deleted file mode 100644 index 7f0134480f..0000000000 --- a/scripts/auto_gen_utils/README.md +++ /dev/null @@ -1,253 +0,0 @@ -Auto Generation Utils -======================== - -Note ------ - -Every time you clone this repo, please set up the pre-commit hook, see (#Pre-Check-in-Validation): `git config core.hooksPath hooks` - - -Description ------------- - -This package contains scripts related to our SDK/CLI self-service pipeline. Most of these scripts run as part of TeamCity builds. - - -### Self-service Pipeline - -TODO: update for Build service - -Runs every hour: -- Preview: https://teamcity.oci.oraclecorp.com/viewType.html?buildTypeId=Sdk_SelfService_PublicV2_ProcessJiraAutoPublicQueue -- Public: https://teamcity.oci.oraclecorp.com/viewType.html?buildTypeId=Sdk_SelfService_Preview_ProcessJiraAutoPreviewQueue - -``` -1_process_preview_jira_queue.py -``` - -If there were tickets in 'Processing Requested', the above job will create new branches for the SDKs and the CLI and update the `pom.xml` file with the new data from the DEXREQ tickets (e.g. a spec artifact version change). - -The scripts that actually modify the `pom.xml` files are - -``` -add_or_update_scripts/*.py -``` - -This Git change will trigger generation builds for each of the SDKs and the CLI: - -TODO: update for Build service - -- Preview: https://teamcity.oci.oraclecorp.com/project.html?projectId=Sdk_SelfService_Preview_AutoPreviewBuilds&tab=projectOverview -- Public: https://teamcity.oci.oraclecorp.com/project.html?projectId=Sdk_SelfService_PublicV2_AutoPublicBuilds&tab=projectOverview - -These builds will run, among other things, these scripts: - -``` -2_pre_generation_set_up.py -3_report_generation_status.py -4_on_generation_complete.py -5_mark_preview_tickets_done_post_merge.py -``` - -The different SDKs have additional scripts that may be part of these generation builds in the `team_city_scripts` directory: - -``` -team_city_scripts/ -├── cli -├── go -├── java -├── python_sdk -└── ruby -``` - - -### Advisor - -The "advisor" runs periodically, looks at DEXREQ JIRA tickets that have changed, and comments on their state. If there are actionable state changes, it also moves the tickets forward (or backward, if illegal manual changes were made) in the pipeline. - -TODO: update for Build service - -Runs every 10 minutes: https://teamcity.oci.oraclecorp.com/viewType.html?buildTypeId=Sdk_SelfService_RunTicketAdvisorV2 - -``` -autogen_issue_advisor.py -autogen_issue_advisor_preview.py -autogen_issue_advisor_public.py -autogen_issue_advisor_shared.py -``` - - -### Branch Cleanup - -Runs daily and cleans up old branches, and merges/declines old spec diff PRs. - -TODO: update for Build service - -- https://teamcity.oci.oraclecorp.com/viewType.html?buildTypeId=Sdk_SelfService_Preview_CleanUpAutoPreviewBranches&tab=buildTypeHistoryList&branch_Sdk_SelfService_Preview=__all_branches__ - -``` -clean_auto_branches.py -``` - - -### Other Scripts for TeamCity Builds - -There are several other scripts that run on TeamCity, e.g. as part of on-PR builds or triggered by timers. - -``` -team_city_scripts/ -├── api_review -├── github_issues -├── oci_testing_service -├── orm -└── spec_validator -``` - - -### Libraries/Utilities - -There are a number of utility/library files: - -- `shared/bitbucket_utils.py` -- interfacing with Bitbucket PRs and repos -- `shared/version_utils.py` -- dealing with Maven versions and checking if they are valid and increasing -- `config.py` -- mostly names, repos, etc. for the different SDKs and the CLI -- `util.py` -- interfacing with JIRA, among other things - - -Further reading ----------------- - -- [Self-service Testing and Development](https://confluence.oci.oraclecorp.com/display/DEX/Self-Service+Testing+and+Development) -- [Requesting a Public SDK](https://confluence.oci.oraclecorp.com/pages/viewpage.action?pageId=43683000) -- [Requesting a Preview SDK](https://confluence.oci.oraclecorp.com/display/DEX/Requesting+a+preview+SDK+CLI) -- [Self-service Pipeline Limitations](https://confluence.oci.oraclecorp.com/display/DEX/Self-service+Pipeline+Limitations) -- [Preview/Public Pipeline Ticket Advisor](https://confluence.oci.oraclecorp.com/pages/viewpage.action?pageId=60740381) -- [SDK Runbook - Exceptions in Self-Service Pipeline](https://confluence.oci.oraclecorp.com/display/DEX/SDK+Runbook+-+Exceptions+in+Self-Service+Pipeline) -- [SDK Runbook - Running the Pipeline on Just One SDK or CLI](https://confluence.oci.oraclecorp.com/display/DEX/SDK+Runbook+-+Running+the+Pipeline+on+Just+One+SDK+or+CLI) -- [Scripts to Add or Update Specs](https://confluence.oci.oraclecorp.com/display/DEX/Scripts+to+Add+or+Update+Specs) -- [Self Service Preview Generation](https://confluence.oci.oraclecorp.com/display/DEX/Self+Service+Preview+Generation) -- [Pull Request Validation Builds for the Testing Service](https://confluence.oci.oraclecorp.com/display/DEX/Pull+Request+Validation+Builds+for+the+Testing+Service) -- [OCI Testing Service Tests Run In PRs](https://confluence.oci.oraclecorp.com/display/~mricken/OCI+Testing+Service+Tests+Run+In+PRs) - - -Pre Check-in Validation ------------------------- -Run the verify script before checking in which performs some basic validation: - -`./verify.sh` - -We recommend adding this as a pre-commit hook which can be done by running the following command: -`git config core.hooksPath hooks` - - -Installing dependencies ------------------------- -To install dependencies for this project, execute the following command: - -`pip install -r requirements.txt` - - -Running scripts locally -------------------------- -In order to run the scripts locally you will need a JIRA token set in the JSESSIONID environment variable. - -To get this token, run the following script: - -`python get_jira_access_token.py` - -This will print out a token which you should set in your environment like so: - -`export JSESSIONID={TOKEN HERE}` - -If you get an error like the following, you haven't set JSESSIONID in your environment. `config.JSESSIONID` is the name -of the Team City variable, but when running locally use JSESSIONID. - - Could not authenticate with JIRA server. Must specify environment variables for either config.JSESSIONID or JIRA_USERNAME and JIRA_PASSWORD. - -Example of running 1_process_preview_jira_queue.p locally - - python 1_process_preview_jira_queue.py --dry-run --issue DEXREQ-354 --tool PythonSDK --allow-individual-tool-generation --for-any-status --build-type individual_public --base-branch master --build-id 645 - -Some scripts also need JIRA access. For that, please set the following environment variables: - - JIRA_USERNAME=... - JIRA_PASSWORD=... - -Some scripts also need Bitbucket access. For that, please set the following environment variables: - - BITBUCKET_USERNAME=... - BITBUCKET_PASSWORD=... - -If those aren't set, the scripts will fall back to using JIRA_USERNAME and JIRA_PASSWORD, for backward-compatibility reasons, even though the accounts are not linked anymore, and the credentials may differ. - - -Testing Jira bot changes locally --------------------------------- -- Use zsh shell for the following steps. -- Set the following env variables `PYTHONPATH`,`PYTHON_CLI_DIR` as shown below. - -- Activate venv and install all requirements for `python-cli` and `auto-gen-utils` repo - -- Use python 3.7 or higher version - -Example directory structure : -``` -dir1 -├── cli-bot-env -├── auto-gen-utils -├── python-cli -``` -**Downloading python3.7 using pyenv** -``` -zsh -echo "Creating a python virtual environment" -curl -L https://raw.githubusercontent.com/yyuu/pyenv-installer/master/bin/pyenv-installer | bash -export PATH="$HOME/.pyenv/bin:$PATH" -eval "$(pyenv init -)" -eval "$(pyenv init --path)" -eval "$(pyenv virtualenv-init -)" -# pyenv update -pyenv install 3.7.0 -s -pyenv shell 3.7.0 - -``` -**Creating virtual venv** -``` - -python3.7 -m venv cli-bot-env -source cli-bot-env/bin/activate - -cd auto-gen-utils -export PYTHONPATH=$(pwd):$PYTHONPATH -pip3 install --pre --trusted-host artifactory.oci.oraclecorp.com --index-url https://artifactory.oci.oraclecorp.com/api/pypi/global-dev-pypi/simple -r requirements.txt -cd .. - -``` -**User needs to git clone python-cli (under dir1, refer above diagram)** -\ -**Now setup python-cli development env as follows** -``` - -cd python-cli -export PYTHONPATH=$(pwd):$PYTHONPATH -export PYTHON_CLI_DIR=$(pwd) - -../auto-gen-utils/python_cli/install_python_cli_local.sh - -``` - -Refer the below example for testing a command against a dummy DEX issue. -Note: Please always follow the first step to clear any local changes,commits and to check out preview in python-cli repo before running the script every time. -``` -cd ../python-cli -git restore . && git clean -fd && git checkout preview && git branch -D preview-DEXTEST-1234 -cd .. - -cd auto-gen-utils -export issue="DEXTEST-1234" -export command="[~gear-dexreq-automation] Manual Changes Requested -[Rename Parameter] - -oci bds instance install-patch --patch-version -> --patch-testing-param" -python3 python_cli/generate_local_changes.py $issue $command -``` diff --git a/scripts/auto_gen_utils/__init__.py b/scripts/auto_gen_utils/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/scripts/auto_gen_utils/add_or_update_scripts/__init__.py b/scripts/auto_gen_utils/add_or_update_scripts/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/scripts/auto_gen_utils/add_or_update_scripts/add_or_update_spec_utils.py b/scripts/auto_gen_utils/add_or_update_scripts/add_or_update_spec_utils.py deleted file mode 100644 index eaf80426f5..0000000000 --- a/scripts/auto_gen_utils/add_or_update_scripts/add_or_update_spec_utils.py +++ /dev/null @@ -1,81 +0,0 @@ -# Add or update spec utilities - -import collections -import re -import xml.etree.ElementTree as ET - - -ns = {"ns": "http://maven.apache.org/POM/4.0.0"} - - -class CommentedTreeBuilder(ET.TreeBuilder): - def __init__(self, *args, **kwargs): - super(CommentedTreeBuilder, self).__init__(*args, **kwargs) - - def comment(self, data): - self.start(ET.Comment, {}) - self.data(data) - self.end(ET.Comment) - - -AddOrUpdateSpecResult = collections.namedtuple( - 'AddOrUpdateSpecResult', - 'updated existing ignored previous changed') - - -def compute_changed_settings(previous, current): - changed = [] - for key, value in previous.items(): - if key not in current: - changed.append(key) - elif previous[key] != current[key]: - changed.append(key) - - for key, value in current.items(): - if key not in previous: - changed.append(key) - - return changed - - -def indent(elem, level=0): - indent_str = " " - i = "\n" + level * indent_str - if len(elem): - if not elem.text or not elem.text.strip(): - elem.text = i + indent_str - for e in elem: - indent(e, level + 1) - if not e.tail or not e.tail.strip(): - e.tail = i + indent_str - if not e.tail or not e.tail.strip(): - e.tail = i - else: - if level and (not elem.tail or not elem.tail.strip()): - elem.tail = i - - -def add_spec_module_to_github_whitelist(spec_name, github_whitelist_location, github_whitelist_template): - if github_whitelist_location and github_whitelist_template: - with open(github_whitelist_location, 'a') as f: - f.write(github_whitelist_template.format(spec_name)) - - -def convert_camel_to_snake_case(name): - s1 = re.sub('(.)([A-Z][a-z]+)', r'\1_\2', name) - return re.sub('([a-z0-9])([A-Z])', r'\1_\2', s1).lower() - - -def parse_pom(file_name): - return ET.parse(file_name, parser=ET.XMLParser(target=CommentedTreeBuilder())) - - -def write_xml(file_name, pom): - indent(pom.getroot()) - pom.write(file_name, encoding="UTF-8", xml_declaration=True) - - -def find_pom_version(pom_location): - print("Parsing pom: {}".format(pom_location)) - pom = parse_pom(pom_location) - return pom.findall(".//ns:version", ns)[0].text diff --git a/scripts/auto_gen_utils/add_or_update_scripts/cli_add_or_update_spec.py b/scripts/auto_gen_utils/add_or_update_scripts/cli_add_or_update_spec.py deleted file mode 100644 index 2c726caab3..0000000000 --- a/scripts/auto_gen_utils/add_or_update_scripts/cli_add_or_update_spec.py +++ /dev/null @@ -1,253 +0,0 @@ -#!/usr/bin/env python - -# -# This script manipulates pom.xml to either add new specs or update the versions of existing specs. -# - -import click -import xml.etree.ElementTree as ET -import os -from .single_pom_file_add_or_update_spec import DEFAULT_POM_LOCATION -from .add_or_update_spec_utils import AddOrUpdateSpecResult, compute_changed_settings -from .add_or_update_spec_utils import write_xml -from shared.version_utils import is_version_increasing - - -XMLNS = {"ns":"http://maven.apache.org/POM/4.0.0"} - -# This is the template for a child pom. -POM_TEMPLATE = """ - - - com.oracle.bmc.sdk - python-cli - 1.0.0-SNAPSHOT - ../../pom.xml - - - com.oracle.bmc.sdk - 1.0.0-SNAPSHOT - 4.0.0 - {spec_name} - - ${project.basedir}/../../ - - {group_id} - {artifact_id} - {version} - {spec_file} - {spec_name} - ${project.basedir} - {root_group_override} - - - - ${cli-root-dir}/target - - - -""" - - -# Get the settings from an existing pom file. -def gather_settings(pom_file): - settings = {} - if not os.path.exists(pom_file): - return settings - - pom = parse_pom(pom_file) - - # Get the spec_name from the top most artifactId tag. - xpath = "./ns:artifactId" - artifactId = pom.findall(xpath, XMLNS) - if len(artifactId) < 1: - print("spec_name could not be found in existing pom file") - else: - settings["spec_name"] = artifactId[0].text - - add_property_setting("codegen.artifactory.groupId", "group_id", pom, settings) - add_property_setting("codegen.artifactory.artifactId", "artifact_id", pom, settings) - add_property_setting("codegen.artifactory.version", "version", pom, settings) - add_property_setting("codegen.spec.name", "relative_spec_path", pom, settings) - # Not everything will have a root_group_override - try: - add_property_setting("generate-rootCliGroupOverride", "root_group_override", pom, settings) - except: # noqa: ignore=E722 - pass - - # Check if the current pom is overriding the default build plugins. - # For example, key_management-pom.xml and streaming-pom.xml do this. - for child in pom.getroot(): - if "build" in child.tag: - for build_child in child: - if "plugins" in build_child.tag: - settings["plugins"] = build_child - - return settings - - -def add_property_setting(xpath_property, setting_name, pom, settings): - xpath = ".//ns:properties//ns:" + xpath_property - properties = pom.findall(xpath, XMLNS) - if len(properties) < 1: - raise Exception(setting_name + " not found in pom.xml") - else: - settings[setting_name] = properties[0].text - - -def determine_pom_location(artifact_id, spec_name, services_root_dir): - # spec_name may not be specified if this is an existing spec - # so we have to look through existing service directories to find a pom.xml with this artifact_id - for individual_service_directory in os.listdir(services_root_dir): - individual_service_directory = os.path.join(services_root_dir, individual_service_directory) - if os.path.isdir(individual_service_directory): - for f in os.listdir(individual_service_directory): - file_path = os.path.join(individual_service_directory, f) - if os.path.isfile(file_path) and file_path.endswith('pom.xml'): - pom = parse_pom(file_path) - - xpath = ".//ns:properties//ns:codegen.artifactory.artifactId" - properties = pom.findall(xpath, XMLNS) - if len(properties) < 1: - raise Exception("{} not found in {}".format(xpath, file_path)) - - artifact_id_from_pom = properties[0].text - if artifact_id_from_pom == artifact_id: - return file_path - - # there is no existing pom for this artifact id - # so return the new location where it should be based on spec_name - if not spec_name: - raise Exception("Spec name must be specified for new service") - - pom_path = os.path.join(services_root_dir, spec_name, "pom.xml") - - # create the new directory to contain this pom.xml - pom_dir = os.path.dirname(pom_path) - if not os.path.exists(pom_dir): - os.makedirs(pom_dir) - - return pom_path - - -def parse_pom(pom_file): - if not os.path.exists(pom_file): - return None - - pom = ET.parse(pom_file) - - # allow default namespace for output, dont print ns0: prefixes everywhere - ET.register_namespace('',"http://maven.apache.org/POM/4.0.0") - - return pom - - -def add_or_update_spec(artifact_id=None, group_id=None, spec_name=None, relative_spec_path=None, version=None, spec_generation_type=None, add_sub_groups=None, multiple_services_in_spec=None, pom_location=None, root_group_override=None): - found = None - ignored = [] - previous = {} - changed = [] - - if not os.path.exists(pom_location): - os.mkdir(pom_location) - - if not spec_generation_type: - spec_generation_type = 'PUBLIC' - - if artifact_id: - file_name = determine_pom_location(artifact_id, spec_name, pom_location) - previous = gather_settings(file_name) - - if 'artifact_id' in previous: - found = previous['artifact_id'] - - if group_id is None and 'group_id' in previous: - group_id = previous['group_id'] - if relative_spec_path is None and 'relative_spec_path' in previous: - relative_spec_path = previous['relative_spec_path'] - - # updating spec_name is not supported in self service so always use previous if it exists - # if we want to support this in the future, the Python SDK will need to support it as well - if 'spec_name' in previous: - if spec_name is not None: - ignored.append('spec_name') - - spec_name = previous['spec_name'] - - previous_version = None - if 'version' in previous: - previous_version = previous['version'] - new_version = None - if previous_version is None or previous_version == version or is_version_increasing(previous_version, version): - new_version = version - else: - new_version = previous_version - print(spec_name, ': The version was not updated to {}, because it was already at {}.'.format(version, previous_version)) - - pom_string = POM_TEMPLATE - - root_group_override = '' - if not multiple_services_in_spec: - if 'root_group_override' in previous: - # This preserves the existing root_group_override already found in the pom - root_group_override = previous['root_group_override'] - else: - root_group_override = spec_name.replace('_', '-') - else: - # Remove the root group override for multiple service specs. - pom_string = pom_string.replace("{root_group_override}", "") - - # Note, we use regular string replace instead of format so we don't have to double and triple-escape things in the template - if artifact_id is not None: - pom_string = pom_string.replace("{artifact_id}", artifact_id) # required in 1_process_preview_jira_queue - if new_version is not None: - pom_string = pom_string.replace("{version}", new_version) # required in 1_process_preview_jira_queue - if spec_name is not None: - pom_string = pom_string.replace("{spec_name}", spec_name) - else: - print("Warning: Could not determine the spec_name from jira or previous pom.") - if group_id is not None: - pom_string = pom_string.replace("{group_id}", group_id) - else: - print("Warning: Could not determine the group_id from jira or previous pom.") - if relative_spec_path is not None: - pom_string = pom_string.replace("{spec_file}", relative_spec_path) - else: - print("Warning: Could not determine the spec_file from jira or previous pom.") - pom_string = pom_string.replace("{root_group_override}", root_group_override) - - root = ET.fromstring(pom_string) - pom = ET.ElementTree(element=root) - # If our previous version of the pom was overriding the default build plugins, - # we need to preserve that. - if 'plugins' in previous: - build = root.find('{http://maven.apache.org/POM/4.0.0}build') - build.append(previous['plugins']) - write_xml(file_name, pom) - current = gather_settings(file_name) - changed = compute_changed_settings(previous, current) - - return AddOrUpdateSpecResult( - updated=(not found) or changed != [], # not found means it's a new spec, or if it is an existing spec, changed needs to be non-empty - existing=found is not None, - ignored=ignored, - previous=previous, - changed=changed) - - -@click.command() -@click.option('--artifact-id', help='The artifact id for the spec artifact (e.g. coreservices-api-spec') -@click.option('--group-id', help='The group id for the spec artifact (e.g. com.oracle.pic.commons)') -@click.option('--spec-name', help='The name of the spec. This will be used (e.g. core, identity, object_storage). This is also used as the module name.') -@click.option('--relative-spec-path', help='The relative path of the spec within the artifact (e.g. coreservices-api-spec-20160918-external.yaml)') -@click.option('--version', help='The version of the spec artifact (e.g. 0.0.1-SNAPSHOT') -@click.option('--spec-generation-type', help='The generation type: PUBLIC or PREVIEW') -@click.option('--add-sub-groups', is_flag=True, help='For new specs this will always be true (without it, nothing from the new spec would be added). For existing specs, providing this value causes all commands in the spec to be automatically added to the CLI.') -@click.option('--multiple-services-in-spec', is_flag=True, help='Provide this flag if the spec contains multiple services (e.g. the Core spec has Compute, Block Storage and Virtual Networking). This will disable behaviour such as root group overrides as otherwise multiple services would try and use the same root group and commands would be suppressed') -@click.option('--pom-location', type=click.Path(exists=True), default=DEFAULT_POM_LOCATION, help='Location of the pom.xml file to update') -def add_or_update_spec_command(artifact_id, group_id, spec_name, relative_spec_path, version, spec_generation_type, add_sub_groups, multiple_services_in_spec, pom_location): - print(add_or_update_spec(artifact_id, group_id, spec_name, relative_spec_path, version, spec_generation_type, add_sub_groups, multiple_services_in_spec, pom_location)) - - -if __name__ == '__main__': - pass diff --git a/scripts/auto_gen_utils/add_or_update_scripts/datagen_add_or_update_spec.py b/scripts/auto_gen_utils/add_or_update_scripts/datagen_add_or_update_spec.py deleted file mode 100644 index dc1836df19..0000000000 --- a/scripts/auto_gen_utils/add_or_update_scripts/datagen_add_or_update_spec.py +++ /dev/null @@ -1,147 +0,0 @@ -#!/usr/bin/env python - -# -# This script manipulates the pom.xml tree to either add new specs or update the versions of existing specs. -# - -import click - -from .module_pom_file_add_or_update_spec import DEFAULT_POM_LOCATION -from .spec_updater_base import SpecUpdaterBase - - -# The path for the modules is under "sdk-client-test-data/codegen" -TEST_DATA_GEN_MODULE_LOCATION = "codegen" - -# Pom.xml template specific to sdk-client-test-data -TEST_DATA_GEN_POM_FILE_TEMPLATE = """ - - 4.0.0 - - com.oci.sdk - sdk-test-data-codegen-template - {sdk_version} - .. - - sdk-test-data-{module_name}-codegen - OCI Generated Test Data - {service_friendly_name} Service Codegen - pom - - {group_id} - {artifact_id} - {artifact_version} - {spec_path_relative_to_jar} - {module_name} - {subdomain} - - - - - org.codehaus.mojo - build-helper-maven-plugin - - - com.oracle.oci.sdk.utilities - dex-get-spec-artifact-plugin - ${{oci.get.spec.artifact.plugin.version}} - - - com.oracle.oci.sdk.utilities - spec-conditionals-preprocessor-plugin - - - com.oracle.bmc.sdk - bmc-sdk-swagger-maven-plugin - - - com.oracle.oci - sdk-client-test-data-generator-maven-plugin - - - org.apache.maven.plugins - maven-antrun-plugin - - - maven-clean-plugin - - - - -""" - - -# The ruby-specific pom.xml template parameters for XML parsing -TEST_DATA_GEN_SPEC_PARAMS_XML_PATH_DICT = { - 'group_id': ".//ns:properties//ns:codegen.artifactory.groupId", - 'artifact_id': ".//ns:properties//ns:codegen.artifactory.artifactId", - 'version': ".//ns:properties//ns:codegen.artifactory.version", - 'relative_spec_path': ".//ns:properties//ns:codegen.artifact.spec.path", - 'service_name': ".//ns:properties//ns:codegen.service.name", - 'subdomain': ".//ns:properties//ns:codegen.service.group.endpoint" -} - - -class TestDataGenSpecUpdater(SpecUpdaterBase): - # Override the spec name that is defined in the service pom.xml files as the testing service - # references folders without snake-case (based on the java sdk formatting). - def format_module_name_for_template(self, module_name): - return module_name.replace("-", "").replace("_", "").replace(" ","") - - -################################################## -# Main -################################################## -@click.command() -@click.option('--artifact-id', required=True, help='The artifact id for the spec artifact (e.g. coreservices-api-spec') -@click.option('--group-id', help='The group id for the spec artifact (e.g. com.oracle.pic.commons)') -@click.option('--spec-name', help='The name of the spec. This will be used (e.g. core, identity, object_storage). ' - 'This is also used as the module name.') -@click.option('--relative-spec-path', help='The relative path of the spec within the artifact ' - '(e.g. coreservices-api-spec-20160918-external.yaml)') -@click.option('--endpoint', help='The base endpoint for the service (e.g. https://iaas.{domain}/20160918)') -@click.option('--subdomain', help='The subdomain for the service (e.g. \'iaas\')') -@click.option('--version', help='The version of the spec artifact (e.g. 0.0.1-SNAPSHOT') -@click.option('--spec-generation-type', help='The generation type: PUBLIC or PREVIEW (is ignored for Ruby)') -@click.option('--regional-sub-service-overrides', multiple=True, help="Is ignored for Ruby SDK.") -@click.option('--non-regional-sub-service-overrides', - multiple=True, - help="Is ignored for Ruby SDK. Non-regional client overrides require manual update to the service module's pom.xml") -@click.option('--signing-strategy', - help='The signing strategy to use for the client. Is ignored for Ruby. Requires manual pom.xml update to override') -@click.option('--pom-location', - type=click.Path(exists=True), - default=DEFAULT_POM_LOCATION, - help='Location of the root pom.xml file for the Ruby SDK') -@click.option('--module-location', - type=click.Path(exists=True), - help="Parent directory containing the module pom.xml files") -def add_or_update_command(artifact_id, - group_id, - spec_name, - relative_spec_path, - endpoint, - subdomain, - signing_strategy, - version, - spec_generation_type, - regional_sub_service_overrides, - non_regional_sub_service_overrides, - pom_location, - module_location): - spec_updater = TestDataGenSpecUpdater(TEST_DATA_GEN_MODULE_LOCATION, - TEST_DATA_GEN_POM_FILE_TEMPLATE, - TEST_DATA_GEN_SPEC_PARAMS_XML_PATH_DICT) - print(spec_updater.add_or_update_spec( - artifact_id=artifact_id, - group_id=group_id, - spec_name=spec_name, - relative_spec_path=relative_spec_path, - endpoint=endpoint, - subdomain=subdomain, - version=version, - pom_location=pom_location, - module_location=module_location)) - - -if __name__ == '__main__': - add_or_update_command() diff --git a/scripts/auto_gen_utils/add_or_update_scripts/dotnet_sdk_add_or_update_spec.py b/scripts/auto_gen_utils/add_or_update_scripts/dotnet_sdk_add_or_update_spec.py deleted file mode 100755 index 397aa65e6a..0000000000 --- a/scripts/auto_gen_utils/add_or_update_scripts/dotnet_sdk_add_or_update_spec.py +++ /dev/null @@ -1,497 +0,0 @@ -#!/usr/bin/env python - -# -# This script manipulates the pom.xml tree to either add new specs or update the versions of existing specs. -# - - -from .spec_updater_base import SpecUpdaterBase -import xml.etree.ElementTree as ET -import re -import click -import os -import string -import uuid -from click.exceptions import UsageError, MissingParameter -from glob import glob - -from shared.version_utils import is_version_increasing -from .add_or_update_spec_utils import AddOrUpdateSpecResult, compute_changed_settings, indent -from .add_or_update_spec_utils import add_spec_module_to_github_whitelist, write_xml -from .add_or_update_spec_utils import CommentedTreeBuilder -from .add_or_update_spec_utils import parse_pom - - -DEFAULT_PARENT_POM_LOCATION = "pom.xml" -DEFAULT_GITHUB_WHITELIST_LOCATION = "github.whitelist" -ns = {"ns":"http://maven.apache.org/POM/4.0.0"} - -# The path for the modules is under "oci-dotnet-sdk/Codegen" -DOTNETSDK_CODEGEN_LOCATION = "Codegen" - - -# Template for include each module in pom.xml -MODULE_TEMPLATE = "{name}" -# Pom.xml template specific to DotNet SDK -DOTNETSDK_POM_FILE_TEMPLATE = """ - - 4.0.0 - - com.oracle.oci.sdk - oci-dotnet-sdk-codegen - {sdk_version} - .. - - oci-dotnet-sdk-{service_name}-codegen - Oracle Cloud Infrastructure SDK - {service_friendly_name} Service Codegen - This project contains the SDK used for Oracle Cloud Infrastructure {service_friendly_name} - - {group_id} - {artifact_id} - {artifact_version} - {spec_path_relative_to_jar} - {module_name} - {generate_waiters} - {generate_paginators} - {service_name} - - - - - org.commonjava.maven.plugins - directory-maven-plugin - - - org.codehaus.mojo - build-helper-maven-plugin - - - com.oracle.oci.sdk.utilities - dex-get-spec-artifact-plugin - - - com.oracle.oci.sdk.utilities - spec-conditionals-preprocessor-plugin - - - com.oracle.bmc.sdk - bmc-sdk-swagger-maven-plugin - - - com.mycila - license-maven-plugin - - - maven-antrun-plugin - - - - -""" - -MODULE_PROJECT_FILE_TEMPLATE = """ - - - - {{{guid}}} - Library - netstandard2.0 - 8.0 - OCI.DotNetSDK.{module_name} - Oci.{module_name}Service - Oracle Cloud Infrastructure - Oracle - Oracle;OCI;Oracle Cloud;OracleCloud;oci-sdk;OracleCloudInfrastructure;{module_name} - Oracle Cloud Infrastructure Cloud {service_friendly_name} Service - - - - - -""" - - -# The dotnet_sdk-specific pom.xml template parameters for XML parsing -DOTNETSDK_SPEC_PARAMS_XML_PATH_DICT = { - 'group_id': ".//ns:properties//ns:codegen.artifactory.groupId", - 'artifact_id': ".//ns:properties//ns:codegen.artifactory.artifactId", - 'version': ".//ns:properties//ns:codegen.artifactory.version", - 'relative_spec_path': ".//ns:properties//ns:codegen.artifact.spec.path", - 'service_name': ".//ns:properties//ns:codegen.service.name", - 'subdomain': ".//ns:properties//ns:codegen.service.group.endpoint" -} - - -MODULE_README_FORMAT = """ -# OCI .NET client for {service_friendly_name} Service - -This module enables you to write code to manage resources for {service_friendly_name} Service. - -## Requirements - -To use this module, you must have the following: - -- An Oracle Cloud Infrastructure account. -- A user created in that account, in a group with a policy that grants the desired permissions. This can be a user for yourself, or another person/system that needs to call the API. For an example of how to set up a new user, group, compartment, and policy, see [Adding Users](https://docs.cloud.oracle.com/en-us/iaas/Content/GSG/Tasks/addingusers.htm). For a list of typical policies you may want to use, see [Common Policies](https://docs.cloud.oracle.com/en-us/iaas/Content/Identity/Concepts/commonpolicies.htm). -- A key pair used for signing API requests, with the public key uploaded to Oracle. Only the user calling the API should be in possession of the private key. For more information, see [Configuring Credentials](https://docs.cloud.oracle.com/en-us/iaas/Content/API/SDKDocs/javasdkconfig.htm) - -## Installing - -Use the following command to install this module: - -``` -dotnet add package OCI.DotNetSDK.{module_name} -``` -""" - - -PROJECT_ENTRY_TEMPLATE = """Project("{{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}}") = "OCI.DotNetSDK.{module_name}", "{module_name}\\OCI.DotNetSDK.{module_name}.csproj", "{{{guid}}}" -EndProject -""" - - -PROJECT_PLATEFORM_TEMPLATE = """ {{{guid}}}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {{{guid}}}.Debug|Any CPU.Build.0 = Debug|Any CPU - {{{guid}}}.Debug|x64.ActiveCfg = Debug|Any CPU - {{{guid}}}.Debug|x64.Build.0 = Debug|Any CPU - {{{guid}}}.Debug|x86.ActiveCfg = Debug|Any CPU - {{{guid}}}.Debug|x86.Build.0 = Debug|Any CPU - {{{guid}}}.Release|Any CPU.ActiveCfg = Release|Any CPU - {{{guid}}}.Release|Any CPU.Build.0 = Release|Any CPU - {{{guid}}}.Release|x64.ActiveCfg = Release|Any CPU - {{{guid}}}.Release|x64.Build.0 = Release|Any CPU - {{{guid}}}.Release|x86.ActiveCfg = Release|Any CPU - {{{guid}}}.Release|x86.Build.0 = Release|Any CPU -""" - - -class DotNetSDKSpecUpdater(SpecUpdaterBase): - # Override the spec name that is defined in the service pom.xml files as the testing service - # references folders without snake-case (based on the .NET sdk formatting). - def format_module_name_for_template(self, module_name): - return module_name.replace("-", "").replace("_", "").replace(" ","") - - -def add_or_update_spec(artifact_id=None, group_id=None, spec_name=None, relative_spec_path=None, endpoint=None, subdomain=None, version=None, spec_generation_type=None, generate_waiters=True, generate_paginators=True, pom_location=None, github_whitelist_location=None): - sdk_dir = os.path.dirname(pom_location) - - found = find_existing_module(sdk_dir, artifact_id) - - ignored = [] - previous = {} - changed = [] - if found: - print('Artifact {} already exists in pom.xml. Updating specified fields...'.format(artifact_id)) - - previous = gather_settings(sdk_dir, found) - - if version: - newer_version = update_version_of_existing_spec(sdk_dir, found, version) - if newer_version: - print('The version was not updated to {}, because it was already at {}.'.format(version, newer_version)) - - if relative_spec_path: - update_relative_spec_path_of_existing_spec(sdk_dir, found, relative_spec_path) - - was_ignored = False - if endpoint: - subdomain = endpoint - subdomain = re.sub('^.*://', '', subdomain) # remove protocol and '://' - subdomain = re.sub(r'\.{domain}.*$', '', subdomain) # remove '.{domain}' and everything after it - was_changed, was_ignored = update_subdomain(sdk_dir, found, subdomain) - elif subdomain: - was_changed, was_ignored = update_subdomain(sdk_dir, found, subdomain) - - if was_ignored: - ignored.append('subdomain') - - if spec_name: - ignored.append('spec_name') - if group_id: - ignored.append('group_id') - - current = gather_settings(sdk_dir, found) - changed = compute_changed_settings(previous, current) - else: - missing_params = [] - if not spec_name: - missing_params.append('--spec-name') - - if not version: - missing_params.append('--version') - - if not group_id: - missing_params.append('--group-id') - - if not artifact_id: - missing_params.append('--artifact-id') - - if not relative_spec_path: - missing_params.append('--relative-spec-path') - - # not checking 'endpoint' anymore; can be specified either in ticket - # or in spec using 'x-obmcs-endpoint-template'. If neither is specified, - # this fails in the generator - - if missing_params: - raise MissingParameter('The following options must be specified for a new spec: {}'.format(', '.join(missing_params))) - - if endpoint and subdomain: - raise UsageError('Cannot specify both --endpoint and --subdomain') - - if endpoint: - print('Ignoring endpoint setting for new services; new services have to get it from the spec') - ignored.append('endpoint') - - if subdomain: - print('Ignoring subdomain setting for new services; new services have to get it from the spec') - ignored.append('subdomain') - - if spec_generation_type: - print('Note: --spec-generation-type is ignored for the .NET SDK, since it is set in the codegen/pom.xml file for all services') - - service_name = spec_name.lower().replace('_', '') # service_name is "newservice" - module_name = service_name.title() # module_name is "Newservice" - service_friendly_name = string.capwords(spec_name.replace('_', ' ')) # service_friendly_name is "New Service" - - # Find OCI DotNet SDK Version - pom = parse_pom(pom_location) - xpath = ".//ns:version" - property = pom.findall(xpath, ns)[0] - sdk_version = property.text - - guid = str(uuid.uuid4()).upper() - print('Project GUID: {}'.format(guid)) - - print('Module {} does not exist in pom.xml. Adding it...'.format(module_name)) - generate_child_codegen_pom(sdk_dir, service_name, module_name, service_friendly_name, sdk_version, group_id, artifact_id, version, relative_spec_path, spec_generation_type, generate_waiters, generate_paginators) - add_child_codegen_module(sdk_dir, service_name) - generate_child_project_and_readme(sdk_dir, module_name, service_friendly_name, guid) - update_sln_file(sdk_dir, module_name, guid) - add_spec_module_to_github_whitelist(module_name, github_whitelist_location, '\n^{}/.*') - - print('Success!') - - return AddOrUpdateSpecResult( - updated=(not found) or changed != [], # not found means it's a new spec, or if it is an existing spec, changed needs to be non-empty - existing=found is not None, - ignored=ignored, - previous=previous, - changed=changed) - - -def find_existing_module(sdk_dir, artifact_id): - codegen_dir = os.path.join(sdk_dir, DOTNETSDK_CODEGEN_LOCATION) - pom_files = [y for x in os.walk(codegen_dir) for y in glob(os.path.join(x[0], 'pom.xml'))] - for ldr_path in pom_files: - pom = parse_pom(ldr_path) - xpath = ".//ns:properties//ns:codegen.artifactory.artifactId" - properties = pom.findall(xpath, ns) - if len(properties) > 0 and artifact_id == properties[0].text: - codegen_artifact_id = pom.findall("./ns:artifactId", ns)[0].text - m = re.match("oci-dotnet-sdk-([^-]*)-codegen", codegen_artifact_id) - if m: - return m.group(1) - - return None - - -def gather_settings(sdk_dir, spec_name): - settings = {} - - file_name = os.path.join(sdk_dir, DOTNETSDK_CODEGEN_LOCATION, spec_name, "pom.xml") - pom = parse_pom(file_name) - - xpath = ".//ns:properties//ns:codegen.artifactory.groupId" - property = pom.findall(xpath, ns)[0] - settings["group_id"] = property.text - - xpath = ".//ns:properties//ns:codegen.artifactory.artifactId" - property = pom.findall(xpath, ns)[0] - settings["artifact_id"] = property.text - - xpath = ".//ns:properties//ns:codegen.artifactory.version" - property = pom.findall(xpath, ns)[0] - settings["version"] = property.text - - settings["module_name"] = spec_name - - xpath = ".//ns:properties//ns:codegen.spec.name" - property = pom.findall(xpath, ns)[0] - settings["relative_spec_path"] = property.text - - xpath = ".//ns:properties//ns:codegen.endpoint" - results = pom.findall(xpath, ns) - if results: - property = results[0] - settings["subdomain"] = property.text - - return settings - - -def update_version_of_existing_spec(sdk_dir, spec_name, version): - file_name = os.path.join(sdk_dir, DOTNETSDK_CODEGEN_LOCATION, spec_name, "pom.xml") - pom = parse_pom(file_name) - - xpath = ".//ns:properties//ns:codegen.artifactory.version" - property = pom.findall(xpath, ns)[0] - old_version = property.text - - if not is_version_increasing(old_version, version): - return old_version - - property.text = version - indent(pom.getroot()) - pom.write(file_name, encoding="UTF-8", xml_declaration=True) - return None # the old version was lower - - -def update_relative_spec_path_of_existing_spec(sdk_dir, spec_name, relative_spec_path): - file_name = os.path.join(sdk_dir, DOTNETSDK_CODEGEN_LOCATION, spec_name, "pom.xml") - pom = parse_pom(file_name) - - xpath = ".//ns:properties//ns:codegen.spec.name" - property = pom.findall(xpath, ns)[0] - property.text = relative_spec_path - indent(pom.getroot()) - pom.write(file_name, encoding="UTF-8", xml_declaration=True) - - -# Returns was_changed, was_ignored -def update_subdomain(sdk_dir, spec_name, subdomain): - file_name = os.path.join(sdk_dir, DOTNETSDK_CODEGEN_LOCATION, spec_name, "pom.xml") - pom = parse_pom(file_name) - - xpath = ".//ns:properties//ns:codegen.endpoint" - results = pom.findall(xpath, ns) - if results: - property = results[0] - was_changed = property.text != subdomain - property.text = subdomain - indent(pom.getroot()) - pom.write(file_name, encoding="UTF-8", xml_declaration=True) - return was_changed, False - else: - print('Ignored update to subdomain/endpoint -- service had no setting for this in pom.xml file and is getting that information from spec') - return False, True - - -def generate_child_codegen_pom(sdk_dir, service_name, module_name, service_friendly_name, sdk_version, group_id, artifact_id, artifact_version, spec_path_relative_to_jar, spec_generation_type, generate_waiters, generate_paginators): - dir_name = os.path.join(sdk_dir, DOTNETSDK_CODEGEN_LOCATION, service_name) - os.mkdir(dir_name) - file_name = os.path.join(dir_name, "pom.xml") - - content = DOTNETSDK_POM_FILE_TEMPLATE.format( - group_id=group_id, - artifact_id=artifact_id, - artifact_version=artifact_version, - sdk_version=sdk_version, - service_name=service_name, - service_friendly_name=service_friendly_name, - module_name=module_name, - spec_path_relative_to_jar=spec_path_relative_to_jar, - spec_generation_type=spec_generation_type, - generate_waiters=str(generate_waiters).lower(), - generate_paginators=str(generate_paginators).lower()) - - root = ET.fromstring(content, parser=ET.XMLParser(target=CommentedTreeBuilder())) - pom = ET.ElementTree(element=root) - write_xml(file_name, pom) - - -def add_child_codegen_module(sdk_dir, service_name): - file_name = os.path.join(sdk_dir, DOTNETSDK_CODEGEN_LOCATION, "pom.xml") - pom = parse_pom(file_name) - - content = MODULE_TEMPLATE.format(name=service_name) - module_element = ET.fromstring(content, ET.XMLParser(target=CommentedTreeBuilder())) - - # find modules - modules = pom.findall("./ns:modules", ns)[0] - modules.append(module_element) - - indent(pom.getroot()) - pom.write(file_name, encoding="UTF-8", xml_declaration=True) - - -def generate_child_project_and_readme(sdk_dir, module_name, service_friendly_name, guid): - dir_name = os.path.join(sdk_dir, module_name) - os.mkdir(dir_name) - file_name = os.path.join(dir_name, "OCI.DotNetSDK." + module_name + ".csproj") - readme = os.path.join(dir_name, "README.md") - - content = MODULE_PROJECT_FILE_TEMPLATE.format( - guid=guid, - module_name=module_name, - service_friendly_name=service_friendly_name) - - readme_content = MODULE_README_FORMAT.format(service_friendly_name=service_friendly_name, - module_name=module_name) - - project_file = open(file_name, "w") - project_file.write(content) - readme_file = open(readme, "w") - readme_file.write(readme_content) - - -def update_sln_file(sdk_dir, module_name, guid): - project_added = False - project_platform_added = False - - sln_file = os.path.join(sdk_dir, "oci-dotnet-sdk.sln") - with open(sln_file) as sln_file_read: - lines = sln_file_read.readlines() - - locs = [i for i, val in enumerate(lines) if 'EndProject' in val] - - # Ignoring case when a solution file does not contain any existing projects.This should never happen since we already have projects. - if len(locs) > 1: - print("Will add project") - new_project_entry = PROJECT_ENTRY_TEMPLATE.format(guid=guid, module_name=module_name) - lines.insert(locs[-1] + 1, new_project_entry) - project_added = True - - project_platform_section_found = False - # First find the start of ProjectConfigurationPlatforms section - for loc, line in enumerate(lines): - if 'ProjectConfigurationPlatforms' in line: - project_platform_section_found = True - continue - # Find the matching closing line for ProjectConfigurationPlatforms section - if project_platform_section_found and 'EndGlobalSection' in line: - print("Will update plateform") - new_project_platform = PROJECT_PLATEFORM_TEMPLATE.format(guid=guid) - lines.insert(loc, new_project_platform) - project_platform_added = True - break - if not project_platform_section_found: - print("Cannot find project platform section.") - else: - print("Cannot find existing projects.") - - if project_added and project_platform_added: - with open(sln_file, 'w') as sln_file_write: - sln_file_write.write(''.join(lines)) - else: - print("Unable to update sln file.") - - -@click.command() -@click.option('--artifact-id', required=True, help='The artifact id for the spec artifact (e.g. coreservices-api-spec') -@click.option('--group-id', help='The group id for the spec artifact (e.g. com.oracle.pic.commons)') -@click.option('--spec-name', help='The name of the spec. This will be used (e.g. core, identity, objectstorage). This is also used as the module name (\'bmc-servicename\') and base package (\'com.oracle.bmc.servicename\'). Underscores are removed, everything is lower-cased.') -@click.option('--relative-spec-path', help='The relative path of the spec within the artifact (e.g. coreservices-api-spec-20160918-external.yaml)') -@click.option('--endpoint', help='The base endpoint for the service (e.g. https://iaas.{domain}/20160918)') -@click.option('--subdomain', help='The subdomain for the service (e.g. if the endpoint is https://iaas.{domain}/20160918), the subdomain is "iaas"') -@click.option('--version', help='The version of the spec artifact (e.g. 0.0.1-SNAPSHOT') -@click.option('--spec-generation-type', help='The generation type: PUBLIC or PREVIEW') -@click.option('--generate-waiters/--no-generate-waiters', default=True, help='Generate waiters') -@click.option('--generate-paginators/--no-generate-paginators', default=True, help='Generate paginators') -@click.option('--pom-location', type=click.Path(exists=True), default=DEFAULT_PARENT_POM_LOCATION, help='Location of the pom.xml file in the root directory of the OCI .NET SDK') -@click.option('--github-whitelist-location', type=click.Path(exists=True), default=DEFAULT_GITHUB_WHITELIST_LOCATION, help='Location of the github.whitelist file to update') -def add_or_update_spec_command(artifact_id, group_id, spec_name, relative_spec_path, endpoint, subdomain, version, spec_generation_type, generate_waiters, generate_paginators, pom_location, github_whitelist_location): - print(add_or_update_spec(artifact_id, group_id, spec_name, relative_spec_path, endpoint, subdomain, version, spec_generation_type, generate_waiters, generate_paginators, pom_location, github_whitelist_location)) - - -if __name__ == '__main__': - add_or_update_spec_command() diff --git a/scripts/auto_gen_utils/add_or_update_scripts/go_sdk_add_or_update_spec.py b/scripts/auto_gen_utils/add_or_update_scripts/go_sdk_add_or_update_spec.py deleted file mode 100644 index 9ced5233a6..0000000000 --- a/scripts/auto_gen_utils/add_or_update_scripts/go_sdk_add_or_update_spec.py +++ /dev/null @@ -1,173 +0,0 @@ -#!/usr/bin/env python - -# -# This script manipulates pom.xml to either add new specs or update the versions of existing specs. -# - - -import click -import re - -from .single_pom_file_add_or_update_spec import single_pom_file_add_or_update_spec -from .single_pom_file_add_or_update_spec import DEFAULT_POM_LOCATION -from .single_pom_file_add_or_update_spec import DEFAULT_GITHUB_WHITELIST_LOCATION -from .single_pom_file_add_or_update_spec import ns -from .single_pom_file_add_or_update_spec import check_args_for_new_service as base_check_args_for_new_service - - -DEFAULT_MAKE_FILE_LOCATION = "../Makefile" - - -GENERATE_EXECUTION_TEMPLATE = """ - - go-public-sdk-{artifact_id} - compile - - generate - - - oracle-go-sdk - ${{preprocessed-temp-dir}}/{artifact_id}/${{{artifact_id}-spec-file}} - ${{env.GOPATH}}/src/${{fullyQualifiedProjectName}} - {spec_name} - ${{generationType}} - - {spec_name} - ${{fullyQualifiedProjectName}} - {regional_non_regional_service_overrides} - - ${{feature-id-file}} - ${{feature-id-dir}} - ${{spec-temp-dir}}/serviceSlugNameMapping.yaml - - -""" - -CLEAN_ELEMENT_TEMPLATE = """ - - lib/oci/{spec_name} - - **/* - - -""" - -GITHUB_WHITELIST_TEMPLATE = '\n^{}/' - - -# Returns was_changed, was_ignored -def update_endpoint(pom, artifact_id, endpoint): - subdomain = endpoint - subdomain = re.sub('^.*://', '', subdomain) # remove protocol and '://' - subdomain = re.sub(r'\.{domain}.*$', '', subdomain) # remove '.{domain}' and everything after it - - results = pom.findall(".//ns:plugin[ns:artifactId='bmc-sdk-swagger-maven-plugin']/ns:executions/ns:execution[ns:id='go-public-sdk-{artifact_id}']//ns:additionalProperties/ns:serviceHostName".format(artifact_id=artifact_id), ns) - if results: - endpoint_node = results[0] - - if endpoint_node.text != subdomain: - endpoint_node.text = subdomain - return True, False - return False, False - - print('Ignored update to subdomain/endpoint -- service had no setting for this in pom.xml file and is getting that information from spec') - return False, True - - -def check_args_for_new_service(locals): - base_check_args_for_new_service(locals) - # not checking 'endpoint' anymore; can be specified either in ticket - # or in spec using 'x-obmcs-endpoint-template'. If neither is specified, - # this fails in the generator - - -def add_spec_name_to_make_file(spec_name, make_file_location): - specNameToken = '##SPECNAME##' - with open(make_file_location) as f: - newText = f.read().replace(specNameToken, "{} {}".format(spec_name, specNameToken)) - - with open(make_file_location, "w") as f: - f.write(newText) - - -def goify_specname(name): - return name.replace('_', '').lower() - - -def gather_settings(pom, artifact_id): - settings = {} - - xpath = ".//ns:dependencyManagement//ns:dependency[ns:artifactId='{artifact_id}']".format(artifact_id=artifact_id) - dependency = pom.findall(xpath, ns)[0] - settings["group_id"] = dependency.find('./ns:groupId', ns).text - settings["artifact_id"] = artifact_id - settings["version"] = dependency.find('./ns:version', ns).text - - xpath = ".//ns:properties/ns:{artifact_id}-spec-file".format(artifact_id=artifact_id) - spec_file_node = pom.findall(xpath, ns)[0] - settings["relative_spec_path"] = spec_file_node.text - - results = pom.findall(".//ns:plugin[ns:artifactId='bmc-sdk-swagger-maven-plugin']/ns:executions/ns:execution[ns:id='go-public-sdk-{artifact_id}']//ns:additionalProperties/ns:serviceHostName".format(artifact_id=artifact_id), ns) - if results: - settings["subdomain"] = results[0].text - - settings["spec_name"] = pom.findall(".//ns:plugin[ns:artifactId='bmc-sdk-swagger-maven-plugin']/ns:executions/ns:execution[ns:id='go-public-sdk-{artifact_id}']//ns:additionalProperties/ns:specName".format(artifact_id=artifact_id), ns)[0].text - - return settings - - -def add_or_update_spec(artifact_id=None, group_id=None, spec_name=None, relative_spec_path=None, endpoint=None, subdomain=None, version=None, spec_generation_type=None, regional_sub_service_overrides=None, non_regional_sub_service_overrides=None, pom_location=None, github_whitelist_location=None, makefile_location=None): - if not spec_generation_type: - spec_generation_type = 'PUBLIC' - - if endpoint and not subdomain: - subdomain = endpoint - subdomain = re.sub('^.*://', '', subdomain) # remove protocol and '://' - subdomain = re.sub(r'\.{domain}.*$', '', subdomain) # remove '.{domain}' and everything after it - - # force format of spec_name by removing underscore and lower case - if spec_name: - spec_name = goify_specname(spec_name) - - result = single_pom_file_add_or_update_spec(artifact_id, group_id, spec_name, relative_spec_path, None, subdomain, version, spec_generation_type, - regional_sub_service_overrides, non_regional_sub_service_overrides, pom_location, github_whitelist_location, GITHUB_WHITELIST_TEMPLATE, - GENERATE_EXECUTION_TEMPLATE, CLEAN_ELEMENT_TEMPLATE, update_endpoint, check_args_for_new_service, gather_settings) - - if result.updated and not result.existing: - # For new spec only - add_spec_name_to_make_file(spec_name, makefile_location) - - return result - - -@click.command() -@click.option('--artifact-id', help='The artifact id for the spec artifact (e.g. coreservices-api-spec') -@click.option('--group-id', help='The group id for the spec artifact (e.g. com.oracle.pic.commons)') -@click.option('--spec-name', help='The name of the spec. This will be used (e.g. core, identity, object_storage). This is also used as the module name.') -@click.option('--relative-spec-path', help='The relative path of the spec within the artifact (e.g. coreservices-api-spec-20160918-external.yaml)') -@click.option('--endpoint', help='The base endpoint for the service (e.g. https://iaas.{domain}/20160918)') -@click.option('--subdomain', help='The subdomain for the service (e.g. if the endpoint is https://iaas.{domain}/20160918), the subdomain is "iaas"') -@click.option('--version', help='The version of the spec artifact (e.g. 0.0.1-SNAPSHOT') -@click.option('--spec-generation-type', help='The generation type: PUBLIC or PREVIEW') -@click.option('--regional-sub-service-overrides', multiple=True, help="""For specs that contain multiple services -(because there are operations with different tags in the spec), which of those services should be considered regional. -Services are considered as regional by default. - -This should be the snake_cased name of the tag/service. For example kms_provisioning instead of kmsProvisioning. - -This parameter can be provided multiple times""") -@click.option('--non-regional-sub-service-overrides', multiple=True, help="""For specs that contain multiple services -(because there are operations with different tags in the spec), which of those services should be considered non-regional. - -This should be the snake_cased name of the tag/service. For example kms_provisioning instead of kmsProvisioning. - -This parameter can be provided multiple times""") -@click.option('--pom-location', type=click.Path(exists=True), default=DEFAULT_POM_LOCATION, help='Location of the pom.xml file to update') -@click.option('--github-whitelist-location', type=click.Path(exists=True), default=DEFAULT_GITHUB_WHITELIST_LOCATION, help='Location of the github.whitelist file to update') -@click.option('--makefile-location', type=click.Path(exists=True), default=DEFAULT_MAKE_FILE_LOCATION, help='Location of the Makefile to update') -def add_or_update_spec_command(artifact_id, group_id, spec_name, relative_spec_path, endpoint, subdomain, version, spec_generation_type, regional_sub_service_overrides, non_regional_sub_service_overrides, pom_location, github_whitelist_location, makefile_location): - print(add_or_update_spec(artifact_id, group_id, spec_name, relative_spec_path, endpoint, subdomain, version, spec_generation_type, regional_sub_service_overrides, non_regional_sub_service_overrides, pom_location, github_whitelist_location, makefile_location)) - - -if __name__ == '__main__': - add_or_update_spec_command() diff --git a/scripts/auto_gen_utils/add_or_update_scripts/java_sdk_add_or_update_spec.py b/scripts/auto_gen_utils/add_or_update_scripts/java_sdk_add_or_update_spec.py deleted file mode 100644 index 26fc75831f..0000000000 --- a/scripts/auto_gen_utils/add_or_update_scripts/java_sdk_add_or_update_spec.py +++ /dev/null @@ -1,721 +0,0 @@ -#!/usr/bin/env python - -# -# This script manipulates pom.xml to either add new specs or update the versions of existing specs. -# - -import datetime -import xml.etree.ElementTree as ET -import re -import click -import os -import parse -import string -from click.exceptions import UsageError, MissingParameter -from glob import glob - -from shared.version_utils import is_version_increasing -from .add_or_update_spec_utils import AddOrUpdateSpecResult, compute_changed_settings, indent -from .add_or_update_spec_utils import add_spec_module_to_github_whitelist, write_xml -from .add_or_update_spec_utils import CommentedTreeBuilder -from .add_or_update_spec_utils import parse_pom - - -DEFAULT_PARENT_POM_LOCATION = "pom.xml" -DEFAULT_GITHUB_WHITELIST_LOCATION = "github.whitelist" - -CODEGEN_POM_FILE_ARTIFACTORY_VERSION_COMMENT_TEMPLATE = "DEXREQ ticket requested version '{}', but version was already '{}' ('{}')." - -CODEGEN_POM_FILE_TEMPLATE = """ - - 4.0.0 - - - com.oracle.oci.sdk - oci-java-sdk-codegen - {sdk_version} - ../pom.xml - - - oci-java-sdk-{module_name}-codegen - Oracle Cloud Infrastructure SDK - {service_friendly_name} Codegen - This project contains the code generation spec and configuration for the {service_friendly_name} Service - https://docs.cloud.oracle.com/Content/API/SDKDocs/javasdk.htm - - - {group_id} - {artifact_id} - {artifact_version} - {spec_path_relative_to_jar} - - {module_name} - {generate_waiters} - {generate_paginators} - - - - - codegen - - - - org.commonjava.maven.plugins - directory-maven-plugin - - - org.codehaus.mojo - build-helper-maven-plugin - - - com.oracle.oci.sdk.utilities - dex-get-spec-artifact-plugin - - - com.oracle.oci.sdk.utilities - spec-conditionals-preprocessor-plugin - - - com.oracle.bmc.sdk - bmc-sdk-swagger-maven-plugin - - - com.mycila - license-maven-plugin - - - com.theoryinpractise - googleformatter-maven-plugin - - - maven-antrun-plugin - - - - - - specs - - - - org.apache.maven.plugins - maven-assembly-plugin - - true - - - - - - - - -""" - -MODULE_POM_FILE_TEMPLATE = """ - - 4.0.0 - - - com.oracle.oci.sdk - oci-java-sdk - {sdk_version} - ../pom.xml - - - oci-java-sdk-{module_name} - Oracle Cloud Infrastructure SDK - {service_friendly_name} - This project contains the SDK used for Oracle Cloud Infrastructure {service_friendly_name} - https://docs.cloud.oracle.com/Content/API/SDKDocs/javasdk.htm - - - - - sign-individual - - - - com.oracle.ccss - codesign-maven-plugin - - - - - - - - - org.codehaus.mojo - clirr-maven-plugin - 2.9.1-oracle-SNAPSHOT - - - clirr - verify - - check - - - ${{clirr.fail.on.error}} - ${{project.build.directory}}/clirr-{module_name}.txt - true - - .*preview.* - 1.23.4.*stream.* - .*experimental.* - - ${{clirr.comparison.version}} - - - - - **/internal/**/* - * - * - - - - com/oracle/bmc/{module_name}/*Async* - * - * - - - - com/oracle/bmc/{module_name}/*Client - * - * - - - - com/oracle/bmc/{module_name}/* - * - * - 7012 - - - - com/oracle/bmc/{module_name}/*/* - * - * - 7004 - - - - - com/oracle/bmc/{module_name}/*/* - * - * - 7005 - * - - - - com/oracle/bmc/{module_name}/*/*Builder - * - * - - - - com/oracle/bmc/{module_name}/*/* - boolean canEqual(java.lang.Object) - 7002 - - - - - - - - - org.apache.maven.plugins - maven-checkstyle-plugin - 3.1.1 - - ${{checkstyle-rules-file}} - UTF-8 - true - true - false - - - - verify - - check - - - - - - - - - - - com.oracle.oci.sdk - oci-java-sdk-common - {sdk_version} - - - - -""" - -MODULE_TEMPLATE = "{name}" - -BOM_DEPENDENCY_TEMPLATE = """ - - com.oracle.oci.sdk - oci-java-sdk-{module_name} - {sdk_version} - false - -""" - -FULL_JAVADOC_SOURCEPATH_TEMPLATE = os.linesep + " ../bmc-{module_name}/src/main/java;" - -FULL_JAVADOC_GROUP_TEMPLATE = """ - - Oracle Cloud Infrastructure {service_friendly_name} - com.oracle.bmc.{module_name}* - -""" - -FULL_DEPENDENCY_TEMPLATE = """ - - com.oracle.oci.sdk - oci-java-sdk-{module_name} - -""" - -SHADED_DEPENDENCY_TEMPLATE = """ - - com.oracle.oci.sdk - oci-java-sdk-{module_name} - provided - -""" - -INTEGTESTS_DEPENDENCY_TEMPLATE = """ - - com.oracle.oci.sdk - oci-java-sdk-{module_name}{scope_text} - -""" - -ns = {"ns":"http://maven.apache.org/POM/4.0.0"} - -# allow default namespace for output, dont print ns0: prefixes everywhere -ET.register_namespace('',"http://maven.apache.org/POM/4.0.0") - - -def generate_child_codegen_pom(sdk_dir, module_name, service_friendly_name, sdk_version, group_id, artifact_id, artifact_version, spec_path_relative_to_jar, spec_generation_type, generate_waiters, generate_paginators): - dir_name = os.path.join(sdk_dir, "bmc-codegen", "bmc-" + module_name + "-codegen") - os.mkdir(dir_name) - file_name = os.path.join(dir_name, "pom.xml") - - content = CODEGEN_POM_FILE_TEMPLATE.format( - group_id=group_id, - artifact_id=artifact_id, - artifact_version=artifact_version, - sdk_version=sdk_version, - service_friendly_name=service_friendly_name, - module_name=module_name, - spec_path_relative_to_jar=spec_path_relative_to_jar, - spec_generation_type=spec_generation_type, - generate_waiters=str(generate_waiters).lower(), - generate_paginators=str(generate_paginators).lower()) - - root = ET.fromstring(content, parser=ET.XMLParser(target=CommentedTreeBuilder())) - pom = ET.ElementTree(element=root) - write_xml(file_name, pom) - - -def add_child_codegen_module(sdk_dir, module_name): - file_name = os.path.join(sdk_dir, "bmc-codegen", "pom.xml") - pom = parse_pom(file_name) - - content = MODULE_TEMPLATE.format(name="bmc-" + module_name + "-codegen") - module_element = ET.fromstring(content, ET.XMLParser(target=CommentedTreeBuilder())) - - # find modules - modules = pom.findall("./ns:modules", ns)[0] - modules.append(module_element) - - indent(pom.getroot()) - pom.write(file_name, encoding="UTF-8", xml_declaration=True) - - -def generate_child_module(sdk_dir, module_name, service_friendly_name, sdk_version): - dir_name = os.path.join(sdk_dir, "bmc-" + module_name) - os.mkdir(dir_name) - file_name = os.path.join(dir_name, "pom.xml") - - content = MODULE_POM_FILE_TEMPLATE.format( - sdk_version=sdk_version, - service_friendly_name=service_friendly_name, - module_name=module_name) - - root = ET.fromstring(content, ET.XMLParser(target=CommentedTreeBuilder())) - pom = ET.ElementTree(element=root) - indent(pom.getroot()) - pom.write(file_name, encoding="UTF-8", xml_declaration=True) - - os.makedirs(os.path.join(dir_name, "src", "main", "java")) - - -def add_child_module(parent_pom_location, module_name): - pom = parse_pom(parent_pom_location) - - content = MODULE_TEMPLATE.format(name="bmc-" + module_name) - module_element = ET.fromstring(content, ET.XMLParser(target=CommentedTreeBuilder())) - - # find - modules = pom.findall("./ns:modules", ns)[0] - - # we want to insert this just before the first "START: Delete Public GitHub" - index_to_insert = 0 - for index, child in enumerate(list(modules)): - index_to_insert = index - if "START: Delete Public GitHub" in child.text: - break - - modules.insert(index_to_insert, module_element) - - indent(pom.getroot()) - pom.write(parent_pom_location, encoding="UTF-8", xml_declaration=True) - - -def add_bom_dependency(sdk_dir, module_name, sdk_version): - file_name = os.path.join(sdk_dir, "bmc-bom", "pom.xml") - pom = parse_pom(file_name) - - content = BOM_DEPENDENCY_TEMPLATE.format(module_name=module_name, sdk_version=sdk_version) - dependency_element = ET.fromstring(content, ET.XMLParser(target=CommentedTreeBuilder())) - - # find dependencies - dependencies = pom.findall("./ns:dependencyManagement/ns:dependencies", ns)[0] - dependencies.append(dependency_element) - - indent(pom.getroot()) - pom.write(file_name, encoding="UTF-8", xml_declaration=True) - - -def add_full_dependency(sdk_dir, module_name, service_friendly_name, sdk_version): - file_name = os.path.join(sdk_dir, "bmc-full", "pom.xml") - pom = parse_pom(file_name) - - content = FULL_DEPENDENCY_TEMPLATE.format(module_name=module_name) - dependency_element = ET.fromstring(content, ET.XMLParser(target=CommentedTreeBuilder())) - - # find dependencies - dependencies = pom.findall("./ns:dependencies", ns)[0] - dependencies.append(dependency_element) - - # find Javadoc sourcepath - sourcepath_node = pom.findall("./ns:build/ns:plugins/ns:plugin[ns:artifactId='maven-javadoc-plugin']/ns:configuration/ns:sourcepath", ns)[0] - sourcepath_node.text = sourcepath_node.text + FULL_JAVADOC_SOURCEPATH_TEMPLATE.format(module_name=module_name) - - # find Javadoc groups - groups = pom.findall("./ns:build/ns:plugins/ns:plugin[ns:artifactId='maven-javadoc-plugin']/ns:configuration/ns:groups", ns)[0] - - content = FULL_JAVADOC_GROUP_TEMPLATE.format(module_name=module_name, service_friendly_name=service_friendly_name) - group_element = ET.fromstring(content, ET.XMLParser(target=CommentedTreeBuilder())) - groups.append(group_element) - - indent(pom.getroot()) - pom.write(file_name, encoding="UTF-8", xml_declaration=True) - - -def add_dependency_to_other_pom_file(file_name, module_name, scope): - pom = parse_pom(file_name) - - content = INTEGTESTS_DEPENDENCY_TEMPLATE.format( - module_name=module_name, - scope_text="\n {}".format(scope) if scope else "") - dependency_element = ET.fromstring(content, ET.XMLParser(target=CommentedTreeBuilder())) - - # find dependencies - dependencies = pom.findall("./ns:dependencies", ns)[0] - dependencies.append(dependency_element) - - indent(pom.getroot()) - pom.write(file_name, encoding="UTF-8", xml_declaration=True) - - -def update_version_of_existing_spec(sdk_dir, spec_name, version): - file_name = os.path.join(sdk_dir, "bmc-codegen", "bmc-" + spec_name + "-codegen", "pom.xml") - pom = parse_pom(file_name) - - xpath = ".//ns:properties//ns:codegen.artifactory.version" - property = pom.findall(xpath, ns)[0] - old_version = property.text - - version_increased = is_version_increasing(old_version, version) - if version_increased: - property.text = version - - # remove old comments and find the place to insert the version comment, if necessary - parent = pom.findall(".//ns:properties", ns)[0] - comments_to_remove = [] - comment_insertion_index = 0 - for index, el in enumerate(parent.iter()): - if el.tag is ET.Comment and el.text: - result = parse.search(CODEGEN_POM_FILE_ARTIFACTORY_VERSION_COMMENT_TEMPLATE, el.text) - if result: - # matched the formats - comments_to_remove.append(el) - else: - if el.tag.endswith("codegen.artifactory.version"): - if index > 0: - comment_insertion_index = index - len(comments_to_remove) - 1 - - for el in comments_to_remove: - parent.remove(el) - - if not version_increased: - # insert version comment - comment = ET.Comment(CODEGEN_POM_FILE_ARTIFACTORY_VERSION_COMMENT_TEMPLATE.format(version, old_version, datetime.datetime.now())) - parent.insert(comment_insertion_index, comment) - - indent(pom.getroot()) - pom.write(file_name, encoding="UTF-8", xml_declaration=True) - - if not version_increased: - return old_version - - return None # the old version was lower - - -def update_relative_spec_path_of_existing_spec(sdk_dir, spec_name, relative_spec_path): - file_name = os.path.join(sdk_dir, "bmc-codegen", "bmc-" + spec_name + "-codegen", "pom.xml") - pom = parse_pom(file_name) - - xpath = ".//ns:properties//ns:codegen.spec.name" - property = pom.findall(xpath, ns)[0] - property.text = relative_spec_path - indent(pom.getroot()) - pom.write(file_name, encoding="UTF-8", xml_declaration=True) - - -# Returns was_changed, was_ignored -def update_subdomain(sdk_dir, spec_name, subdomain): - file_name = os.path.join(sdk_dir, "bmc-codegen", "bmc-" + spec_name + "-codegen", "pom.xml") - pom = parse_pom(file_name) - - xpath = ".//ns:properties//ns:codegen.endpoint" - results = pom.findall(xpath, ns) - if results: - property = results[0] - was_changed = property.text != subdomain - property.text = subdomain - indent(pom.getroot()) - pom.write(file_name, encoding="UTF-8", xml_declaration=True) - return was_changed, False - else: - print('Ignored update to subdomain/endpoint -- service had no setting for this in pom.xml file and is getting that information from spec') - return False, True - - -def add_dependency_for_shaded_fatjar(file_name, module_name, template): - pom = parse_pom(file_name) - - content = template.format(module_name=module_name) - dependency_element = ET.fromstring(content, ET.XMLParser(target=CommentedTreeBuilder())) - - # find - dependencies = pom.findall("./ns:dependencies", ns)[0] - - # we want to insert this just before the first "START: Delete Public GitHub" - index_to_insert = 0 - for index, child in enumerate(list(dependencies)): - index_to_insert = index - if "END OF OCI SDK DEPENDENCIES" in child.text: - break - - dependencies.insert(index_to_insert, dependency_element) - - indent(pom.getroot()) - pom.write(file_name, encoding="UTF-8", xml_declaration=True) - - -def find_existing_module(sdk_dir, artifact_id): - codegen_dir = os.path.join(sdk_dir, "bmc-codegen") - pom_files = [y for x in os.walk(codegen_dir) for y in glob(os.path.join(x[0], 'pom.xml'))] - for ldr_path in pom_files: - pom = parse_pom(ldr_path) - xpath = ".//ns:properties//ns:codegen.artifactory.artifactId" - properties = pom.findall(xpath, ns) - if len(properties) > 0 and artifact_id == properties[0].text: - codegen_artifact_id = pom.findall("./ns:artifactId", ns)[0].text - m = re.match("oci-java-sdk-([^-]*)-codegen", codegen_artifact_id) - if m: - return m.group(1) - - return None - - -def gather_settings(sdk_dir, spec_name): - settings = {} - - file_name = os.path.join(sdk_dir, "bmc-codegen", "bmc-" + spec_name + "-codegen", "pom.xml") - pom = parse_pom(file_name) - - xpath = ".//ns:properties//ns:codegen.artifactory.groupId" - property = pom.findall(xpath, ns)[0] - settings["group_id"] = property.text - - xpath = ".//ns:properties//ns:codegen.artifactory.artifactId" - property = pom.findall(xpath, ns)[0] - settings["artifact_id"] = property.text - - xpath = ".//ns:properties//ns:codegen.artifactory.version" - property = pom.findall(xpath, ns)[0] - settings["version"] = property.text - - settings["module_name"] = spec_name - - xpath = ".//ns:properties//ns:codegen.spec.name" - property = pom.findall(xpath, ns)[0] - settings["relative_spec_path"] = property.text - - xpath = ".//ns:properties//ns:codegen.endpoint" - results = pom.findall(xpath, ns) - if results: - property = results[0] - settings["subdomain"] = property.text - - return settings - - -def add_or_update_spec(artifact_id=None, group_id=None, spec_name=None, relative_spec_path=None, endpoint=None, subdomain=None, version=None, spec_generation_type=None, generate_waiters=True, generate_paginators=True, pom_location=None, github_whitelist_location=None): - sdk_dir = os.path.dirname(pom_location) - - found = find_existing_module(sdk_dir, artifact_id) - - ignored = [] - previous = {} - changed = [] - if found: - print('Artifact {} already exists in pom.xml. Updating specified fields...'.format(artifact_id)) - - previous = gather_settings(sdk_dir, found) - - if version: - newer_version = update_version_of_existing_spec(sdk_dir, found, version) - if newer_version: - print('The version was not updated to {}, because it was already at {}.'.format(version, newer_version)) - - if relative_spec_path: - update_relative_spec_path_of_existing_spec(sdk_dir, found, relative_spec_path) - - was_ignored = False - if endpoint: - subdomain = endpoint - subdomain = re.sub('^.*://', '', subdomain) # remove protocol and '://' - subdomain = re.sub(r'\.{domain}.*$', '', subdomain) # remove '.{domain}' and everything after it - was_changed, was_ignored = update_subdomain(sdk_dir, found, subdomain) - elif subdomain: - was_changed, was_ignored = update_subdomain(sdk_dir, found, subdomain) - - if was_ignored: - ignored.append('subdomain') - - if spec_name: - ignored.append('spec_name') - if group_id: - ignored.append('group_id') - - current = gather_settings(sdk_dir, found) - changed = compute_changed_settings(previous, current) - else: - missing_params = [] - if not spec_name: - missing_params.append('--spec-name') - - if not version: - missing_params.append('--version') - - if not group_id: - missing_params.append('--group-id') - - if not artifact_id: - missing_params.append('--artifact-id') - - if not relative_spec_path: - missing_params.append('--relative-spec-path') - - # not checking 'endpoint' anymore; can be specified either in ticket - # or in spec using 'x-obmcs-endpoint-template'. If neither is specified, - # this fails in the generator - - if missing_params: - raise MissingParameter('The following options must be specified for a new spec: {}'.format(', '.join(missing_params))) - - if endpoint and subdomain: - raise UsageError('Cannot specify both --endpoint and --subdomain') - - if endpoint: - print('Ignoring endpoint setting for new services; new services have to get it from the spec') - ignored.append('endpoint') - - if subdomain: - print('Ignoring subdomain setting for new services; new services have to get it from the spec') - ignored.append('subdomain') - - if spec_generation_type: - print('Note: --spec-generation-type is ignored for the Java SDK, since it is set in the bmc-codegen/pom.xml file for all modules') - - module_name = spec_name.lower().replace('_', '') # module_name is "newservice" - service_friendly_name = string.capwords(spec_name.replace('_', ' ')) # service_friendly_name is "New Service" - - # Find OCI Java SDK Version - pom = parse_pom(pom_location) - xpath = ".//ns:version" - property = pom.findall(xpath, ns)[0] - sdk_version = property.text - - print('Module {} does not exist in pom.xml. Adding it...'.format(module_name)) - generate_child_codegen_pom(sdk_dir, module_name, service_friendly_name, sdk_version, group_id, artifact_id, version, relative_spec_path, spec_generation_type, generate_waiters, generate_paginators) - add_child_codegen_module(sdk_dir, module_name) - generate_child_module(sdk_dir, module_name, service_friendly_name, sdk_version) - add_child_module(pom_location, module_name) - add_bom_dependency(sdk_dir, module_name, sdk_version) - add_full_dependency(sdk_dir, module_name, service_friendly_name, sdk_version) - add_dependency_to_other_pom_file(os.path.join(sdk_dir, "bmc-integtests", "pom.xml"), module_name, "test") - add_dependency_to_other_pom_file(os.path.join(sdk_dir, "bmc-smoketests", "pom.xml"), module_name, "test") - add_dependency_to_other_pom_file(os.path.join(sdk_dir, "bmc-examples", "pom.xml"), module_name, None) - add_dependency_for_shaded_fatjar(os.path.join(sdk_dir, "bmc-shaded", "bmc-shaded-internal-fatjar", "pom.xml"), module_name, FULL_DEPENDENCY_TEMPLATE) - add_dependency_for_shaded_fatjar(os.path.join(sdk_dir, "bmc-shaded", "bmc-shaded-full", "pom.xml"), module_name, SHADED_DEPENDENCY_TEMPLATE) - add_spec_module_to_github_whitelist(module_name, github_whitelist_location, '\n^bmc-{}/.*') - - print('Success!') - - return AddOrUpdateSpecResult( - updated=(not found) or changed != [], # not found means it's a new spec, or if it is an existing spec, changed needs to be non-empty - existing=found is not None, - ignored=ignored, - previous=previous, - changed=changed) - - -@click.command() -@click.option('--artifact-id', required=True, help='The artifact id for the spec artifact (e.g. coreservices-api-spec') -@click.option('--group-id', help='The group id for the spec artifact (e.g. com.oracle.pic.commons)') -@click.option('--spec-name', help='The name of the spec. This will be used (e.g. core, identity, objectstorage). This is also used as the module name (\'bmc-servicename\') and base package (\'com.oracle.bmc.servicename\'). Underscores are removed, everything is lower-cased.') -@click.option('--relative-spec-path', help='The relative path of the spec within the artifact (e.g. coreservices-api-spec-20160918-external.yaml)') -@click.option('--endpoint', help='The base endpoint for the service (e.g. https://iaas.{domain}/20160918)') -@click.option('--subdomain', help='The subdomain for the service (e.g. if the endpoint is https://iaas.{domain}/20160918), the subdomain is "iaas"') -@click.option('--version', help='The version of the spec artifact (e.g. 0.0.1-SNAPSHOT') -@click.option('--spec-generation-type', help='The generation type: PUBLIC or PREVIEW') -@click.option('--generate-waiters/--no-generate-waiters', default=True, help='Generate waiters') -@click.option('--generate-paginators/--no-generate-paginators', default=True, help='Generate paginators') -@click.option('--pom-location', type=click.Path(exists=True), default=DEFAULT_PARENT_POM_LOCATION, help='Location of the pom.xml file in the root directory of the OCI Java SDK') -@click.option('--github-whitelist-location', type=click.Path(exists=True), default=DEFAULT_GITHUB_WHITELIST_LOCATION, help='Location of the github.whitelist file to update') -def add_or_update_spec_command(artifact_id, group_id, spec_name, relative_spec_path, endpoint, subdomain, version, spec_generation_type, generate_waiters, generate_paginators, pom_location, github_whitelist_location): - print(add_or_update_spec(artifact_id, group_id, spec_name, relative_spec_path, endpoint, subdomain, version, spec_generation_type, generate_waiters, generate_paginators, pom_location, github_whitelist_location)) - - -if __name__ == '__main__': - add_or_update_spec_command() diff --git a/scripts/auto_gen_utils/add_or_update_scripts/legacy_java_sdk_add_or_update_spec.py b/scripts/auto_gen_utils/add_or_update_scripts/legacy_java_sdk_add_or_update_spec.py deleted file mode 100644 index 75948b0b53..0000000000 --- a/scripts/auto_gen_utils/add_or_update_scripts/legacy_java_sdk_add_or_update_spec.py +++ /dev/null @@ -1,773 +0,0 @@ -#!/usr/bin/env python - -# -# This script manipulates pom.xml to either add new specs or update the versions of existing specs. -# - -import datetime -import xml.etree.ElementTree as ET -import re -import click -import os -import parse -import string -from click.exceptions import UsageError, MissingParameter -from glob import glob - -from shared.version_utils import is_version_increasing -from .add_or_update_spec_utils import AddOrUpdateSpecResult, compute_changed_settings, indent -from .add_or_update_spec_utils import add_spec_module_to_github_whitelist, write_xml -from .add_or_update_spec_utils import CommentedTreeBuilder -from .add_or_update_spec_utils import parse_pom - -DEFAULT_PARENT_POM_LOCATION = "pom.xml" -DEFAULT_GITHUB_WHITELIST_LOCATION = "github.whitelist" - -CODEGEN_POM_FILE_ARTIFACTORY_VERSION_COMMENT_TEMPLATE = "DEXREQ ticket requested version '{}', but version was already '{}' ('{}')." - -CODEGEN_POM_FILE_TEMPLATE = """ - - 4.0.0 - - - com.oracle.oci.sdk - oci-java-sdk-codegen - {sdk_version} - ../pom.xml - - - oci-java-sdk-{module_name}-codegen - Oracle Cloud Infrastructure SDK - {service_friendly_name} Codegen - This project contains the code generation spec and configuration for the {service_friendly_name} Service - https://docs.cloud.oracle.com/Content/API/SDKDocs/javasdk.htm - - - {group_id} - {artifact_id} - {artifact_version} - {spec_path_relative_to_jar} - - {module_name} - {generate_waiters} - {generate_paginators} - - - - - codegen - - - - org.commonjava.maven.plugins - directory-maven-plugin - - - org.codehaus.mojo - build-helper-maven-plugin - - - com.oracle.oci.sdk.utilities - dex-get-spec-artifact-plugin - - - com.oracle.oci.sdk.utilities - spec-conditionals-preprocessor-plugin - - - com.oracle.bmc.sdk - bmc-sdk-swagger-maven-plugin - - - com.mycila - license-maven-plugin - - - com.theoryinpractise - googleformatter-maven-plugin - - - maven-antrun-plugin - - - - - - ${{codegen.artifactory.groupId}} - ${{codegen.artifactory.artifactId}} - - - - - specs - - - - org.apache.maven.plugins - maven-assembly-plugin - - true - - - - - - - - - - ${{codegen.artifactory.groupId}} - ${{codegen.artifactory.artifactId}} - ${{codegen.artifactory.version}} - compile - - - - -""" - -MODULE_POM_FILE_TEMPLATE = """ - - 4.0.0 - - - com.oracle.oci.sdk - oci-java-sdk - {sdk_version} - ../pom.xml - - - oci-java-sdk-{module_name} - Oracle Cloud Infrastructure SDK - {service_friendly_name} - This project contains the SDK used for Oracle Cloud Infrastructure {service_friendly_name} - https://docs.cloud.oracle.com/Content/API/SDKDocs/javasdk.htm - - - - - - org.codehaus.mojo - clirr-maven-plugin - 2.9.1-oracle-SNAPSHOT - - - clirr - verify - - check - - - ${{clirr.fail.on.error}} - ${{project.build.directory}}/clirr-{module_name}.txt - true - - .*preview.* - 1.23.4.*stream.* - .*experimental.* - - ${{clirr.comparison.version}} - - - - - **/internal/**/* - * - * - - - - com/oracle/bmc/{module_name}/*Async* - * - * - - - - com/oracle/bmc/{module_name}/*Client - * - * - - - - com/oracle/bmc/{module_name}/* - * - * - 7012 - - - - com/oracle/bmc/{module_name}/*/* - * - * - 7004 - - - - - com/oracle/bmc/{module_name}/*/* - * - * - 7005 - * - - - - com/oracle/bmc/{module_name}/*/*Builder - * - * - - - - com/oracle/bmc/{module_name}/*/* - boolean canEqual(java.lang.Object) - 7002 - - - - - - - - - org.apache.maven.plugins - maven-checkstyle-plugin - 3.1.1 - - ${{checkstyle-rules-file}} - UTF-8 - true - true - false - - - - verify - - check - - - - - - - - - - - com.oracle.oci.sdk - oci-java-sdk-common - {sdk_version} - - - - -""" - -MODULE_TEMPLATE = "{name}" - -BOM_DEPENDENCY_TEMPLATE = """ - - com.oracle.oci.sdk - oci-java-sdk-{module_name} - {sdk_version} - false - -""" - -GRAALVM_METADATA_TEMPLATE = """ -package com.oracle.bmc.graalvm; - -@SdkClientPackages({\n%s\n}) -final class SdkAutomaticFeatureMetadata {} -""" - -FULL_JAVADOC_SOURCEPATH_TEMPLATE = os.linesep + " ../bmc-{module_name}/src/main/java;" - -FULL_JAVADOC_GROUP_TEMPLATE = """ - - Oracle Cloud Infrastructure {service_friendly_name} - com.oracle.bmc.{module_name}* - -""" - -FULL_DEPENDENCY_TEMPLATE = """ - - com.oracle.oci.sdk - oci-java-sdk-{module_name} - -""" - -SHADED_DEPENDENCY_TEMPLATE = """ - - com.oracle.oci.sdk - oci-java-sdk-{module_name} - provided - -""" - -INTEGTESTS_DEPENDENCY_TEMPLATE = """ - - com.oracle.oci.sdk - oci-java-sdk-{module_name}{scope_text} - -""" - -ns = {"ns": "http://maven.apache.org/POM/4.0.0"} - -# allow default namespace for output, dont print ns0: prefixes everywhere -ET.register_namespace('', "http://maven.apache.org/POM/4.0.0") - - -def generate_child_codegen_pom(sdk_dir, module_name, service_friendly_name, sdk_version, group_id, artifact_id, - artifact_version, spec_path_relative_to_jar, spec_generation_type, generate_waiters, - generate_paginators): - dir_name = os.path.join(sdk_dir, "bmc-codegen", "bmc-" + module_name + "-codegen") - os.mkdir(dir_name) - file_name = os.path.join(dir_name, "pom.xml") - - content = CODEGEN_POM_FILE_TEMPLATE.format( - group_id=group_id, - artifact_id=artifact_id, - artifact_version=artifact_version, - sdk_version=sdk_version, - service_friendly_name=service_friendly_name, - module_name=module_name, - spec_path_relative_to_jar=spec_path_relative_to_jar, - spec_generation_type=spec_generation_type, - generate_waiters=str(generate_waiters).lower(), - generate_paginators=str(generate_paginators).lower()) - - root = ET.fromstring(content, parser=ET.XMLParser(target=CommentedTreeBuilder())) - pom = ET.ElementTree(element=root) - write_xml(file_name, pom) - - -def add_child_codegen_module(sdk_dir, module_name): - file_name = os.path.join(sdk_dir, "bmc-codegen", "pom.xml") - pom = parse_pom(file_name) - - content = MODULE_TEMPLATE.format(name="bmc-" + module_name + "-codegen") - module_element = ET.fromstring(content, ET.XMLParser(target=CommentedTreeBuilder())) - - # find modules - modules = pom.findall("./ns:modules", ns)[0] - modules.append(module_element) - - indent(pom.getroot()) - pom.write(file_name, encoding="UTF-8", xml_declaration=True) - - -def generate_child_module(sdk_dir, module_name, service_friendly_name, sdk_version): - dir_name = os.path.join(sdk_dir, "bmc-" + module_name) - os.mkdir(dir_name) - file_name = os.path.join(dir_name, "pom.xml") - - content = MODULE_POM_FILE_TEMPLATE.format( - sdk_version=sdk_version, - service_friendly_name=service_friendly_name, - module_name=module_name) - - root = ET.fromstring(content, ET.XMLParser(target=CommentedTreeBuilder())) - pom = ET.ElementTree(element=root) - indent(pom.getroot()) - pom.write(file_name, encoding="UTF-8", xml_declaration=True) - - os.makedirs(os.path.join(dir_name, "src", "main", "java")) - - -def add_child_module(parent_pom_location, module_name): - pom = parse_pom(parent_pom_location) - - content = MODULE_TEMPLATE.format(name="bmc-" + module_name) - module_element = ET.fromstring(content, ET.XMLParser(target=CommentedTreeBuilder())) - - # find - modules = pom.findall("./ns:modules", ns)[0] - - # we want to insert this just before the first "START: Delete Public GitHub" - index_to_insert = 0 - for index, child in enumerate(list(modules)): - index_to_insert = index - if "START: Delete Public GitHub" in child.text: - break - - modules.insert(index_to_insert, module_element) - - indent(pom.getroot()) - pom.write(parent_pom_location, encoding="UTF-8", xml_declaration=True) - - -def add_bom_dependency(sdk_dir, module_name, sdk_version): - file_name = os.path.join(sdk_dir, "bmc-bom", "pom.xml") - pom = parse_pom(file_name) - - content = BOM_DEPENDENCY_TEMPLATE.format(module_name=module_name, sdk_version=sdk_version) - dependency_element = ET.fromstring(content, ET.XMLParser(target=CommentedTreeBuilder())) - - # find dependencies - dependencies = pom.findall("./ns:dependencyManagement/ns:dependencies", ns)[0] - dependencies.append(dependency_element) - - indent(pom.getroot()) - pom.write(file_name, encoding="UTF-8", xml_declaration=True) - - -def add_full_dependency(sdk_dir, module_name, service_friendly_name, sdk_version): - file_name = os.path.join(sdk_dir, "bmc-full", "pom.xml") - pom = parse_pom(file_name) - - content = FULL_DEPENDENCY_TEMPLATE.format(module_name=module_name) - dependency_element = ET.fromstring(content, ET.XMLParser(target=CommentedTreeBuilder())) - - # find dependencies - dependencies = pom.findall("./ns:dependencies", ns)[0] - dependencies.append(dependency_element) - - # find Javadoc sourcepath - sourcepath_node = pom.findall("./ns:build/ns:plugins/ns:plugin[ns:artifactId='maven-javadoc-plugin']/ns:configuration/ns:sourcepath",ns)[0] - sourcepath_node.text = sourcepath_node.text + FULL_JAVADOC_SOURCEPATH_TEMPLATE.format(module_name=module_name) - - # find Javadoc groups - groups = pom.findall("./ns:build/ns:plugins/ns:plugin[ns:artifactId='maven-javadoc-plugin']/ns:configuration/ns:groups", ns)[0] - - content = FULL_JAVADOC_GROUP_TEMPLATE.format(module_name=module_name, service_friendly_name=service_friendly_name) - group_element = ET.fromstring(content, ET.XMLParser(target=CommentedTreeBuilder())) - groups.append(group_element) - - indent(pom.getroot()) - pom.write(file_name, encoding="UTF-8", xml_declaration=True) - - -def add_dependency_to_other_pom_file(file_name, module_name, scope): - pom = parse_pom(file_name) - - content = INTEGTESTS_DEPENDENCY_TEMPLATE.format( - module_name=module_name, - scope_text="\n {}".format(scope) if scope else "") - dependency_element = ET.fromstring(content, ET.XMLParser(target=CommentedTreeBuilder())) - - # find dependencies - dependencies = pom.findall("./ns:dependencies", ns)[0] - dependencies.append(dependency_element) - - indent(pom.getroot()) - pom.write(file_name, encoding="UTF-8", xml_declaration=True) - - -def update_version_of_existing_spec(sdk_dir, spec_name, version): - file_name = os.path.join(sdk_dir, "bmc-codegen", "bmc-" + spec_name + "-codegen", "pom.xml") - pom = parse_pom(file_name) - - xpath = ".//ns:properties//ns:codegen.artifactory.version" - property = pom.findall(xpath, ns)[0] - old_version = property.text - - version_increased = is_version_increasing(old_version, version) - if version_increased: - property.text = version - - # remove old comments and find the place to insert the version comment, if necessary - parent = pom.findall(".//ns:properties", ns)[0] - comments_to_remove = [] - comment_insertion_index = 0 - for index, el in enumerate(parent.iter()): - if el.tag is ET.Comment and el.text: - result = parse.search(CODEGEN_POM_FILE_ARTIFACTORY_VERSION_COMMENT_TEMPLATE, el.text) - if result: - # matched the formats - comments_to_remove.append(el) - else: - if el.tag.endswith("codegen.artifactory.version"): - if index > 0: - comment_insertion_index = index - len(comments_to_remove) - 1 - - for el in comments_to_remove: - parent.remove(el) - - if not version_increased: - # insert version comment - comment = ET.Comment( - CODEGEN_POM_FILE_ARTIFACTORY_VERSION_COMMENT_TEMPLATE.format(version, old_version, datetime.datetime.now())) - parent.insert(comment_insertion_index, comment) - - indent(pom.getroot()) - pom.write(file_name, encoding="UTF-8", xml_declaration=True) - - if not version_increased: - return old_version - - return None # the old version was lower - - -def update_relative_spec_path_of_existing_spec(sdk_dir, spec_name, relative_spec_path): - file_name = os.path.join(sdk_dir, "bmc-codegen", "bmc-" + spec_name + "-codegen", "pom.xml") - pom = parse_pom(file_name) - - xpath = ".//ns:properties//ns:codegen.spec.name" - property = pom.findall(xpath, ns)[0] - property.text = relative_spec_path - indent(pom.getroot()) - pom.write(file_name, encoding="UTF-8", xml_declaration=True) - - -# Returns was_changed, was_ignored -def update_subdomain(sdk_dir, spec_name, subdomain): - file_name = os.path.join(sdk_dir, "bmc-codegen", "bmc-" + spec_name + "-codegen", "pom.xml") - pom = parse_pom(file_name) - - xpath = ".//ns:properties//ns:codegen.endpoint" - results = pom.findall(xpath, ns) - if results: - property = results[0] - was_changed = property.text != subdomain - property.text = subdomain - indent(pom.getroot()) - pom.write(file_name, encoding="UTF-8", xml_declaration=True) - return was_changed, False - else: - print( - 'Ignored update to subdomain/endpoint -- service had no setting for this in pom.xml file and is getting that information from spec') - return False, True - - -def add_dependency_for_shaded_fatjar(file_name, module_name, template): - pom = parse_pom(file_name) - - content = template.format(module_name=module_name) - dependency_element = ET.fromstring(content, ET.XMLParser(target=CommentedTreeBuilder())) - - # find - dependencies = pom.findall("./ns:dependencies", ns)[0] - - # we want to insert this just before the first "START: Delete Public GitHub" - index_to_insert = 0 - for index, child in enumerate(list(dependencies)): - index_to_insert = index - if "END OF OCI SDK DEPENDENCIES" in child.text: - break - - dependencies.insert(index_to_insert, dependency_element) - - indent(pom.getroot()) - pom.write(file_name, encoding="UTF-8", xml_declaration=True) - - -def add_sdk_clients_metadata_for_graalvm(sdk_dir, module_name): - sdk_automatic_feature_metadata_file_name = os.path.join(sdk_dir, "bmc-addons", "bmc-graalvm-addon", "src", "main", - "java", "com", "oracle", "bmc", "graalvm", - "SdkAutomaticFeatureMetadata.java") - new_sdk_clients_metadata_classname_template = " \"com.oracle.bmc.{}.SdkClientsMetadata\"," - existingSdkClientsMetadataClasses = [] - - with open(sdk_automatic_feature_metadata_file_name) as metadata_file: - for line in metadata_file: - if line.strip().startswith("\"com.oracle.bmc"): - if not line.strip().endswith(","): - line += "," - existingSdkClientsMetadataClasses.append(line.replace("\n", "")) - existingSdkClientsMetadataClasses.append(new_sdk_clients_metadata_classname_template.format(module_name)) - existingSdkClientsMetadataClasses.sort() - allSdkClientsMetadataClasses = "\n".join(existingSdkClientsMetadataClasses)[:-1] - - with open(sdk_automatic_feature_metadata_file_name, 'w') as metadata_file: - metadata_file.write(GRAALVM_METADATA_TEMPLATE % allSdkClientsMetadataClasses) - - -def find_existing_module(sdk_dir, artifact_id): - codegen_dir = os.path.join(sdk_dir, "bmc-codegen") - pom_files = [y for x in os.walk(codegen_dir) for y in glob(os.path.join(x[0], 'pom.xml'))] - for ldr_path in pom_files: - pom = parse_pom(ldr_path) - xpath = ".//ns:properties//ns:codegen.artifactory.artifactId" - properties = pom.findall(xpath, ns) - if len(properties) > 0 and artifact_id == properties[0].text: - codegen_artifact_id = pom.findall("./ns:artifactId", ns)[0].text - m = re.match("oci-java-sdk-([^-]*)-codegen", codegen_artifact_id) - if m: - return m.group(1) - - return None - - -def gather_settings(sdk_dir, spec_name): - settings = {} - - file_name = os.path.join(sdk_dir, "bmc-codegen", "bmc-" + spec_name + "-codegen", "pom.xml") - pom = parse_pom(file_name) - - xpath = ".//ns:properties//ns:codegen.artifactory.groupId" - property = pom.findall(xpath, ns)[0] - settings["group_id"] = property.text - - xpath = ".//ns:properties//ns:codegen.artifactory.artifactId" - property = pom.findall(xpath, ns)[0] - settings["artifact_id"] = property.text - - xpath = ".//ns:properties//ns:codegen.artifactory.version" - property = pom.findall(xpath, ns)[0] - settings["version"] = property.text - - settings["module_name"] = spec_name - - xpath = ".//ns:properties//ns:codegen.spec.name" - property = pom.findall(xpath, ns)[0] - settings["relative_spec_path"] = property.text - - xpath = ".//ns:properties//ns:codegen.endpoint" - results = pom.findall(xpath, ns) - if results: - property = results[0] - settings["subdomain"] = property.text - - return settings - - -def add_or_update_spec(artifact_id=None, group_id=None, spec_name=None, relative_spec_path=None, endpoint=None, - subdomain=None, version=None, spec_generation_type=None, generate_waiters=True, - generate_paginators=True, pom_location=None, github_whitelist_location=None): - sdk_dir = os.path.dirname(pom_location) - - found = find_existing_module(sdk_dir, artifact_id) - - ignored = [] - previous = {} - changed = [] - if found: - print('Artifact {} already exists in pom.xml. Updating specified fields...'.format(artifact_id)) - - previous = gather_settings(sdk_dir, found) - - if version: - newer_version = update_version_of_existing_spec(sdk_dir, found, version) - if newer_version: - print('The version was not updated to {}, because it was already at {}.'.format(version, newer_version)) - - if relative_spec_path: - update_relative_spec_path_of_existing_spec(sdk_dir, found, relative_spec_path) - - was_ignored = False - if endpoint: - subdomain = endpoint - subdomain = re.sub('^.*://', '', subdomain) # remove protocol and '://' - subdomain = re.sub(r'\.{domain}.*$', '', subdomain) # remove '.{domain}' and everything after it - was_changed, was_ignored = update_subdomain(sdk_dir, found, subdomain) - elif subdomain: - was_changed, was_ignored = update_subdomain(sdk_dir, found, subdomain) - - if was_ignored: - ignored.append('subdomain') - - if spec_name: - ignored.append('spec_name') - if group_id: - ignored.append('group_id') - - current = gather_settings(sdk_dir, found) - changed = compute_changed_settings(previous, current) - else: - missing_params = [] - if not spec_name: - missing_params.append('--spec-name') - - if not version: - missing_params.append('--version') - - if not group_id: - missing_params.append('--group-id') - - if not artifact_id: - missing_params.append('--artifact-id') - - if not relative_spec_path: - missing_params.append('--relative-spec-path') - - # not checking 'endpoint' anymore; can be specified either in ticket - # or in spec using 'x-obmcs-endpoint-template'. If neither is specified, - # this fails in the generator - - if missing_params: - raise MissingParameter( - 'The following options must be specified for a new spec: {}'.format(', '.join(missing_params))) - - if endpoint and subdomain: - raise UsageError('Cannot specify both --endpoint and --subdomain') - - if endpoint: - print('Ignoring endpoint setting for new services; new services have to get it from the spec') - ignored.append('endpoint') - - if subdomain: - print('Ignoring subdomain setting for new services; new services have to get it from the spec') - ignored.append('subdomain') - - if spec_generation_type: - print( - 'Note: --spec-generation-type is ignored for the Java SDK, since it is set in the bmc-codegen/pom.xml file for all modules') - - module_name = spec_name.lower().replace('_', '') # module_name is "newservice" - service_friendly_name = string.capwords(spec_name.replace('_', ' ')) # service_friendly_name is "New Service" - - # Find OCI Java SDK Version - pom = parse_pom(pom_location) - xpath = ".//ns:version" - property = pom.findall(xpath, ns)[0] - sdk_version = property.text - - print('Module {} does not exist in pom.xml. Adding it...'.format(module_name)) - generate_child_codegen_pom(sdk_dir, module_name, service_friendly_name, sdk_version, group_id, artifact_id, - version, relative_spec_path, spec_generation_type, generate_waiters, - generate_paginators) - add_child_codegen_module(sdk_dir, module_name) - generate_child_module(sdk_dir, module_name, service_friendly_name, sdk_version) - add_child_module(pom_location, module_name) - add_bom_dependency(sdk_dir, module_name, sdk_version) - add_full_dependency(sdk_dir, module_name, service_friendly_name, sdk_version) - add_dependency_to_other_pom_file(os.path.join(sdk_dir, "bmc-integtests", "pom.xml"), module_name, "test") - add_dependency_to_other_pom_file(os.path.join(sdk_dir, "bmc-smoketests", "pom.xml"), module_name, "test") - add_dependency_to_other_pom_file(os.path.join(sdk_dir, "bmc-examples", "pom.xml"), module_name, None) - add_dependency_for_shaded_fatjar(os.path.join(sdk_dir, "bmc-shaded", "bmc-shaded-internal-fatjar", "pom.xml"), - module_name, FULL_DEPENDENCY_TEMPLATE) - add_dependency_for_shaded_fatjar(os.path.join(sdk_dir, "bmc-shaded", "bmc-shaded-full", "pom.xml"), module_name, - SHADED_DEPENDENCY_TEMPLATE) - add_spec_module_to_github_whitelist(module_name, github_whitelist_location, '\n^bmc-{}/.*') - add_sdk_clients_metadata_for_graalvm(sdk_dir, module_name) - - print('Success!') - - return AddOrUpdateSpecResult( - updated=(not found) or changed != [], - # not found means it's a new spec, or if it is an existing spec, changed needs to be non-empty - existing=found is not None, - ignored=ignored, - previous=previous, - changed=changed) - - -@click.command() -@click.option('--artifact-id', required=True, help='The artifact id for the spec artifact (e.g. coreservices-api-spec') -@click.option('--group-id', help='The group id for the spec artifact (e.g. com.oracle.pic.commons)') -@click.option('--spec-name', - help='The name of the spec. This will be used (e.g. core, identity, objectstorage). This is also used as the module name (\'bmc-servicename\') and base package (\'com.oracle.bmc.servicename\'). Underscores are removed, everything is lower-cased.') -@click.option('--relative-spec-path', - help='The relative path of the spec within the artifact (e.g. coreservices-api-spec-20160918-external.yaml)') -@click.option('--endpoint', help='The base endpoint for the service (e.g. https://iaas.{domain}/20160918)') -@click.option('--subdomain', - help='The subdomain for the service (e.g. if the endpoint is https://iaas.{domain}/20160918), the subdomain is "iaas"') -@click.option('--version', help='The version of the spec artifact (e.g. 0.0.1-SNAPSHOT') -@click.option('--spec-generation-type', help='The generation type: PUBLIC or PREVIEW') -@click.option('--generate-waiters/--no-generate-waiters', default=True, help='Generate waiters') -@click.option('--generate-paginators/--no-generate-paginators', default=True, help='Generate paginators') -@click.option('--pom-location', type=click.Path(exists=True), default=DEFAULT_PARENT_POM_LOCATION, - help='Location of the pom.xml file in the root directory of the OCI Java SDK') -@click.option('--github-whitelist-location', type=click.Path(exists=True), default=DEFAULT_GITHUB_WHITELIST_LOCATION, - help='Location of the github.whitelist file to update') -def add_or_update_spec_command(artifact_id, group_id, spec_name, relative_spec_path, endpoint, subdomain, version, - spec_generation_type, generate_waiters, generate_paginators, pom_location, - github_whitelist_location): - print(add_or_update_spec(artifact_id, group_id, spec_name, relative_spec_path, endpoint, subdomain, version, - spec_generation_type, generate_waiters, generate_paginators, pom_location, - github_whitelist_location)) - - -if __name__ == '__main__': - add_or_update_spec_command() diff --git a/scripts/auto_gen_utils/add_or_update_scripts/module_pom_file_add_or_update_spec.py b/scripts/auto_gen_utils/add_or_update_scripts/module_pom_file_add_or_update_spec.py deleted file mode 100644 index fdb4dba04f..0000000000 --- a/scripts/auto_gen_utils/add_or_update_scripts/module_pom_file_add_or_update_spec.py +++ /dev/null @@ -1,382 +0,0 @@ -#!/usr/bin/env python - -# -# This is a library for manipulating a single pom.xml file to either -# add new specs or update the versions of existing specs. -# -# Can be used by Python SDK, CLI, Ruby SDK and Go SDK - -import os -import xml.etree.ElementTree as ET -from click.exceptions import MissingParameter, UsageError - -from .add_or_update_spec_utils import parse_pom, write_xml, AddOrUpdateSpecResult -from .add_or_update_spec_utils import compute_changed_settings -from .add_or_update_spec_utils import add_spec_module_to_github_whitelist -from shared.version_utils import is_version_increasing - -DEFAULT_POM_LOCATION = "pom.xml" -DEFAULT_GITHUB_WHITELIST_LOCATION = "github.whitelist" - -SPEC_FILE_PROPERTY_TEMPLATE = """ -<{artifact_id}-spec-file>{spec_path_relative_to_jar} -""" - -UNPACK_EXECUTION_TEMPLATE = """ - - unpack-{artifact_id} - initialize - - unpack - - - - - {group_id} - {artifact_id} - jar - **/* - ${{spec-temp-dir}}/{artifact_id} - - - - -""" - -PREFER_EXECUTION_TEMPLATE = """ - - spec-conditionals-prefer-{artifact_id} - initialize - - prefer - - - - - ${{spec-temp-dir}}/{artifact_id}/source/${{{artifact_id}-spec-file}} - - ${{spec-temp-dir}}/{artifact_id}/${{{artifact_id}-spec-file}} - - ${{preferred-temp-dir}}/{artifact_id}/${{{artifact_id}-spec-file}} - - -""" - -PREPROCESS_EXECUTION_TEMPLATE = """ - - spec-conditionals-preprocess-{artifact_id} - initialize - - preprocess - - - ${{preferred-temp-dir}}/{artifact_id}/${{{artifact_id}-spec-file}} - ${{preprocessed-temp-dir}}/{artifact_id}/${{{artifact_id}-spec-file}} - ${{enabled-groups-file}} - ${{enabled-groups-dir}} - - -""" - - -DEPENDENCY_MANAGEMENT_TEMPLATE = """ - - {group_id} - {artifact_id} - {version} - -""" - -MODULE_ARTIFACT_ID_TEMPLATE = """{parent_artifact_id}-{spec_name}-codegen""" - -ns = {"ns": "http://maven.apache.org/POM/4.0.0"} - -# allow default namespace for output, dont print ns0: prefixes everywhere -ET.register_namespace('', "http://maven.apache.org/POM/4.0.0") - - -def add_artifact_id_to_module(pom, parent_artifact_id, spec_name): - # Search for an artifactId node in the root of pom. - xpath = "ns:artifactId" - artfact_node = pom.findall(xpath, ns)[0] - artfact_node.text = MODULE_ARTIFACT_ID_TEMPLATE.format(parent_artifact_id=parent_artifact_id, - spec_name=spec_name) - - -def get_artifact_id(pom): - xpath = ".//ns:artifactId" - artfact_id = pom.findall(xpath, ns)[0].text - return artfact_id - - -def generate_and_add_property_element(pom, artifact_id, spec_path_relative_to_jar): - content = SPEC_FILE_PROPERTY_TEMPLATE.format( - artifact_id=artifact_id, - spec_path_relative_to_jar=spec_path_relative_to_jar) - - property_element = ET.fromstring(content) - - xpath = ".//ns:properties" - properties = pom.findall(xpath, ns)[0] - properties.append(property_element) - - -def add_module_to_parent_pom(pom, module_entry): - module_element = ET.fromstring(module_entry) - xpath = ".//ns:modules" - properties = pom.findall(xpath, ns)[0] - properties.insert(0, module_element) - - -def update_relative_spec_path(pom, artifact_id, spec_path_relative_to_jar): - xpath = ".//ns:properties/ns:{artifact_id}-spec-file".format(artifact_id=artifact_id) - spec_file_node = pom.findall(xpath, ns)[0] - if spec_file_node.text != spec_path_relative_to_jar: - spec_file_node.text = spec_path_relative_to_jar - return True - return False - - -def generate_and_add_unpack_element(pom, group_id, artifact_id, spec_path_relative_to_jar): - content = UNPACK_EXECUTION_TEMPLATE.format( - group_id=group_id, - artifact_id=artifact_id, - spec_path_relative_to_jar=spec_path_relative_to_jar) - - unpack_element = ET.fromstring(content) - - # find dex-get-spec-artifact-plugin where unpacking happens - unpack_plugin_executions = pom.findall(".//ns:plugin[ns:artifactId='dex-get-spec-artifact-plugin']/ns:executions", ns)[0] - unpack_plugin_executions.append(unpack_element) - - -def generate_and_add_prefer_element(pom, group_id, artifact_id, spec_path_relative_to_jar): - content = PREFER_EXECUTION_TEMPLATE.format( - group_id=group_id, - artifact_id=artifact_id, - spec_path_relative_to_jar=spec_path_relative_to_jar) - - unpack_element = ET.fromstring(content) - - # find spec-conditionals-preprocessor-plugin where unpacking happens - unpack_plugin_executions = pom.findall(".//ns:plugin[ns:artifactId='spec-conditionals-preprocessor-plugin']/ns:executions", ns)[0] - unpack_plugin_executions.append(unpack_element) - - -def generate_and_add_preprocess_element(pom, group_id, artifact_id, spec_path_relative_to_jar): - content = PREPROCESS_EXECUTION_TEMPLATE.format( - group_id=group_id, - artifact_id=artifact_id, - spec_path_relative_to_jar=spec_path_relative_to_jar) - - unpack_element = ET.fromstring(content) - - # find spec-conditionals-preprocessor-plugin where unpacking happens - unpack_plugin_executions = pom.findall(".//ns:plugin[ns:artifactId='spec-conditionals-preprocessor-plugin']/ns:executions", ns)[0] - unpack_plugin_executions.append(unpack_element) - - -def generate_and_add_generate_section(pom, spec_name, artifact_id, spec_path_relative_to_jar, spec_generation_type, regional_sub_service_overrides, non_regional_sub_service_overrides, generate_execution_template): - regional_non_regional_service_overrides_content = '' - if regional_sub_service_overrides or non_regional_sub_service_overrides: - if regional_sub_service_overrides: - for override in regional_sub_service_overrides: - regional_non_regional_service_overrides_content += 'true\n'.format(service_name=override) - - if non_regional_sub_service_overrides: - for override in non_regional_sub_service_overrides: - regional_non_regional_service_overrides_content += 'false\n'.format(service_name=override) - - content = generate_execution_template.format( - artifact_id=artifact_id, - spec_name=spec_name, - spec_path_relative_to_jar=spec_path_relative_to_jar, - spec_generation_type=spec_generation_type, - regional_non_regional_service_overrides=regional_non_regional_service_overrides_content) - - generate_element = ET.fromstring(content) - - # find bmc-sdk-swagger-maven-plugin where generation happens - generate_plugin_executions = pom.findall(".//ns:plugin[ns:artifactId='bmc-sdk-swagger-maven-plugin']/ns:executions", ns)[0] - generate_plugin_executions.append(generate_element) - - -def generate_and_add_clean_section(pom, spec_name, clean_element_template): - if not clean_element_template: - return - - content = clean_element_template.format( - spec_name=spec_name) - - clean_element = ET.fromstring(content) - - # find filesetes where clean directory goes - filesets = pom.findall(".//ns:plugin[ns:artifactId='maven-clean-plugin']//ns:filesets", ns)[0] - filesets.append(clean_element) - - -def generate_and_add_dependency_management_section(pom, group_id, artifact_id, version): - content = DEPENDENCY_MANAGEMENT_TEMPLATE.format( - group_id=group_id, - artifact_id=artifact_id, - version=version) - - dep_mgt_element = ET.fromstring(content) - - # find dependencies where version is specified - dependencies = pom.findall(".//ns:dependencyManagement/ns:dependencies", ns)[0] - dependencies.append(dep_mgt_element) - - -def update_version_of_existing_spec(pom, artifact_id, version): - xpath = ".//ns:dependencyManagement//ns:dependency[ns:artifactId='{artifact_id}']".format(artifact_id=artifact_id) - dependency = pom.findall(xpath, ns)[0] - old_version = dependency.find('./ns:version', ns).text - - if not is_version_increasing(old_version, version): - return old_version - - dependency.find('./ns:version', ns).text = version - return None # the old version was lower - - -def check_args_for_new_service(locals): - if not locals['version']: - raise MissingParameter('Must specify --version for new spec') - - if not locals['group_id']: - raise MissingParameter('Must specify --group-id for new spec') - - if not locals['spec_name']: - raise MissingParameter('Must specify --spec-name for new spec') - - if not locals['relative_spec_path']: - raise MissingParameter('Must specify --relative-spec-path for new spec') - - -def module_pom_file_add_or_update_spec(artifact_id=None, group_id=None, spec_name=None, - relative_spec_path=None, endpoint=None, subdomain=None, version=None, - spec_generation_type=None, regional_sub_service_overrides=None, - non_regional_sub_service_overrides=None, pom_location=None, - github_whitelist_location=None, github_whitelist_template=None, generate_execution_template=None, - clean_element_template=None, update_endpoint_function=None, - check_args_for_new_service_function=check_args_for_new_service, gather_settings=None, - module_pom=None, module_pom_path=None, module_template=None): - - found = False - - if not generate_execution_template: - raise ValueError('Must supply generate_execution_template') - - if not artifact_id: - raise MissingParameter(param_type='option', param_hint='--artifact-id', message='Artifact id parameter is required') - - if subdomain and endpoint: - raise UsageError('Cannot specify both --endpoint and --subdomain') - - # Parent pom - pom = parse_pom(pom_location) - - updated_spec = False - - ignored = [] - previous = {} - changed = [] - - # determine if this artifact is already in the spec - # xpath_for_spec_dependency_declaration = ".//ns:dependency[ns:artifactId='{artifact_id}']".format(artifact_id=artifact_id) - if module_pom: - print('Artifact {} already exists in module pom. Updating specified fields...'.format(artifact_id)) - - found = True - - previous = gather_settings(module_pom, artifact_id) - - if version: - # Todo: Determine if we need to find the version again in update_version_of_existing_spec. It should - # already be in `previous["version"]` - newer_version = update_version_of_existing_spec(module_pom, artifact_id, version) - if newer_version: - print('The version was not updated to {}, because it was already at {}.'.format(version, newer_version)) - else: - updated_spec |= True - - if relative_spec_path: - updated_spec |= update_relative_spec_path(module_pom, artifact_id, relative_spec_path) - - if update_endpoint_function: - was_ignored = False - if endpoint: - was_changed, was_ignored = update_endpoint_function(module_pom, artifact_id, endpoint) - updated_spec |= was_changed - elif subdomain: - was_changed, was_ignored = update_endpoint_function(module_pom, artifact_id, 'https://{}.{{domain}}'.format(subdomain)) - updated_spec |= was_changed - - if was_ignored: - ignored.append('subdomain') - - if spec_name: - ignored.append('spec_name') - if group_id: - ignored.append('group_id') - - current = gather_settings(module_pom, artifact_id) - changed = compute_changed_settings(previous, current) - else: - print(os.getcwd()) - module_pom = parse_pom("add_or_update_scripts/templates/pom-template.xml") - - if endpoint: - print('Ignoring endpoint setting for new services; new services have to get it from the spec') - ignored.append('endpoint') - - if subdomain: - print('Ignoring subdomain setting for new services; new services have to get it from the spec') - ignored.append('subdomain') - - if subdomain and not endpoint: - endpoint = 'https://{}.{{domain}}'.format(subdomain) - - check_args_for_new_service_function(locals()) - - if not spec_generation_type: - spec_generation_type = 'PUBLIC' - - print('Artifact {} does not exist. Adding it...'.format(spec_name)) - parent_artifact_id = get_artifact_id(pom) - print("Parent_artifact_id: {}".format(parent_artifact_id)) - print("spec_name: {}".format(spec_name)) - add_artifact_id_to_module(module_pom, parent_artifact_id, spec_name) - generate_and_add_property_element(module_pom, artifact_id, relative_spec_path) - generate_and_add_unpack_element(module_pom, group_id, artifact_id, relative_spec_path) - generate_and_add_prefer_element(module_pom, group_id, artifact_id, relative_spec_path) - generate_and_add_preprocess_element(module_pom, group_id, artifact_id, relative_spec_path) - generate_and_add_generate_section(module_pom, spec_name, artifact_id, relative_spec_path, spec_generation_type, regional_sub_service_overrides, non_regional_sub_service_overrides, generate_execution_template) - generate_and_add_clean_section(module_pom, spec_name, clean_element_template) - generate_and_add_dependency_management_section(module_pom, group_id, artifact_id, version) - add_spec_module_to_github_whitelist(spec_name, github_whitelist_location, github_whitelist_template) - add_module_to_parent_pom(pom, module_template.format(spec_name)) - - updated_spec = True - - if updated_spec: - write_xml(pom_location, pom) - write_xml(module_pom_path, module_pom) - - print('====== Success! ======') - print(""" -Next Steps ----------- -1. Run mvn clean install to update generated code -""") - else: - print('===== Spec was not updated =====') - - return AddOrUpdateSpecResult( - updated=updated_spec, - existing=found, - ignored=ignored, - previous=previous, - changed=changed - ) diff --git a/scripts/auto_gen_utils/add_or_update_scripts/powershell_add_or_update_spec.py b/scripts/auto_gen_utils/add_or_update_scripts/powershell_add_or_update_spec.py deleted file mode 100644 index fe1389f440..0000000000 --- a/scripts/auto_gen_utils/add_or_update_scripts/powershell_add_or_update_spec.py +++ /dev/null @@ -1,437 +0,0 @@ -#!/usr/bin/env python - -# -# This script manipulates the pom.xml tree to either add new specs or update the versions of existing specs. -# - -from .spec_updater_base import SpecUpdaterBase -import xml.etree.ElementTree as ET -import re -import click -import os -import string -import uuid -from click.exceptions import UsageError, MissingParameter -from glob import glob - -from shared.version_utils import is_version_increasing -from .add_or_update_spec_utils import AddOrUpdateSpecResult, compute_changed_settings, indent -from .add_or_update_spec_utils import write_xml -from .add_or_update_spec_utils import CommentedTreeBuilder -from .add_or_update_spec_utils import parse_pom - -DEFAULT_PARENT_POM_LOCATION = "pom.xml" -DEFAULT_GITHUB_WHITELIST_LOCATION = "github.whitelist" -ns = {"ns":"http://maven.apache.org/POM/4.0.0"} - -POWERSHELL_CODEGEN_LOCATION = "codegen" -# Template for include each module in pom.xml -MODULE_TEMPLATE = "{name}" -# Pom.xml template specific to PowerShell -POWERSHELL_POM_FILE_TEMPLATE = """ - - 4.0.0 - - com.oracle.oci.sdk - oci-powershell-codegen - {sdk_version} - .. - - oci-powershell-{service_name}-codegen - Oracle Cloud Infrastructure SDK - {service_friendly_name} Service Codegen - This project contains the code generation spec and configuration for the {service_friendly_name} - - {group_id} - {artifact_id} - {artifact_version} - {spec_path_relative_to_jar} - {module_name} - {generate_waiters} - {generate_paginators} - - - - - org.commonjava.maven.plugins - directory-maven-plugin - - - org.codehaus.mojo - build-helper-maven-plugin - - - com.oracle.oci.sdk.utilities - dex-get-spec-artifact-plugin - - - com.oracle.oci.sdk.utilities - spec-conditionals-preprocessor-plugin - - - com.oracle.bmc.sdk - bmc-sdk-swagger-maven-plugin - - - com.mycila - license-maven-plugin - - - maven-antrun-plugin - - - - -""" - -PS_GITHUB_WHITELIST_TEMPLATE = """\n^{spec_name}/Cmdlets/.*$ -^{spec_name}/OCI.PSModules.{spec_name}.csproj$ -^{spec_name}/OCI.PSModules.{spec_name}.psd1$""" - -# The powershell-specific pom.xml template parameters for XML parsing -POWERSHELL_SPEC_PARAMS_XML_PATH_DICT = { - 'group_id': ".//ns:properties//ns:codegen.artifactory.groupId", - 'artifact_id': ".//ns:properties//ns:codegen.artifactory.artifactId", - 'version': ".//ns:properties//ns:codegen.artifactory.version", - 'relative_spec_path': ".//ns:properties//ns:codegen.artifact.spec.path", - 'service_name': ".//ns:properties//ns:codegen.service.name", - 'subdomain': ".//ns:properties//ns:codegen.service.group.endpoint" -} - -PROJECT_ENTRY_TEMPLATE = """Project("{{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}}") = "OCI.PSModules.{module_name}", "{module_name}\\OCI.PSModules.{module_name}.csproj", "{{{guid}}}" -EndProject -""" - -PROJECT_PLATEFORM_TEMPLATE = """ {{{guid}}}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {{{guid}}}.Debug|Any CPU.Build.0 = Debug|Any CPU - {{{guid}}}.Debug|x64.ActiveCfg = Debug|Any CPU - {{{guid}}}.Debug|x64.Build.0 = Debug|Any CPU - {{{guid}}}.Debug|x86.ActiveCfg = Debug|Any CPU - {{{guid}}}.Debug|x86.Build.0 = Debug|Any CPU - {{{guid}}}.Release|Any CPU.ActiveCfg = Release|Any CPU - {{{guid}}}.Release|Any CPU.Build.0 = Release|Any CPU - {{{guid}}}.Release|x64.ActiveCfg = Release|Any CPU - {{{guid}}}.Release|x64.Build.0 = Release|Any CPU - {{{guid}}}.Release|x86.ActiveCfg = Release|Any CPU - {{{guid}}}.Release|x86.Build.0 = Release|Any CPU -""" - - -class PowerShellSpecUpdater(SpecUpdaterBase): - # Override the spec name that is defined in the service pom.xml files as the testing service - # references folders without snake-case (based on the .NET sdk formatting). - def format_module_name_for_template(self, module_name): - return module_name.replace("-", "").replace("_", "").replace(" ","") - - -def add_or_update_spec(artifact_id=None, group_id=None, spec_name=None, relative_spec_path=None, endpoint=None, subdomain=None, version=None, spec_generation_type=None, generate_waiters=True, generate_paginators=True, pom_location=None, github_whitelist_location=None): - sdk_dir = os.path.dirname(pom_location) - - found = find_existing_module(sdk_dir, artifact_id) - ignored = [] - previous = {} - changed = [] - - if found: - print('Artifact {} already exists in pom.xml. Updating specified fields...'.format(artifact_id)) - - previous = gather_settings(sdk_dir, found) - - if version: - newer_version = update_version_of_existing_spec(sdk_dir, found, version) - if newer_version: - print('The version was not updated to {}, because it was already at {}.'.format(version, newer_version)) - - if relative_spec_path: - update_relative_spec_path_of_existing_spec(sdk_dir, found, relative_spec_path) - - was_ignored = False - if endpoint: - subdomain = endpoint - subdomain = re.sub('^.*://', '', subdomain) # remove protocol and '://' - subdomain = re.sub(r'\.{domain}.*$', '', subdomain) # remove '.{domain}' and everything after it - was_changed, was_ignored = update_subdomain(sdk_dir, found, subdomain) - elif subdomain: - was_changed, was_ignored = update_subdomain(sdk_dir, found, subdomain) - - if was_ignored: - ignored.append('subdomain') - - if spec_name: - ignored.append('spec_name') - if group_id: - ignored.append('group_id') - - current = gather_settings(sdk_dir, found) - changed = compute_changed_settings(previous, current) - - else: - missing_params = [] - if not spec_name: - missing_params.append('--spec-name') - - if not version: - missing_params.append('--version') - - if not group_id: - missing_params.append('--group-id') - - if not artifact_id: - missing_params.append('--artifact-id') - - if not relative_spec_path: - missing_params.append('--relative-spec-path') - - # not checking 'endpoint' anymore; can be specified either in ticket - # or in spec using 'x-obmcs-endpoint-template'. If neither is specified, - # this fails in the generator - - if missing_params: - raise MissingParameter('The following options must be specified for a new spec: {}'.format(', '.join(missing_params))) - - if endpoint and subdomain: - raise UsageError('Cannot specify both --endpoint and --subdomain') - - if endpoint: - print('Ignoring endpoint setting for new services; new services have to get it from the spec') - ignored.append('endpoint') - - if subdomain: - print('Ignoring subdomain setting for new services; new services have to get it from the spec') - ignored.append('subdomain') - - if spec_generation_type: - print('Note: --spec-generation-type is ignored for the PowerShell, since it is set in the codegen/pom.xml file for all services') - - service_name = spec_name.lower().replace('_', '') # service_name is "newservice" - module_name = service_name.title() # module_name is "Newservice" - service_friendly_name = string.capwords(spec_name.replace('_', ' ')) # service_friendly_name is "New Service" - - # Find OCI DotNet SDK Version - pom = parse_pom(pom_location) - xpath = ".//ns:version" - property = pom.findall(xpath, ns)[0] - sdk_version = property.text - - guid = str(uuid.uuid4()).upper() - print('Project GUID: {}'.format(guid)) - - print('Module {} does not exist in pom.xml. Adding it...'.format(module_name)) - generate_child_codegen_pom(sdk_dir, service_name, module_name, service_friendly_name, sdk_version, group_id, - artifact_id, version, relative_spec_path, spec_generation_type, generate_waiters, - generate_paginators) - - add_child_codegen_module(sdk_dir, service_name) - update_sln_file(sdk_dir, module_name, guid) - add_spec_module_to_powershell_github_whitelist(module_name, github_whitelist_location, PS_GITHUB_WHITELIST_TEMPLATE) - - print('Success!') - - return AddOrUpdateSpecResult( - updated=(not found) or changed != [], # not found means it's a new spec, or if it is an existing spec, changed needs to be non-empty - existing=found is not None, - ignored=ignored, - previous=previous, - changed=changed) - - -def find_existing_module(sdk_dir, artifact_id): - codegen_dir = os.path.join(sdk_dir, POWERSHELL_CODEGEN_LOCATION) - pom_files = [y for x in os.walk(codegen_dir) for y in glob(os.path.join(x[0], 'pom.xml'))] - for ldr_path in pom_files: - pom = parse_pom(ldr_path) - xpath = ".//ns:properties//ns:codegen.artifactory.artifactId" - properties = pom.findall(xpath, ns) - if len(properties) > 0 and artifact_id == properties[0].text: - codegen_artifact_id = pom.findall("./ns:artifactId", ns)[0].text - m = re.match("oci-powershell-([^-]*)-codegen", codegen_artifact_id) - if m: - return m.group(1) - - return None - - -def gather_settings(sdk_dir, spec_name): - settings = {} - - file_name = os.path.join(sdk_dir, POWERSHELL_CODEGEN_LOCATION, spec_name, "pom.xml") - pom = parse_pom(file_name) - - xpath = ".//ns:properties//ns:codegen.artifactory.groupId" - property = pom.findall(xpath, ns)[0] - settings["group_id"] = property.text - - xpath = ".//ns:properties//ns:codegen.artifactory.artifactId" - property = pom.findall(xpath, ns)[0] - settings["artifact_id"] = property.text - - xpath = ".//ns:properties//ns:codegen.artifactory.version" - property = pom.findall(xpath, ns)[0] - settings["version"] = property.text - - settings["module_name"] = spec_name - - xpath = ".//ns:properties//ns:codegen.spec.name" - property = pom.findall(xpath, ns)[0] - settings["relative_spec_path"] = property.text - - xpath = ".//ns:properties//ns:codegen.endpoint" - results = pom.findall(xpath, ns) - if results: - property = results[0] - settings["subdomain"] = property.text - - return settings - - -def update_version_of_existing_spec(sdk_dir, spec_name, version): - file_name = os.path.join(sdk_dir, POWERSHELL_CODEGEN_LOCATION, spec_name, "pom.xml") - pom = parse_pom(file_name) - - xpath = ".//ns:properties//ns:codegen.artifactory.version" - property = pom.findall(xpath, ns)[0] - old_version = property.text - - if not is_version_increasing(old_version, version): - return old_version - - property.text = version - indent(pom.getroot()) - pom.write(file_name, encoding="UTF-8", xml_declaration=True) - return None # the old version was lower - - -def update_relative_spec_path_of_existing_spec(sdk_dir, spec_name, relative_spec_path): - file_name = os.path.join(sdk_dir, POWERSHELL_CODEGEN_LOCATION, spec_name, "pom.xml") - pom = parse_pom(file_name) - - xpath = ".//ns:properties//ns:codegen.spec.name" - property = pom.findall(xpath, ns)[0] - property.text = relative_spec_path - indent(pom.getroot()) - pom.write(file_name, encoding="UTF-8", xml_declaration=True) - - -# Returns was_changed, was_ignored -def update_subdomain(sdk_dir, spec_name, subdomain): - file_name = os.path.join(sdk_dir, POWERSHELL_CODEGEN_LOCATION, spec_name, "pom.xml") - pom = parse_pom(file_name) - - xpath = ".//ns:properties//ns:codegen.endpoint" - results = pom.findall(xpath, ns) - if results: - property = results[0] - was_changed = property.text != subdomain - property.text = subdomain - indent(pom.getroot()) - pom.write(file_name, encoding="UTF-8", xml_declaration=True) - return was_changed, False - else: - print('Ignored update to subdomain/endpoint -- service had no setting for this in pom.xml file and is getting that information from spec') - return False, True - - -def generate_child_codegen_pom(sdk_dir, service_name, module_name, service_friendly_name, sdk_version, group_id, artifact_id, artifact_version, spec_path_relative_to_jar, spec_generation_type, generate_waiters, generate_paginators): - dir_name = os.path.join(sdk_dir, POWERSHELL_CODEGEN_LOCATION, service_name) - os.mkdir(dir_name) - file_name = os.path.join(dir_name, "pom.xml") - - content = POWERSHELL_POM_FILE_TEMPLATE.format( - group_id=group_id, - artifact_id=artifact_id, - artifact_version=artifact_version, - sdk_version=sdk_version, - service_name=service_name, - service_friendly_name=service_friendly_name, - module_name=module_name, - spec_path_relative_to_jar=spec_path_relative_to_jar, - spec_generation_type=spec_generation_type, - generate_waiters=str(generate_waiters).lower(), - generate_paginators=str(generate_paginators).lower()) - - root = ET.fromstring(content, parser=ET.XMLParser(target=CommentedTreeBuilder())) - pom = ET.ElementTree(element=root) - write_xml(file_name, pom) - - -def add_spec_module_to_powershell_github_whitelist(spec_name, github_whitelist_location, github_whitelist_template): - if github_whitelist_location and github_whitelist_template: - with open(github_whitelist_location, 'a') as f: - f.write(github_whitelist_template.format(spec_name=spec_name)) - - -def add_child_codegen_module(sdk_dir, service_name): - file_name = os.path.join(sdk_dir, POWERSHELL_CODEGEN_LOCATION, "pom.xml") - pom = parse_pom(file_name) - - content = MODULE_TEMPLATE.format(name=service_name) - module_element = ET.fromstring(content, ET.XMLParser(target=CommentedTreeBuilder())) - - # find modules - modules = pom.findall("./ns:modules", ns)[0] - modules.append(module_element) - - indent(pom.getroot()) - pom.write(file_name, encoding="UTF-8", xml_declaration=True) - - -def update_sln_file(sdk_dir, module_name, guid): - project_added = False - project_platform_added = False - - sln_file = os.path.join(sdk_dir, "oci-powershell-modules.sln") - with open(sln_file) as sln_file_read: - lines = sln_file_read.readlines() - - locs = [i for i, val in enumerate(lines) if 'EndProject' in val] - - # Ignoring case when a solution file does not contain any existing projects.This should never happen since we already have projects. - if len(locs) > 1: - print("Will add project") - new_project_entry = PROJECT_ENTRY_TEMPLATE.format(guid=guid, module_name=module_name) - lines.insert(locs[-1] + 1, new_project_entry) - project_added = True - - project_platform_section_found = False - # First find the start of ProjectConfigurationPlatforms section - for loc, line in enumerate(lines): - if 'ProjectConfigurationPlatforms' in line: - project_platform_section_found = True - continue - # Find the matching closing line for ProjectConfigurationPlatforms section - if project_platform_section_found and 'EndGlobalSection' in line: - print("Will update platform") - new_project_platform = PROJECT_PLATEFORM_TEMPLATE.format(guid=guid) - lines.insert(loc, new_project_platform) - project_platform_added = True - break - if not project_platform_section_found: - print("Cannot find project platform section.") - else: - print("Cannot find existing projects.") - - if project_added and project_platform_added: - with open(sln_file, 'w') as sln_file_write: - sln_file_write.write(''.join(lines)) - else: - print("Unable to update sln file.") - - -@click.command() -@click.option('--artifact-id', required=True, help='The artifact id for the spec artifact (e.g. coreservices-api-spec') -@click.option('--group-id', help='The group id for the spec artifact (e.g. com.oracle.pic.commons)') -@click.option('--spec-name', help='The name of the spec. This will be used (e.g. core, identity, objectstorage). This is also used as the module name (\'bmc-servicename\') and base package (\'com.oracle.bmc.servicename\'). Underscores are removed, everything is lower-cased.') -@click.option('--relative-spec-path', help='The relative path of the spec within the artifact (e.g. coreservices-api-spec-20160918-external.yaml)') -@click.option('--endpoint', help='The base endpoint for the service (e.g. https://iaas.{domain}/20160918)') -@click.option('--subdomain', help='The subdomain for the service (e.g. if the endpoint is https://iaas.{domain}/20160918), the subdomain is "iaas"') -@click.option('--version', help='The version of the spec artifact (e.g. 0.0.1-SNAPSHOT') -@click.option('--spec-generation-type', help='The generation type: PUBLIC or PREVIEW') -@click.option('--generate-waiters/--no-generate-waiters', default=True, help='Generate waiters') -@click.option('--generate-paginators/--no-generate-paginators', default=True, help='Generate paginators') -@click.option('--pom-location', type=click.Path(exists=True), default=DEFAULT_PARENT_POM_LOCATION, help='Location of the pom.xml file in the root directory of the OCI .NET SDK') -@click.option('--github-whitelist-location', type=click.Path(exists=True), default=DEFAULT_GITHUB_WHITELIST_LOCATION, help='Location of the github.whitelist file to update') -def add_or_update_spec_command(artifact_id, group_id, spec_name, relative_spec_path, endpoint, subdomain, version, spec_generation_type, generate_waiters, generate_paginators, pom_location, github_whitelist_location): - print(add_or_update_spec(artifact_id, group_id, spec_name, relative_spec_path, endpoint, subdomain, version, spec_generation_type, generate_waiters, generate_paginators, pom_location, github_whitelist_location)) - - -if __name__ == '__main__': - add_or_update_spec_command() diff --git a/scripts/auto_gen_utils/add_or_update_scripts/python_sdk_add_or_update_spec.py b/scripts/auto_gen_utils/add_or_update_scripts/python_sdk_add_or_update_spec.py deleted file mode 100644 index d8e51097cf..0000000000 --- a/scripts/auto_gen_utils/add_or_update_scripts/python_sdk_add_or_update_spec.py +++ /dev/null @@ -1,225 +0,0 @@ -#!/usr/bin/env python - -# -# This script manipulates pom.xml to either add new specs or update the versions of existing specs. -# - -import os -import re -import click -from click.exceptions import MissingParameter - -from .add_or_update_spec_utils import convert_camel_to_snake_case -from .add_or_update_spec_utils import parse_pom -from .module_pom_file_add_or_update_spec import module_pom_file_add_or_update_spec -from .module_pom_file_add_or_update_spec import DEFAULT_POM_LOCATION -from .module_pom_file_add_or_update_spec import DEFAULT_GITHUB_WHITELIST_LOCATION -from .module_pom_file_add_or_update_spec import ns -from .module_pom_file_add_or_update_spec import check_args_for_new_service as base_check_args_for_new_service - -DEFAULT_MODULE_LOCATION = "poms" - -GENERATE_EXECUTION_TEMPLATE = """ - - python-public-sdk-{artifact_id} - compile - - generate - - - ${{codegen-language}} - ${{preprocessed-temp-dir}}/{artifact_id}/${{{artifact_id}-spec-file}} - src/oci - OCI - {spec_generation_type} - - {spec_name} - true - {regional_non_regional_service_overrides} - - ${{feature-id-file}} - ${{feature-id-dir}} - ${{spec-temp-dir}}/serviceSlugNameMapping.yaml - true - - -""" - - -CLEAN_ELEMENT_TEMPLATE = """ - - src/oci/{spec_name} - - **/* - - -""" - -MODULE_TEMPLATE = 'poms/{}/pom.xml' - -GITHUB_WHITELIST_TEMPLATE = '\n^src/oci/{}/.*' + r'\.py$' - - -# Returns was_changed, was_ignored -def update_endpoint(pom, artifact_id, endpoint): - results = pom.findall(".//ns:plugin[ns:artifactId='bmc-sdk-swagger-maven-plugin']/ns:executions/ns:execution[ns:id='python-public-sdk-{artifact_id}']//ns:additionalProperties/ns:endpoint".format(artifact_id=artifact_id), ns) - if results: - # Only allow update if the pom.xml file has the endpoint - endpoint_node = results[0] - if endpoint_node.text != endpoint: - endpoint_node.text = endpoint - return True, False - else: - return False, False - - print('Ignored update to subdomain/endpoint -- service had no setting for this in pom.xml file and is getting that information from spec') - return False, True - - -def check_args_for_new_service(locals): - base_check_args_for_new_service(locals) - # not checking 'endpoint' anymore; can be specified either in ticket - # or in spec using 'x-obmcs-endpoint-template'. If neither is specified, - # this fails in the generator - - -def gather_settings(pom, artifact_id): - settings = {} - - xpath = ".//ns:dependencyManagement//ns:dependency[ns:artifactId='{artifact_id}']".format(artifact_id=artifact_id) - dependency = pom.findall(xpath, ns)[0] - settings["group_id"] = dependency.find('./ns:groupId', ns).text - settings["artifact_id"] = artifact_id - settings["version"] = dependency.find('./ns:version', ns).text - - xpath = ".//ns:properties/ns:{artifact_id}-spec-file".format(artifact_id=artifact_id) - spec_file_node = pom.findall(xpath, ns)[0] - settings["relative_spec_path"] = spec_file_node.text - - results = pom.findall(".//ns:plugin[ns:artifactId='bmc-sdk-swagger-maven-plugin']/ns:executions/ns:execution[ns:id='python-public-sdk-{artifact_id}']//ns:additionalProperties/ns:endpoint".format(artifact_id=artifact_id), ns) - if results: - endpoint_node = results[0] - subdomain = endpoint_node.text - subdomain = re.sub('^.*://', '', subdomain) # remove protocol and '://' - subdomain = re.sub(r'\.{domain}.*$', '', subdomain) # remove '.{domain}' and everything after it - settings["subdomain"] = subdomain - - settings["spec_name"] = get_spec_name(pom) - - return settings - - -def get_spec_name(pom): - xpath_for_spec_name = "./ns:artifactId" - spec_name = pom.find(xpath_for_spec_name, ns).text - - return spec_name - - -def existing_spec(source_path, artifact_id): - xpath_for_spec_dependency_declaration = ".//ns:dependency[ns:artifactId='{artifact_id}']".format(artifact_id=artifact_id) - for item in os.listdir(source_path): - path = os.path.join(source_path, item) - if os.path.isdir(path): - pom_location = os.path.join(path, "pom.xml") - if os.path.exists(pom_location): - pom = parse_pom(pom_location) - if (pom.findall(xpath_for_spec_dependency_declaration, ns)): - print("Existing service") - return pom, pom_location - - return None, None - - -def add_or_update_spec(artifact_id=None, - group_id=None, - spec_name=None, - relative_spec_path=None, - endpoint=None, - subdomain=None, - version=None, - spec_generation_type=None, - regional_sub_service_overrides=None, - non_regional_sub_service_overrides=None, - pom_location=None, - github_whitelist_location=None, - module_location=None): - - if pom_location and not module_location: - module_location = os.path.join(os.path.dirname(pom_location), DEFAULT_MODULE_LOCATION) - print("Location of pom modules: {}".format(module_location)) - - # force format of spec_name to (lower) snake case for consistency with standards of python SDK and CLI - if spec_name: - spec_name = convert_camel_to_snake_case(spec_name) - - service_pom, service_pom_path = existing_spec(module_location, artifact_id) - print("service_pom='{}', service_pom_path='{}'".format(service_pom, service_pom_path)) - - if not service_pom_path: - if not spec_name: - raise MissingParameter('Must specify --spec-name for new spec') - service_pom_path = os.path.join(module_location, spec_name, "pom.xml") - os.makedirs(os.path.join(module_location, spec_name)) - print("New module pom path: {}".format(service_pom_path)) - - return module_pom_file_add_or_update_spec(artifact_id, group_id, spec_name, relative_spec_path, endpoint, subdomain, - version, spec_generation_type, regional_sub_service_overrides, - non_regional_sub_service_overrides, pom_location, - github_whitelist_location, GITHUB_WHITELIST_TEMPLATE, - GENERATE_EXECUTION_TEMPLATE, CLEAN_ELEMENT_TEMPLATE, update_endpoint, - check_args_for_new_service, gather_settings, module_pom=service_pom, - module_pom_path=service_pom_path, module_template=MODULE_TEMPLATE) - - -@click.command() -@click.option('--artifact-id', help='The artifact id for the spec artifact (e.g. coreservices-api-spec') -@click.option('--group-id', help='The group id for the spec artifact (e.g. com.oracle.pic.commons)') -@click.option('--spec-name', help='The name of the spec. This will be used (e.g. core, identity, object_storage). This is also used as the module name.') -@click.option('--relative-spec-path', help='The relative path of the spec within the artifact (e.g. coreservices-api-spec-20160918-external.yaml)') -@click.option('--endpoint', help='The base endpoint for the service (e.g. https://iaas.{domain}/20160918)') -@click.option('--subdomain', help='The subdomain for the service (e.g. if the endpoint is https://iaas.{domain}/20160918), the subdomain is "iaas"') -@click.option('--version', help='The version of the spec artifact (e.g. 0.0.1-SNAPSHOT') -@click.option('--spec-generation-type', help='The generation type: PUBLIC or PREVIEW') -@click.option('--regional-sub-service-overrides', multiple=True, help="""For specs that contain multiple services -(because there are operations with different tags in the spec), which of those services should be considered regional. -Services are considered as regional by default. - -This should be the snake_cased name of the tag/service. For example kms_provisioning instead of kmsProvisioning. - -This parameter can be provided multiple times""") -@click.option('--non-regional-sub-service-overrides', multiple=True, help="""For specs that contain multiple services -(because there are operations with different tags in the spec), which of those services should be considered non-regional. - -This should be the snake_cased name of the tag/service. For example kms_provisioning instead of kmsProvisioning. - -This parameter can be provided multiple times""") -@click.option('--pom-location', - type=click.Path(exists=True), - default=DEFAULT_POM_LOCATION, - help='Location of the pom.xml file to update') -@click.option('--github-whitelist-location', - type=click.Path(exists=True), - default=DEFAULT_GITHUB_WHITELIST_LOCATION, - help='Location of the github.whitelist file to update') -@click.option('--module-location', - type=click.Path(exists=True), - default=DEFAULT_MODULE_LOCATION, - help="Parent directory containing the module pom.xml files") -def add_or_update_spec_command(artifact_id, group_id, spec_name, relative_spec_path, endpoint, subdomain, version, - spec_generation_type, regional_sub_service_overrides, non_regional_sub_service_overrides, - pom_location, github_whitelist_location, module_location): - - if not artifact_id: - raise click.exceptions.MissingParameter(param_type='option', param_hint='--artifact-id', message='Artifact id parameter is required') - - if subdomain and endpoint: - raise click.exceptions.UsageError('Cannot specify both --endpoint and --subdomain') - - print(add_or_update_spec(artifact_id, group_id, spec_name, relative_spec_path, endpoint, subdomain, version, - spec_generation_type, regional_sub_service_overrides, non_regional_sub_service_overrides, - pom_location, github_whitelist_location, module_location)) - - -if __name__ == '__main__': - add_or_update_spec_command() diff --git a/scripts/auto_gen_utils/add_or_update_scripts/ruby_sdk_add_or_update_spec.py b/scripts/auto_gen_utils/add_or_update_scripts/ruby_sdk_add_or_update_spec.py deleted file mode 100644 index b83b2c16a0..0000000000 --- a/scripts/auto_gen_utils/add_or_update_scripts/ruby_sdk_add_or_update_spec.py +++ /dev/null @@ -1,188 +0,0 @@ -#!/usr/bin/env python - -# -# This script manipulates the pom.xml tree to either add new specs or update the versions of existing specs. -# - -import os -import click - -from datetime import datetime -from .module_pom_file_add_or_update_spec import DEFAULT_POM_LOCATION -from .spec_updater_base import SpecUpdaterBase - - -# The path for the modules is under "ruby-sdk/codegen" -RUBY_MODULE_LOCATION = "codegen" - -# The service pom.xml template for the ruby sdk -RUBY_POM_FILE_TEMPLATE = """ - - 4.0.0 - - com.oci.sdk - ruby-sdk-codegen-template - {sdk_version} - .. - - ruby-sdk-{module_name}-codegen - OCI Ruby SDK - {service_friendly_name} Service Codegen - pom - - {group_id} - {artifact_id} - {artifact_version} - {spec_path_relative_to_jar} - {module_name} - {subdomain} - true - true - - - - - org.codehaus.mojo - build-helper-maven-plugin - - - com.oracle.oci.sdk.utilities - dex-get-spec-artifact-plugin - - - com.oracle.oci.sdk.utilities - spec-conditionals-preprocessor-plugin - - - com.oracle.bmc.sdk - bmc-sdk-swagger-maven-plugin - - - com.mycila - license-maven-plugin - - - exec-maven-plugin - org.codehaus.mojo - - - org.apache.maven.plugins - maven-antrun-plugin - - - maven-clean-plugin - - - - -""" - - -# The ruby-specific pom.xml template parameters for XML parsing -RUBY_SPEC_PARAMS_XML_PATH_DICT = { - 'group_id': ".//ns:properties//ns:codegen.artifactory.groupId", - 'artifact_id': ".//ns:properties//ns:codegen.artifactory.artifactId", - 'version': ".//ns:properties//ns:codegen.artifactory.version", - 'relative_spec_path': ".//ns:properties//ns:codegen.artifact.spec.path", - 'service_name': ".//ns:properties//ns:codegen.service.name", - 'subdomain': ".//ns:properties//ns:codegen.endpoint.prefix" -} - - -class RubySpecUpdater(SpecUpdaterBase): - # Required for new services to reference from the generated composite operations - # Generates an empty util.rb file as ruby-sdk/lib/oci/module_name/util.rb - def generate_util_rb(self, sdk_dir, module_name): - gen_path = os.path.join(sdk_dir, 'lib', 'oci', module_name) - util_file_path = os.path.join(gen_path, 'util.rb') - - if os.path.isfile(util_file_path): - print("{} already exists. Not overwriting".format(util_file_path)) - return - - if not os.path.exists(gen_path): - os.makedirs(gen_path) - - with open(util_file_path, 'w') as util_file: - util_contents = "# Copyright (c) 2016, {}, Oracle and/or its affiliates. All rights reserved.\n\n" \ - .format(datetime.now().year) - util_file.write(util_contents) - print("Created {}".format(util_file_path)) - - # Override add_spec to generate the util.rb file that is required for the ruby sdk - def add_spec(self, - sdk_dir, - module_location, - spec_name, - group_id, - artifact_id, - version, - relative_spec_path, - subdomain): - result = SpecUpdaterBase.add_spec(self, - sdk_dir, - module_location, - spec_name, - group_id, - artifact_id, - version, - relative_spec_path, - subdomain) - self.generate_util_rb(sdk_dir, spec_name) - return result - - -################################################## -# Main -################################################## -@click.command() -@click.option('--artifact-id', required=True, help='The artifact id for the spec artifact (e.g. coreservices-api-spec') -@click.option('--group-id', help='The group id for the spec artifact (e.g. com.oracle.pic.commons)') -@click.option('--spec-name', help='The name of the spec. This will be used (e.g. core, identity, object_storage). ' - 'This is also used as the module name.') -@click.option('--relative-spec-path', help='The relative path of the spec within the artifact ' - '(e.g. coreservices-api-spec-20160918-external.yaml)') -@click.option('--endpoint', help='The base endpoint for the service (e.g. https://iaas.{domain}/20160918)') -@click.option('--subdomain', help='The subdomain for the service (e.g. \'iaas\')') -@click.option('--version', help='The version of the spec artifact (e.g. 0.0.1-SNAPSHOT') -@click.option('--spec-generation-type', help='The generation type: PUBLIC or PREVIEW (is ignored for Ruby)') -@click.option('--regional-sub-service-overrides', multiple=True, help="Is ignored for Ruby SDK.") -@click.option('--non-regional-sub-service-overrides', - multiple=True, - help="Is ignored for Ruby SDK. Non-regional client overrides require manual update to the service module's pom.xml") -@click.option('--signing-strategy', - help='The signing strategy to use for the client. Is ignored for Ruby. Requires manual pom.xml update to override') -@click.option('--pom-location', - type=click.Path(exists=True), - default=DEFAULT_POM_LOCATION, - help='Location of the root pom.xml file for the Ruby SDK') -@click.option('--module-location', - type=click.Path(exists=True), - help="Parent directory containing the module pom.xml files") -def add_or_update_command(artifact_id, - group_id, - spec_name, - relative_spec_path, - endpoint, - subdomain, - signing_strategy, - version, - spec_generation_type, - regional_sub_service_overrides, - non_regional_sub_service_overrides, - pom_location, - module_location): - spec_updater = RubySpecUpdater(RUBY_MODULE_LOCATION, RUBY_POM_FILE_TEMPLATE, RUBY_SPEC_PARAMS_XML_PATH_DICT) - print(spec_updater.add_or_update_spec( - artifact_id=artifact_id, - group_id=group_id, - spec_name=spec_name, - relative_spec_path=relative_spec_path, - endpoint=endpoint, - subdomain=subdomain, - version=version, - pom_location=pom_location, - module_location=module_location)) - - -if __name__ == '__main__': - add_or_update_command() diff --git a/scripts/auto_gen_utils/add_or_update_scripts/single_pom_file_add_or_update_spec.py b/scripts/auto_gen_utils/add_or_update_scripts/single_pom_file_add_or_update_spec.py deleted file mode 100644 index b802058e7d..0000000000 --- a/scripts/auto_gen_utils/add_or_update_scripts/single_pom_file_add_or_update_spec.py +++ /dev/null @@ -1,346 +0,0 @@ -#!/usr/bin/env python - -# -# This is a library for manipulating a single pom.xml file to either -# add new specs or update the versions of existing specs. -# -# Can be used by Python SDK, CLI, Ruby SDK and Go SDK - -import xml.etree.ElementTree as ET -import click -from click.exceptions import UsageError, MissingParameter - -from .add_or_update_spec_utils import parse_pom, write_xml, AddOrUpdateSpecResult -from .add_or_update_spec_utils import compute_changed_settings -from .add_or_update_spec_utils import add_spec_module_to_github_whitelist -from shared.version_utils import is_version_increasing - -DEFAULT_POM_LOCATION = "pom.xml" -DEFAULT_GITHUB_WHITELIST_LOCATION = "github.whitelist" - -SPEC_FILE_PROPERTY_TEMPLATE = """ -<{artifact_id}-spec-file>{spec_path_relative_to_jar} -""" - -UNPACK_EXECUTION_TEMPLATE = """ - - unpack-{artifact_id} - initialize - - unpack - - - - - {group_id} - {artifact_id} - jar - **/* - ${{spec-temp-dir}}/{artifact_id} - - - - -""" - -PREFER_EXECUTION_TEMPLATE = """ - - spec-conditionals-prefer-{artifact_id} - initialize - - prefer - - - - - ${{spec-temp-dir}}/{artifact_id}/source/${{{artifact_id}-spec-file}} - - ${{spec-temp-dir}}/{artifact_id}/${{{artifact_id}-spec-file}} - - ${{preferred-temp-dir}}/{artifact_id}/${{{artifact_id}-spec-file}} - - -""" - -PREPROCESS_EXECUTION_TEMPLATE = """ - - spec-conditionals-preprocess-{artifact_id} - initialize - - preprocess - - - ${{preferred-temp-dir}}/{artifact_id}/${{{artifact_id}-spec-file}} - ${{preprocessed-temp-dir}}/{artifact_id}/${{{artifact_id}-spec-file}} - ${{enabled-groups-file}} - ${{enabled-groups-dir}} - - -""" - - -DEPENDENCY_MANAGEMENT_TEMPLATE = """ - - {group_id} - {artifact_id} - {version} - -""" - -ns = {"ns": "http://maven.apache.org/POM/4.0.0"} - -# allow default namespace for output, dont print ns0: prefixes everywhere -ET.register_namespace('', "http://maven.apache.org/POM/4.0.0") - - -def generate_and_add_property_element(pom, artifact_id, spec_path_relative_to_jar): - content = SPEC_FILE_PROPERTY_TEMPLATE.format( - artifact_id=artifact_id, - spec_path_relative_to_jar=spec_path_relative_to_jar) - - property_element = ET.fromstring(content) - - xpath = ".//ns:properties" - properties = pom.findall(xpath, ns)[0] - properties.append(property_element) - - -def update_relative_spec_path(pom, artifact_id, spec_path_relative_to_jar): - xpath = ".//ns:properties/ns:{artifact_id}-spec-file".format(artifact_id=artifact_id) - spec_file_node = pom.findall(xpath, ns)[0] - if spec_file_node.text != spec_path_relative_to_jar: - spec_file_node.text = spec_path_relative_to_jar - return True - return False - - -def generate_and_add_unpack_element(pom, group_id, artifact_id, spec_path_relative_to_jar): - content = UNPACK_EXECUTION_TEMPLATE.format( - group_id=group_id, - artifact_id=artifact_id, - spec_path_relative_to_jar=spec_path_relative_to_jar) - - unpack_element = ET.fromstring(content) - - # find dex-get-spec-artifact-plugin where unpacking happens - unpack_plugin_executions = pom.findall(".//ns:plugin[ns:artifactId='dex-get-spec-artifact-plugin']/ns:executions", ns)[0] - unpack_plugin_executions.append(unpack_element) - - -def generate_and_add_prefer_element(pom, group_id, artifact_id, spec_path_relative_to_jar): - content = PREFER_EXECUTION_TEMPLATE.format( - group_id=group_id, - artifact_id=artifact_id, - spec_path_relative_to_jar=spec_path_relative_to_jar) - - unpack_element = ET.fromstring(content) - - # find spec-conditionals-preprocessor-plugin where unpacking happens - unpack_plugin_executions = pom.findall(".//ns:plugin[ns:artifactId='spec-conditionals-preprocessor-plugin']/ns:executions", ns)[0] - unpack_plugin_executions.append(unpack_element) - - -def generate_and_add_preprocess_element(pom, group_id, artifact_id, spec_path_relative_to_jar): - content = PREPROCESS_EXECUTION_TEMPLATE.format( - group_id=group_id, - artifact_id=artifact_id, - spec_path_relative_to_jar=spec_path_relative_to_jar) - - unpack_element = ET.fromstring(content) - - # find spec-conditionals-preprocessor-plugin where unpacking happens - unpack_plugin_executions = pom.findall(".//ns:plugin[ns:artifactId='spec-conditionals-preprocessor-plugin']/ns:executions", ns)[0] - unpack_plugin_executions.append(unpack_element) - - -def generate_and_add_generate_section(pom, spec_name, artifact_id, spec_path_relative_to_jar, spec_generation_type, regional_sub_service_overrides, non_regional_sub_service_overrides, generate_execution_template): - regional_non_regional_service_overrides_content = '' - if regional_sub_service_overrides or non_regional_sub_service_overrides: - if regional_sub_service_overrides: - for override in regional_sub_service_overrides: - regional_non_regional_service_overrides_content += 'true\n'.format(service_name=override) - - if non_regional_sub_service_overrides: - for override in non_regional_sub_service_overrides: - regional_non_regional_service_overrides_content += 'false\n'.format(service_name=override) - - content = generate_execution_template.format( - artifact_id=artifact_id, - spec_name=spec_name, - spec_path_relative_to_jar=spec_path_relative_to_jar, - spec_generation_type=spec_generation_type, - regional_non_regional_service_overrides=regional_non_regional_service_overrides_content) - - generate_element = ET.fromstring(content) - - # find bmc-sdk-swagger-maven-plugin where generation happens - generate_plugin_executions = pom.findall(".//ns:plugin[ns:artifactId='bmc-sdk-swagger-maven-plugin']/ns:executions", ns)[0] - generate_plugin_executions.append(generate_element) - - -def generate_and_add_clean_section(pom, spec_name, clean_element_template): - if not clean_element_template: - return - - content = clean_element_template.format( - spec_name=spec_name) - - clean_element = ET.fromstring(content) - - # find filesetes where clean directory goes - filesets = pom.findall(".//ns:plugin[ns:artifactId='maven-clean-plugin']//ns:filesets", ns)[0] - filesets.append(clean_element) - - -def generate_and_add_dependency_management_section(pom, group_id, artifact_id, version): - content = DEPENDENCY_MANAGEMENT_TEMPLATE.format( - group_id=group_id, - artifact_id=artifact_id, - version=version) - - dep_mgt_element = ET.fromstring(content) - - # find dependencies where version is specified - dependencies = pom.findall(".//ns:dependencyManagement/ns:dependencies", ns)[0] - dependencies.append(dep_mgt_element) - - -def update_version_of_existing_spec(pom, artifact_id, version): - xpath = ".//ns:dependencyManagement//ns:dependency[ns:artifactId='{artifact_id}']".format(artifact_id=artifact_id) - dependency = pom.findall(xpath, ns)[0] - old_version = dependency.find('./ns:version', ns).text - - if not is_version_increasing(old_version, version): - return old_version - - dependency.find('./ns:version', ns).text = version - return None # the old version was lower - - -def check_args_for_new_service(locals): - if not locals['version']: - raise MissingParameter('Must specify --version for new spec') - - if not locals['group_id']: - raise MissingParameter('Must specify --group-id for new spec') - - if not locals['spec_name']: - raise MissingParameter('Must specify --spec-name for new spec') - - if not locals['relative_spec_path']: - raise MissingParameter('Must specify --relative-spec-path for new spec') - - -def single_pom_file_add_or_update_spec(artifact_id=None, group_id=None, spec_name=None, - relative_spec_path=None, endpoint=None, subdomain=None, version=None, - spec_generation_type=None, regional_sub_service_overrides=None, - non_regional_sub_service_overrides=None, pom_location=None, - github_whitelist_location=None, github_whitelist_template=None, generate_execution_template=None, - clean_element_template=None, update_endpoint_function=None, - check_args_for_new_service_function=check_args_for_new_service, gather_settings=None): - - found = False - - if not generate_execution_template: - raise ValueError('Must supply generate_execution_template') - - if not artifact_id: - raise click.exceptions.MissingParameter(param_type='option', param_hint='--artifact-id', message='Artifact id parameter is required') - - if subdomain and endpoint: - raise UsageError('Cannot specify both --endpoint and --subdomain') - - pom = parse_pom(pom_location) - - updated_spec = False - - ignored = [] - previous = {} - changed = [] - - # determine if this artifact is already in the spec - xpath_for_spec_dependency_declaration = ".//ns:dependency[ns:artifactId='{artifact_id}']".format(artifact_id=artifact_id) - if (pom.findall(xpath_for_spec_dependency_declaration, ns)): - print('Artifact {} already exists in pom.xml. Updating specified fields...'.format(artifact_id)) - - found = True - - previous = gather_settings(pom, artifact_id) - - if version: - newer_version = update_version_of_existing_spec(pom, artifact_id, version) - if newer_version: - print('The version was not updated to {}, because it was already at {}.'.format(version, newer_version)) - else: - updated_spec |= True - - if relative_spec_path: - updated_spec |= update_relative_spec_path(pom, artifact_id, relative_spec_path) - - if update_endpoint_function: - was_ignored = False - if endpoint: - was_changed, was_ignored = update_endpoint_function(pom, artifact_id, endpoint) - updated_spec |= was_changed - elif subdomain: - was_changed, was_ignored = update_endpoint_function(pom, artifact_id, 'https://{}.{{domain}}'.format(subdomain)) - updated_spec |= was_changed - - if was_ignored: - ignored.append('subdomain') - - if spec_name: - ignored.append('spec_name') - if group_id: - ignored.append('group_id') - - current = gather_settings(pom, artifact_id) - changed = compute_changed_settings(previous, current) - else: - if endpoint: - print('Ignoring endpoint setting for new services; new services have to get it from the spec') - ignored.append('endpoint') - - if subdomain: - print('Ignoring subdomain setting for new services; new services have to get it from the spec') - ignored.append('subdomain') - - if subdomain and not endpoint: - endpoint = 'https://{}.{{domain}}'.format(subdomain) - - check_args_for_new_service_function(locals()) - - if not spec_generation_type: - spec_generation_type = 'PUBLIC' - - print('Artifact {} does not exist in pom.xml. Adding it...'.format(artifact_id)) - generate_and_add_property_element(pom, artifact_id, relative_spec_path) - generate_and_add_unpack_element(pom, group_id, artifact_id, relative_spec_path) - generate_and_add_prefer_element(pom, group_id, artifact_id, relative_spec_path) - generate_and_add_preprocess_element(pom, group_id, artifact_id, relative_spec_path) - generate_and_add_generate_section(pom, spec_name, artifact_id, relative_spec_path, spec_generation_type, regional_sub_service_overrides, non_regional_sub_service_overrides, generate_execution_template) - generate_and_add_clean_section(pom, spec_name, clean_element_template) - generate_and_add_dependency_management_section(pom, group_id, artifact_id, version) - add_spec_module_to_github_whitelist(spec_name, github_whitelist_location, github_whitelist_template) - - updated_spec = True - - if updated_spec: - write_xml(pom_location, pom) - - print('====== Success! ======') - print(""" -Next Steps ----------- -1. Run mvn clean install to update generated code -""") - else: - print('===== Spec was not updated =====') - - return AddOrUpdateSpecResult( - updated=updated_spec, - existing=found, - ignored=ignored, - previous=previous, - changed=changed - ) diff --git a/scripts/auto_gen_utils/add_or_update_scripts/spec_updater_base.py b/scripts/auto_gen_utils/add_or_update_scripts/spec_updater_base.py deleted file mode 100644 index 138f74b6a4..0000000000 --- a/scripts/auto_gen_utils/add_or_update_scripts/spec_updater_base.py +++ /dev/null @@ -1,377 +0,0 @@ -import os -import re -import string -import xml.etree.ElementTree as ET - -from click.exceptions import MissingParameter -from shared.version_utils import is_version_increasing -from .add_or_update_spec_utils import CommentedTreeBuilder, ns, indent, write_xml, parse_pom, find_pom_version -from .add_or_update_spec_utils import AddOrUpdateSpecResult, compute_changed_settings, convert_camel_to_snake_case -from .java_sdk_add_or_update_spec import MODULE_TEMPLATE - -# Example dictionary for defining the paths in the XML for the templates: -# SPEC_PARAMS_XML_PATH_DICT = { -# 'group_id': ".//ns:properties//ns:codegen.artifactory.groupId", -# 'artifact_id': ".//ns:properties//ns:codegen.artifactory.artifactId", -# 'version': ".//ns:properties//ns:codegen.artifactory.version", -# 'relative_spec_path': ".//ns:properties//ns:codegen.artifact.spec.path", -# 'service_name': ".//ns:properties//ns:codegen.service.name", -# 'subdomain': ".//ns:properties//ns:codegen.endpoint.prefix" -# } - - -class SpecUpdaterBase: - ET.register_namespace('', "http://maven.apache.org/POM/4.0.0") - - def __init__(self, pom_module_location, service_pom_file_template, spec_params_xml_path_dict): - self.pom_module_location = pom_module_location - self.service_pom_file_template = service_pom_file_template - self.spec_params_xml_path_dict = spec_params_xml_path_dict - - # Creates a new pom file as ruby-sdk/codegen/module_name/pom.xml - def generate_child_codegen_pom(self, - service_pom_path, - module_name, - service_friendly_name, - sdk_version, - group_id, - artifact_id, - artifact_version, - spec_path_relative_to_jar, - subdomain): - if not subdomain: - subdomain = "None" - - content = self.service_pom_file_template.format( - group_id=group_id, - artifact_id=artifact_id, - artifact_version=artifact_version, - sdk_version=sdk_version, - service_friendly_name=service_friendly_name, - module_name=self.format_module_name_for_template(module_name), - spec_path_relative_to_jar=spec_path_relative_to_jar, - subdomain=subdomain) - - root = ET.fromstring(content, parser=ET.XMLParser(target=CommentedTreeBuilder())) - pom = ET.ElementTree(element=root) - write_xml(service_pom_path, pom) - - def format_module_name_for_template(self, module_name): - return module_name - - # Updates the submodule reference under ruby-sdk/codegen/pom.xml - def add_child_codegen_module(self, sdk_dir, module_location, module_name): - file_name = os.path.join(sdk_dir, module_location, "pom.xml") - pom = parse_pom(file_name) - - content = MODULE_TEMPLATE.format(name=module_name) - module_element = ET.fromstring(content, parser=ET.XMLParser(target=CommentedTreeBuilder())) - - # find modules - modules = pom.findall("./ns:modules", ns)[0] - modules.append(module_element) - - indent(pom.getroot()) - pom.write(file_name, encoding="UTF-8", xml_declaration=True) - - # Verifies required parameters for adding a new service spec. - def verify_params_for_add(self, spec_name, group_id, artifact_id, version, relative_spec_path): - # Check input parameters - missing_params = [] - if not spec_name: - missing_params.append('--spec-name') - - if not group_id: - missing_params.append('--group-id') - - if not artifact_id: - missing_params.append('--artifact-id') - - if not version: - missing_params.append('--version') - - if not relative_spec_path: - missing_params.append('--relative-spec-path') - - if missing_params: - raise MissingParameter( - 'The following options must be specified for a new spec: {}'.format(', '.join(missing_params))) - - # Main business logic for adding a new service spec - def add_spec(self, - sdk_dir, - module_location, - spec_name, - group_id, - artifact_id, - version, - relative_spec_path, - subdomain): - self.verify_params_for_add(spec_name, group_id, artifact_id, version, relative_spec_path) - - # Create the pom.xml file for writing (e.g., ruby-sdk/codegen/new_service/pom.xml) - service_pom_file = os.path.join(module_location, spec_name, "pom.xml") - service_dir = os.path.join(module_location, spec_name) - if not os.path.exists(service_dir): - os.makedirs(service_dir) - print('Module {} does not exist. Adding it as {}...'.format(spec_name, service_pom_file)) - - sdk_version = find_pom_version(os.path.join(sdk_dir, "pom.xml")) - service_friendly_name = string.capwords(spec_name.replace('_', ' ')) - - print("SDK VERSION: {}, Friendly Name: {}".format(sdk_version, service_friendly_name)) - self.generate_child_codegen_pom( - service_pom_file, - spec_name, # module_name == spec_name for Ruby - service_friendly_name, - sdk_version, - group_id, - artifact_id, - version, - relative_spec_path, - subdomain) - self.add_child_codegen_module(sdk_dir, module_location, spec_name) - - return AddOrUpdateSpecResult( - updated=False, - existing=False, - ignored=[], - previous={}, - changed=[]) - - ################################################## - # Update - ################################################## - - def gather_settings(self, module_name, service_pom_file): - settings = {} - - pom = parse_pom(service_pom_file) - - xpath = self.spec_params_xml_path_dict['group_id'] - xml_property = pom.findall(xpath, ns)[0] - settings["group_id"] = xml_property.text - - xpath = self.spec_params_xml_path_dict['artifact_id'] - xml_property = pom.findall(xpath, ns)[0] - settings["artifact_id"] = xml_property.text - - xpath = self.spec_params_xml_path_dict['version'] - xml_property = pom.findall(xpath, ns)[0] - settings["version"] = xml_property.text - - settings["module_name"] = module_name - - xpath = self.spec_params_xml_path_dict['relative_spec_path'] - xml_property = pom.findall(xpath, ns)[0] - settings["relative_spec_path"] = xml_property.text - - xpath = self.spec_params_xml_path_dict['subdomain'] - xml_property = pom.findall(xpath, ns) - if xml_property: - settings["subdomain"] = xml_property[0].text - - return settings - - def resolve_sub_domain(self, endpoint, subdomain): - if subdomain: - return subdomain - - if endpoint and not subdomain: - subdomain = endpoint - subdomain = re.sub('^.*://', '', subdomain) # remove protocol and '://' - subdomain = re.sub(r'\.{domain}.*$', '', subdomain) # remove '.{domain}' and everything after it - else: - subdomain = None - - return subdomain - - def update_version_of_existing_spec(self, pom_file, version): - pom = parse_pom(pom_file) - - xpath = self.spec_params_xml_path_dict['version'] - xml_property = pom.findall(xpath, ns)[0] - old_version = xml_property.text - - if not is_version_increasing(old_version, version): - return old_version - - xml_property.text = version - indent(pom.getroot()) - pom.write(pom_file, encoding="UTF-8", xml_declaration=True) - return None # the old version was lower - - def update_relative_spec_path_of_existing_spec(self, pom_file, relative_spec_path): - pom = parse_pom(pom_file) - - xpath = self.spec_params_xml_path_dict['relative_spec_path'] - xml_property = pom.findall(xpath, ns)[0] - xml_property.text = relative_spec_path - indent(pom.getroot()) - pom.write(pom_file, encoding="UTF-8", xml_declaration=True) - - def update_subdomain(self, pom_file, subdomain): - pom = parse_pom(pom_file) - - xpath = self.spec_params_xml_path_dict['subdomain'] - xml_property = pom.findall(xpath, ns)[0] - xml_property.text = subdomain - indent(pom.getroot()) - pom.write(pom_file, encoding="UTF-8", xml_declaration=True) - - def update_spec(self, - sdk_dir, - service_pom_file, - spec_name, - group_id, - artifact_id, - artifact_version, - relative_spec_path, - subdomain): - print('Artifact {} already exists in pom.xml. Updating specified fields...'.format(artifact_id)) - - ignored_settings = [] - previous_settings = self.gather_settings(spec_name, service_pom_file) - changed_settings = [] - - if artifact_version: - updated_version = self.update_version_of_existing_spec(service_pom_file, artifact_version) - if updated_version: - print('The version was not updated to {}, because it was already at {}.'.format(artifact_version, - updated_version)) - - if relative_spec_path: - self.update_relative_spec_path_of_existing_spec(service_pom_file, relative_spec_path) - - if subdomain: - self.update_subdomain(service_pom_file, subdomain) - - if spec_name: - ignored_settings.append('subdomain') - - if group_id: - ignored_settings.append('group_id') - - current_settings = self.gather_settings(spec_name, service_pom_file) - changed_settings = changed = compute_changed_settings(previous_settings, current_settings) - - return AddOrUpdateSpecResult( - updated=changed != [], - # not found means it's a new spec, or if it is an existing spec, changed needs to be non-empty - existing=True, - ignored=ignored_settings, - previous=previous_settings, - changed=changed_settings) - - def find_existing_spec_pom(self, source_path, spec_name, artifact_id): - # Search for module pom.xml based on artifact_id - pom_location_from_artifact_id = None - for item in os.listdir(source_path): - path = os.path.join(source_path, item) - if not os.path.isdir(path): - continue - - pom_location = os.path.join(path, "pom.xml") - if not os.path.exists(pom_location): - continue - - pom = parse_pom(pom_location) - properties = pom.findall(".//ns:properties//ns:codegen.artifactory.artifactId", ns) - if len(properties) > 0 and artifact_id == properties[0].text: - pom_location_from_artifact_id = pom_location - - # Search for module pom.xml based on the spec_name - pom_location_from_spec_name = None - if spec_name: - pom_location_from_spec_name = os.path.join(source_path, spec_name, "pom.xml") - - does_pom_from_spec_name_exist = pom_location_from_spec_name and os.path.exists(pom_location_from_spec_name) - does_pom_from_artifact_id_exist = pom_location_from_artifact_id and os.path.exists( - pom_location_from_artifact_id) - - # Service pom was found from the spec name - if does_pom_from_spec_name_exist and not does_pom_from_artifact_id_exist: - return pom_location_from_spec_name - - # Spec name was not provided, but the service pom was found via artifact id - if not does_pom_from_spec_name_exist and does_pom_from_artifact_id_exist: - return pom_location_from_artifact_id - - # This is an update to an already matching spec with the same artifact_id and spec_name. - if does_pom_from_spec_name_exist and does_pom_from_artifact_id_exist and pom_location_from_spec_name == pom_location_from_artifact_id: - return pom_location_from_spec_name - - # This is a new service spec. Return None. - if not does_pom_from_spec_name_exist and not does_pom_from_artifact_id_exist: - return None - - # At this point, both a pom was resolved from the spec_name as well from a matching artifact_id. Favor the spec_name. - if does_pom_from_spec_name_exist: - # For Ruby, favor the spec_name in order to update the artifact_id. If the spec_name is to be renamed, - # it will require a manual update to remove the existing service spec from the Ruby SDK. - print( - "artifact_id [{}] already exists under [{}]. Returning pom based on spec name [{}]".format(artifact_id, - pom_location_from_artifact_id, - pom_location_from_spec_name)) - return pom_location_from_spec_name - - raise EnvironmentError("Unable to determine path for service pom.xml") - - def add_or_update_spec(self, - artifact_id=None, - group_id=None, - spec_name=None, - relative_spec_path=None, - endpoint=None, - subdomain=None, - signing_strategy=None, # Not used - version=None, - spec_generation_type=None, # Not used - regional_sub_service_overrides=None, # Not used - non_regional_sub_service_overrides=None, # Not used - pom_location=None, - module_location=None): - if signing_strategy: - print("signing_strategy is ignored and requires a manual update") - - if spec_generation_type: - print("spec_generation_type is ignored") - - if regional_sub_service_overrides: - print("regional_sub_service_overrides is ignored") - - if non_regional_sub_service_overrides: - print("non_regional_sub_service_overrides is ignored and requires a manual update") - - sdk_dir = os.path.dirname(pom_location) - - if spec_name: - spec_name = convert_camel_to_snake_case(spec_name) - - if pom_location and not module_location: - module_location = os.path.join(os.path.dirname(pom_location), self.pom_module_location) - print("Location of pom modules: {}".format(module_location)) - - service_pom_path = self.find_existing_spec_pom(module_location, spec_name, artifact_id) - subdomain = self.resolve_sub_domain(endpoint, subdomain) - print("Resolved subdomain: {}".format(subdomain)) - print("Service Pom.xml path: {}".format(service_pom_path)) - - if service_pom_path: - return self.update_spec(sdk_dir, - service_pom_path, - spec_name, - group_id, - artifact_id, - version, - relative_spec_path, - subdomain) - else: - return self.add_spec(sdk_dir, - module_location, - spec_name, - group_id, - artifact_id, - version, - relative_spec_path, - subdomain) diff --git a/scripts/auto_gen_utils/add_or_update_scripts/templates/pom-template.xml b/scripts/auto_gen_utils/add_or_update_scripts/templates/pom-template.xml deleted file mode 100644 index 2bc36a1251..0000000000 --- a/scripts/auto_gen_utils/add_or_update_scripts/templates/pom-template.xml +++ /dev/null @@ -1,103 +0,0 @@ - - - - com.oracle.bmc.sdk - python-sdk - 1.0.0-SNAPSHOT - ../../pom.xml - - - 4.0.0 - com.oracle.bmc.sdk - - 1.0.0-SNAPSHOT - - - ${project.basedir}/../.. - ${project.build.directory}/preprocessed - ${project.build.directory}/swagger - ${project.build.directory}/preferred - ${sdk-root-dir}/codegenConfig/enabledGroups - ${sdk-root-dir}/featureId.yaml - ${sdk-root-dir}/codegenConfig/featureIds - - - - - ${sdk-root-dir}/target - - - com.oracle.oci.sdk.utilities - dex-get-spec-artifact-plugin - ${oci.get.spec.artifact.plugin.version} - - - - - - - com.oracle.oci.sdk.utilities - spec-conditionals-preprocessor-plugin - ${preprocessor-version} - - - - - - - com.oracle.bmc.sdk - bmc-sdk-swagger-maven-plugin - ${codegen-version} - - false - - - - - - - - maven-clean-plugin - 3.0.0 - - - - - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/scripts/auto_gen_utils/add_or_update_scripts/typescript_sdk_add_or_update_spec.py b/scripts/auto_gen_utils/add_or_update_scripts/typescript_sdk_add_or_update_spec.py deleted file mode 100644 index a0c2c921b1..0000000000 --- a/scripts/auto_gen_utils/add_or_update_scripts/typescript_sdk_add_or_update_spec.py +++ /dev/null @@ -1,454 +0,0 @@ -#!/usr/bin/env python - -# -# This script manipulates the pom.xml tree to either add new specs or update the versions of existing specs. -# - -from .spec_updater_base import SpecUpdaterBase -import xml.etree.ElementTree as ET -import re -import click -import os -import string -from click.exceptions import UsageError, MissingParameter -from glob import glob - -from shared.version_utils import is_version_increasing -from .add_or_update_spec_utils import AddOrUpdateSpecResult, compute_changed_settings, indent -from .add_or_update_spec_utils import write_xml -from .add_or_update_spec_utils import CommentedTreeBuilder -from .add_or_update_spec_utils import parse_pom - -DEFAULT_PARENT_POM_LOCATION = "pom.xml" -DEFAULT_GITHUB_WHITELIST_LOCATION = "github.whitelist" -ns = {"ns": "http://maven.apache.org/POM/4.0.0"} - -# The path for the modules is under "oci-typescript-sdk/codegen" -TYPESCRIPT_MODULE_LOCATION = "codegen" - - -# Template for include each module in pom.xml -MODULE_TEMPLATE = "{name}" -# Pom.xml template specific to Typescript SDK -TYPESCRIPT_POM_FILE_TEMPLATE = """ - - 4.0.0 - - com.oracle.oci.sdk - oci-typescript-sdk-codegen - {sdk_version} - ../pom.xml - - oci-typescript-sdk-{module_name}-codegen - Oracle Cloud Infrastructure SDK - {service_friendly_name} Service Codegen - This project contains the SDK used for Oracle Cloud Infrastructure {service_friendly_name} - - {group_id} - {artifact_id} - {artifact_version} - {spec_path_relative_to_jar} - {module_name} - {generate_waiters} - {generate_paginators} - - - - codegen - - - - org.commonjava.maven.plugins - directory-maven-plugin - - - org.codehaus.mojo - build-helper-maven-plugin - - - com.oracle.oci.sdk.utilities - dex-get-spec-artifact-plugin - - - com.oracle.oci.sdk.utilities - spec-conditionals-preprocessor-plugin - - - com.oracle.bmc.sdk - bmc-sdk-swagger-maven-plugin - - - com.mycila - license-maven-plugin - - - com.mycila - license-maven-plugin - - - maven-antrun-plugin - - - org.codehaus.mojo - exec-maven-plugin - - - - - - -""" - -MODULE_PACKAGE_JSON_FILE_TEMPLATE = """ -{ - "name": "oci-%s", - "version": "%s", - "description": "OCI NodeJS client for %s Service", - "repository": { - "type": "git", - "url": "https://github.com/oracle/oci-typescript-sdk" - }, - "main": "../../target/lib/%s/index.js", - "typings": "../../target/lib/%s/index.d.ts", - "scripts": {}, - "author": { - "name": "Oracle Cloud Infrastructure", - "email": "" - }, - "license": "(UPL-1.0 OR Apache-2.0)", - "dependencies": { - "oci-common": "../common" - }, - "publishConfig": { - "registry": "https://registry.npmjs.org" - }, - "contributors": [ - "Jyoti Saini ", - "Joe Levy ", - "Walt Tran " - ] -} -""" - - -# The typescript-specific pom.xml template parameters for XML parsing -TYPESCRIPT_SPEC_PARAMS_XML_PATH_DICT = { - 'group_id': ".//ns:properties//ns:codegen.artifactory.groupId", - 'artifact_id': ".//ns:properties//ns:codegen.artifactory.artifactId", - 'version': ".//ns:properties//ns:codegen.artifactory.version", - 'relative_spec_path': ".//ns:properties//ns:codegen.artifact.spec.path", - 'service_name': ".//ns:properties//ns:codegen.service.name", - 'subdomain': ".//ns:properties//ns:codegen.service.group.endpoint" -} - - -MODULE_README_FORMAT = """ -# OCI NodeJS client for {service_friendly_name} Service - -This module enables you to write code to manage resources for {service_friendly_name} Service. - -## Requirements - -To use this module, you must have the following: - -- An Oracle Cloud Infrastructure account. -- A user created in that account, in a group with a policy that grants the desired permissions. This can be a user for yourself, or another person/system that needs to call the API. For an example of how to set up a new user, group, compartment, and policy, see [Adding Users](https://docs.cloud.oracle.com/en-us/iaas/Content/GSG/Tasks/addingusers.htm). For a list of typical policies you may want to use, see [Common Policies](https://docs.cloud.oracle.com/en-us/iaas/Content/Identity/Concepts/commonpolicies.htm). -- A key pair used for signing API requests, with the public key uploaded to Oracle. Only the user calling the API should be in possession of the private key. For more information, see [Configuring Credentials](https://docs.cloud.oracle.com/en-us/iaas/Content/API/SDKDocs/typescriptsdkgettingstarted.htm#Configure) - -## Installing - -Use the following command to install this module: - -``` -npm install oci-{module_name} -``` - -Alternatively you can git clone this repo. -""" - - -class TypescriptSpecUpdater(SpecUpdaterBase): - # Override the spec name that is defined in the service pom.xml files as the testing service - # references folders without snake-case (based on the typescript sdk formatting). - def format_module_name_for_template(self, module_name): - return module_name.replace("-", "").replace("_", "").replace(" ","") - - -def add_or_update_spec(artifact_id=None, group_id=None, spec_name=None, relative_spec_path=None, endpoint=None, subdomain=None, version=None, spec_generation_type=None, generate_waiters=True, generate_paginators=True, pom_location=None, github_whitelist_location=None): - sdk_dir = os.path.dirname(pom_location) - - found = find_existing_module(sdk_dir, artifact_id) - - ignored = [] - previous = {} - changed = [] - if found: - print('Artifact {} already exists in pom.xml. Updating specified fields...'.format(artifact_id)) - - previous = gather_settings(sdk_dir, found) - - if version: - newer_version = update_version_of_existing_spec(sdk_dir, found, version) - if newer_version: - print('The version was not updated to {}, because it was already at {}.'.format(version, newer_version)) - - if relative_spec_path: - update_relative_spec_path_of_existing_spec(sdk_dir, found, relative_spec_path) - - was_ignored = False - if endpoint: - subdomain = endpoint - subdomain = re.sub('^.*://', '', subdomain) # remove protocol and '://' - subdomain = re.sub(r'\.{domain}.*$', '', subdomain) # remove '.{domain}' and everything after it - was_changed, was_ignored = update_subdomain(sdk_dir, found, subdomain) - elif subdomain: - was_changed, was_ignored = update_subdomain(sdk_dir, found, subdomain) - - if was_ignored: - ignored.append('subdomain') - - if spec_name: - ignored.append('spec_name') - if group_id: - ignored.append('group_id') - - current = gather_settings(sdk_dir, found) - changed = compute_changed_settings(previous, current) - else: - missing_params = [] - if not spec_name: - missing_params.append('--spec-name') - - if not version: - missing_params.append('--version') - - if not group_id: - missing_params.append('--group-id') - - if not artifact_id: - missing_params.append('--artifact-id') - - if not relative_spec_path: - missing_params.append('--relative-spec-path') - - # not checking 'endpoint' anymore; can be specified either in ticket - # or in spec using 'x-obmcs-endpoint-template'. If neither is specified, - # this fails in the generator - - if missing_params: - raise MissingParameter('The following options must be specified for a new spec: {}'.format(', '.join(missing_params))) - - if endpoint and subdomain: - raise UsageError('Cannot specify both --endpoint and --subdomain') - - if endpoint: - print('Ignoring endpoint setting for new services; new services have to get it from the spec') - ignored.append('endpoint') - - if subdomain: - print('Ignoring subdomain setting for new services; new services have to get it from the spec') - ignored.append('subdomain') - - if spec_generation_type: - print('Note: --spec-generation-type is ignored for the Typescript SDK, since it is set in the bmc-codegen/pom.xml file for all modules') - - module_name = spec_name.lower().replace('_', '') # module_name is "newservice" - service_friendly_name = string.capwords(spec_name.replace('_', ' ')) # service_friendly_name is "New Service" - - # Find OCI Typescript SDK Version - sdk_version = get_package_version(pom_location) - - print('Module {} does not exist in pom.xml. Adding it...'.format(module_name)) - generate_child_codegen_pom(sdk_dir, module_name, service_friendly_name, sdk_version, group_id, artifact_id, version, relative_spec_path, spec_generation_type, generate_waiters, generate_paginators) - add_child_codegen_module(sdk_dir, module_name) - generate_child_package_json_and_readme(sdk_dir, module_name, service_friendly_name, sdk_version) - - print('Success!') - - return AddOrUpdateSpecResult( - updated=(not found) or changed != [], # not found means it's a new spec, or if it is an existing spec, changed needs to be non-empty - existing=found is not None, - ignored=ignored, - previous=previous, - changed=changed) - - -def find_existing_module(sdk_dir, artifact_id): - codegen_dir = os.path.join(sdk_dir, "codegen") - pom_files = [y for x in os.walk(codegen_dir) for y in glob(os.path.join(x[0], 'pom.xml'))] - for ldr_path in pom_files: - pom = parse_pom(ldr_path) - xpath = ".//ns:properties//ns:codegen.artifactory.artifactId" - properties = pom.findall(xpath, ns) - if len(properties) > 0 and artifact_id == properties[0].text: - codegen_artifact_id = pom.findall("./ns:artifactId", ns)[0].text - m = re.match("oci-typescript-sdk-([^-]*)-codegen", codegen_artifact_id) - if m: - return m.group(1) - - return None - - -def gather_settings(sdk_dir, spec_name): - settings = {} - - file_name = os.path.join(sdk_dir, "codegen", spec_name + "-codegen", "pom.xml") - pom = parse_pom(file_name) - - xpath = ".//ns:properties//ns:codegen.artifactory.groupId" - property = pom.findall(xpath, ns)[0] - settings["group_id"] = property.text - - xpath = ".//ns:properties//ns:codegen.artifactory.artifactId" - property = pom.findall(xpath, ns)[0] - settings["artifact_id"] = property.text - - xpath = ".//ns:properties//ns:codegen.artifactory.version" - property = pom.findall(xpath, ns)[0] - settings["version"] = property.text - - settings["module_name"] = spec_name - - xpath = ".//ns:properties//ns:codegen.spec.name" - property = pom.findall(xpath, ns)[0] - settings["relative_spec_path"] = property.text - - xpath = ".//ns:properties//ns:codegen.endpoint" - results = pom.findall(xpath, ns) - if results: - property = results[0] - settings["subdomain"] = property.text - - return settings - - -def update_version_of_existing_spec(sdk_dir, spec_name, version): - file_name = os.path.join(sdk_dir, "codegen", spec_name + "-codegen", "pom.xml") - pom = parse_pom(file_name) - - xpath = ".//ns:properties//ns:codegen.artifactory.version" - property = pom.findall(xpath, ns)[0] - old_version = property.text - - if not is_version_increasing(old_version, version): - return old_version - - property.text = version - indent(pom.getroot()) - pom.write(file_name, encoding="UTF-8", xml_declaration=True) - return None # the old version was lower - - -def update_relative_spec_path_of_existing_spec(sdk_dir, spec_name, relative_spec_path): - file_name = os.path.join(sdk_dir, "codegen", spec_name + "-codegen", "pom.xml") - pom = parse_pom(file_name) - - xpath = ".//ns:properties//ns:codegen.spec.name" - property = pom.findall(xpath, ns)[0] - property.text = relative_spec_path - indent(pom.getroot()) - pom.write(file_name, encoding="UTF-8", xml_declaration=True) - - -# Returns was_changed, was_ignored -def update_subdomain(sdk_dir, spec_name, subdomain): - file_name = os.path.join(sdk_dir, "codegen", spec_name + "-codegen", "pom.xml") - pom = parse_pom(file_name) - - xpath = ".//ns:properties//ns:codegen.endpoint" - results = pom.findall(xpath, ns) - if results: - property = results[0] - was_changed = property.text != subdomain - property.text = subdomain - indent(pom.getroot()) - pom.write(file_name, encoding="UTF-8", xml_declaration=True) - return was_changed, False - else: - print('Ignored update to subdomain/endpoint -- service had no setting for this in pom.xml file and is getting that information from spec') - return False, True - - -def generate_child_codegen_pom(sdk_dir, module_name, service_friendly_name, sdk_version, group_id, artifact_id, artifact_version, spec_path_relative_to_jar, spec_generation_type, generate_waiters, generate_paginators): - dir_name = os.path.join(sdk_dir, "codegen", module_name + "-codegen") - os.mkdir(dir_name) - file_name = os.path.join(dir_name, "pom.xml") - - content = TYPESCRIPT_POM_FILE_TEMPLATE.format( - group_id=group_id, - artifact_id=artifact_id, - artifact_version=artifact_version, - sdk_version=sdk_version, - service_friendly_name=service_friendly_name, - module_name=module_name, - spec_path_relative_to_jar=spec_path_relative_to_jar, - spec_generation_type=spec_generation_type, - generate_waiters=str(generate_waiters).lower(), - generate_paginators=str(generate_paginators).lower()) - - root = ET.fromstring(content, parser=ET.XMLParser(target=CommentedTreeBuilder())) - pom = ET.ElementTree(element=root) - write_xml(file_name, pom) - - -def add_child_codegen_module(sdk_dir, module_name): - file_name = os.path.join(sdk_dir, "codegen", "pom.xml") - pom = parse_pom(file_name) - - content = MODULE_TEMPLATE.format(name=module_name + "-codegen") - module_element = ET.fromstring(content, ET.XMLParser(target=CommentedTreeBuilder())) - - # find modules - modules = pom.findall("./ns:modules", ns)[0] - modules.append(module_element) - - indent(pom.getroot()) - pom.write(file_name, encoding="UTF-8", xml_declaration=True) - - -def generate_child_package_json_and_readme(sdk_dir, module_name, service_friendly_name, sdk_version): - dir_name = os.path.join(sdk_dir, 'lib', module_name) - os.mkdir(dir_name) - file_name = os.path.join(dir_name, "package.json") - readme = os.path.join(dir_name, "README.md") - - content = MODULE_PACKAGE_JSON_FILE_TEMPLATE % ( - module_name, - sdk_version, - service_friendly_name, - module_name, - module_name) - - readme_content = MODULE_README_FORMAT.format(service_friendly_name=service_friendly_name, - module_name=module_name) - - file = open(file_name, "w") - file.write(content) - readme_file = open(readme, "w") - readme_file.write(readme_content) - - -def get_package_version(file_name): - with open(file_name, "r") as f: - data = f.read().replace('\n', '') - return data - - -@click.command() -@click.option('--artifact-id', required=True, help='The artifact id for the spec artifact (e.g. coreservices-api-spec') -@click.option('--group-id', help='The group id for the spec artifact (e.g. com.oracle.pic.commons)') -@click.option('--spec-name', help='The name of the spec. This will be used (e.g. core, identity, objectstorage). This is also used as the module name (\'bmc-servicename\') and base package (\'com.oracle.bmc.servicename\'). Underscores are removed, everything is lower-cased.') -@click.option('--relative-spec-path', help='The relative path of the spec within the artifact (e.g. coreservices-api-spec-20160918-external.yaml)') -@click.option('--endpoint', help='The base endpoint for the service (e.g. https://iaas.{domain}/20160918)') -@click.option('--subdomain', help='The subdomain for the service (e.g. if the endpoint is https://iaas.{domain}/20160918), the subdomain is "iaas"') -@click.option('--version', help='The version of the spec artifact (e.g. 0.0.1-SNAPSHOT') -@click.option('--spec-generation-type', help='The generation type: PUBLIC or PREVIEW') -@click.option('--generate-waiters/--no-generate-waiters', default=True, help='Generate waiters') -@click.option('--generate-paginators/--no-generate-paginators', default=True, help='Generate paginators') -@click.option('--pom-location', type=click.Path(exists=True), default=DEFAULT_PARENT_POM_LOCATION, help='Location of the pom.xml file in the root directory of the OCI Typescript SDK') -@click.option('--github-whitelist-location', type=click.Path(exists=True), default=DEFAULT_GITHUB_WHITELIST_LOCATION, help='Location of the github.whitelist file to update') -def add_or_update_spec_command(artifact_id, group_id, spec_name, relative_spec_path, endpoint, subdomain, version, spec_generation_type, generate_waiters, generate_paginators, pom_location, github_whitelist_location): - print(add_or_update_spec(artifact_id, group_id, spec_name, relative_spec_path, endpoint, subdomain, version, spec_generation_type, generate_waiters, generate_paginators, pom_location, github_whitelist_location)) - - -if __name__ == '__main__': - add_or_update_spec_command() diff --git a/scripts/auto_gen_utils/autogen_issue_advisor.py b/scripts/auto_gen_utils/autogen_issue_advisor.py deleted file mode 100644 index 8e961c3954..0000000000 --- a/scripts/auto_gen_utils/autogen_issue_advisor.py +++ /dev/null @@ -1,801 +0,0 @@ -import argparse -import textwrap -import datetime -import sys -import traceback -import time -import json -import six -from string import maketrans -from dotmap import DotMap -from jira.resources import Status - -import config -import util - -from config import PREVIEW_ISSUE_TYPE_ID, PREVIEW_ISSUE_TYPE_NAME -from config import PUBLIC_ISSUE_TYPE_ID, PUBLIC_ISSUE_TYPE_NAME - -import shared.bitbucket_utils - -import autogen_issue_advisor_shared - -from autogen_issue_advisor_shared import printv -from autogen_issue_advisor_shared import check_should_update -from autogen_issue_advisor_shared import ERROR_CHOOSE_PIPELINE_STATE -from autogen_issue_advisor_shared import QUIET_TIME_MINUTES -from autogen_issue_advisor_shared import DEFAULT_JIRA_ISSUE_FIELDS, CUSTOM_JIRA_ISSUE_FIELDS -from autogen_issue_advisor_shared import DEXREQ_AUTOMATION_NAME -from autogen_issue_advisor_shared import TICKET_STATE_ADVISORY_TEXT -from autogen_issue_advisor_shared import ERROR_STATES -from autogen_issue_advisor_shared import PIPELINE_NAMES -from autogen_issue_advisor_shared import process_last_builds - -import autogen_issue_advisor_preview -from autogen_issue_advisor_preview import get_preview_state -from autogen_issue_advisor_preview import PREVIEW_STATES -from autogen_issue_advisor_preview import advise_on_preview_issue -from autogen_issue_advisor_preview import handle_automated_preview_transitions - -import autogen_issue_advisor_public -from autogen_issue_advisor_public import get_public_state -from autogen_issue_advisor_public import PUBLIC_STATES -from autogen_issue_advisor_public import advise_on_public_issue -from autogen_issue_advisor_public import handle_automated_public_transitions -from create_cli_design_review_ticket import get_cli_design_review_issues_for_udx -from create_cli_design_review_ticket import is_design_ticket_in_non_terminal_state -from dexreq_migration import process_preview_jira_queue - - -# Spot testing: -# -# I've tested this is through spot testing: Picking out a DEXREQ ticket from one of the tables, then seeing if it gives the right output. -# -# python autogen_issue_advisor.py --dry-run --issue DEXREQ-123 -# -# -# Testing against all issues: -# -# And then I've run it against all issues, regardless of timestamp etc., and made sure it doesn't throw an exception for any of them. -# -# python autogen_issue_advisor.py --dry-run --force -# -# JIRA testing: -# -# I've tested the JIRA portion (without --dry-run) using the "spot testing" procedure. - -ignore_wrong_pipeline = False -IGNORE_CHANGES_AFTER = None - - -def process_changelog(issue): - # Simplify the changelog - changelog = issue.changelog - changelog_list = [] - for history in changelog.histories: - history_record = DotMap() - history_record.author = str(history.author) - history_record.created = history.created - items_list = [] - for item in history.items: - item_record = DotMap() - item_record.field = str(item.field) - item_record.old = item.fromString - item_record.new = item.toString - items_list.append(item_record) - history_record.changed_items = items_list - changelog_list.append(history_record) - return changelog_list - - -# returns statuses, all, any -def process_statuses(issue): - sdk_statuses = {} - for tool_name, jira_field_id in util.get_jira_custom_field_ids_for_tool().items(): - if util.is_tool_jira_reportable(tool_name): - status = getattr(issue.fields, jira_field_id) - sdk_statuses[tool_name] = str(status) - - all_sdks = { - config.CUSTOM_STATUS_TODO: True, - config.CUSTOM_STATUS_PROCESSING: True, - config.CUSTOM_STATUS_FAILURE: True, - config.CUSTOM_STATUS_SUCCESS: True, - config.CUSTOM_STATUS_DONE: True, - "None": True - } - any_sdks = { - config.CUSTOM_STATUS_TODO: False, - config.CUSTOM_STATUS_PROCESSING: False, - config.CUSTOM_STATUS_FAILURE: False, - config.CUSTOM_STATUS_SUCCESS: False, - config.CUSTOM_STATUS_DONE: False, - "None": False - } - for k,status in sdk_statuses.items(): - any_sdks[status] = True - for status_type in all_sdks.keys(): - if not status_type == status: - all_sdks[status_type] = False - - return sdk_statuses, all_sdks, any_sdks - - -def process_pipeline(issue): - pipeline = None - ticket_type_id = issue.fields.issuetype.id - if ticket_type_id == PREVIEW_ISSUE_TYPE_ID: - pipeline = PREVIEW_ISSUE_TYPE_NAME - if ticket_type_id == PUBLIC_ISSUE_TYPE_ID: - pipeline = PUBLIC_ISSUE_TYPE_NAME - - return pipeline - - -def process_comments(issue): - comments = [] - if "comment" in issue.raw["fields"] and "comments" in issue.raw["fields"]["comment"]: - for entry in issue.raw["fields"]["comment"]["comments"]: - item = DotMap() - item.author = entry["author"]["displayName"] - item.created = entry["created"] - item.url = entry["self"] - item.text = entry["body"] - comments.append(item) - - return comments - - -def process_dates(issue, summary): - if summary.jira.changelog: - summary.dates.last_changelog = summary.jira.changelog[-1] - summary.dates.last.created = summary.dates.last_changelog.created - summary.dates.last.author = summary.dates.last_changelog.author - if summary.jira.comments: - summary.dates.last_comment = summary.jira.comments[-1] - summary.dates.last.created = summary.dates.last_comment.created - summary.dates.last.author = summary.dates.last_comment.author - - if summary.dates.last_changelog.created and summary.dates.last_comment.created: - # both set - if summary.dates.last_changelog.created > summary.dates.last_comment.created: - summary.dates.last.created = summary.dates.last_changelog.created - summary.dates.last.author = summary.dates.last_changelog.author - else: - summary.dates.last.created = summary.dates.last_comment.created - summary.dates.last.author = summary.dates.last_comment.author - - last_issue_advisory = None - for comment in summary.jira.comments: - if (autogen_issue_advisor_shared.PROCESS_COMMENTS_BY_ANYONE or comment.author == DEXREQ_AUTOMATION_NAME) and comment.text and TICKET_STATE_ADVISORY_TEXT in comment.text.split("\n")[0]: - # occurred in first line of comment - last_issue_advisory = comment - - if last_issue_advisory: - summary.dates.last_issue_advisory = last_issue_advisory - - -def process_reporter(issue): - reporter_record = getattr(issue.fields, "reporter") - reporter = getattr(reporter_record, "key") - return reporter - - -def process_bypass_labels(issue, summary): - bypass_labels = [] - - for l in issue.fields.labels: - if l in config.BYPASS_LABELS: - bypass_labels.append(l.encode('utf-8')) - - summary.checks.bypass = bypass_labels - - return bypass_labels - - -def accept_non_bulk_prs(pr): - name = None - if 'name' in pr: - name = pr['name'] - source_branch = None - if 'source' in pr and 'branch' in pr['source']: - source_branch = pr['source']['branch'] - - if not name or not source_branch: - return True - - if name.startswith("Auto Generated Bulk Preview for") and source_branch.startswith(config.GENERATION_BRANCH_PREFIX + "-" + config.BULK_PREVIEW_BRANCH_PREFIX + "-"): - printv("Rejecting PR because it is a bulk preview PR: {}".format(pr['url'])) - return False - - return True - - -def process_pull_requests(issue, summary): - tool_names = util.get_jira_reportable_tool_names() - - # Note: the 'last_update' (which consists of the PR changes and the validation builds) - # does not currently take 'ignore_changes_after' into account. - printv("Getting PR status from JIRA for issue {}".format(issue.key), flush=True) - t0 = time.time() - pr_status = util.get_pr_status_for_tools(util.JIRA_CLIENT(), issue, tool_names, target_branch_filter='master') - t1 = time.time() - printv("JIRA returned PR status for issue {}, took {:.3f} seconds".format(issue.key, t1 - t0), flush=True) - - # TODO: replace this with results from just looking at 'dexreq' and 'python-cli' repos - summary.pull_requests.last_update = pr_status.last_update - summary.dates.last_pr_change = pr_status.last_update - - on_service_team = pr_status - on_service_team.tools = DotMap(on_service_team.tools) - - summary.pull_requests.on_service_team.master = on_service_team - - return on_service_team - - -def ignore_changes_after(issue): - if not IGNORE_CHANGES_AFTER: - return issue - - if "comment" in issue.raw["fields"] and "comments" in issue.raw["fields"]["comment"]: - to_be_removed = [] - for entry in issue.raw["fields"]["comment"]["comments"]: - datetime = entry["created"] - if datetime > IGNORE_CHANGES_AFTER: - to_be_removed.append(entry) - - for entry in to_be_removed: - if entry in issue.raw["fields"]["comment"]["comments"]: - issue.raw["fields"]["comment"]["comments"].remove(entry) - printv("Ignoring {}".format(entry["self"])) - - already_changed_items = [] - - if issue.changelog and issue.changelog.histories: - to_be_removed = [] - for entry in issue.changelog.histories: - datetime = entry.created - if datetime > IGNORE_CHANGES_AFTER: - to_be_removed.append(entry) - printv("Ignoring changelog history change at {}".format(entry.created)) - for item in entry.items: - if item.field not in already_changed_items: - # This is the first time we're seeing a field change after changes - # are being ignored; set the field back to the old value - field = str(item.field) - already_changed_items.append(item.field) - if item.field == "status": - setattr(issue.fields, "status", Status(None, None, raw={'name': item.fromString})) - issue.raw['fields']["status"] = Status(None, None, raw={'name': item.fromString}) - printv("Fixing {} to '{}'".format(field, str(item.fromString))) - if item.field == "labels": - setattr(issue.fields, "labels", item.fromString) - issue.raw['fields']["labels"] = item.fromString - printv("Fixing {} to '{}'".format(field, str(item.fromString))) - if item.field in config.CUSTOM_FIELD_NAME_FOR_ID.values(): - for k, v in config.CUSTOM_FIELD_NAME_FOR_ID.items(): - if item.field == v: - setattr(issue.fields, k, item.fromString) - issue.raw['fields'][k] = item.fromString - printv("Fixing {} to '{}'".format(field, str(item.fromString))) - break - - for entry in to_be_removed: - if entry in issue.changelog.histories: - issue.changelog.histories.remove(entry) - - return issue - - -# Returns summary, issue -def summarize_issue(issue_key=None): - dexreq_public_errors = [] - dexreq_public_warnings = [] - - printv("Querying JIRA for issue {}".format(issue_key), flush=True) - t0 = time.time() - issue = util.get_dexreq_issue(issue_key, fields=(DEFAULT_JIRA_ISSUE_FIELDS + CUSTOM_JIRA_ISSUE_FIELDS), expand=["changelog"], - errors=dexreq_public_errors, warnings=dexreq_public_warnings) - t1 = time.time() - printv("JIRA returned issue {}, took {:.3f} seconds".format(issue_key, t1 - t0), flush=True) - - ignore_changes_after(issue) - - summary = DotMap() - - summary.errors = [] - summary.errors.extend(dexreq_public_errors) - summary.errors.extend("Warning: {}".format(w) for w in dexreq_public_warnings) - - pipeline = process_pipeline(issue) - summary.pipeline = pipeline - - summary.jira.reporter = process_reporter(issue) - summary.jira.changelog = process_changelog(issue) - summary.jira.comments = process_comments(issue) - - process_dates(issue, summary) - - t0 = time.time() - process_last_builds(issue, summary) - t1 = time.time() - printv("process_last_builds took {:.3f} seconds".format(t1 - t0)) - - process_bypass_labels(issue, summary) - - sdk_statuses, all_sdks, any_sdks = process_statuses(issue) - - # TODO: collect latest branches for each SDK - - summary.sdks.statuses = sdk_statuses - summary.sdks.any = any_sdks - summary.sdks.all = all_sdks - - summary.jira.status = str(getattr(issue.fields, "status")) - - value = getattr(issue.fields, config.CUSTOM_FIELD_ID_PREVIEW_ISSUE) - if value: - summary.preview_issues = ",".join(util.get_dexreq_issue_keys(str(value))) - - for field_name in issue.raw['fields']: - field_value = issue.raw['fields'][field_name] - if field_name in config.CUSTOM_FIELD_NAME_FOR_ID: - field_name = config.CUSTOM_FIELD_NAME_FOR_ID[field_name] - - # this deals with the custom status fields, which look like this in the raw: - # {u'self': u'https://jira.oci.oraclecorp.com/rest/api/2/customFieldOption/14502', u'id': u'14502', u'value': u'Pending Merge'} - if type(field_value) is dict and "self" in field_value and "id" in field_value and ("value" in field_value or "name" in field_value): - field_value = field_value["value"] if "value" in field_value else field_value["name"] - - encoded_field_name = field_name.encode('utf-8').lower().translate(maketrans(' /', '__'), '()-').replace('_&_', '_and_').replace('&', '_and_') - if field_name not in ['comment'] and encoded_field_name not in summary.jira: - summary.jira[encoded_field_name] = field_value - - process_pull_requests(issue, summary) - - if pipeline is None: - state = ERROR_CHOOSE_PIPELINE_STATE - elif pipeline is PREVIEW_ISSUE_TYPE_NAME: - state = get_preview_state(issue, summary, sdk_statuses, all_sdks, any_sdks) - elif pipeline is PUBLIC_ISSUE_TYPE_NAME: - state = get_public_state(issue, summary, sdk_statuses, all_sdks, any_sdks) - else: - state = None - - summary.state = state - - if summary.pipeline == config.PUBLIC_ISSUE_TYPE_NAME: - if update_summary_with_pending_cli_design_review_tickets(issue, summary): - issue = util.get_dexreq_issue(issue.key, fields=(DEFAULT_JIRA_ISSUE_FIELDS + CUSTOM_JIRA_ISSUE_FIELDS + process_preview_jira_queue.CUSTOM_JIRA_ISSUE_FIELDS_PER_BUILD_TYPE[config.BUILD_TYPE_INDIVIDUAL_PUBLIC])) - - return summary, issue - - -def update_summary_with_pending_cli_design_review_tickets(issue, summary): - udx_issue_keys = util.get_udx_issue_keys(getattr(issue.fields, config.CUSTOM_FIELD_ID_UDX_TICKET)) - printv("Checking for CLI Design Review issues for: {}".format(", ".join(udx_issue_keys))) - design_review_tickets = get_cli_design_review_issues_for_udx(udx_issue_keys) - - pending_design_review_tickets = get_pending_cli_design_review_tickets(design_review_tickets) - summary.cli.pending_design_reviews = [i.key for i in pending_design_review_tickets] - - printv("Checking for manual changes:") - requires_manual_changes = are_cli_manual_changes_required(design_review_tickets) - - if requires_manual_changes and config.CLI_REQUIRED_MANUAL_CHANGES_LABEL not in issue.fields.labels: - printv("Adding CLI-ManualChangesRequired label to: " + issue.key) - issue.add_field_value('labels', config.CLI_REQUIRED_MANUAL_CHANGES_LABEL) - # True indicates issue needs to be refreshed. - return True - - # No need to update the issue field - return False - - -def get_pending_cli_design_review_tickets(design_review_tickets): - pending_design_review_tickets = filter(lambda issue: is_design_ticket_in_non_terminal_state(issue), design_review_tickets) - printv("Pending CLI Design Reviews: " + ', '.join(issue.key for issue in pending_design_review_tickets)) - - return pending_design_review_tickets - - -def are_cli_manual_changes_required(design_review_tickets): - for issue in design_review_tickets: - for label in issue.fields.labels: - if label in config.CLI_MANUAL_CHANGES_LABELS: - printv('Found manual changes for CLI Design review issue: {}'.format(issue.key)) - return True - return False - - -CHOOSE_PIPELINE_ERROR_TEMPLATE = """\ -[~{reporter}], the ticket does not specify if this is for the preview pipeline or the public pipeline. - -If this is for the preview pipeline (LA), choose the '{preview_type}' ticket type. - -If this is for the public pipeline (GA), choose the '{public_type}' ticket type. - -In order to change ticket types, select the 'Move' action in the 'More' menu of the JIRA issue. - -([Preview wiki|https://confluence.oci.oraclecorp.com/display/DEX/Requesting+a+preview+SDK+CLI#RequestingapreviewSDKCLI-A)Opentheticket] and [public wiki|https://confluence.oci.oraclecorp.com/pages/viewpage.action?pageId=43683000#RequestingapublicSDK/CLI-Step3:Createaticketinthe%22DEX-REQUEST%22JIRA])""" - - -def for_choose_pipeline_error(issue_key, summary): - global ignore_wrong_pipeline - - text = "" - - if not summary.pipeline: - if ignore_wrong_pipeline: - print("Ignoring {}, even though it has not chosen a correct ticket type.".format(issue_key)) - else: - text = CHOOSE_PIPELINE_ERROR_TEMPLATE.format( - preview_type=config.PREVIEW_ISSUE_TYPE_NAME, - public_type=config.PUBLIC_ISSUE_TYPE_NAME, - reporter=summary.jira.reporter) - - return text - - -def handle_automated_transitions(issue_key, issue, summary): - text = None - transitioned = False - if summary.pipeline == config.PREVIEW_ISSUE_TYPE_NAME: - transitioned, text = handle_automated_preview_transitions(issue_key, issue, summary) - elif summary.pipeline == config.PUBLIC_ISSUE_TYPE_NAME: - transitioned, text = handle_automated_public_transitions(issue_key, issue, summary) - if transitioned: - printv("Automated transition:\n{}".format(text)) - else: - printv("No automated transition.{}".format((" Text:\n" + text) if text else "")) - - return transitioned, text - - -class CustomEncoder(json.JSONEncoder): - def default(self, obj): - if (isinstance(obj, autogen_issue_advisor_shared.ServiceTeamMasterPrs) or # noqa: ignore=W504 - isinstance(obj, autogen_issue_advisor_shared.BitbucketBuilds) or # noqa: ignore=W504 - isinstance(obj, autogen_issue_advisor_shared.BitbucketBuildChecks) or # noqa: ignore=W504 - isinstance(obj, util.PrStatusForTools) or # noqa: ignore=W504 - isinstance(obj, util.PrsPerTool) or # noqa: ignore=W504 - isinstance(obj, util.PrAndUrl)): # noqa: ignore=W504 - return dict(obj.__dict__) - return json.JSONEncoder.default(self, obj) - - -# Returns text, summary -def advise_on_issue(issue_key, force): - summary, issue = summarize_issue(issue_key=issue_key) - - # Flush, so we make sure the output of the summarize_issue function is already there - # NOTE: This is to help debug for DEX-6382 - sys.stdout.flush() - - printv("summary:") - if autogen_issue_advisor_shared.IS_VERBOSE: - print(json.dumps(summary, cls=CustomEncoder, sort_keys=True, indent=2)) - - should_update = check_should_update(summary) - - transitioned, transition_text = handle_automated_transitions(issue_key, issue, summary) - - if transitioned: - # Always update after a transition - should_update = True - - # Get the latest post-transition summary - new_summary, issue = summarize_issue(issue_key=issue_key) - new_summary.transition_from_state = summary.state - summary = new_summary - - if not should_update: - print("Should not update") - if not force: - return None, summary - else: - print("Forcing update...") - - text = None - - if summary.state == ERROR_CHOOSE_PIPELINE_STATE: - text = for_choose_pipeline_error(issue_key, summary) - elif summary.pipeline == config.PREVIEW_ISSUE_TYPE_NAME: - text = advise_on_preview_issue(issue_key, issue, summary) - else: - text = advise_on_public_issue(issue_key, issue, summary) - - if transition_text and text: - text = transition_text + "\n" + text - elif transition_text: - text = transition_text - - if text: - text = "{}\n\n{}".format(TICKET_STATE_ADVISORY_TEXT, text) - - return text, summary - - -def initialize_state_statistics(only_pipeline): - state_statistics = {} - if not only_pipeline or only_pipeline == PREVIEW_ISSUE_TYPE_NAME: - for state in PREVIEW_STATES: - state_statistics[state] = 0 - state_statistics["TRANSITION_FROM_{}".format(state)] = 0 - if not only_pipeline or only_pipeline == PUBLIC_ISSUE_TYPE_NAME: - for state in PUBLIC_STATES: - state_statistics[state] = 0 - state_statistics["TRANSITION_FROM_{}".format(state)] = 0 - for state in ERROR_STATES: - state_statistics[state] = 0 - - return state_statistics - - -def print_state_statistics(state_statistics, states): - for state in states: - if state in state_statistics: - count = state_statistics[state] - print("##teamcity[buildStatisticValue key='{}_count' value='{}']".format(state, count)) - - -def query_all_issues(only_pipeline): - # Query all unresolved issues, plus those that were set to 'Done' in the last 4 days (to make it easier to span weekends, in case automation fails) - query = 'project = {JIRA_PROJECT} AND (resolution = Unresolved OR (status changed to Done AFTER "-4d"))'.format( - JIRA_PROJECT=config.JIRA_PROJECT) - - if only_pipeline: - query = query + ' AND issuetype = "{}"'.format(only_pipeline) - - # Since here, we're only interested in issue key and summary, let's not use - # the util.search_dexreq_issues function that also looks at the linked preview - # ticket in the case of public tickets - all_issues = util.jira_search_issues(query, fields=['summary, created']) - - for issue in all_issues: - try: - print('{} - {}'.format(issue.key, issue.fields.summary.encode('utf-8'))) - except Exception as error: - print('Problem with {}'.format(issue.key)) - exception_string = traceback.format_exc() - print("Unexpected error: {}\n{}".format(type(error), exception_string)) - - return all_issues - - -def post_comment_if_new(issue_key, summary, text, force, commented_issues): - should_comment = True - if not force and "{color:" + config.COMMENT_TYPE_TO_COLOR[config.COMMENT_TYPE_INFO] + "}" + text + "{color}" == summary.dates.last_issue_advisory.text: - printv("Not commenting, because the last issue advisory on {} had the same text.".format(summary.dates.last_issue_advisory.created)) - should_comment = False - - if should_comment: - # comment on issue - util.add_jira_comment( - issue_key, - text, - comment_type=config.COMMENT_TYPE_INFO - ) - commented_issues.append(issue_key) - - return should_comment - - -if __name__ == "__main__": - parser = argparse.ArgumentParser(description='Ticket advisor (preview and public).') - parser.add_argument('--issue', - action='append', - help='By default, we query JIRA. This allows you to specify a DEXREQ issue to process instead: --issue DEXREQ-123') - parser.add_argument('--bulk-preview-date-overrides', - help='''Date overrides for bulk preview. Comma-separated list, with each part being either "+YYYY-MM-DD" or "-YYYY-MM-DD". - Example "+2018-10-04,-2018-10-05" means "do a bulk preview on October 4th (Thursday), but not on October 5th (Friday)"''') - parser.add_argument('--public-release-date-overrides', - help='''Date overrides for public release. Comma-separated list, with each part being either "+YYYY-MM-DD", "-YYYY-MM-DD", or "=YYYY-MM-DD@GA". - Starting with "=" sets the base date of the two-week cadence, and "@GA" indicates that this should be the GA with the number "GA". If not set, it's {}. - Example "=2018-10-11@123,+2018-10-24,-2018-10-25" means "do releases every two weeks, starting with GA 123 on 2018-10-11, but don't do one on October 25th, do it on October 24th instead"'''.format(autogen_issue_advisor_public.PUBLIC_RELEASE_DATE_CADENCE_START_DATE)) - parser.add_argument('--public-release-requested-cut-off-date-overrides', - help='''Date overrides for public 'release requested' cut-off date. Comma-separated list, with each part being either "+YYYY-MM-DD", "-YYYY-MM-DD", or "=YYYY-MM-DD". - Starting with "=" sets the base date of the two-week cadence. If not set, it's {}. - Example "=2018-10-05,+2018-10-18,-2018-10-19" means "do RR cut-off every two weeks, starting with 2018-10-05, but don't do one on October 19th, do it on October 18th instead"'''.format(autogen_issue_advisor_public.RELEASE_REQUESTED_CUT_OFF_DATE_CADENCE_START_DATE)) - parser.add_argument('--dry-run', - default=False, - action='store_true', - help='Perform a dry-run') - parser.add_argument('--force', - default=False, - action='store_true', - help='Force an update, even if the issue has not been quiet for {} minutes and there have been no updates since the last advisory.'.format(QUIET_TIME_MINUTES)) - parser.add_argument('--verbose', - default=False, - action='store_true', - help='Verbose logging') - parser.add_argument('--pipeline', - help='Limit to pipeline, one of {}. Default is both.'.format(", ".join(['"{}"'.format(x) for x in PIPELINE_NAMES]))) - parser.add_argument('--show-ga-calendar', - default=False, - action='store_true', - help='''Show a GA calendar (number of future releases specified using --show-ga-calendar-count).''') - parser.add_argument('--show-ga-calendar-count', - default=10, - help='''When showing a GA calendar, show the specified number of future releases.''') - parser.add_argument('--show-preview-calendar', - default=False, - action='store_true', - help='''Show a Preview calendar (number of future preview releases specified using --show-preview-calendar-count).''') - parser.add_argument('--show-preview-calendar-count', - default=10, - help='''When showing a preview calendar, show the specified number of future preview releases.''') - parser.add_argument('--ignore-wrong-pipeline', - default=False, - action='store_true', - help='Ignore wrong pipeline (i.e. not "{}" or "{}" issue type)'.format(config.PREVIEW_ISSUE_TYPE_NAME, config.PUBLIC_ISSUE_TYPE_NAME)) - parser.add_argument('--process-comments-by-anyone', - default=False, - action='store_true', - help='Process comments by anyone, not just by "{}"'.format(DEXREQ_AUTOMATION_NAME)) - parser.add_argument('--ignore-changes-after', - help='Ignore changes after the specified datetime YYYY-MM-DDThh:mm:ss') - parser.add_argument('--disable_date_check', - default=False, - action='store_true', - help='Check if the provided GA date is a public release date. Leave a warning if not') - - failed = [] - - args = parser.parse_args() - autogen_issue_advisor_shared.IS_VERBOSE = args.verbose - util.IS_VERBOSE = args.verbose - shared.bitbucket_utils.verbose = args.verbose - - autogen_issue_advisor_shared.PROCESS_COMMENTS_BY_ANYONE = args.process_comments_by_anyone - - config.IS_DRY_RUN = args.dry_run - shared.bitbucket_utils.dry_run = args.dry_run - - config.DISABLE_COMMENT_INCORRECT_DATES = args.disable_date_check - - only_pipeline = args.pipeline - force = args.force - - # date overrides - cut_off_date_or = args.public_release_requested_cut_off_date_overrides - release_date_or = args.public_release_date_overrides - - # If one of the overrides contains '=', the other should as well - if release_date_or and cut_off_date_or: - if ('=' in release_date_or and '=' not in cut_off_date_or) or ('=' not in release_date_or and '=' in cut_off_date_or): - raise ValueError("\'=\' found in either public_release_requested_cut_off_date_overrides or public_release_date_overrides, but not both.") - - # Check that base dates are within 3 weeks of each other - base_cut_off_date, ga_number, cut_off_overrides = autogen_issue_advisor_shared.process_date_override(cut_off_date_or, - autogen_issue_advisor_public.RELEASE_REQUESTED_CUT_OFF_DATE_CADENCE_START_DATE, None, '--public-to-deploy-cut-off-date-overrides') - base_release_date, base_release_ga_number, release_overrides = autogen_issue_advisor_shared.process_date_override(release_date_or, - autogen_issue_advisor_public.PUBLIC_RELEASE_DATE_CADENCE_START_DATE, autogen_issue_advisor_public.PUBLIC_RELEASE_DATE_GA_NUMBER, '--public-release-date-overrides') - - delta = base_cut_off_date - base_release_date - delta = abs(delta.days) - - if (delta > 21): - raise ValueError("public_release_requested_cut_off_date_overrides and public_release_date_overrides should be within 3 weeks of each other") - - autogen_issue_advisor_preview.BULK_PREVIEW_DATE_OVERRIDE = args.bulk_preview_date_overrides - autogen_issue_advisor_public.PUBLIC_RELEASE_DATE_OVERRIDE = release_date_or - autogen_issue_advisor_public.RELEASE_REQUESTED_CUT_OFF_DATE_OVERRIDE = cut_off_date_or - - IGNORE_CHANGES_AFTER = args.ignore_changes_after - - if args.show_ga_calendar: - autogen_issue_advisor_public.show_ga_calendar(int(args.show_ga_calendar_count)) - sys.exit(0) - - if args.show_preview_calendar: - autogen_issue_advisor_preview.show_preview_calendar(int(args.show_preview_calendar_count)) - sys.exit(0) - - shared.bitbucket_utils.setup_bitbucket(args) - - if args.pipeline and args.pipeline not in PIPELINE_NAMES: - raise ValueError("Pipeline, if specified, must be one of: {}".format(", ".join(['"{}"'.format(x) for x in PIPELINE_NAMES]))) - - if args.issue: - if only_pipeline: - raise ValueError("Cannot use --issue with --pipeline.") - issues = [util.JIRA_CLIENT().issue(issue_key, fields=['summary, created']) for issue_key in args.issue] - else: - issues = query_all_issues(only_pipeline) - - # Find the oldest issue - oldest_created_date = None - oldest_ticket = None - for issue in issues: - created_date = getattr(issue.fields, 'created') - if not oldest_created_date or created_date < oldest_created_date: - oldest_created_date = created_date - oldest_ticket = issue.key - - print("Oldest ticket was created {} ({})".format(oldest_created_date, oldest_ticket)) - - # Prime the caches - all_repo_names = [config.DEXREQ_REPO_NAME] - for tool, repo_names in six.iteritems(config.REPO_NAMES_FOR_TOOL): - all_repo_names.extend(repo_names) - - for repo_name in set(all_repo_names): - printv("Priming PR cache for repo {}".format(repo_name)) - shared.bitbucket_utils.get_all_pullrequest_with_string_after('SDK', repo_name, '', oldest_created_date) - - if args.ignore_wrong_pipeline: - ignore_wrong_pipeline = True - - print("##teamcity[testSuiteStarted name='autogen_issue_advisor']") - - state_statistics = initialize_state_statistics(only_pipeline) - - commented_issues = [] - for issue_key in [issue.key for issue in issues]: - if config.should_ignore_issue(issue_key): - print(textwrap.dedent("""\ - ======================================== - Issue: {} - being ignored per env var {}""").format(issue_key, config.DEXREQ_IGNORED_ISSUES_ENV_VAR_NAME)) - print("========================================") - continue - - print(textwrap.dedent("""\ - ======================================== - Issue: {}""").format(issue_key)) - issue_start = datetime.datetime.now() - print("##teamcity[testStarted name='{}' captureStandardOutput='true']".format(issue_key)) - try: - # Flush, so we make sure the output of the issue key is already visible - # NOTE: This is to help debug for DEX-6382 - sys.stdout.flush() - text, summary = advise_on_issue(issue_key, force) - - if summary.state: - state_statistics[summary.state] += 1 - - if summary.transition_from_state: - printv("summary.transition_from_state: {}".format(summary.transition_from_state)) - state_statistics["TRANSITION_FROM_{}".format(summary.transition_from_state)] += 1 - - if text: - if summary.errors: - text = text + "\n\nErrors the advisor noticed:\n{}".format("\n".join("- {}".format(e) for e in summary.errors)) - - post_comment_if_new(issue_key, summary, text, force, commented_issues) - except Exception as error: - exception_string = traceback.format_exc() - print("Unexpected error: {}\n{}".format(type(error), exception_string)) - failed.append(issue_key) - print("##teamcity[testFailed name='{}' message='{}' details='{}']".format(issue_key, type(error), exception_string)) - finally: - issue_end = datetime.datetime.now() - - print("##teamcity[testFinished name='{}' duration='{}']".format(issue_key, int((issue_end - issue_start).total_seconds() * 1000))) - - print("##teamcity[testSuiteFinished name='autogen_issue_advisor']") - print("========================================") - - print("##teamcity[buildStatisticValue key='{}' value='{}']".format("comment_count", len(commented_issues))) - print("##teamcity[buildStatisticValue key='{}' value='{}']".format("error_count", len(failed))) - transition_count = sum([state_statistics["TRANSITION_FROM_{}".format(state)] for state in PREVIEW_STATES + PUBLIC_STATES]) - print("##teamcity[buildStatisticValue key='{}' value='{}']".format("transition_count", transition_count)) - - print_state_statistics(state_statistics, PREVIEW_STATES) - print_state_statistics(state_statistics, PUBLIC_STATES) - print_state_statistics(state_statistics, ERROR_STATES) - print_state_statistics(state_statistics, ["TRANSITION_FROM_{}".format(state) for state in PREVIEW_STATES]) - print_state_statistics(state_statistics, ["TRANSITION_FROM_{}".format(state) for state in PUBLIC_STATES]) - - if config.IS_DRY_RUN: - print("DRY-RUN: Would have left {} comment(s)".format(len(commented_issues))) - else: - print("Left {} comment(s)".format(len(commented_issues))) - - if commented_issues: - print("Commented on the following issues:\n{}".format("\n".join(commented_issues))) - - if failed: - print("The following issues failed:\n{}".format("\n".join(failed))) - sys.exit(1) diff --git a/scripts/auto_gen_utils/autogen_issue_advisor_preview.py b/scripts/auto_gen_utils/autogen_issue_advisor_preview.py deleted file mode 100644 index 082cb6e1db..0000000000 --- a/scripts/auto_gen_utils/autogen_issue_advisor_preview.py +++ /dev/null @@ -1,486 +0,0 @@ -import datetime -import pytz -import sys - -import config -import util - -from autogen_issue_advisor_shared import PACIFIC_TIME_ZONE -from autogen_issue_advisor_shared import get_successful_pull_requests_text -from autogen_issue_advisor_shared import get_failed_links_text -from autogen_issue_advisor_shared import handle_transition_for_processing -from autogen_issue_advisor_shared import execute_appropriate_transition_handler -from autogen_issue_advisor_shared import handle_transition_for_spec_review_pr -from autogen_issue_advisor_shared import get_spec_review_pr -from autogen_issue_advisor_shared import process_all_spec_change_prs -from autogen_issue_advisor_shared import process_date_override, find_next_matching_date - -# Format: comma-separated, with each part being either "+YYYY-MM-DD" or "-YYYY-MM-DD" -# Example "+2018-10-04,-2018-10-05" means "do a bulk preview on October 4th (Thursday), but not on October 5th (Friday)" -BULK_PREVIEW_DATE_OVERRIDE = None - -# Format: comma-separated, with each part being either "+YYYY-MM-DD", "-YYYY-MM-DD", or "=YYYY-MM-DD". -# Starting with "=" sets the base date of the one-week cadence. If not set, it's 2019-01-08. -# Example "=2018-10-11,+2018-10-24,-2018-10-25" means -# "do releases every week, starting with 2018-10-11, but don't do one on October 25th, do it on October 24th instead" -PREVIEW_RELEASE_DATE_OVERRIDE = None -PREVIEW_RELEASE_DATE_CADENCE_START_DATE = "2022-06-08" -PREVIEW_RELEASE_DATE_CADENCE_IN_DAYS = 7 - - -PREVIEW_NON_ACTIONABLE_STATE = "PREVIEW_NON_ACTIONABLE_STATE" -PREVIEW_PROCESSING_REQUESTED_STATE = "PREVIEW_PROCESSING_REQUESTED_STATE" -PREVIEW_PROCESSING_STATE = "PREVIEW_PROCESSING_STATE" -PREVIEW_SERVICE_TEAM_FAILURE_INVESTIGATION_STATE = "PREVIEW_SERVICE_TEAM_FAILURE_INVESTIGATION_STATE" -PREVIEW_DEX_SUPPORT_REQUIRED_STATE = "PREVIEW_DEX_SUPPORT_REQUIRED_STATE" -PREVIEW_SERVICE_TEAM_REVIEW_REQUIRED_STATE = "PREVIEW_SERVICE_TEAM_REVIEW_REQUIRED_STATE" -PREVIEW_READY_FOR_PREVIEW_STATE = "PREVIEW_READY_FOR_PREVIEW_STATE" -PREVIEW_PROCESSING_BULK_STATE = "PREVIEW_PROCESSING_BULK_STATE" -PREVIEW_DEX_BULK_REVIEW_STATE = "PREVIEW_DEX_BULK_REVIEW_STATE" -PREVIEW_DONE_STATE = "PREVIEW_DONE_STATE" - -PREVIEW_STATES = [ - PREVIEW_NON_ACTIONABLE_STATE, - PREVIEW_PROCESSING_REQUESTED_STATE, - PREVIEW_PROCESSING_STATE, - PREVIEW_SERVICE_TEAM_FAILURE_INVESTIGATION_STATE, - PREVIEW_DEX_SUPPORT_REQUIRED_STATE, - PREVIEW_SERVICE_TEAM_REVIEW_REQUIRED_STATE, - PREVIEW_READY_FOR_PREVIEW_STATE, - PREVIEW_PROCESSING_BULK_STATE, - PREVIEW_DEX_BULK_REVIEW_STATE, - PREVIEW_DONE_STATE, -] - - -NON_ACTIONABLE_BECAUSE_OF_BACKLOG_TEMPLATE = """\ -[~{reporter}], the ticket status is set to '{backlog_status}'. For processing to begin, please set the ticket status to '{requested_status}'. -""" - - -def for_non_actionable(issue_key, issue, summary): - text = "" - - if summary.jira.status == config.STATUS_BACKLOG: - if text: - text += "\n\n" - - text = text + NON_ACTIONABLE_BECAUSE_OF_BACKLOG_TEMPLATE.format( - reporter=summary.jira.reporter, - backlog_status=config.STATUS_BACKLOG, - requested_status=config.STATUS_PROCESSING_REQUESTED) - - return text - - -SERVICE_TEAM_FAILURE_INVESTIGATION_TEMPLATE = """\ -[~{reporter}], the status of the ticket is set to '{ticket_status}', because generation of {an_sdk_several_sdks} has failed, as indicated by the SDK status {field_fields} set to '{sdk_state}'. For {this_sdk_these_sdks}, please look into why the generation failed by examining the build log. - -If you determine that something was wrong in your spec, please change your spec and generate a new spec artifact. Then update the spec version in this ticket and set the ticket status back to '{requested_status}'. - -If you think the error is unrelated to your change, or if you need help figuring this out, set the ticket status to '{dex_support_status}', and our on-call engineer will look at it within a day. - -The failed {sdk_is_sdks_are}: - -{fields}{failed_builds_text} - -([wiki|https://confluence.oci.oraclecorp.com/display/DEX/Requesting+a+preview+SDK+CLI#RequestingapreviewSDKCLI-Step4:Monitorticketstatusforautomatedupdatesandtakeactionifnecessary])""" - - -def for_service_team_failure_investigation(issue_key, issue, summary): - text = "" - - failed = [(language, value) for language, value in summary.sdks.statuses.items() if value == config.CUSTOM_STATUS_FAILURE] - fields = "\n".join(["- {}".format(language) for language, value in failed]) - - failed_builds_text = get_failed_links_text(summary) or "" - - text = SERVICE_TEAM_FAILURE_INVESTIGATION_TEMPLATE.format( - reporter=summary.jira.reporter, - fields=fields, - ticket_status=config.STATUS_SERVICE_TEAM_FAILURE_INVESTIGATION, - sdk_state=config.CUSTOM_STATUS_FAILURE, - dex_support_status=config.STATUS_DEX_SUPPORT_REQUIRED, - requested_status=config.STATUS_PROCESSING_REQUESTED, - an_sdk_several_sdks="an SDK" if len(failed) == 1 else "several SDKs", - field_fields="field" if len(failed) == 1 else "fields", - this_sdk_these_sdks="this SDK" if len(failed) == 1 else "these SDKs", - sdk_is_sdks_are="SDK is" if len(failed) == 1 else "SDKs are", - failed_builds_text=failed_builds_text) - - return text - - -SERVICE_TEAM_REVIEW_REQUIRED_TEMPLATE = """\ -The preview SDKs have been generated. The service team has to review the output and approve it before the ticket can move on. - -[~{reporter}], {review_text}{pr_text} -([wiki|https://confluence.oci.oraclecorp.com/display/DEX/Requesting+a+preview+SDK+CLI#RequestingapreviewSDKCLI-Step5:Verifygeneratedchanges])""" - - -REVIEW_GENERATED_CODE_TEXT = """\ -check that the generated SDKs contain (1) your entire change, and (2) nothing unwanted. - -If you are satisfied, then set the '{ready_status}' status for this ticket. - -If you find that something is missing from your change or there is something that should not be there, then please revise your spec. Generate a new spec artifact, then update the spec version in this ticket and set the ticket status back to '{requested_status}'.""" - - -REVIEW_SPEC_DIFF_TEXT = """\ -review the [spec change pull request|{pr_url}] to make sure it contains (1) your entire change, and (2) nothing unwanted. - -If you are satisfied, then approve the spec pull request and set the '{ready_status}' status for this ticket. - -If you find that something is missing from your change or there is something that should not be there, then please set the spec pull request to "Needs Work" and revise your spec. Generate a new spec artifact, then update the spec version in this ticket and set the ticket status back to '{requested_status}'. - -Spec change pull request to review: {pr_url}""" - - -def get_preview_review_text(issue_key, reporter): - dexreq_pr_url = None - dexreq_pr = get_spec_review_pr(issue_key) - - if dexreq_pr: - hrefs = util.deep_get(dexreq_pr, 'links.self') - if hrefs: - dexreq_pr_url = util.deep_get(hrefs[0], 'href') - - if dexreq_pr_url: - try: - dexreq_pr_id = dexreq_pr['id'] - util.add_reviewer_to_pull_request(dexreq_pr_id, config.DEXREQ_REPO_NAME, [reporter]) - except: # noqa: ignore=E722 - print("Could not add '{}' as reviewer".format(reporter)) - print('Error: {}. {}, line: {}'.format(sys.exc_info()[0], - sys.exc_info()[1], - sys.exc_info()[2].tb_lineno)) - return REVIEW_SPEC_DIFF_TEXT.format(ready_status=config.STATUS_READY_FOR_PREVIEW, - requested_status=config.STATUS_PROCESSING_REQUESTED, - pr_url=dexreq_pr_url), dexreq_pr, dexreq_pr_url - - return REVIEW_GENERATED_CODE_TEXT.format(ready_status=config.STATUS_READY_FOR_PREVIEW, - requested_status=config.STATUS_PROCESSING_REQUESTED), None, None - - -def for_service_team_review_required(issue_key, issue, summary): - review_text, dexreq_pr, dexreq_pr_url = get_preview_review_text(issue_key, summary.jira.reporter) - pr_text = get_successful_pull_requests_text(summary) - if pr_text: - if dexreq_pr_url: - pr_text = "\n\nThe links below allow you to examine the effect of your change on the generated source. All you have to do is review the above [spec change pull request|{pr_url}]. The SDK/CLI team will do all the work to include the change in the weekly bulk preview.\n\n".format(pr_url=dexreq_pr_url) + pr_text - else: - pr_text = "\n\nThe links below allow you to examine the effects of your change on the generated source. Just look at the diff and verify that the generated SDKs contain the change you expect. The SDK/CLI team will do all the work to include the change in the weekly bulk preview.\n\n" + pr_text - else: - pr_text = "\n" - - text = SERVICE_TEAM_REVIEW_REQUIRED_TEMPLATE.format( - reporter=summary.jira.reporter, - pr_text=pr_text, - review_text=review_text) - - return text - - -PROCESSING_TEMPLATE = """\ -The ticket is being processed. Please wait for the automation to produce results. -""" - -PARTIALLY_PROCESSED_TEMPLATE = """\ -The ticket is being processed, and some SDKs have already been generated. - -Already done: -{fields}""" - - -def for_processing(issue_key, issue, summary): - fields = "\n".join(["- {}".format(language) for language, value in summary.sdks.statuses.items() - if value in [config.CUSTOM_STATUS_FAILURE, config.CUSTOM_STATUS_SUCCESS, config.CUSTOM_STATUS_DONE]]) - if fields: - return PARTIALLY_PROCESSED_TEMPLATE.format(fields=fields, reporter=summary.jira.reporter) - else: - return PROCESSING_TEMPLATE.format(reporter=summary.jira.reporter) - - -PROCESSING_BULK_TEMPLATE = """\ -The ticket is being processed for the bulk preview. Please wait for the automation to produce results. -""" - -PARTIALLY_PROCESSED_BULK_TEMPLATE = """\ -The ticket is being processed for the bulk preview, and some SDKs have already been generated. - -Already done: -{fields}""" - - -def for_processing_bulk(issue_key, issue, summary): - fields = "\n".join(["- {}".format(language) for language, value in summary.sdks.statuses.items() - if value in [config.CUSTOM_STATUS_FAILURE, config.CUSTOM_STATUS_SUCCESS, config.CUSTOM_STATUS_DONE]]) - if fields: - return PARTIALLY_PROCESSED_BULK_TEMPLATE.format(fields=fields, reporter=summary.jira.reporter) - else: - return PROCESSING_BULK_TEMPLATE.format(reporter=summary.jira.reporter) - - -def for_processing_requested(issue_key, issue, summary): - return "This ticket is ready for automated processing. Please wait for the automation to start." - - -def get_preview_review_complete_date(issue_key, summary): - # Find timestamp when "Ready for Preview" was last set - preview_review_complete_date = None - if summary.jira and summary.jira.changelog: - for cl in summary.jira.changelog: - for item in cl.changed_items: - if item.field == "status" and item.new == config.STATUS_READY_FOR_PREVIEW: - # status was changed to config.STATUS_READY_FOR_PREVIEW - preview_review_complete_datetime = pytz.utc.localize(datetime.datetime.strptime(cl.created, '%Y-%m-%dT%H:%M:%S.%f+0000')) - - # change this to Pacific time - - preview_review_complete_datetime_pacific = preview_review_complete_datetime.astimezone(PACIFIC_TIME_ZONE) - - preview_review_complete_date = preview_review_complete_datetime_pacific.date() - - return preview_review_complete_date - - -def get_next_bulk_preview_date(issue_key, summary): - # Start by finding the "Ready for Preview" timestamp - preview_review_complete_date = get_preview_review_complete_date(issue_key, summary) - print("Status '{}' was set for issue '{}' on: {}".format(config.STATUS_READY_FOR_PREVIEW, issue_key, preview_review_complete_date)) - - if not preview_review_complete_date: - # Couldn't figure out when label was added, decline to give a date - return None - - base_release_date, base_release_ga_number, release_overrides = \ - process_date_override(BULK_PREVIEW_DATE_OVERRIDE, PREVIEW_RELEASE_DATE_CADENCE_START_DATE, 140, - '--bulk-preview-date-overrides') - - bulk_preview_date = base_release_date - - # Find the next Wednesday after this date (if added on a Wednesday, go to next Wednesday) - while True: - bulk_preview_date = find_next_matching_date(bulk_preview_date, base_release_date, - release_overrides, PREVIEW_RELEASE_DATE_CADENCE_IN_DAYS, - "preview release") - print("Finding next bulk preview date, preview_review_complete_date='{}', candidate='{}'".format(preview_review_complete_date, bulk_preview_date)) - - if bulk_preview_date > preview_review_complete_date: - break - - bulk_preview_date += datetime.timedelta(1) - - print("Next bulk preview for issue '{}' is scheduled for: {}".format(issue_key, bulk_preview_date)) - - return bulk_preview_date - - -def for_ready_for_bulk(issue_key, issue, summary): - text = "This ticket is ready for the next bulk preview SDK generation." - - date = get_next_bulk_preview_date(issue_key, summary) - if date: - preview_review_complete_date = get_preview_review_complete_date(issue_key, summary) - text = text + "\n\nSince the '{}' status was set on {} (Seattle time), the next bulk preview generation that this change can be included in is scheduled for {}.".format( - config.STATUS_READY_FOR_PREVIEW, preview_review_complete_date.strftime('%A, %d %B %Y'), date.strftime('%A, %d %B %Y')) - - text = text + "\n\n" + "If the ticket ends up going into '{}', please ignore that and do not change the ticket status - SDK/CLI team will handle it.".format(config.STATUS_SERVICE_TEAM_FAILURE_INVESTIGATION) - - text = text + "\n\n" + "([wiki|https://confluence.oci.oraclecorp.com/display/DEX/Requesting+a+preview+SDK+CLI#RequestingapreviewSDKCLI-RequestsreadytobeincludedinweeklypreviewSDK(ReadyforPreview)])" - - return text - - -DONE_TEMPLATE = """\ -This ticket is done. All SDKs have been successfully generated and merged. - -Next, start working on your samples and tests and get them into the preview branches. - -See [Testing of feature support|https://confluence.oci.oraclecorp.com/display/DEX/Self-Service+Testing+and+Development] and [Code samples of feature support|https://confluence.oci.oraclecorp.com/display/DEX/Self-Service+Testing+and+Development]. - -Testing of generated CLI commands is the Service team's responsibility. For CLI testing, install the latest generated [preview build|https://confluence.oci.oraclecorp.com/pages/viewpage.action?spaceKey=DEX&title=CLI+FAQs#CLIFAQs-WherecanIfindthelatestversionofthepreviewCLI?]. Then to setup the development environment, [refer|https://confluence.oci.oraclecorp.com/display/DEX/OCI+CLI+Development+Setup#OCICLIDevelopmentSetup-1.Settingupthedevelopmentenvironment]. For running tests in your tenancy, please follow the [steps mentioned|https://confluence.oci.oraclecorp.com/display/DEX/OCI+CLI+Development+Setup#OCICLIDevelopmentSetup-Runningtestsagainstanothertenancy(RecommendedforServiceTeam)] Test all the commands/ param which are added, modified or deleted in this DEXREQ process. Please do end to end testing, which involves getting a response from service and validate. -""" - - -def for_done(issue_key, issue, summary): - warnings = process_all_spec_change_prs(issue) - - text = DONE_TEMPLATE - - if warnings: - text = text + "\n\nWarnings:\n" + warnings - - return text - - -PARTIAL_DEX_SUPPORT_REQUIRED_FAILED_SDKS_TEMPLATE = """\ - -{sdk_sdks} that {requires_require} the on-call's attention: -{fields}""" - - -DEX_SUPPORT_REQUIRED_TEMPLATE = """\ -The service team has asked the SDK/CLI team for help. The on-call engineer will respond to this ticket once a day. -{failed_sdk_text} -""" - - -def for_dex_support_required(issue_key, issue, summary): - failed = [(language, value) for language, value in summary.sdks.statuses.items() if value == config.CUSTOM_STATUS_FAILURE] - - failed_sdk_text = "" - if failed: - fields = "\n".join(["- {}".format(language) for language, value in failed]) - failed_sdk_text = PARTIAL_DEX_SUPPORT_REQUIRED_FAILED_SDKS_TEMPLATE.format( - fields=fields, - sdk_sdks="SDK" if len(failed) == 1 else "SDKs", - requires_require="requires" if len(failed) == 1 else "require") - - return DEX_SUPPORT_REQUIRED_TEMPLATE.format( - reporter=summary.jira.reporter, - failed_sdk_text=failed_sdk_text) - - -DEX_BULK_REVIEW_REQUIRED_TEMPLATE = """\ -The bulk preview SDKs have been generated. The SDK/CLI team's on-call engineer will review the pull requests and merge them.{pr_text} - -([wiki|https://confluence.oci.oraclecorp.com/display/DEX/Requesting+a+preview+SDK+CLI#RequestingapreviewSDKCLI-Step5:Verifygeneratedchanges])""" - - -def for_dex_bulk_review(issue_key, issue, summary): - # TODO: get PR links - pr_text = None - if pr_text: - pr_text = "\n\nPull requests to review and merge:\n\n" + pr_text - else: - pr_text = "" - - text = DEX_BULK_REVIEW_REQUIRED_TEMPLATE.format( - reporter=summary.jira.reporter, - ready_status=config.STATUS_READY_FOR_PREVIEW, - requested_status=config.STATUS_PROCESSING_REQUESTED, - pr_text=pr_text) - - return text - - -def additional_checks(issue_key, issue, summary): - return "" - - -PREVIEW_HANDLERS = { - PREVIEW_NON_ACTIONABLE_STATE: for_non_actionable, - PREVIEW_PROCESSING_REQUESTED_STATE: for_processing_requested, - PREVIEW_PROCESSING_STATE: for_processing, - PREVIEW_SERVICE_TEAM_FAILURE_INVESTIGATION_STATE: for_service_team_failure_investigation, - PREVIEW_DEX_SUPPORT_REQUIRED_STATE: for_dex_support_required, - PREVIEW_SERVICE_TEAM_REVIEW_REQUIRED_STATE: for_service_team_review_required, - PREVIEW_READY_FOR_PREVIEW_STATE: for_ready_for_bulk, - PREVIEW_PROCESSING_BULK_STATE: for_processing_bulk, - PREVIEW_DEX_BULK_REVIEW_STATE: for_dex_bulk_review, - PREVIEW_DONE_STATE: for_done -} - - -def get_preview_state(issue, summary, sdk_statuses, all_sdks, any_sdks): - if summary.jira.status == config.STATUS_PROCESSING_REQUESTED: - state = PREVIEW_PROCESSING_REQUESTED_STATE - - elif summary.jira.status == config.STATUS_PROCESSING: - state = PREVIEW_PROCESSING_STATE - - elif summary.jira.status == config.STATUS_SERVICE_TEAM_FAILURE_INVESTIGATION: - state = PREVIEW_SERVICE_TEAM_FAILURE_INVESTIGATION_STATE - - elif summary.jira.status == config.STATUS_DEX_SUPPORT_REQUIRED: - state = PREVIEW_DEX_SUPPORT_REQUIRED_STATE - - elif summary.jira.status == config.STATUS_SERVICE_TEAM_REVIEW_REQUIRED: - state = PREVIEW_SERVICE_TEAM_REVIEW_REQUIRED_STATE - - elif summary.jira.status == config.STATUS_READY_FOR_PREVIEW: - state = PREVIEW_READY_FOR_PREVIEW_STATE - - elif summary.jira.status == config.STATUS_PROCESSING_BULK: - state = PREVIEW_PROCESSING_BULK_STATE - - elif summary.jira.status == config.STATUS_DEX_BULK_REVIEW: - state = PREVIEW_DEX_BULK_REVIEW_STATE - - elif summary.jira.status == config.STATUS_DONE: - state = PREVIEW_DONE_STATE - - else: - state = PREVIEW_NON_ACTIONABLE_STATE - - return state - - -def advise_on_preview_issue(issue_key, issue, summary): - text = "" - if summary.state in PREVIEW_HANDLERS and PREVIEW_HANDLERS[summary.state]: - handler = PREVIEW_HANDLERS[summary.state] - text = handler(issue_key, issue, summary) - - text = text + additional_checks(issue_key, issue, summary) - - return text - - -TRANSITION_TO_BACKLOG_SINCE_SPEC_PR_DECLINED = """\ -Automatically transitioned status back to '{status}'. The [spec diff pull request|{pr_url}] was {action}. Please revise your spec and generate a new spec artifact. Then update the spec version in your DEXREQ ticket and set the ticket status back to '{processing_requested_state}'. -""" - -TRANSITION_TO_READY_FOR_PREVIEW_SINCE_SPEC_PR_MERGED = """\ -Automatically transitioned status to '{status}'. The [spec diff pull request|{pr_url}] was {action}. -""" - - -def handle_transition_for_service_team_review_required_preconditions_check(issue_key, issue, summary): - return True, "" - - -def handle_transition_for_service_team_review_required(issue_key, issue, summary): - return handle_transition_for_spec_review_pr(issue_key, issue, summary, config.STATUS_READY_FOR_PREVIEW, TRANSITION_TO_READY_FOR_PREVIEW_SINCE_SPEC_PR_MERGED, - TRANSITION_TO_BACKLOG_SINCE_SPEC_PR_DECLINED, handle_transition_for_service_team_review_required_preconditions_check) - - -PREVIEW_TRANSITION_HANDLERS = { - PREVIEW_PROCESSING_STATE: handle_transition_for_processing, - PREVIEW_SERVICE_TEAM_REVIEW_REQUIRED_STATE: handle_transition_for_service_team_review_required, - PREVIEW_PROCESSING_BULK_STATE: handle_transition_for_processing -} - - -def handle_automated_preview_transitions(issue_key, issue, summary): - return execute_appropriate_transition_handler(PREVIEW_TRANSITION_HANDLERS, issue_key, issue, summary) - - -# -# Show Preview Calendar -# -def show_preview_calendar(count=10): - base_release_date, base_release_ga_number, release_overrides = \ - process_date_override(BULK_PREVIEW_DATE_OVERRIDE, PREVIEW_RELEASE_DATE_CADENCE_START_DATE, 140, - '--bulk-preview-date-overrides') - - # since preview releases are independent of GA date , base_release_ga_number is not used anywhere. - now_datetime_utc = pytz.utc.localize(datetime.datetime.utcnow()) - now_datetime = now_datetime_utc.astimezone(PACIFIC_TIME_ZONE) - now = now_datetime.date() - release_date = base_release_date - - print("| SDK/CLI Preview Release Date |") - print("| ---------------------------- |") - - while count > 0: - release_date = find_next_matching_date(release_date, base_release_date, - release_overrides, PREVIEW_RELEASE_DATE_CADENCE_IN_DAYS, - "preview release") - - if release_date >= now: - print("| {} |".format(release_date)) - count -= 1 - - release_date += datetime.timedelta(1) diff --git a/scripts/auto_gen_utils/autogen_issue_advisor_public.py b/scripts/auto_gen_utils/autogen_issue_advisor_public.py deleted file mode 100644 index 2132855376..0000000000 --- a/scripts/auto_gen_utils/autogen_issue_advisor_public.py +++ /dev/null @@ -1,1342 +0,0 @@ -import datetime -import pytz -import six -import sys - -import config -import util -import parse - -import shared.bitbucket_utils - -from autogen_issue_advisor_shared import PACIFIC_TIME_ZONE -from autogen_issue_advisor_shared import get_successful_pull_requests_text -from autogen_issue_advisor_shared import printv -from autogen_issue_advisor_shared import PR_LINK_TYPE -from autogen_issue_advisor_shared import get_failed_links_text -from autogen_issue_advisor_shared import DEFAULT_JIRA_ISSUE_FIELDS, CUSTOM_JIRA_ISSUE_FIELDS -from autogen_issue_advisor_shared import handle_transition_for_processing -from autogen_issue_advisor_shared import execute_appropriate_handler -from autogen_issue_advisor_shared import execute_appropriate_transition_handler -from autogen_issue_advisor_shared import handle_transition_for_spec_review_pr -from autogen_issue_advisor_shared import process_all_spec_change_prs -from autogen_issue_advisor_shared import get_spec_review_pr -from autogen_issue_advisor_shared import BitbucketBuilds -from autogen_issue_advisor_shared import BitbucketBuildChecks -from autogen_issue_advisor_shared import ServiceTeamMasterPrs -from autogen_issue_advisor_shared import find_next_matching_date, process_date_override - -# Format: comma-separated, with each part being either "+YYYY-MM-DD", "-YYYY-MM-DD", or "=YYYY-MM-DD". -# Starting with "=" sets the base date of the one-week cadence. If not set, it's 2022-06-14. -# Example "=2018-10-11,+2018-10-24,-2018-10-25" means -# "do releases every week, starting with 2018-10-11, but don't do one on October 25th, do it on October 24th instead" -# Release dates are usually on Tuesday. -PUBLIC_RELEASE_DATE_OVERRIDE = None -PUBLIC_RELEASE_DATE_CADENCE_START_DATE = "2022-08-09" -PUBLIC_RELEASE_DATE_CADENCE_IN_DAYS = 7 -PUBLIC_RELEASE_DATE_GA_NUMBER = 218 # GA 218 on 2022-08-09 - -# Format: comma-separated, with each part being either "+YYYY-MM-DD", "-YYYY-MM-DD", or "=YYYY-MM-DD". -# Starting with "=" sets the base date of the one-week cadence. If not set, it's 2022-06-07. -# Example "=2018-10-05,+2018-10-18,-2018-10-19" means -# "do RR cut-off every week, starting with 2018-10-05, but don't do one on October 19th, do it on October 18th instead" -# Cut-off dates are usually Monday (EOD), 8 days before the corresponding release date. -RELEASE_REQUESTED_CUT_OFF_DATE_OVERRIDE = None -RELEASE_REQUESTED_CUT_OFF_DATE_CADENCE_START_DATE = "2022-08-01" -RELEASE_REQUESTED_CUT_OFF_DATE_CADENCE_IN_DAYS = 7 - -# See https://confluence.oci.oraclecorp.com/plugins/gliffy/viewer.action?inline=false&pageId=60737311&attachmentId=64632788&name=DEXREQ%20Public%20Ticket%20Lifecycle&ceoid=64632759&key=~michross&lastPage=%2Fpages%2Fviewpage.action%3FpageId%3D60737311&imageUrl=%2Fdownload%2Fattachments%2F64632759%2FDEXREQ%2520Public%2520Ticket%2520Lifecycle.png%3Fversion%3D13%26modificationDate%3D1547155991054%26api%3Dv2&gonUrl=%2Fdownload%2Fattachments%2F64632759%2FDEXREQ%2520Public%2520Ticket%2520Lifecycle%3Fversion%3D13%26modificationDate%3D1547155990982%26api%3Dv2 - -PUBLIC_NON_ACTIONABLE_STATE = "PUBLIC_NON_ACTIONABLE_STATE" -PUBLIC_PROCESSING_REQUESTED_STATE = "PUBLIC_PROCESSING_REQUESTED_STATE" -PUBLIC_PROCESSING_STATE = "PUBLIC_PROCESSING_STATE" -PUBLIC_SERVICE_TEAM_FAILURE_INVESTIGATION_STATE = "PUBLIC_SERVICE_TEAM_FAILURE_INVESTIGATION_STATE" -PUBLIC_DEX_SUPPORT_REQUIRED_STATE = "PUBLIC_DEX_SUPPORT_REQUIRED_STATE" -PUBLIC_SERVICE_TEAM_WORK_REQUIRED_STATE = "PUBLIC_SERVICE_TEAM_WORK_REQUIRED_STATE" -PUBLIC_RELEASE_REQUESTED_STATE = "PUBLIC_RELEASE_REQUESTED_STATE" -PUBLIC_RELEASE_APPROVED_STATE = "PUBLIC_RELEASE_APPROVED_STATE" -PUBLIC_PROCESSING_BULK_STATE = "PUBLIC_PROCESSING_BULK_STATE" -PUBLIC_DEX_BULK_REVIEW_STATE = "PUBLIC_DEX_BULK_REVIEW_STATE" -PUBLIC_TO_DEPLOY_STATE = "PUBLIC_TO_DEPLOY_STATE" -PUBLIC_DONE_STATE = "PUBLIC_DONE_STATE" - -JIRA_URL_TEMPLATE = "https://jira.oci.oraclecorp.com/browse/{ticket}" - -PUBLIC_STATES = [ - PUBLIC_NON_ACTIONABLE_STATE, - PUBLIC_PROCESSING_REQUESTED_STATE, - PUBLIC_PROCESSING_STATE, - PUBLIC_SERVICE_TEAM_FAILURE_INVESTIGATION_STATE, - PUBLIC_DEX_SUPPORT_REQUIRED_STATE, - PUBLIC_SERVICE_TEAM_WORK_REQUIRED_STATE, - PUBLIC_RELEASE_REQUESTED_STATE, - PUBLIC_RELEASE_APPROVED_STATE, - PUBLIC_PROCESSING_BULK_STATE, - PUBLIC_DEX_BULK_REVIEW_STATE, - PUBLIC_TO_DEPLOY_STATE, - PUBLIC_DONE_STATE, -] - - -NON_ACTIONABLE_BECAUSE_OF_BACKLOG_TEMPLATE = """\ -[~{reporter}], the ticket status is set to '{backlog_status}'. For processing to begin, please set the ticket status to '{requested_status}'. -""" - - -def for_non_actionable(issue_key, issue, summary): - text = is_requested_ga_date_valid_comment(summary) - - if summary.jira.status == config.STATUS_BACKLOG: - - text = text + NON_ACTIONABLE_BECAUSE_OF_BACKLOG_TEMPLATE.format( - reporter=summary.jira.reporter, - backlog_status=config.STATUS_BACKLOG, - requested_status=config.STATUS_PROCESSING_REQUESTED) - - return text - - -SERVICE_TEAM_FAILURE_INVESTIGATION_TEMPLATE = """\ -[~{reporter}], the status of the ticket is set to '{ticket_status}', because generation of {an_sdk_several_sdks} has failed, as indicated by the SDK status {field_fields} set to '{sdk_state}'. For {this_sdk_these_sdks}, please look into why the generation failed by examining the build log. - -If you determine that something was wrong in your spec, please change your spec and generate a new spec artifact. Then update the spec version in this ticket and set the ticket status back to '{requested_status}'. - -If you think the error is unrelated to your change, or if you need help figuring this out, set the ticket status to '{dex_support_status}', and our on-call engineer will look at it within a day. - -The failed {sdk_is_sdks_are}: - -{fields}{failed_builds_text} - -([wiki|https://confluence.oci.oraclecorp.com/display/DEX/Requesting+a+preview+SDK+CLI#RequestingapreviewSDKCLI-Step4:Monitorticketstatusforautomatedupdatesandtakeactionifnecessary])""" - - -def for_service_team_failure_investigation(issue_key, issue, summary): - - # Check if provided GA date is a release date. Leave a comment if not - text = is_requested_ga_date_valid_comment(summary) - - failed = [(language, value) for language, value in summary.sdks.statuses.items() if value == config.CUSTOM_STATUS_FAILURE] - fields = "\n".join(["- {}".format(language) for language, value in failed]) - - failed_builds_text = get_failed_links_text(summary) or "" - - text = SERVICE_TEAM_FAILURE_INVESTIGATION_TEMPLATE.format( - reporter=summary.jira.reporter, - fields=fields, - ticket_status=config.STATUS_SERVICE_TEAM_FAILURE_INVESTIGATION, - sdk_state=config.CUSTOM_STATUS_FAILURE, - dex_support_status=config.STATUS_DEX_SUPPORT_REQUIRED, - requested_status=config.STATUS_PROCESSING_REQUESTED, - an_sdk_several_sdks="an SDK" if len(failed) == 1 else "several SDKs", - field_fields="field" if len(failed) == 1 else "fields", - this_sdk_these_sdks="this SDK" if len(failed) == 1 else "these SDKs", - sdk_is_sdks_are="SDK is" if len(failed) == 1 else "SDKs are", - failed_builds_text=failed_builds_text) - - return text - - -def get_service_team_master_prs(issue, summary): - prs_per_tool = summary.pull_requests.on_service_team.master.prs_per_tool - - master_prs = ServiceTeamMasterPrs( - merged=[], - approved=[], - approved_could_be_bypassed=[], - opened=[], - opened_could_be_bypassed=[], - missing=[], - bypassed=[] - ) - - if util.is_cli_pr_required(issue): - master_prs.missing = [config.CLI_NAME] - - # Bypassed tools are not missing - for tool_name in master_prs.missing: - if config.BYPASS_CHECK_PR_MASTER_PREFIX + tool_name in summary.checks.bypass: - master_prs.bypassed.append(tool_name) - - for tool_name in master_prs.bypassed: - if tool_name in master_prs.missing: - master_prs.missing.remove(tool_name) - - for tool_name, pr_by_status in six.iteritems(prs_per_tool): - for pr_and_url in pr_by_status.open: - if config.BYPASS_CHECK_PR_MASTER_PREFIX + tool_name in summary.checks.bypass: - master_prs.opened_could_be_bypassed.append(pr_and_url) - else: - master_prs.opened.append(pr_and_url) - if tool_name in master_prs.missing: - master_prs.missing.remove(tool_name) - for pr_and_url in pr_by_status.approved: - if config.BYPASS_CHECK_PR_MASTER_PREFIX + tool_name in summary.checks.bypass: - master_prs.approved_could_be_bypassed.append(pr_and_url) - else: - master_prs.approved.append(pr_and_url) - if tool_name in master_prs.missing: - master_prs.missing.remove(tool_name) - for pr_and_url in pr_by_status.merged: - master_prs.merged.append(pr_and_url) - if tool_name in master_prs.missing: - master_prs.missing.remove(tool_name) - - # Bypassed tools are not opened - for pr_and_url in master_prs.opened: - if config.BYPASS_CHECK_PR_MASTER_PREFIX + pr_and_url.tool_name in summary.checks.bypass: - master_prs.bypassed.append(tool_name) - - for tool_name in master_prs.bypassed: - master_prs.opened = [pr_and_url for pr_and_url in master_prs.opened if pr_and_url.tool_name != tool_name] - - return master_prs - - -def get_bitbucket_builds(approved_master_prs): - builds = BitbucketBuilds( - failed_master_pr_builds={}, - successful_master_pr_builds={}, - in_progress_master_pr_builds={} - ) - - for tool in util.get_jira_reportable_tool_names(): - builds.failed_master_pr_builds[tool] = [] - builds.successful_master_pr_builds[tool] = [] - builds.in_progress_master_pr_builds[tool] = [] - - for pr_and_url in approved_master_prs: - printv("Checking for successful {} builds in {}".format(pr_and_url.tool_name, pr_and_url.url)) - pr = pr_and_url.pr - build_status = shared.bitbucket_utils.get_bitbucket_build_status_for_pr(pr) - build_status['pr'] = pr_and_url.url - printv(build_status) - for build in build_status['values']: - build_state = build['state'] - printv("\t{} - {}".format(build_state, build['url'])) - if build_state == "SUCCESSFUL": - builds.successful_master_pr_builds[pr_and_url.tool_name].append(build_status) - elif build_state == "FAILED": - builds.failed_master_pr_builds[pr_and_url.tool_name].append(build_status) - elif build_state == "INPROGRESS": - builds.in_progress_master_pr_builds[pr_and_url.tool_name].append(build_status) - - return builds - - -def check_bitbuckets_builds(summary, builds, master_prs): - tools_to_check = [] - tools_to_check.extend(master_prs.missing) - tools_to_check.extend([pr_and_url.tool_name for pr_and_url in master_prs.approved]) - tools_to_check.extend([pr_and_url.tool_name for pr_and_url in master_prs.opened]) - - checks = BitbucketBuildChecks( - all_required_prs_have_successful_builds=True, - all_required_prs_have_no_failed_builds=True, - ) - - printv("Looking for builds for tools {}".format(tools_to_check)) - - for tool_name in tools_to_check: - if config.BYPASS_CHECK_PR_MASTER_PREFIX + tool_name in summary.checks.bypass: - # Bypassed -- if we don't need a PR at all, we don't need passing builds - continue - if config.BYPASS_CHECK_PR_BUILD_MASTER_PREFIX + tool_name in summary.checks.bypass: - # Bypassed - continue - if not builds.successful_master_pr_builds[tool_name]: - # No successful builds - printv("successful_master_pr_builds[{}] == false".format(tool_name)) - checks.all_required_prs_have_successful_builds = False - continue - - if builds.failed_master_pr_builds[tool_name]: - # Some builds failed - checks.all_required_prs_have_no_failed_builds = False - - return checks - - -def zero_or_more_string(text): - return text - - -zero_or_more_string.pattern = r".*" -PR_REQUEST_LINK_TEMPLATE = 'https://bitbucket.oci.oraclecorp.com/projects/SDK/repos/{repo}/compare?targetBranch=refs%2Fheads%2F{target_branch}&sourceBranch=refs%2Fheads%2F{generated_branch}&title={title}&description={description}&targetRepoId={target_repo_id:z}' - - -def get_cherry_pick_target_text(summary, tool_name): - last_build = summary.builds.last[tool_name] - if last_build: - if last_build.successful and last_build.generation_successful and last_build.build_successful: - if last_build.links: - for l in last_build.links: - if l.link_type == PR_LINK_TYPE: - url = l.url - - result = parse.parse(PR_REQUEST_LINK_TEMPLATE, url, {"z": zero_or_more_string}) - - if result and result["generated_branch"] and result["repo"]: - return " into this branch: [{generated_branch}|https://bitbucket.oci.oraclecorp.com/projects/SDK/repos/{repo}/browse?at=refs%2Fheads%2F{generated_branch}]".format( - generated_branch=result["generated_branch"], - repo=result["repo"]) - - return " to master" - - -SERVICE_TEAM_WORK_REQUIRED_STATE_TEMPLATE = """\ -The SDKs and the CLI have been generated. It is now the job of the service team to review the output, as well as cherry-pick the recordings for this feature from the preview branch of the CLI into the generated public CLI branch linked below. - -[~{reporter}], please do the following: - -1. If you had manual changes to the CLI code, [cherry-pick|https://confluence.oci.oraclecorp.com/pages/viewpage.action?spaceKey=DEX&title=How+to+cherry-pick+changes+from+preview+branch+to+master+branch] the CLI changes from [preview|https://bitbucket.oci.oraclecorp.com/projects/SDK/repos/python-cli/browse?at=refs%2Fheads%2Fpreview]{cli_branch_text}. Create a pull request from your CLI branch into the CLI master branch. You don't have to create a pull request if you didn't have manual changes. It is expected that the service team would have tested the generated commands in the preview generation and design review process. - -2. Please update CLI ChangeLog Entry field. More info about CLI change log can be found here: [CLI ChangeLog|https://confluence.oci.oraclecorp.com/pages/viewpage.action?spaceKey=DEX&title=Directives+for+writing+CHANGELOG+for+OCI+CLI] - -3. {review_text} -""" - - -REVIEW_GENERATED_CODE_TEXT = """\ -Check that the generated SDKs contain (1) your entire change, and (2) nothing unwanted. If you are satisfied, then set the '{ready_status}' status for this ticket. If you find that something is missing from your change or there is something that should not be there, then please revise your spec. Generate a new spec artifact, then update the spec version in this ticket and set the ticket status back to '{requested_status}'.""" - - -REVIEW_SPEC_DIFF_TEXT = """\ -Review the [spec change pull request|{pr_url}] to make sure it contains (1) your entire change, and (2) nothing unwanted. If you are satisfied, then approve the spec pull request and set the '{ready_status}' status for this ticket. If you find that something is missing from your change or there is something that should not be there, then please set the spec pull request to 'Needs Work' and revise your spec. Generate a new spec artifact, then update the spec version in this ticket and set the ticket status back to '{requested_status}'. - -Spec change pull request to review: {pr_url}""" - - -def get_public_review_text(issue_key, reporter): - dexreq_pr_url = None - dexreq_pr = get_spec_review_pr(issue_key) - - if dexreq_pr: - hrefs = util.deep_get(dexreq_pr, 'links.self') - if hrefs: - dexreq_pr_url = util.deep_get(hrefs[0], 'href') - - if dexreq_pr_url: - try: - dexreq_pr_id = dexreq_pr['id'] - util.add_reviewer_to_pull_request(dexreq_pr_id, config.DEXREQ_REPO_NAME, [reporter]) - except: # noqa: ignore=E722 - print("Could not add '{}' as reviewer".format(reporter)) - print('Error: {}. {}, line: {}'.format(sys.exc_info()[0], - sys.exc_info()[1], - sys.exc_info()[2].tb_lineno)) - return REVIEW_SPEC_DIFF_TEXT.format(ready_status=config.STATUS_RELEASE_REQUESTED, - requested_status=config.STATUS_PROCESSING_REQUESTED, - pr_url=dexreq_pr_url), dexreq_pr, dexreq_pr_url - - return REVIEW_GENERATED_CODE_TEXT.format(ready_status=config.STATUS_RELEASE_REQUESTED, - requested_status=config.STATUS_PROCESSING_REQUESTED), None, None - - -SERVICE_TEAM_WORK_REQUIRED_STATE_DATE_TEMPLATE = """\ -According to the '{field}' field of this ticket, you are targeting a public SDK/CLI release of {ga_date}. If that is not correct, please update the '{field}' field with the right release date. In order to release on that date, as per [Self-Service Testing and Development Calendar|https://confluence.oci.oraclecorp.com/display/DEX/Self-Service+Testing+and+Development#Self-ServiceTestingandDevelopment-Calendar], you need complete the steps above and have the ticket in '{requested_state}'' by end of day {release_cutoff_date} (Seattle time).""" - -SERVICE_TEAM_WORK_REQUIRED_STATE_APPROVED_PRS_TEMPLATE = """\ -The following pull {request_requests_sop} to master {is_are} currently open for this ticket, and since {it_they} {has_have} passed validation and {has_have} and at least one approval{optional_each}, {it_they} {is_are} ready for the SDK/CLI team's review once the ticket transitions to '{status}': - -{links}""" - -SERVICE_TEAM_WORK_REQUIRED_STATE_BUILD_PROBLEM_PRS_TEMPLATE = """\ -The following pull {request_requests_sop} to master {is_are} currently open for this ticket, but either {doesnt_dont} have{a_opt} successful validation {build_builds_sop} yet, or {its_their} validation {build_builds_sop} failed: - -{links}""" - - -SERVICE_TEAM_WORK_REQUIRED_STATE_OPEN_PRS_TEMPLATE = """\ -The following pull {request_requests_sop} to master {is_are} currently open for this ticket, but {has_have} not yet received approval from a service team member: - -{links}""" - - -SERVICE_TEAM_WORK_REQUIRED_STATE_OPEN_PRS_COULD_BE_BYPASSED_TEMPLATE = """\ -The following pull {request_requests_sop} to master {is_are} currently open for this ticket, but {has_have} not yet received approval from a service team member. {cap_It_They} could be bypassed because of {its_their} bypass label, but since you opened {this_these} pull {request_requests_sop}, we will review {it_them} after one of your team members has approved {it_them}. If you don't want to have {this_these} pull {request_requests_sop} merged, please decline {it_them}. - -{links}""" - - -SERVICE_TEAM_WORK_REQUIRED_STATE_APPROVED_PRS_COULD_BE_BYPASSED_TEMPLATE = """\ -The following pull {request_requests_sop} to master {is_are} currently open for this ticket, and {has_have} received approval from a service team member. {cap_It_They} could be bypassed because of {its_their} bypass label, but since you opened {this_these} pull {request_requests_sop}, we will review {it_them} now. If you don't want to have {this_these} pull {request_requests_sop} merged, please decline {it_them}. - -{links}""" - - -SERVICE_TEAM_WORK_REQUIRED_STATE_MISSING_PRS_TEMPLATE = """\ -Still missing {is_an_are} open pull {request_requests_sop} to master for: - -{missing}""" - - -SERVICE_TEAM_WORK_REQUIRED_STATE_BYPASSED_PRS_TEMPLATE = """\ -The following {tool_tools} {does_do} not require an open pull request to master because of {its_their} bypass {label_labels}: - -{bypassed}""" - -CLI_DESIGN_REVIEW_REQUIRED_TEMPLATE = """\ -The following CLI Design Review {ticket_tickets_sop} currently open for this ticket, please work with CLI Support to resolve the ticket. - -{links}""" - -CLI_CHANGELOG_REQUIRED_TEXT = """\ -CLI ChangeLog Entry field is empty. Please update the ticket with a valid [CLI ChangeLog|https://confluence.oci.oraclecorp.com/pages/viewpage.action?spaceKey=DEX&title=Directives+for+writing+CHANGELOG+for+OCI+CLI]. -""" - - -def parse_date(date): - try: - return datetime.datetime.strptime(date, '%Y-%m-%d').date() - except ValueError: - pass - try: - return datetime.datetime.strptime(date, '%m/%d/%Y').date() - except ValueError: - pass - try: - return datetime.datetime.strptime(date, '%d/%b/%Y').date() - except ValueError: - pass - try: - return datetime.datetime.strptime(date, '%b/%d/%Y').date() - except ValueError: - pass - - # 2-digit years - try: - return datetime.datetime.strptime(date, '%y-%m-%d').date() - except ValueError: - pass - try: - return datetime.datetime.strptime(date, '%m/%d/%y').date() - except ValueError: - pass - try: - return datetime.datetime.strptime(date, '%d/%b/%y').date() - except ValueError: - pass - try: - return datetime.datetime.strptime(date, '%b/%d/%y').date() - except ValueError: - pass - - raise ValueError('SDK/CLI GA Date "{}" was not in format "YYYY-MM-DD" (preferred), "MM/DD/YYYY", "DD/MON/YYY", or "MON/DD/YYYY"'.format(date)) - - -FEATURE_API_IS_PUBLICLY_AVAILABLE_ACKNOWLEDGED = """\ - - -You have acknowledged that the feature API is publicly available and not behind a whitelist, meaning once the SDKs and the CLI are publicly released, it is immediately ready to receive traffic from all customers. -""" - -FEATURE_API_IS_PUBLICLY_AVAILABLE_NOT_ACKNOWLEDGED = """\ - - -You have not acknowledged that the feature API is publicly available and not behind a whitelist, meaning once the SDKs and the CLI are publicly released, it is immediately ready to receive traffic from all customers. We do not release features that are not publicly available or that are behind whitelists. - -[~{reporter}], this release cannot proceed until you set the value of the the '{field}' field to 'Yes'. -""" - -FEATURE_API_IS_PUBLICLY_AVAILABLE_NOT_ACKNOWLEDGED_SKIPPED = """\ - - -You have not acknowledged that the feature API is publicly available and not behind a whitelist by setting the '{field}' field to 'Yes', meaning once the SDKs and the CLI are publicly released, it is immediately ready to receive traffic from all customers. We do not release features that are not publicly available or that are behind whitelists. - -We are skipping this check, though, because of the '{label}' label and will proceed with the release anyway once all other requirements have been met. -""" - - -def for_service_team_work_required(issue_key, issue, summary): - review_text, dexreq_pr, dexreq_pr_url = get_public_review_text(issue_key, summary.jira.reporter) - pr_text = get_successful_pull_requests_text(summary) - if pr_text: - if dexreq_pr_url: - pr_text = "\nThe links below allow you to examine the effect of your change on the generated source. All you have to do is review the above [spec change pull request|{pr_url}]. The SDK/CLI team will do all the work to include the change in the weekly bulk public release.\n\n".format(pr_url=dexreq_pr_url) + pr_text - else: - pr_text = "\nThe links below allow you to examine the effects of your change on the generated source. Just look at the diff and verify that the generated SDKs contain the change you expect. The SDK/CLI team will do all the work to include the change in the weekly bulk public release.\n\n" + pr_text - else: - pr_text = "\n" - - cli_branch_text = get_cherry_pick_target_text(summary, config.CLI_NAME) - - # Check if provided GA date is a release date. Leave a comment if not - text = is_requested_ga_date_valid_comment(summary) - - text += SERVICE_TEAM_WORK_REQUIRED_STATE_TEMPLATE.format( - status=config.STATUS_RELEASE_REQUESTED, - reporter=summary.jira.reporter, - cli_branch_text=cli_branch_text, - review_text=review_text) - - if pr_text: - text = text + "\n\n" + pr_text - - if summary.jira.sdk_cli_ga_date: - ga_date = parse_date(summary.jira.sdk_cli_ga_date) - - release_cutoff_date = get_previous_cut_off_date_before_public_release(issue_key, ga_date) - - text = text + "\n\n" + SERVICE_TEAM_WORK_REQUIRED_STATE_DATE_TEMPLATE.format(field=config.CUSTOM_FIELD_NAME_FOR_ID[config.CUSTOM_FIELD_ID_SDK_CLI_GA_DATE], - ga_date=ga_date.strftime('%A, %d %B %Y'), release_cutoff_date=release_cutoff_date.strftime('%A, %d %B %Y'), requested_state=config.STATUS_RELEASE_REQUESTED) - - if summary.pull_requests.on_service_team.master.prs_per_tool: - master_prs = get_service_team_master_prs(issue, summary) - - builds = get_bitbucket_builds(master_prs.approved) - - checks = check_bitbuckets_builds(summary, builds, master_prs) - - build_problem_pr = [] - build_problem_pr_bypassed = [] - build_problem_pr_unnecessary = [] - if not checks.all_required_prs_have_successful_builds or not checks.all_required_prs_have_no_failed_builds: - printv("Checking which approved builds have build problems") - for pr_and_url in master_prs.approved + master_prs.approved_could_be_bypassed: - build_failed = False - for b in builds.failed_master_pr_builds[pr_and_url.tool_name]: - if b['pr'] == pr_and_url.url: - build_failed = True - break - - build_succeeded = False - for b in builds.successful_master_pr_builds[pr_and_url.tool_name]: - if b['pr'] == pr_and_url.url: - build_succeeded = True - break - - if build_failed or not build_succeeded: - if (config.BYPASS_CHECK_PR_BUILD_MASTER_PREFIX + pr_and_url.tool_name) in summary.checks.bypass: - build_problem_pr_bypassed.append(pr_and_url) - elif (config.BYPASS_CHECK_PR_MASTER_PREFIX + pr_and_url.tool_name) in summary.checks.bypass: - build_problem_pr_unnecessary.append(pr_and_url) - else: - build_problem_pr.append(pr_and_url) - - # Remove builds with a problem from the approved builds - for pr_and_url in build_problem_pr: - if pr_and_url in master_prs.approved: - master_prs.approved.remove(pr_and_url) - - if master_prs.approved or build_problem_pr or master_prs.opened or master_prs.missing or master_prs.bypassed or master_prs.opened_could_be_bypassed or master_prs.approved_could_be_bypassed: - text = text + "\n\n{panel:title=Master PR Status}" - if master_prs.approved: - text = text + "\n\n" + SERVICE_TEAM_WORK_REQUIRED_STATE_APPROVED_PRS_TEMPLATE.format( - status=config.STATUS_RELEASE_REQUESTED, - request_requests_sop="request" if len(master_prs.approved) == 1 else "requests", - is_are="is" if len(master_prs.approved) == 1 else "are", - has_have="has" if len(master_prs.approved) == 1 else "have", - it_they="it" if len(master_prs.approved) == 1 else "they", - optional_each="" if len(master_prs.approved) == 1 else " each", - links="\n".join(["- {}: {}{}".format(pr_and_url.tool_name, pr_and_url.url, " (failed, but the check was bypassed)" if pr_and_url in build_problem_pr_bypassed else "") for pr_and_url in master_prs.approved]), - ) - - if master_prs.approved_could_be_bypassed: - text = text + "\n\n" + SERVICE_TEAM_WORK_REQUIRED_STATE_APPROVED_PRS_COULD_BE_BYPASSED_TEMPLATE.format( - request_requests_sop="request" if len(master_prs.approved_could_be_bypassed) == 1 else "requests", - is_are="is" if len(master_prs.approved_could_be_bypassed) == 1 else "are", - has_have="has" if len(master_prs.approved_could_be_bypassed) == 1 else "have", - cap_It_They="It" if len(master_prs.approved_could_be_bypassed) == 1 else "They", - this_these="this" if len(master_prs.approved_could_be_bypassed) == 1 else "these", - it_them="it" if len(master_prs.approved_could_be_bypassed) == 1 else "them", - its_their="its" if len(master_prs.approved_could_be_bypassed) == 1 else "their", - links="\n".join(["- {}: {}{}{}".format(pr_and_url.tool_name, pr_and_url.url, - " (failed, but the check was bypassed)" if pr_and_url in build_problem_pr_bypassed else "", - " (build failed)" if pr_and_url in build_problem_pr + build_problem_pr_unnecessary else "") for pr_and_url in master_prs.approved_could_be_bypassed]), - ) - - if build_problem_pr: - text = text + "\n\n" + SERVICE_TEAM_WORK_REQUIRED_STATE_BUILD_PROBLEM_PRS_TEMPLATE.format( - request_requests_sop="request" if len(build_problem_pr) == 1 else "requests", - is_are="is" if len(build_problem_pr) == 1 else "are", - doesnt_dont="doesn't" if len(build_problem_pr) == 1 else "don't", - a_opt=" a" if len(build_problem_pr) == 1 else "", - build_builds_sop="build" if len(build_problem_pr) == 1 else "builds", - its_their="its" if len(build_problem_pr) == 1 else "their", - links="\n".join(["- {}: {}".format(pr_and_url.tool_name, pr_and_url.url) for pr_and_url in build_problem_pr]), - ) - - if master_prs.opened: - text = text + "\n\n" + SERVICE_TEAM_WORK_REQUIRED_STATE_OPEN_PRS_TEMPLATE.format( - request_requests_sop="request" if len(master_prs.opened) == 1 else "requests", - is_are="is" if len(master_prs.opened) == 1 else "are", - has_have="has" if len(master_prs.opened) == 1 else "have", - links="\n".join(["- {}: {}".format(pr_and_url.tool_name, pr_and_url.url) for pr_and_url in master_prs.opened]), - ) - - if master_prs.opened_could_be_bypassed: - text = text + "\n\n" + SERVICE_TEAM_WORK_REQUIRED_STATE_OPEN_PRS_COULD_BE_BYPASSED_TEMPLATE.format( - request_requests_sop="request" if len(master_prs.opened_could_be_bypassed) == 1 else "requests", - is_are="is" if len(master_prs.opened_could_be_bypassed) == 1 else "are", - has_have="has" if len(master_prs.opened_could_be_bypassed) == 1 else "have", - cap_It_They="It" if len(master_prs.opened_could_be_bypassed) == 1 else "They", - this_these="this" if len(master_prs.opened_could_be_bypassed) == 1 else "these", - it_them="it" if len(master_prs.opened_could_be_bypassed) == 1 else "them", - its_their="its" if len(master_prs.opened_could_be_bypassed) == 1 else "their", - links="\n".join(["- {}: {}".format(pr_and_url.tool_name, pr_and_url.url) for pr_and_url in master_prs.opened_could_be_bypassed]), - ) - - if master_prs.missing: - text = text + "\n\n" + SERVICE_TEAM_WORK_REQUIRED_STATE_MISSING_PRS_TEMPLATE.format( - request_requests_sop="request" if len(master_prs.missing) == 1 else "requests", - is_an_are="is an" if len(master_prs.missing) == 1 else "are", - missing="\n".join(["- {}".format(tool_name) for tool_name in master_prs.missing]) - ) - - if master_prs.bypassed: - text = text + "\n\n" + SERVICE_TEAM_WORK_REQUIRED_STATE_BYPASSED_PRS_TEMPLATE.format( - tool_tools="tool" if len(master_prs.bypassed) == 1 else "tools", - does_do="does" if len(master_prs.bypassed) == 1 else "do", - its_their="its" if len(master_prs.bypassed) == 1 else "their", - label_labels="label" if len(master_prs.bypassed) == 1 else "labels", - bypassed="\n".join(["- {} (label {}{})".format(tool_name, config.BYPASS_CHECK_PR_MASTER_PREFIX, tool_name) for tool_name in master_prs.bypassed]) - ) - text = text + "{panel}" - - if summary.jira.feature_api_is_publicly_available_and_unwhitelisted_in_prod == "Yes": - text = text + FEATURE_API_IS_PUBLICLY_AVAILABLE_ACKNOWLEDGED - else: - if config.BYPASS_CHECK_API_PUBLICLY_AVAILABLE in summary.checks.bypass: - text = text + FEATURE_API_IS_PUBLICLY_AVAILABLE_NOT_ACKNOWLEDGED_SKIPPED.format( - label=config.BYPASS_CHECK_API_PUBLICLY_AVAILABLE, - field=config.CUSTOM_FIELD_NAME_FOR_ID[config.CUSTOM_FIELD_ID_FEATURE_API_IS_PUBLICLY_AVAILABLE]) - else: - text = text + FEATURE_API_IS_PUBLICLY_AVAILABLE_NOT_ACKNOWLEDGED.format(reporter=summary.jira.reporter, - field=config.CUSTOM_FIELD_NAME_FOR_ID[config.CUSTOM_FIELD_ID_FEATURE_API_IS_PUBLICLY_AVAILABLE]) - - if summary.cli.pending_design_reviews: - if config.BYPASS_CLI_DESIGN_REVIEW_CHECK not in summary.checks.bypass: - text = text + "\n\n" + CLI_DESIGN_REVIEW_REQUIRED_TEMPLATE.format( - ticket_tickets_sop="ticket is" if len(summary.cli.pending_design_reviews) == 1 else "tickets are", - links="\n".join(JIRA_URL_TEMPLATE.format(ticket=key) for key in summary.cli.pending_design_reviews) - ) - else: - printv("Bypassing CLI Design review check for: " + issue.key) - else: - printv("No pending CLI Design review for: " + issue.key) - - return text - - -PROCESSING_TEMPLATE = """\ -The ticket is being processed. Please wait for the automation to produce results. -""" - -PARTIALLY_PROCESSED_TEMPLATE = """\ -The ticket is being processed, and some SDKs have already been generated. - -Already done: -{fields}""" - - -def for_processing(issue_key, issue, summary): - - # Check if provided GA date is a release date. Leave a comment if not - text = is_requested_ga_date_valid_comment(summary) - - fields = "\n".join(["- {}".format(language) for language, value in summary.sdks.statuses.items() - if value in [config.CUSTOM_STATUS_FAILURE, config.CUSTOM_STATUS_SUCCESS, config.CUSTOM_STATUS_DONE]]) - if fields: - return text + PARTIALLY_PROCESSED_TEMPLATE.format(fields=fields, reporter=summary.jira.reporter) - else: - return text + PROCESSING_TEMPLATE.format(reporter=summary.jira.reporter) - - -PARTIALLY_MERGED_TEMPLATE = """\ -The ticket is being processed, and some SDKs have already been merged. - -Already merged: -{fields}""" - - -def for_processing_requested(issue_key, issue, summary): - - # Move ticket to Service Team Work Required and leave a comment if the provided GA date is incorrect. - comment = is_requested_ga_date_valid_comment(summary) - if len(comment) > 0: - util.transition_issue_overall_status(util.JIRA_CLIENT(), issue, config.STATUS_SERVICE_TEAM_WORK_REQUIRED) - return comment - - return "This ticket is ready for automated processing. Please wait for the automation to start." - - -def get_release_requested_status_date(issue_key, summary): - # Find timestamp when status "Release Requested" was added - release_requested_datetime_pacific = None - if summary.jira and summary.jira.changelog: - for cl in summary.jira.changelog: - for item in cl.changed_items: - if item.field != "status": - continue - - # There was no old status, or "Release Requested" wasn't it - was_not_signed_off = not item.old or config.STATUS_RELEASE_REQUESTED != item.old - # AND - # There is a new status, and "Release Requested" is it - is_now_signed_off = item.new and config.STATUS_RELEASE_REQUESTED == item.new - if was_not_signed_off and is_now_signed_off: - # config.STATUS_RELEASE_REQUESTED was added - release_requested_datetime = pytz.utc.localize(datetime.datetime.strptime(cl.created, '%Y-%m-%dT%H:%M:%S.%f+0000')) - - # change this to Pacific time - release_requested_datetime_pacific = release_requested_datetime.astimezone(PACIFIC_TIME_ZONE) - - release_requested_date = None - if release_requested_datetime_pacific: - release_requested_date = release_requested_datetime_pacific.date() - - printv("get_release_requested_status_date returns {}, based on Seattle time {}".format(release_requested_date, release_requested_datetime_pacific)) - - return release_requested_date - - -def find_previous_matching_date(start_date, base_date, overrides, cadence_in_days, date_name): - date = start_date - while True: - printv("{}: {}".format(date_name, date)) - date_string = date.isoformat() - if (date - base_date).days % cadence_in_days == 0: - if date_string in overrides and not overrides[date_string]: - # NOT on this day - printv("Elected to not have regular {} scheduled on: {}".format(date_name, date_string)) - date -= datetime.timedelta(1) - continue - else: - # either no override set for this date, or it's a positive override - printv("Having regular {} scheduled on: {}".format(date_name, date_string)) - break - if date_string in overrides and overrides[date_string]: - # cut-off on this day - printv("Elected to have special {} scheduled on: {}".format(date_name, date_string)) - break - - date -= datetime.timedelta(1) - - return date - - -def get_next_release_requested_cut_off_date(release_requested_date): - base_cut_off_date, ga_number, cut_off_overrides = process_date_override(RELEASE_REQUESTED_CUT_OFF_DATE_OVERRIDE, - RELEASE_REQUESTED_CUT_OFF_DATE_CADENCE_START_DATE, None, '--public-to-deploy-cut-off-date-overrides') - - # Find the Wednesday after this date (if added on a Wednesday, that's ok). - # (We're requiring them to be in 'Release Requested' by end of Wednesday.) - release_requested_cut_off_date = find_next_matching_date(release_requested_date, - base_cut_off_date, cut_off_overrides, RELEASE_REQUESTED_CUT_OFF_DATE_CADENCE_IN_DAYS, - "'{}' cut-off".format(config.STATUS_RELEASE_REQUESTED)) - - return release_requested_cut_off_date - - -def get_next_public_release_date_after_cut_off(release_requested_cut_off_date): - # Now release_requested_cut_off_date is the next 'Release Requested' cut-off date - # Add two days, because we'll never have the release day be the same as the - # cut-off day. And our cut-off is EOD Monday, but the matching release day is not the - # next Tuesday, but the Tuesday after that. - release_date = release_requested_cut_off_date + datetime.timedelta(2) - - base_release_date, base_release_ga_number, release_overrides = process_date_override(PUBLIC_RELEASE_DATE_OVERRIDE, - PUBLIC_RELEASE_DATE_CADENCE_START_DATE, PUBLIC_RELEASE_DATE_GA_NUMBER, '--public-release-date-overrides') - - # Find the next release day after that cut-off. - release_date = find_next_matching_date(release_date, - base_release_date, release_overrides, PUBLIC_RELEASE_DATE_CADENCE_IN_DAYS, - "public release") - - print("Next public release after {} is scheduled for: {}".format(release_requested_cut_off_date, release_date)) - - return release_date - - -COMMENT_TEMPLATE_INCORRECT_DATES = """\ - -[~{reporter}] The provided SDK/CLI GA Date of {requested_date} is not a release date. The next closest release date would be {next_release_date}. Please update this field accordingly.""" - - -# Determines the next release date after the given date, inclusive. -# If the requested date is a release date, it should return it -def get_next_public_release_date_after_ga(requested_ga_date): - base_release_date, base_release_ga_number, release_overrides = process_date_override(PUBLIC_RELEASE_DATE_OVERRIDE, - PUBLIC_RELEASE_DATE_CADENCE_START_DATE, PUBLIC_RELEASE_DATE_GA_NUMBER, '--public-release-date-overrides') - - release_date = find_next_matching_date(requested_ga_date, - base_release_date, release_overrides, PUBLIC_RELEASE_DATE_CADENCE_IN_DAYS, - "Release Date") - - return release_date - - -def is_requested_ga_date_valid_comment(summary): - comment = "" - if summary.jira.sdk_cli_ga_date: - requested_ga_date = parse_date(summary.jira.sdk_cli_ga_date) - release_after_req_ga = get_next_public_release_date_after_ga(requested_ga_date) - - if release_after_req_ga != requested_ga_date: - comment = COMMENT_TEMPLATE_INCORRECT_DATES.format(reporter=summary.jira.reporter, requested_date=requested_ga_date, next_release_date=release_after_req_ga) + "\n\n" - - if config.DISABLE_COMMENT_INCORRECT_DATES: - printv("Disabled commenting on incorrect GA dates. Not Leaving comment: \n" + comment) - return "" - - return comment - - -def get_next_public_release_date_after_cut_off_for_issue(issue_key, summary): - # Find timestamp when "Release Requested" was set - release_requested_date = get_release_requested_status_date(issue_key, summary) - print("Status '{}' was set on issue '{}' on: {}".format(config.STATUS_RELEASE_REQUESTED, issue_key, release_requested_date)) - - if not release_requested_date: - # Couldn't figure out when status was set, decline to give a date - return None - - # Find the Monday after this date (if added on a Monday, that's ok). - # (We're requiring them to be in 'Release Requested' by end of Monday.) - release_requested_cut_off_date = get_next_release_requested_cut_off_date(release_requested_date) - - return get_next_public_release_date_after_cut_off(release_requested_cut_off_date) - - -def get_next_public_release_date(): - base_release_date, base_release_ga_number, release_overrides = process_date_override(PUBLIC_RELEASE_DATE_OVERRIDE, - PUBLIC_RELEASE_DATE_CADENCE_START_DATE, PUBLIC_RELEASE_DATE_GA_NUMBER, '--public-release-date-overrides') - - now_datetime_utc = pytz.utc.localize(datetime.datetime.utcnow()) - now_datetime = now_datetime_utc.astimezone(PACIFIC_TIME_ZONE) - now = now_datetime.date() - - release_date = find_next_matching_date(now, - base_release_date, release_overrides, PUBLIC_RELEASE_DATE_CADENCE_IN_DAYS, - "Release Date") - - return release_date - - -def get_previous_cut_off_date_before_public_release(issue_key, ga_date): - print("SDK/CLI GA Date field for issue '{}' is set to: '{}'".format(issue_key, ga_date)) - - base_release_date, base_release_ga_number, release_overrides = process_date_override(PUBLIC_RELEASE_DATE_OVERRIDE, - PUBLIC_RELEASE_DATE_CADENCE_START_DATE, PUBLIC_RELEASE_DATE_GA_NUMBER, '--public-release-date-overrides') - base_cut_off_date, ga_number, cut_off_overrides = process_date_override(RELEASE_REQUESTED_CUT_OFF_DATE_OVERRIDE, - RELEASE_REQUESTED_CUT_OFF_DATE_CADENCE_START_DATE, None, '--public-release-requested-cut-off-date-overrides') - - release_date = base_release_date - release_requested_cut_off_date = base_cut_off_date - ga_number = base_release_ga_number - - while release_date < ga_date: - release_date = find_next_matching_date(release_date, - base_release_date, release_overrides, PUBLIC_RELEASE_DATE_CADENCE_IN_DAYS, - "public release") - release_requested_cut_off_date = find_next_matching_date(release_requested_cut_off_date, - base_cut_off_date, cut_off_overrides, RELEASE_REQUESTED_CUT_OFF_DATE_CADENCE_IN_DAYS, - "public release requested cut-off") - - if release_date >= ga_date: - break - - release_date += datetime.timedelta(1) - release_requested_cut_off_date += datetime.timedelta(1) - ga_number += 1 - - release_requested_cut_off_date += datetime.timedelta(-1) - - print("{} cut-off day before public release on '{}' is scheduled for: '{}'".format(config.STATUS_RELEASE_REQUESTED, ga_date, release_requested_cut_off_date)) - - return release_requested_cut_off_date - - -def get_next_release_requested_cut_off_date_for_ga_date(release_requested_date, requested_ga_date): - while True: - printv("Find cut-off date for {}".format(release_requested_date)) - cut_off_date = get_next_release_requested_cut_off_date(release_requested_date) - printv("Cut-off date for {} is {}".format(release_requested_date, cut_off_date)) - printv("GA date for cut-off date {}".format(cut_off_date)) - ga_date = get_next_public_release_date_after_cut_off(cut_off_date) - printv("GA date for cut-off date {} is {}".format(cut_off_date, ga_date)) - - if ga_date >= requested_ga_date: - break - - release_requested_date = cut_off_date + datetime.timedelta(1) - - printv("Next cut-off date for desired GA {} is {} (actual GA {})".format(requested_ga_date, cut_off_date, ga_date)) - - return cut_off_date - - -RELEASE_REQUESTED_OPEN_PRS_TEMPLATE = """\ -The following pull {request_requests_sop} to master {is_are} currently open for this ticket and should now be reviewed by the SDK/CLI team: - -{links}""" - -RELEASE_REQUESTED_MERGED_PRS_TEMPLATE = """\ -The following pull {request_requests_sop} to master {has_have} already been merged: - -{links}""" - -RELEASE_REQUESTED_BYPASSED_PRS_TEMPLATE = """\ -The following {tool_tools} {does_do} not require a merged pull request to master because of {its_their} bypass {label_labels}: - -{links}""" - -RELEASE_REQUESTED_TEMPLATE = """\ -The service team has indicated that the change is ready for release. The SDK/CLI team will review any open pull requests. - -The next step is the UDX Go/No-Go meeting, after which the SDK/CLI team will merge pull requests and initiate the bulk public release.""" - - -RELEASE_REQUESTED_TEMPLATE_DATES_TEMPLATE = """\ - - -The public release that the changes will be included in is scheduled for {release_date}. The '{status}' cut-off for this release date is 11:59 PM {cut_off_date} (Seattle time). The '{status}' status was set on {status_date} (Seattle time). This ticket meets the '{status}' cut-off. If corresponding UDX and ORM tickets are approved, then the ticket will be released as part of the release on {release_date}.""" - - -RELEASE_REQUESTED_TEMPLATE_MISSED_CUT_OFF_WARNING_TEMPLATE = """\ - - -*[~{reporter}], you have missed the '{status}' cut-off for releasing on {requested_release_date}*, which was the date you requested using the '{field}' field in this ticket. The '{status}' cut-off date for this ticket was 11:59 PM {release_cutoff_date} (Seattle time), however the '{status}' status was set on {status_date}. Hence, the changes associated with this ticket cannot be released as requested.""" - - -RELEASE_REQUESTED_TEMPLATE_MISSED_CUT_OFF_WARNING_EXCEPTION_PROCESS_TEMPLATE = """\ - - -If you believe you have reasons for an exception to the cut-off date, in the hopes of still releasing on {requested_release_date}, please urgently look at the [Exception Process for the SDK/CLI|https://confluence.oci.oraclecorp.com/pages/viewpage.action?pageId=132774928#SDK/CLISelf-ServiceFrequentlyAskedQuestions-WhatistheexceptionprocessifIwanttoskipsomepartofSDK/CLIself-service,orGAwithoutSDK/CLI?].""" - - -def for_release_requested(issue_key, issue, summary): - text = RELEASE_REQUESTED_TEMPLATE - - next_ga_date = get_next_public_release_date_after_cut_off_for_issue(issue_key, summary) - - if summary.jira.sdk_cli_ga_date: - requested_ga_date = parse_date(summary.jira.sdk_cli_ga_date) - release_cutoff_date = get_previous_cut_off_date_before_public_release(issue_key, requested_ga_date) - else: - # Just assume they want the next GA. - requested_ga_date = next_ga_date - - ga_date_comment = is_requested_ga_date_valid_comment(summary) - - # Only leave these comments if requested date is a release date. Otherwise comments will not make sense. - # If the requested date is not a release date, a different comment has already been left. - if len(ga_date_comment) > 0: - text = ga_date_comment - elif next_ga_date: - release_requested_date = get_release_requested_status_date(issue_key, summary) - release_requested_cut_off_date = get_next_release_requested_cut_off_date_for_ga_date(release_requested_date, requested_ga_date) - - if next_ga_date > requested_ga_date: - text = text + RELEASE_REQUESTED_TEMPLATE_MISSED_CUT_OFF_WARNING_TEMPLATE.format( - reporter=summary.jira.reporter, - field=config.CUSTOM_FIELD_NAME_FOR_ID[config.CUSTOM_FIELD_ID_SDK_CLI_GA_DATE], - status=config.STATUS_RELEASE_REQUESTED, - requested_release_date=requested_ga_date.strftime('%A, %d %B %Y'), - release_cutoff_date=release_cutoff_date.strftime('%A, %d %B %Y'), - status_date=release_requested_date.strftime('%A, %d %B %Y')) - - text = text + RELEASE_REQUESTED_TEMPLATE_DATES_TEMPLATE.format( - status=config.STATUS_RELEASE_REQUESTED, - status_date=release_requested_date.strftime('%A, %d %B %Y'), - cut_off_date=release_requested_cut_off_date.strftime('%A, %d %B %Y'), - release_date=(next_ga_date if next_ga_date > requested_ga_date else requested_ga_date).strftime('%A, %d %B %Y')) - - if next_ga_date > requested_ga_date: - text = text + RELEASE_REQUESTED_TEMPLATE_MISSED_CUT_OFF_WARNING_EXCEPTION_PROCESS_TEMPLATE.format( - requested_release_date=requested_ga_date.strftime('%A, %d %B %Y')) - - master_prs = get_service_team_master_prs(issue, summary) - - # treat all opened PRs the same - master_prs.opened.extend(master_prs.opened_could_be_bypassed) - master_prs.opened.extend(master_prs.approved) - - if master_prs.merged or master_prs.opened or master_prs.bypassed: - text = text + "\n\n{panel:title=Master PR Status}" - - if master_prs.opened: - text = text + "\n\n" + RELEASE_REQUESTED_OPEN_PRS_TEMPLATE.format( - request_requests_sop="request" if len(master_prs.opened) == 1 else "requests", - is_are="is" if len(master_prs.opened) == 1 else "are", - links="\n".join(["- {}: {}".format(pr_and_url.tool_name, pr_and_url.url) for pr_and_url in master_prs.opened]), - ) - - if master_prs.merged: - text = text + "\n\n" + RELEASE_REQUESTED_MERGED_PRS_TEMPLATE.format( - request_requests_sop="request" if len(master_prs.merged) == 1 else "requests", - has_have="has" if len(master_prs.merged) == 1 else "have", - links="\n".join(["- {}: {}".format(pr_and_url.tool_name, pr_and_url.url) for pr_and_url in master_prs.merged]), - ) - - if master_prs.bypassed: - text = text + "\n\n" + RELEASE_REQUESTED_BYPASSED_PRS_TEMPLATE.format( - tool_tools="tool" if len(master_prs.bypassed) == 1 else "tools", - does_do="does" if len(master_prs.bypassed) == 1 else "do", - its_their="its" if len(master_prs.bypassed) == 1 else "their", - label_labels="label" if len(master_prs.bypassed) == 1 else "labels", - links="\n".join(["- {} (label {}{})".format(tool_name, config.BYPASS_CHECK_PR_MASTER_PREFIX, tool_name) for tool_name in master_prs.bypassed]) - ) - text = text + "{panel}" - - return text - - -def for_release_approved(issue_key, issue, summary): - return "This ticket has been approved for release and is ready for the next bulk public SDK generation." + "\n\n" + "If the ticket ends up going into '{}', please ignore that and do not change the ticket status - SDK/CLI team will handle it.".format(config.STATUS_SERVICE_TEAM_FAILURE_INVESTIGATION) - - -PROCESSING_BULK_TEMPLATE = """\ -The ticket is being processed for the bulk public build. Please wait for the automation to produce results. - -([wiki|https://confluence.oci.oraclecorp.com/pages/viewpage.action?pageId=43683000#RequestingapublicSDK/CLI-Step4:Notifications])""" - -PARTIALLY_PROCESSED_BULK_TEMPLATE = """\ -The ticket is being processed for the bulk public build, and some SDKs have already been generated. - -Already done: -{fields} - -([wiki|https://confluence.oci.oraclecorp.com/pages/viewpage.action?pageId=43683000#RequestingapublicSDK/CLI-Step4:Notifications])""" - - -def for_processing_bulk(issue_key, issue, summary): - fields = "\n".join(["- {}".format(language) for language, value in summary.sdks.statuses.items() - if value in [config.CUSTOM_STATUS_FAILURE, config.CUSTOM_STATUS_SUCCESS, - config.CUSTOM_STATUS_DONE]]) - if fields: - return PARTIALLY_PROCESSED_BULK_TEMPLATE.format(fields=fields) - else: - return PROCESSING_BULK_TEMPLATE - - -def for_dex_bulk_review(issue_key, issue, summary): - return """\ -The bulk public SDKs have been generated. The SDK/CLI team's on-call engineer will review the pull requests and merge them. - -([wiki|https://confluence.oci.oraclecorp.com/display/DEX/Requesting+a+preview+SDK+CLI#RequestingapreviewSDKCLI-Step5:Verifygeneratedchanges])""" - - -TO_DEPLOY_TEMPLATE = """\ -This feature has been successfully generated and merged for all SDKs/CLI. It is now pending public release.""" - -TO_DEPLOY_DATE_TEMPLATE = """\ - - -The change will be publicly released on {release_date}.""" - - -def for_to_deploy(issue_key, issue, summary): - text = TO_DEPLOY_TEMPLATE - - date = get_next_public_release_date() - if date: - text = text + TO_DEPLOY_DATE_TEMPLATE.format( - release_date=date.strftime('%A, %d %B %Y')) - - return text - - -def for_done(issue_key, issue, summary): - warnings = process_all_spec_change_prs(issue) - - text = "This ticket is done. All SDKs have been successfully generated, merged, and publicly released." - - if warnings: - text = text + "\n\nWarnings:\n" + warnings - - return text - - -PARTIAL_DEX_SUPPORT_REQUIRED_FAILED_SDKS_TEMPLATE = """\ - -{sdk_sdks} that {requires_require} the on-call's attention: -{fields}""" - - -DEX_SUPPORT_REQUIRED_TEMPLATE = """\ -The service team has asked the SDK/CLI team for help. The on-call engineer will respond to this ticket once a day. -{failed_sdk_text} -""" - - -def for_dex_support_required(issue_key, issue, summary): - - # Check if provided GA date is a release date. Leave a comment if not - text = is_requested_ga_date_valid_comment(summary) - - failed = [(language, value) for language, value in summary.sdks.statuses.items() if value == config.CUSTOM_STATUS_FAILURE] - - failed_sdk_text = "" - if failed: - fields = "\n".join(["- {}".format(language) for language, value in failed]) - failed_sdk_text = PARTIAL_DEX_SUPPORT_REQUIRED_FAILED_SDKS_TEMPLATE.format( - fields=fields, - sdk_sdks="SDK" if len(failed) == 1 else "SDKs", - requires_require="requires" if len(failed) == 1 else "require") - - return text + DEX_SUPPORT_REQUIRED_TEMPLATE.format( - reporter=summary.jira.reporter, - failed_sdk_text=failed_sdk_text) - - -def additional_checks(issue_key, issue, summary): - text = "" - - return text - - -PUBLIC_HANDLERS = { - PUBLIC_NON_ACTIONABLE_STATE: for_non_actionable, - PUBLIC_PROCESSING_REQUESTED_STATE: for_processing_requested, - PUBLIC_PROCESSING_STATE: for_processing, - PUBLIC_SERVICE_TEAM_FAILURE_INVESTIGATION_STATE: for_service_team_failure_investigation, - PUBLIC_DEX_SUPPORT_REQUIRED_STATE: for_dex_support_required, - PUBLIC_SERVICE_TEAM_WORK_REQUIRED_STATE: for_service_team_work_required, - PUBLIC_RELEASE_REQUESTED_STATE: for_release_requested, - PUBLIC_RELEASE_APPROVED_STATE: for_release_approved, - PUBLIC_PROCESSING_BULK_STATE: for_processing_bulk, - PUBLIC_DEX_BULK_REVIEW_STATE: for_dex_bulk_review, - PUBLIC_TO_DEPLOY_STATE: for_to_deploy, - PUBLIC_DONE_STATE: for_done -} - - -def get_public_state(issue, summary, sdk_statuses, all_sdks, any_sdks): - if summary.jira.status == config.STATUS_PROCESSING_REQUESTED: - state = PUBLIC_PROCESSING_REQUESTED_STATE - - elif summary.jira.status == config.STATUS_PROCESSING: - state = PUBLIC_PROCESSING_STATE - - elif summary.jira.status == config.STATUS_SERVICE_TEAM_FAILURE_INVESTIGATION: - state = PUBLIC_SERVICE_TEAM_FAILURE_INVESTIGATION_STATE - - elif summary.jira.status == config.STATUS_DEX_SUPPORT_REQUIRED: - state = PUBLIC_DEX_SUPPORT_REQUIRED_STATE - - elif summary.jira.status == config.STATUS_SERVICE_TEAM_WORK_REQUIRED: - state = PUBLIC_SERVICE_TEAM_WORK_REQUIRED_STATE - - elif summary.jira.status == config.STATUS_RELEASE_REQUESTED: - state = PUBLIC_RELEASE_REQUESTED_STATE - - elif summary.jira.status == config.STATUS_RELEASE_APPROVED: - state = PUBLIC_RELEASE_APPROVED_STATE - - elif summary.jira.status == config.STATUS_PROCESSING_BULK: - state = PUBLIC_PROCESSING_BULK_STATE - - elif summary.jira.status == config.STATUS_DEX_BULK_REVIEW: - state = PUBLIC_DEX_BULK_REVIEW_STATE - - elif summary.jira.status == config.STATUS_TO_DEPLOY: - state = PUBLIC_TO_DEPLOY_STATE - - elif summary.jira.status == config.STATUS_DONE: - state = PUBLIC_DONE_STATE - - else: - state = PUBLIC_NON_ACTIONABLE_STATE - - return state - - -def advise_on_public_issue(issue_key, issue, summary): - text = execute_appropriate_handler(PUBLIC_HANDLERS, issue_key, issue, summary) - - text = text + additional_checks(issue_key, issue, summary) - - return text - - -# -# Automated transitions -# - - -TRANSITION_TO_SERVICE_TEAM_WORK_REQUIRED_TEMPLATE = """\ -Automatically transitioning status back to '{status}'. The service team's work is not complete yet.{problem_text} -""" - - -def ready_for_release_requested_status(issue_key, issue, summary): - printv("Checking for unauthorized transition from '{}' to '{}'...".format(config.STATUS_SERVICE_TEAM_WORK_REQUIRED, config.STATUS_RELEASE_REQUESTED)) - - master_prs = get_service_team_master_prs(issue, summary) - printv("master_prs: {}".format(master_prs)) - - builds = get_bitbucket_builds(master_prs.approved) - printv("builds: {}".format(builds)) - - checks = check_bitbuckets_builds(summary, builds, master_prs) - printv("checks: {}".format(checks)) - - # No missing master PRs - # No open PRs that aren't approved - # No open PRs that could be bypassed (if they can be bypassed, why are they open?) - is_master_ok = not master_prs.missing and not master_prs.opened and not master_prs.opened_could_be_bypassed - - # All PRs have successful builds - # No PRs have failed builds - are_builds_ok = checks.all_required_prs_have_successful_builds and checks.all_required_prs_have_no_failed_builds - - feature_api_publicly_available_ok = config.BYPASS_CHECK_API_PUBLICLY_AVAILABLE in summary.checks.bypass or summary.jira.feature_api_is_publicly_available_and_unwhitelisted_in_prod == "Yes" - - design_review_ok = config.BYPASS_CLI_DESIGN_REVIEW_CHECK in summary.checks.bypass or not summary.cli.pending_design_reviews - - cli_changelog_ok = summary.jira.cli_changelog_entry and summary.jira.cli_changelog_entry.strip() and len(summary.jira.cli_changelog_entry) > 0 - - print("is_master_ok: {}, are_builds_ok: {}, feature_api_publicly_available_ok: {}, design_review_ok: {}, cli_changelog_ok: {}".format(is_master_ok, are_builds_ok, feature_api_publicly_available_ok, design_review_ok, cli_changelog_ok)) - - problems = [] - if not is_master_ok: - problems.append("The master pull requests are not done yet.") - if not are_builds_ok: - problems.append("The validation builds for the master pull requests are not all passing.") - if not feature_api_publicly_available_ok: - problems.append("The feature API is not publicly available yet.") - if not design_review_ok: - problems.append("The CLI design review is not done yet.") - if not cli_changelog_ok: - problems.append("CLI ChangeLog Entry field is empty.") - - return (is_master_ok and are_builds_ok and feature_api_publicly_available_ok and design_review_ok and cli_changelog_ok), "\n".join(problems) - - -def handle_transition_for_release_requested(issue_key, issue, summary): - transitioned = False - text = None - - ready, problem_text = ready_for_release_requested_status(issue_key, issue, summary) - if not ready: - transitioned = True - summary.transition_from_state = summary.state - util.transition_issue_overall_status(util.JIRA_CLIENT(), issue, config.STATUS_SERVICE_TEAM_WORK_REQUIRED) - - # Refresh issue after transitionh - issue = util.get_dexreq_issue(issue_key, fields=(DEFAULT_JIRA_ISSUE_FIELDS + CUSTOM_JIRA_ISSUE_FIELDS), expand=["changelog"]) - - text = TRANSITION_TO_SERVICE_TEAM_WORK_REQUIRED_TEMPLATE.format( - status=config.STATUS_SERVICE_TEAM_WORK_REQUIRED, - problem_text="" if not problem_text else "\n\n{}".format(problem_text)) - else: - print("Issue {} can stay in status '{}' because service team work appears complete.".format(issue_key, config.STATUS_RELEASE_REQUESTED)) - - return transitioned, text - - -TRANSITION_TO_RELEASE_APPROVED_UNDONE = """\ -Automatically transitioned status back to '{status}'. Please do not update the status to '{release_approved_status}' unless you are a member of the SDK/CLI team. -""" - - -def handle_transition_for_release_approved(issue_key, issue, summary): - transitioned = False - text = None - - if config.BYPASS_CHECK_PREVENT_MANUAL_STATUS_CHANGES not in summary.checks.bypass: - printv("Checking if transition to '{}' was done by unauthorized person".format(config.STATUS_RELEASE_APPROVED)) - unauthorized_change = False - for cl in reversed(summary.jira.changelog): - is_status_change = False - for ci in cl.changed_items: - if ci.field == "status": - # This is the last status change, after this, we stop looking - is_status_change = True - if ci.new == config.STATUS_RELEASE_APPROVED and cl.author not in config.APPROVED_DEX_TEAM_MEMBERS: - # This was an unauthorized manual change to "Release Approved' - unauthorized_change = ci.old - break - if is_status_change: - # We've reached the last status change, stop looking - break - if unauthorized_change: - printv("Undoing the unauthorized change to '{}', since it was not done by the SDK/CLI team. Switching back to '{}'".format( - config.STATUS_RELEASE_APPROVED, - unauthorized_change)) - transitioned = True - summary.transition_from_state = summary.state - util.transition_issue_overall_status(util.JIRA_CLIENT(), issue, unauthorized_change) - - # Refresh issue after transitionh - issue = util.get_dexreq_issue(issue_key, fields=(DEFAULT_JIRA_ISSUE_FIELDS + CUSTOM_JIRA_ISSUE_FIELDS), expand=["changelog"]) - - text = TRANSITION_TO_RELEASE_APPROVED_UNDONE.format( - status=unauthorized_change, - release_approved_status=config.STATUS_RELEASE_APPROVED) - - return transitioned, text - - -TRANSITION_TO_BACKLOG_SINCE_SPEC_PR_DECLINED = """\ -Automatically transitioned status back to '{status}'. The [spec diff pull request|{pr_url}] was {action}. Please revise your spec and generate a new spec artifact. Since you rejected this spec change in the public pipeline, you have to restart the entire process, beginning with preview. Create a new preview DEXREQ ticket with the updated artifact version set that ticket's status to '{processing_requested_state}'. -""" - -TRANSITION_TO_RELEASE_REQUESTED_SINCE_SPEC_PR_MERGED = """\ -Automatically transitioned status to '{status}'. The [spec diff pull request|{pr_url}] was {action}. -""" - - -def handle_transition_for_service_team_work_required_preconditions_check(issue_key, issue, summary): - ready, problem_text = ready_for_release_requested_status(issue_key, issue, summary) - - if ready: - return True, "" - - return ready, "Not transitioning to '{status}' yet, the service team work is not done yet.{problem_text}\n\n".format(status=config.STATUS_RELEASE_REQUESTED, - problem_text="" if not problem_text else "\n\n" + problem_text) - - -def handle_transition_for_service_team_work_required(issue_key, issue, summary): - return handle_transition_for_spec_review_pr(issue_key, issue, summary, config.STATUS_RELEASE_REQUESTED, TRANSITION_TO_RELEASE_REQUESTED_SINCE_SPEC_PR_MERGED, - TRANSITION_TO_BACKLOG_SINCE_SPEC_PR_DECLINED, handle_transition_for_service_team_work_required_preconditions_check) - - -PUBLIC_TRANSITION_HANDLERS = { - PUBLIC_SERVICE_TEAM_WORK_REQUIRED_STATE: handle_transition_for_service_team_work_required, - PUBLIC_RELEASE_REQUESTED_STATE: handle_transition_for_release_requested, - PUBLIC_RELEASE_APPROVED_STATE: handle_transition_for_release_approved, - PUBLIC_PROCESSING_STATE: handle_transition_for_processing -} - - -def handle_automated_public_transitions(issue_key, issue, summary): - return execute_appropriate_transition_handler(PUBLIC_TRANSITION_HANDLERS, issue_key, issue, summary) - - -# -# GA calendar -# - -def show_ga_calendar(count=10): - base_release_date, base_release_ga_number, release_overrides = process_date_override(PUBLIC_RELEASE_DATE_OVERRIDE, - PUBLIC_RELEASE_DATE_CADENCE_START_DATE, PUBLIC_RELEASE_DATE_GA_NUMBER, '--public-release-date-overrides') - base_cut_off_date, ga_number, cut_off_overrides = process_date_override(RELEASE_REQUESTED_CUT_OFF_DATE_OVERRIDE, - RELEASE_REQUESTED_CUT_OFF_DATE_CADENCE_START_DATE, None, '--public-release-requested-cut-off-date-overrides') - - now_datetime_utc = pytz.utc.localize(datetime.datetime.utcnow()) - now_datetime = now_datetime_utc.astimezone(PACIFIC_TIME_ZONE) - now = now_datetime.date() - release_date = base_release_date - release_requested_cut_off_date = base_cut_off_date - ga_number = base_release_ga_number - - print("| GA | SDK/CLI Public Release Date | {} Cut-Off Date |".format(config.STATUS_RELEASE_REQUESTED)) - print("|-----|------------|------------|") - - while count > 0: - release_date = find_next_matching_date(release_date, - base_release_date, release_overrides, PUBLIC_RELEASE_DATE_CADENCE_IN_DAYS, - "public release") - release_requested_cut_off_date = find_next_matching_date(release_requested_cut_off_date, - base_cut_off_date, cut_off_overrides, RELEASE_REQUESTED_CUT_OFF_DATE_CADENCE_IN_DAYS, - "public release requested cut-off") - - if release_date >= now: - print("| {} | {} | {} |".format(ga_number, release_date, release_requested_cut_off_date)) - count -= 1 - - release_date += datetime.timedelta(1) - release_requested_cut_off_date += datetime.timedelta(1) - ga_number += 1 diff --git a/scripts/auto_gen_utils/autogen_issue_advisor_shared.py b/scripts/auto_gen_utils/autogen_issue_advisor_shared.py deleted file mode 100644 index f190d37a97..0000000000 --- a/scripts/auto_gen_utils/autogen_issue_advisor_shared.py +++ /dev/null @@ -1,781 +0,0 @@ -import datetime -import pytz -import config -import os -import parse -import re -import sys -from dotmap import DotMap -import util -from recordclass import recordclass - -import shared.bitbucket_utils - -IS_VERBOSE = False -PROCESS_COMMENTS_BY_ANYONE = False - -QUIET_TIME_MINUTES = 5 - -value = os.environ.get('ALLOWED_PROCESSING_TIME_IN_HOURS') -if value: - ALLOWED_PROCESSING_TIME_IN_HOURS = int(value) -else: - ALLOWED_PROCESSING_TIME_IN_HOURS = 4 - -PACIFIC_TIME_ZONE = pytz.timezone("America/Los_Angeles") - -DEXREQ_AUTOMATION_NAME = "DEXREQ Automation" -TICKET_STATE_ADVISORY_TEXT = "Ticket state summary:" - -BRANCH_LINK_TYPE = "branch" -BUILD_LINK_TYPE = "build" -PR_LINK_TYPE = "diff" # (pull request) - -DEFAULT_JIRA_ISSUE_FIELDS = ['summary', 'description', 'labels', 'comment', 'status', 'reporter', 'issuetype', 'created'] -CUSTOM_JIRA_ISSUE_FIELDS = [ - config.CUSTOM_FIELD_ID_ARTIFACT_ID, - config.CUSTOM_FIELD_ID_GROUP_ID, - config.CUSTOM_FIELD_ID_ARTIFACT_VERSION, - config.CUSTOM_FIELD_ID_SPEC_LOCATION_IN_ARTIFACT, - config.CUSTOM_FIELD_ID_SPEC_FRIENDLY_NAME, - config.CUSTOM_FIELD_ID_SERVICE_SUBDOMAIN, - config.CUSTOM_FIELD_ID_FEATURE_IDS, - config.CUSTOM_FIELD_ID_JAVA_SDK_STATUS, - config.CUSTOM_FIELD_ID_PYTHON_SDK_STATUS, - config.CUSTOM_FIELD_ID_TYPESCRIPT_SDK_STATUS, - config.CUSTOM_FIELD_ID_DOTNET_SDK_STATUS, - config.CUSTOM_FIELD_ID_RUBY_SDK_STATUS, - config.CUSTOM_FIELD_ID_GO_SDK_STATUS, - config.CUSTOM_FIELD_ID_CLI_STATUS, - config.CUSTOM_FIELD_ID_POWERSHELL_STATUS, - config.CUSTOM_FIELD_ID_TEST_DATA_STATUS, - config.CUSTOM_FIELD_ID_LEGACY_JAVA_SDK_STATUS, - config.CUSTOM_FIELD_ID_SDK_CLI_GA_DATE, - config.CUSTOM_FIELD_ID_PREVIEW_ISSUE, - config.CUSTOM_FIELD_ID_CHANGELOG, - config.CUSTOM_FIELD_ID_ACKNOWLEDGE_RESPONSIBILITIES, - config.CUSTOM_FIELD_ID_UDX_TICKET, - config.CUSTOM_FIELD_ID_FEATURE_API_IS_PUBLICLY_AVAILABLE, - config.CUSTOM_FIELD_ID_CLI_CHANGELOG -] - - -ERROR_CHOOSE_PIPELINE_STATE = "ERROR_CHOOSE_PIPELINE_STATE" - -ERROR_STATES = [ - ERROR_CHOOSE_PIPELINE_STATE -] - -PIPELINE_NAMES = [config.PREVIEW_ISSUE_TYPE_NAME, config.PUBLIC_ISSUE_TYPE_NAME] - - -ServiceTeamMasterPrs = recordclass('ServiceTeamMasterPrs', 'merged approved approved_could_be_bypassed opened opened_could_be_bypassed missing bypassed') -BitbucketBuilds = recordclass('BitbucketBuilds', 'failed_master_pr_builds successful_master_pr_builds in_progress_master_pr_builds') -BitbucketBuildChecks = recordclass('BitbucketBuildChecks', 'all_required_prs_have_successful_builds all_required_prs_have_no_failed_builds') - - -# Sanity check -def variations_sanity_check(variations, variations_quick, name): - for v in variations: - found = False - for q in variations_quick: - if q in v: - found = True - break - if not found: - raise Exception("One of the {0} didn't contain any of the {0}_QUICK".format(name)) - - -# If none of the *_QUICK variations is present, won't ever try the full regex -BUILD_FAIL_JIRA_MESSAGE_TEMPLATE_VARIATIONS_QUICK = ["Building the change for failed for tool:", "Building the change failed for tool:"] -BUILD_FAIL_JIRA_MESSAGE_TEMPLATE_VARIATIONS = [config.BUILD_FAIL_JIRA_MESSAGE_TEMPLATE, -"""Building the change for failed for tool: {tool_name} (repos: {repos}) {build_description}. - -{links} - -If necessary, you can use {branch_text} as a starting point to fix the build (e.g. if you made a breaking change in preview and you now have to change tests or samples).""", -"""Building the change failed for tool: {tool_name} (repos: {repos}) {build_description}. - -{links} - -If necessary, you can use {branch_text} as a starting point to fix the build (e.g. if you made a breaking change in preview and you now have to change tests or samples). - -Information on the failure is in the following TeamCity city [build log|https://teamcity.oci.oraclecorp.com/viewLog.html?buildId={build_id}&tab=buildLog]""", -"""Building the change failed for tool: {tool_name} (repos: {repos}) {build_description}. - -{links} - -If necessary, you can use {branch_text} as a starting point to fix the build (e.g. if you made a breaking change in preview and you now have to change tests or samples). - -Information about the failure can be found in the {build_log_link}.""" -] # noqa: E124 -variations_sanity_check(BUILD_FAIL_JIRA_MESSAGE_TEMPLATE_VARIATIONS, BUILD_FAIL_JIRA_MESSAGE_TEMPLATE_VARIATIONS_QUICK, "BUILD_FAIL_JIRA_MESSAGE_TEMPLATE_VARIATIONS") - -BUILD_PASS_JIRA_MESSAGE_TEMPLATE_VARIATIONS_QUICK = ["Completed generating tool:"] -BUILD_PASS_JIRA_MESSAGE_TEMPLATE_VARIATIONS = [config.BUILD_PASS_JIRA_MESSAGE_TEMPLATE, -"""Completed generating tool: {tool_name} (repos: {repos}) {build_description}. - -{links} - -Artifacts can be found [here|https://teamcity.oci.oraclecorp.com/viewLog.html?buildId={build_id}&tab=artifacts] (for the next 10 days). - -For TeamCity access see the [self-service FAQ|https://confluence.oci.oraclecorp.com/pages/viewpage.action?pageId=132774928#SDK/CLISelfServiceFrequentlyAskedQuestions-HowdoIgetaccesstoTeamCitylogsandartifacts?].""", -"""Completed generating tool: {tool_name} (repos: {repos}) {build_description}. - -{links} - -Artifacts can be found {build_artifacts_link} (for the next 10 days). - -For build log and artifact access see the [self-service FAQ|https://confluence.oci.oraclecorp.com/pages/viewpage.action?pageId=132774928#SDK/CLISelfServiceFrequentlyAskedQuestions-HowdoIgetaccesstologsandartifacts?].""", -] # noqa: E124 -variations_sanity_check(BUILD_PASS_JIRA_MESSAGE_TEMPLATE_VARIATIONS, BUILD_PASS_JIRA_MESSAGE_TEMPLATE_VARIATIONS_QUICK, "BUILD_PASS_JIRA_MESSAGE_TEMPLATE_VARIATIONS") - -STEP_FAILED_MESSAGE_TEMPLATE_VARIATIONS_QUICK = ["If it is unclear how to resolve the issue, you can set",] -STEP_FAILED_MESSAGE_TEMPLATE_VARIATIONS = [config.STEP_FAILED_MESSAGE_TEMPLATE, -"""{failure_step} for tool: {tool_name} (repos: {repos}) failed. - -Information on the failure is in the following TeamCity city [build log|https://teamcity.oci.oraclecorp.com/viewLog.html?buildId={build_id}&tab=buildLog] - -If it is unclear how to resolve the issue, you can set {custom_status_field} to 'Manual Attention Required to request help from the SDK / CLI team.""", -"""{failure_step} for tool: {tool_name} (repos: {repos}) failed. - -Information on the failure is in the following TeamCity city [build log|https://teamcity.oci.oraclecorp.com/viewLog.html?buildId={build_id}&tab=buildLog] - -If it is unclear how to resolve the issue, you can set {custom_status_field} to 'Manual Attention Required to request help from the SDK / CLI team.""", -"""{failure_step} for tool: {tool_name} (repos: {repos}) failed. - -Information on the failure is in the following TeamCity city [build log|https://teamcity.oci.oraclecorp.com/viewLog.html?buildId={build_id}&tab=buildLog] - -If it is unclear how to resolve the issue, you can set the status to '{dex_support_required_status}' to request help from the SDK / CLI team.""", -"""{failure_step} for tool: {tool_name} (repos: {repos}) failed. - -Information on the failure is in the following TeamCity city [build log|https://teamcity.oci.oraclecorp.com/viewLog.html?buildId={build_id}&tab=buildLog] - -If it is unclear how to resolve the issue, you can set the status to '{dex_support_required_status}' to request help from the SDK / CLI team.{additional_comment}""", -"""{failure_step} for tool: {tool_name} (repos: {repos}) failed. - -Information about the failure can be found in the {build_log_link}. - -If it is unclear how to resolve the issue, you can set the status to '{dex_support_required_status}' to request help from the SDK / CLI team.{additional_comment}""", -] # noqa: E124 -variations_sanity_check(STEP_FAILED_MESSAGE_TEMPLATE_VARIATIONS, STEP_FAILED_MESSAGE_TEMPLATE_VARIATIONS_QUICK, "STEP_FAILED_MESSAGE_TEMPLATE_VARIATIONS") - -TRANSITION_TO_DEX_SUPPORT_REQUESTED_TEMPLATE = """\ -Automatically transitioning status to '{status}' since it has been stuck in processing for too long. -""" - - -def printv(s, flush=False): - if IS_VERBOSE: - print(s) - if flush: - # Flush, so we make sure the output of the issue key is already visible - # NOTE: This is to help debug for DEX-6382 - sys.stdout.flush() - - -def check_should_update(summary): - should_update = True - - last_comment_was_advisory = False - # Do not update if the last change is the advisory - if summary.dates.last and summary.dates.last_issue_advisory: - if summary.dates.last.created == summary.dates.last_issue_advisory.created: - # advisory is the last comment - last_comment_was_advisory = True - printv("Previous advisory is the last comment.") - - # Do not update if nothing has changed since the last advisory - if summary.dates.last_issue_advisory and summary.dates.last_issue_advisory.created: - last_advisory = summary.dates.last_issue_advisory.created - printv("Previous advisory at {}".format(last_advisory)) - last_change = None - if summary.dates.last_changelog and summary.dates.last_changelog.created: - printv("Last JIRA change at {}".format(summary.dates.last_changelog.created)) - last_change = summary.dates.last_changelog.created - if summary.dates.last_pr_change: - printv("Last PR change at {}".format(summary.dates.last_pr_change)) - if not last_change or last_change < summary.dates.last_pr_change: - printv("PR change was after JIRA change") - last_change = summary.dates.last_pr_change - else: - printv("JIRA change was after PR change") - if last_comment_was_advisory: - printv("Last comment is advisory, should not update") - should_update = False - else: - printv("No PR change, JIRA change is the only change") - if last_comment_was_advisory: - printv("Last comment is advisory, should not update") - should_update = False - - if last_change: - # Let's see if something was changed after the last advisory - printv("Last change at {}".format(last_change)) - if last_advisory < last_change: - printv("Previous advisory is from before the last change.") - else: - should_update = False - printv("Nothing has changed since the last advisory.") - - # Check if any Design Review Tickets have been closed - if summary.dates.last_comment and summary.dates.last_comment.body: - if "CLI Design Review" in summary.dates.last_comment.body: - last_design_review_comment = summary.dates.last_comment.body.split("CLI Design Review", 1)[1] - if last_design_review_comment.count("DEX-") != len(summary.cli.pending_design_reviews): - should_update = True - - # Do not update if the last change to the fields was less than QUIET_TIME_MINUTES ago. - # A user may still be editing the ticket. We don't want to run on something that may - # be inconsistent - # TODO: we might want to have separate limits for update by automation and update by user - if summary.dates.last_changelog and summary.dates.last_changelog.created: - last_change = summary.dates.last_changelog.created - printv("Last change at {}".format(last_change)) - now = datetime.datetime.utcnow() - cut_off = now + datetime.timedelta(minutes=-QUIET_TIME_MINUTES) - cut_off_timestamp = cut_off.isoformat("T") - printv("Cut-off is at {}".format(cut_off_timestamp)) - if last_change < cut_off_timestamp: - printv("Quiet time has passed.") - else: - printv("Still in quiet time, not making advisory.") - should_update = False - - return should_update - - -SUCCESSFUL_PULL_REQUESTS_TEMPLATE = """\ -These are the most recent successful links to generated code. You can use them to examine the diff of your change: - -{links}""" - - -# Primary repo for tool when occurring in PR links -PRIMARY_REPO_FOR_LINKS = { - config.JAVA_SDK_NAME: "java-sdk", - config.PYTHON_SDK_NAME: "python-sdk", - config.CLI_NAME: "python-cli", - config.RUBY_SDK_NAME: "ruby-sdk-autogen-fork", - config.GO_SDK_NAME: "oci-go-sdk", - config.TYPESCRIPT_SDK_NAME: "oci-typescript-sdk-autogen-fork", - config.DOTNET_SDK_NAME: "oci-dotnet-sdk-autogen-fork", - config.POWERSHELL_NAME: "oci-powershell-modules-autogen-fork", - config.TEST_DATA_GEN_NAME: "sdk-client-test-data-autogen-fork", - config.LEGACY_JAVA_SDK_NAME: "legacy-java-sdk" -} - - -def get_pr_link_text(link, tool): - url = link.url - - target_repo = None - result = re.search("repos/([^/]*)/pull-requests?", url) - if result: - if not PRIMARY_REPO_FOR_LINKS[tool] == result.group(1): - target_repo = result.group(1) - - if target_repo: - return "- [{target_repo} {link_type} from the {tool} build|{link}]\n".format(tool=tool, link_type=link.link_type, link=url, target_repo=target_repo) - else: - return "- [{tool} {link_type}|{link}]\n".format(tool=tool, link_type=link.link_type, link=url) - - -def get_successful_pull_requests_text(summary): - successful_links = {} - for tool, build in summary.builds.last.items(): - if build.successful: - successful_links[tool] = build.links - - if successful_links: - text = "" - for tool, links in successful_links.items(): - for link in links: - if link.link_type == PR_LINK_TYPE: - text = text + get_pr_link_text(link, tool) - - return SUCCESSFUL_PULL_REQUESTS_TEMPLATE.format(links=text) - else: - return None - - -def process_last_builds_failure_step(issue, summary, comment, last_builds, all_builds): - # If none of the *_QUICK variations is present, won't ever try the full regex - found = False - for q in STEP_FAILED_MESSAGE_TEMPLATE_VARIATIONS_QUICK: - if q in comment.text: - found = True - break - if not found: - return False - - # Try all templates we've used for the "{failure_step} for tool ... failed" message - for template in STEP_FAILED_MESSAGE_TEMPLATE_VARIATIONS: - result = parse.search(template, comment.text) - # If this template worked and the "Generation" step failed, process it - # (if the "Build" step failed, we process a different message in process_last_builds_build_fail below) - if result and "Generation" in result["failure_step"]: - tool_name = result["tool_name"] - if "build_id" in result: - build_url = "https://teamcity.oci.oraclecorp.com/viewLog.html?buildId={build_id}&tab=buildLog".format(build_id=result["build_id"]) - elif "build_log_link" in result: - build_url = result["build_log_link"] - else: - build_url = None - - # comments are sorted by time, so later builds overwrite newer ones - build = DotMap() - build.successful = False - build.generation_successful = False - build.build_successful = False - build.tool_name = tool_name - build.created = comment.created - build.repos = result["repos"] - - # process links - build.links = [DotMap({ - "url": build_url, - "link_type": BUILD_LINK_TYPE - })] - - last_builds[tool_name] = build - if not all_builds[tool_name]: - all_builds[tool_name] = [] - all_builds[tool_name].append(build) - - return True # If we have found one, the other variations don't matter anymore - - return False - - -def process_last_builds_build_failed(issue, summary, comment, last_builds, all_builds): - # If none of the *_QUICK variations is present, won't ever try the full regex - found = False - for q in BUILD_FAIL_JIRA_MESSAGE_TEMPLATE_VARIATIONS_QUICK: - if q in comment.text: - found = True - break - if not found: - return False - - # Try all templates we've used for the "Building the change for failed for tool..." message - for template in BUILD_FAIL_JIRA_MESSAGE_TEMPLATE_VARIATIONS: - result = parse.search(template, comment.text) - # If this template worked, process it - if result: - tool_name = result["tool_name"] - if "build_id" in result: - build_url = "https://teamcity.oci.oraclecorp.com/viewLog.html?buildId={build_id}&tab=buildLog".format(build_id=result["build_id"]) - elif "build_log_link" in result: - build_url = result["build_log_link"] - else: - build_url = None - - # comments are sorted by time, so later builds overwrite newer ones - build = DotMap() - build.successful = False - build.generation_successful = True - build.build_successful = False - build.tool_name = tool_name - build.created = comment.created - build.repos = result["repos"] - - # process links - links = re.findall(r"\|([^]]*)\]", result["links"], re.MULTILINE) - build.links = [] - for url in links: - build.links.append(DotMap({"url": url, "link_type": BRANCH_LINK_TYPE})) - - if "build_id" in result: - build.links.append(DotMap({ - "url": build_url, - "link_type": BUILD_LINK_TYPE - })) - - last_builds[tool_name] = build - if not all_builds[tool_name]: - all_builds[tool_name] = [] - all_builds[tool_name].append(build) - - return True # If we have found one, the other variations don't matter anymore - - return False - - -def process_last_builds_build_passed(issue, summary, comment, last_builds, all_builds): - # If none of the *_QUICK variations is present, won't ever try the full regex - found = False - for q in BUILD_PASS_JIRA_MESSAGE_TEMPLATE_VARIATIONS_QUICK: - if q in comment.text: - found = True - break - if not found: - return False - - # Try all templates we've used for the "Completed generating tool..." message - for template in BUILD_PASS_JIRA_MESSAGE_TEMPLATE_VARIATIONS: - result = parse.search(template, comment.text) - # If this template worked, process it - if result: - tool_name = result["tool_name"] - - # comments are sorted by time, so later builds overwrite newer ones - build = DotMap() - build.successful = True - build.generation_successful = True - build.build_successful = True - build.tool_name = tool_name - build.created = comment.created - build.repos = result["repos"] - - # process links - links = re.findall(r"\|([^]]*)\]", result["links"], re.MULTILINE) - build.links = [] - for url in links: - build.links.append(DotMap({"url": url, "link_type": PR_LINK_TYPE})) - - last_builds[tool_name] = build - if not all_builds[tool_name]: - all_builds[tool_name] = [] - all_builds[tool_name].append(build) - - return True # If we have found one, the other variations don't matter anymore - - return False - - -def process_last_builds(issue, summary): - last_builds = DotMap() - all_builds = DotMap() - for comment in summary.jira.comments: - # Unify line endings - comment.text = comment.text.replace('\r\n', '\n').replace('\r', '\n') - if PROCESS_COMMENTS_BY_ANYONE or comment.author == DEXREQ_AUTOMATION_NAME: - found = process_last_builds_failure_step(issue, summary, comment, last_builds, all_builds) - if found: - # If it's a failure step, it can't be one of the other ones - continue - - found = process_last_builds_build_failed(issue, summary, comment, last_builds, all_builds) - if found: - # If it's a build failed message, it can't be one of the other ones - continue - - found = process_last_builds_build_passed(issue, summary, comment, last_builds, all_builds) - if found: - # If it's a build passed message, it can't be one of the other ones - continue - - summary.builds.last = last_builds - summary.builds.all = all_builds - - -FAILED_BUILDS_TEMPLATE = """\ - - -These are the most recent failed builds and branches: - -{builds}""" - - -def get_failed_links_text(summary): - failed_builds = {} - for tool, build in summary.builds.last.items(): - if not build.successful: - failed_builds[tool] = build.links - - if failed_builds: - text = "" - for tool, links in failed_builds.items(): - for link in links: - if link.link_type == PR_LINK_TYPE: - text = text + get_pr_link_text(link, tool) - if link.link_type == BRANCH_LINK_TYPE: - result = re.search("repos/([^/]*)/browse?", link.url) - tool_from_link = tool - if result: - for tn, rn in util.get_jira_reportable_repo_names_for_tool().items(): - # The last of the repos is actually the one that belongs to this tool - # (see PythonCLI: [python-sdk, python-cli]) - if rn[-1] == result.group(1): - tool_from_link = tn - break - - text = text + "- [{tool_from_link} {link_type}|{link}]\n".format(tool_from_link=tool_from_link, link_type=link.link_type, link=link.url) - else: - text = text + "- [{tool} {link_type}|{link}]\n".format(tool=tool, link_type=link.link_type, link=link.url) - - return FAILED_BUILDS_TEMPLATE.format(builds=text) - else: - return None - - -# Turn a list into a string, with " and " before the last element -def list_to_string(list, item_prefix=""): - if len(list) == 0: - return "" - elif len(list) == 1: - return "{}{}".format(item_prefix, list[0]) - elif len(list) == 2: - return "{}{} and {}{}".format(item_prefix, list[0], item_prefix, list[1]) - else: - return "{}, and {}".format(", ".join("{}{}".format(item_prefix, str(e)) for e in list[:-1]), "{}{}".format(item_prefix, list[-1])) - - -def handle_transition_for_processing(issue_key, issue, summary): - transitioned = False - text = None - - printv("Checking for transition from {} to {}...".format(summary.jira.status, config.STATUS_DEX_SUPPORT_REQUIRED)) - last_status_update = None - for cl in reversed(summary.jira.changelog): - is_status_change = False - for ci in cl.changed_items: - if ci.field == "status": - # This is the last status change, after this, we stop looking - is_status_change = True - if cl.author == DEXREQ_AUTOMATION_NAME: - last_status_update = cl.created - break - if is_status_change: - # We've reached the last status change, stop looking - break - - if not last_status_update: - printv("Ticket was not set to {} by DEXREQ_AUTOMATION. Skipping checking this ticket".format(summary.jira.status)) - return transitioned, text - - printv("Ticket status updated at {}".format(last_status_update)) - now = datetime.datetime.utcnow() - cut_off = now + datetime.timedelta(hours=-ALLOWED_PROCESSING_TIME_IN_HOURS) - cut_off_timestamp = cut_off.isoformat("T") - printv("Cut-off is at {}".format(cut_off_timestamp)) - - if last_status_update < cut_off_timestamp: - transitioned = True - printv("Ticket stuck in processing for too long. Setting the ticket status to DEX_SUPPORT_REQUIRED") - util.transition_issue_overall_status(util.JIRA_CLIENT(), issue, config.STATUS_DEX_SUPPORT_REQUIRED) - text = TRANSITION_TO_DEX_SUPPORT_REQUESTED_TEMPLATE.format(status=config.STATUS_DEX_SUPPORT_REQUIRED) - else: - printv("Auto-generation of DEX surfaces in progress. Will check again later.") - - return transitioned, text - - -def get_spec_review_pr(issue_key): - issue = util.get_dexreq_issue(issue_key, fields=(DEFAULT_JIRA_ISSUE_FIELDS + CUSTOM_JIRA_ISSUE_FIELDS)) - - created_date = getattr(issue.fields, 'created') - printv("To get spec diff PR, listing all PRs newer than {}".format(created_date)) - - # The spec diff PR can't be older than the DEXREQ ticket, so only search that far - return shared.bitbucket_utils.get_newest_pullrequest_with_string_after('SDK', config.DEXREQ_REPO_NAME, issue_key, created_date) - - -def handle_transition_for_spec_review_pr(issue_key, issue, summary, approved_target_state, approved_template, declined_template, check_approved_preconditions_fn): - transitioned = False - text = None - - dexreq_pr_url = None - dexreq_pr = get_spec_review_pr(issue_key) - - if dexreq_pr: - hrefs = util.deep_get(dexreq_pr, 'links.self') - if hrefs: - dexreq_pr_url = util.deep_get(hrefs[0], 'href') - - if dexreq_pr_url: - rejected_text = "set to 'Needs Work'" - accepted_text = "approved" - is_rejected = False - is_accepted = False - - for reviewer in dexreq_pr['reviewers']: - if reviewer['status'] == 'APPROVED': - is_accepted = True - if reviewer['status'] == 'NEEDS_WORK': - is_rejected = True - - if dexreq_pr['state'] == 'DECLINED': - is_rejected = True - rejected_text = "declined" - - if dexreq_pr['state'] == 'MERGED': - is_accepted = True - accepted_text = "merged" - - if is_rejected: - transitioned = True - summary.transition_from_state = summary.state - util.transition_issue_overall_status(util.JIRA_CLIENT(), issue, config.STATUS_BACKLOG) - - # Refresh issue after transitionh - issue = util.get_dexreq_issue(issue_key, fields=(DEFAULT_JIRA_ISSUE_FIELDS + CUSTOM_JIRA_ISSUE_FIELDS), expand=["changelog"]) - - text = declined_template.format(status=config.STATUS_BACKLOG, - pr_url=dexreq_pr_url, - processing_requested_state=config.STATUS_PROCESSING_REQUESTED, - action=rejected_text) - - elif is_accepted: - are_preconditions_met, precondition_check_text = check_approved_preconditions_fn(issue_key, issue, summary) - if are_preconditions_met: - transitioned = True - summary.transition_from_state = summary.state - util.transition_issue_overall_status(util.JIRA_CLIENT(), issue, approved_target_state) - - # Refresh issue after transitionh - issue = util.get_dexreq_issue(issue_key, fields=(DEFAULT_JIRA_ISSUE_FIELDS + CUSTOM_JIRA_ISSUE_FIELDS), expand=["changelog"]) - - text = approved_template.format(status=approved_target_state, - pr_url=dexreq_pr_url, - action=accepted_text) - else: - text = precondition_check_text - - return transitioned, text - - -def execute_appropriate_handler(handlers, issue_key, issue, summary): - text = "" - if summary.state in handlers and handlers[summary.state]: - handler = handlers[summary.state] - if handler: - text = handler(issue_key, issue, summary) - - return text - - -def execute_appropriate_transition_handler(handlers, issue_key, issue, summary): - transitioned = False - text = "" - if summary.state in handlers and handlers[summary.state]: - handler = handlers[summary.state] - if handler: - transitioned, text = handler(issue_key, issue, summary) - - return transitioned, text - - -def process_all_spec_change_prs(issue): - warnings = None - created_date = getattr(issue.fields, 'created') - printv("To get all spec diff PR, listing all PRs newer than {}".format(created_date)) - - # The spec diff PR can't be older than the DEXREQ ticket, so only search that far - - prs = shared.bitbucket_utils.get_all_pullrequest_with_string_after('SDK', config.DEXREQ_REPO_NAME, issue.key, created_date) - - open_spec_change_prs = [] - merged_spec_change_prs = [] - most_recent_approved_spec_change_pr = None - - for pr in prs: - print("Spec change pr {} is {}".format(pr['id'], pr['state'])) - - if pr['state'] == config.PULL_REQUEST_STATUS_OPEN: - open_spec_change_prs.append(pr) - - if not most_recent_approved_spec_change_pr or most_recent_approved_spec_change_pr['toRef']['id'] < pr['toRef']['id']: - is_accepted = False - is_rejected = False - if pr['reviewers']: - for reviewer in pr['reviewers']: - if reviewer['status'] == 'APPROVED': - is_accepted = True - if reviewer['status'] == 'NEEDS_WORK': - is_rejected = True - - if is_accepted and not is_rejected: - most_recent_approved_spec_change_pr = pr - elif pr['state'] == config.PULL_REQUEST_STATUS_MERGED: - merged_spec_change_prs.append(pr) - - printv("Found {} open PRs and {} merged PRs. Most recent approved PR is {}".format(len(open_spec_change_prs), len(merged_spec_change_prs), - most_recent_approved_spec_change_pr['id'] if most_recent_approved_spec_change_pr else "None")) - - if not merged_spec_change_prs and most_recent_approved_spec_change_pr: - # Nothing has been merged yet, merge the most recent approved spec change PR - printv("Merging spec change PR {}".format(most_recent_approved_spec_change_pr['id'])) - shared.bitbucket_utils.merge_pr("SDK", config.DEXREQ_REPO_NAME, most_recent_approved_spec_change_pr['id'], most_recent_approved_spec_change_pr['version']) - - if most_recent_approved_spec_change_pr: - for pr in open_spec_change_prs: - if pr == most_recent_approved_spec_change_pr: - continue - - # Decline other open PRs - printv("Declining spec change PR {}".format(pr['id'])) - shared.bitbucket_utils.decline_pr("SDK", config.DEXREQ_REPO_NAME, pr['id'], pr['version']) - elif open_spec_change_prs: - warnings = "Found open spec change PRs, but none of them were approved. Why not?" - for pr in open_spec_change_prs: - hrefs = util.deep_get(pr, 'links.self') - if hrefs: - warnings = warnings + "\n - [Spec diff PR {}|{}]".format(pr['id'], util.deep_get(hrefs[0], 'href')) - else: - warnings = warnings + "\n - Spec diff PR {}".format(pr['id']) - print(warnings) - - return warnings - - -# returns base_date, base_date_ga_number overrides -def process_date_override(override_string, default_base_date, default_base_date_ga_number, parameter_name): - base_date_string = None - base_date_ga_number = None - overrides = {} - if override_string: - parts = override_string.split(",") - for p in parts: - date = p[1:] - if p.startswith("="): - if base_date_string: - raise ValueError("Base date ('=YYYY-MM-DD') for {} set more than once: '{}' and '{}'".format(parameter_name, base_date_string, date)) - base_date_string = date - else: - overrides[date] = p.startswith("+") - - if not base_date_string: - base_date_string = default_base_date - - if "@" in base_date_string: - if not default_base_date_ga_number: - raise ValueError("Cannot set GA number ('=YYYY-MM-DD@GA') for {}: '{}'".format(parameter_name, base_date_string)) - parts = base_date_string.split("@") - if len(parts) != 2: - raise ValueError("Should contain at most one '@' ('=YYYY-MM-DD' or '=YYYY-MM-DD@GA') for {}: '{}'".format(parameter_name, base_date_string)) - base_date_string = parts[0] - base_date_ga_number = int(parts[1]) - - if not base_date_ga_number: - base_date_ga_number = default_base_date_ga_number - - base_date = datetime.datetime.strptime(base_date_string, "%Y-%m-%d").date() - - return base_date, base_date_ga_number, overrides - - -def find_next_matching_date(start_date, base_date, overrides, cadence_in_days, date_name): - date = start_date - while True: - printv("{}: {}".format(date_name, date)) - date_string = date.isoformat() - if (date - base_date).days % cadence_in_days == 0: - if date_string in overrides and not overrides[date_string]: - # NOT on this day - printv("Elected to not have regular {} scheduled on: {}".format(date_name, date_string)) - date += datetime.timedelta(1) - continue - else: - # either no override set for this date, or it's a positive override - printv("Having regular {} scheduled on: {}".format(date_name, date_string)) - break - if date_string in overrides and overrides[date_string]: - # cut-off on this day - printv("Elected to have special {} scheduled on: {}".format(date_name, date_string)) - break - - date += datetime.timedelta(1) - - return date diff --git a/scripts/auto_gen_utils/change_dexreq_to_release_approved.py b/scripts/auto_gen_utils/change_dexreq_to_release_approved.py deleted file mode 100644 index 9a03261d8a..0000000000 --- a/scripts/auto_gen_utils/change_dexreq_to_release_approved.py +++ /dev/null @@ -1,33 +0,0 @@ -import util -import argparse -import config -from jira import JIRAError - -if __name__ == "__main__": - parser = argparse.ArgumentParser( - description='change go DEXREQ ticket from release requested to release approved before BULK PUBLIC.') - parser.add_argument('--list-of-go-tickets', - action='append', - help='After the Go-No go meeting, provide a comma seperated list (No space) of all go tickets while running bulk-public: --list-of-go-tickets DEXREQ-123,DEXREQ-124,DEXREX-125') - parser.add_argument('--dry-run', default=False, action='store_true', help='Dry-run, do not actually transition issues') - args = parser.parse_args() - list_of_go_tickets = args.list_of_go_tickets - dry_run = args.dry_run - try: - if list_of_go_tickets: - for issue in list_of_go_tickets[0].split(','): - jira_issue = util.get_dexreq_issue(issue) - if jira_issue.fields.status.name != config.STATUS_RELEASE_REQUESTED: - print('{} is not in Release/Processing requested status'.format(jira_issue)) - if not dry_run: - print("Changing status to Release Approved of {}".format(jira_issue)) - # set all issues from release requested to release approved - util.transition_issue_overall_status(util.JIRA_CLIENT(), jira_issue, config.STATUS_RELEASE_APPROVED) - else: - print("DRY-RUN: Would have changed status to Release Approved of {}".format(jira_issue)) - # set all issues from release requested to release approved - util.transition_issue_overall_status(util.JIRA_CLIENT(), jira_issue, config.STATUS_RELEASE_APPROVED) - - except JIRAError as e: - print('{} {}'.format(e.status_code, e.text)) - raise diff --git a/scripts/auto_gen_utils/change_dexreq_to_release_approved.sh b/scripts/auto_gen_utils/change_dexreq_to_release_approved.sh deleted file mode 100755 index 75ec182bc9..0000000000 --- a/scripts/auto_gen_utils/change_dexreq_to_release_approved.sh +++ /dev/null @@ -1,15 +0,0 @@ -#!/bin/bash - -# ./change_dexreq_to_release_approved.sh " DEXREQ-1803, DEXREQ-1803 " -set -e -set -x -LIST_OF_GO_TICKETS=$1 - -LIST_OF_GO_TICKETS_ARG="" -if [ ! -z "$LIST_OF_GO_TICKETS" ]; then - # Remove spaces from LIST_OF_GO_TICKETS. DEXREQ tickets should be only comma seperated - LIST_OF_GO_TICKETS="$(echo -e "${LIST_OF_GO_TICKETS}" | tr -d '[:space:]')" - LIST_OF_GO_TICKETS_ARG="--list-of-go-tickets "$LIST_OF_GO_TICKETS - echo "Running ./change_dexreq_to_release_approved $LIST_OF_GO_TICKETS_ARG" - python ./change_dexreq_to_release_approved.py $DRY_RUN_ARG $LIST_OF_GO_TICKETS_ARG -fi diff --git a/scripts/auto_gen_utils/clean_auto_branches.py b/scripts/auto_gen_utils/clean_auto_branches.py deleted file mode 100644 index c741c05b47..0000000000 --- a/scripts/auto_gen_utils/clean_auto_branches.py +++ /dev/null @@ -1,228 +0,0 @@ -from datetime import datetime -import re -import git - -import util -import config - -import argparse -from jira import JIRAError - - -DEXREQ_AUTOMATION_NAME_AND_EMAIL = 'DEXREQ Automation noreply+dexreq@oracle.com' - - -def init_branches(): - global DEBUG_DEXREQ_BRANCH_PREFIX - global GENERATED_AUTO_PREVIEW_BRANCH_PATTERN - global AUTO_PREVIEW_BRANCH_PATTERN - global GENERATED_AUTO_PUBLIC_BRANCH_PATTERN - global AUTO_PUBLIC_BRANCH_PATTERN - global AUTO_PREVIEW_BRANCH_PREFIX - global AUTO_PUBLIC_BRANCH_PREFIX - - DEBUG_DEXREQ_BRANCH_PREFIX = config.get_debug_dexreq_branch_prefix() - - GENERATED_AUTO_PREVIEW_BRANCH_PATTERN = 'refs/remotes/origin/generated-{debug_dexreq_branch_prefix}auto-v2-preview-*'.format( - debug_dexreq_branch_prefix=DEBUG_DEXREQ_BRANCH_PREFIX) - AUTO_PREVIEW_BRANCH_PATTERN = 'refs/remotes/origin/{debug_dexreq_branch_prefix}auto-v2-preview-*'.format( - debug_dexreq_branch_prefix=DEBUG_DEXREQ_BRANCH_PREFIX) - GENERATED_AUTO_PUBLIC_BRANCH_PATTERN = 'refs/remotes/origin/generated-{debug_dexreq_branch_prefix}auto-v2-public-*'.format( - debug_dexreq_branch_prefix=DEBUG_DEXREQ_BRANCH_PREFIX) - AUTO_PUBLIC_BRANCH_PATTERN = 'refs/remotes/origin/{debug_dexreq_branch_prefix}auto-v2-public-*'.format( - debug_dexreq_branch_prefix=DEBUG_DEXREQ_BRANCH_PREFIX) - - AUTO_PREVIEW_BRANCH_PREFIX = DEBUG_DEXREQ_BRANCH_PREFIX + 'auto-v2-preview-' - AUTO_PUBLIC_BRANCH_PREFIX = DEBUG_DEXREQ_BRANCH_PREFIX + 'auto-v2-public-' - - -init_branches() - -# no reason to keep 'merge to/from GitHub' branches longer than a week -MAX_AGE_IN_DAYS_GITHUB_MERGE_BRANCH = 7 - -# no reason to keep a bulk preview branch more than 2 weeks old -MAX_AGE_IN_DAYS_BULK_PREVIEW_BRANCH = 14 - -# these are only used internally in the auto generation process so we can delete them very promptly -# (the generated-auto-preview-* branches are the ones we give to partner teams) -MAX_AGE_IN_DAYS_AUTO_PREVIEW_BRANCH = 2 - -# these are only used internally in the auto generation process so we can delete them very promptly -# (the generated-auto-public-* branches are the ones we give to partner teams) -MAX_AGE_IN_DAYS_AUTO_PUBLIC_BRANCH = 2 - -DEXREQ_PREFIX = 'DEXREQ-' -DEXREQ_TOOL_NAME = "DEXREQ" - -CLEAN_TOOL_NAMES = config.TOOL_NAMES + [DEXREQ_TOOL_NAME] -CLEAN_TOOL_REPOS_FOR_TOOL = {} -CLEAN_TOOL_REPOS_FOR_TOOL.update(config.REPOS_FOR_TOOL) -CLEAN_TOOL_REPOS_FOR_TOOL[DEXREQ_TOOL_NAME] = [config.DEXREQ_REPO] - -TERMINAL_STATES = [config.STATUS_DONE, config.STATUS_WITHDRAWN, config.STATUS_CLOSED] - - -def clean_auto_preview_branches(tool_name, issues, branches): - # start by making sure we are on the base branch with no other changes present -- TODO: why is this important? - # checkout_sdk_and_cli_branches(base_branch, tool_name) - for repo in CLEAN_TOOL_REPOS_FOR_TOOL[tool_name]: - repo.git.reset('HEAD','--hard') - repo.git.clean('-f') - - # prune remote branches so we only try to delete branches that still exist - repo.git.remote('update', 'origin', '--prune') - - for repo in CLEAN_TOOL_REPOS_FOR_TOOL[tool_name]: - if branches: - remote_refs = branches - else: - prefix = 'origin/' - - remote_refs = [] - - safe_branch_prefixes = [] - safe_branch_prefixes.extend(config.BRANCH_PREFIXES_SAFE_FOR_DELETION) - if tool_name == DEXREQ_TOOL_NAME: - # Hack to clear up old DEXREQ branches - safe_branch_prefixes.extend(["public-DEXREQ", "preview-DEXREQ", "bulk_public-DEXREQ", "bulk_preview-DEXREQ"]) - - for branch_prefix in safe_branch_prefixes: - pattern = 'refs/remotes/origin/' + branch_prefix + '*' - remote_refs.extend(repo.git.for_each_ref(pattern, format='%(refname:short)').split('\n')) - - remote_refs = [remote_ref[len(prefix):] for remote_ref in remote_refs if remote_ref.startswith(prefix)] - - if len(remote_refs) == 0: - print('No remote branches found for repo: {}. Skipping.'.format(repo.working_dir)) - continue - - for ref in remote_refs: - if issues: - found = False - for i in issues: - if i in ref: - found = True - break - if not found: - continue - - can_be_deleted, message = branch_ok_for_deletion(ref) - if can_be_deleted: - print('Branch {} can be deleted - {}'.format(ref, message)) - else: - print('Branch {} not ready for deletion - {}'.format(ref, message)) - continue - - try: - util.safe_delete_branch(repo, ref) - except git.exc.GitCommandError as e: - print('Failed to delete ref: {}. Exception: {}', ref, str(e)) - - -# Returns True/False and a message -def check_dexreq_in_terminal_state(branch_name): - # find dexreq-<> from the branch name - if DEXREQ_PREFIX in branch_name: - start = branch_name.index(DEXREQ_PREFIX) - try: - end = branch_name.index('-', start + len(DEXREQ_PREFIX)) - issue = util.get_dexreq_issue(branch_name[start:end]) - - if issue.fields.status: - if issue.fields.status.name in TERMINAL_STATES: - return True, "terminal '{}'".format(issue.fields.status.name) - else: - return False, "non-terminal '{}'".format(issue.fields.status.name) - else: - return False, "unknown issue status" - except ValueError as e: - # e.g. for "auto-preview-JavaSDK-DEXREQ-143" without timestamp - print(e) - except JIRAError as e: - print(e) - if "404" in str(e): - return True, "does not exist in JIRA" - else: - raise e - - return False, "no DEXREQ issue" - - -# Returns True/False and a message -def branch_ok_for_deletion(branch_name): - # delete bulk branches older than some threshold: MAX_AGE_IN_DAYS_BULK_PREVIEW_BRANCH - days_old = 0 - match = re.search('.*(20[0-9]{2}-[0-1]{1}[0-9]{1}-[0-3]{1}[0-9]{1}-[0-2]{1}[0-9]{1}-[0-5]{1}[0-9]{1}-[0-5]{1}[0-9]{1})', branch_name) - if match: - groups = match.groups() - if len(groups) == 1: - branch_timestamp = datetime.strptime(groups[0], '%Y-%m-%d-%H-%M-%S') - days_old = (datetime.now() - branch_timestamp).days - if '-bulk-' in branch_name: - if days_old >= MAX_AGE_IN_DAYS_BULK_PREVIEW_BRANCH: - return True, ">= {} days".format(MAX_AGE_IN_DAYS_BULK_PREVIEW_BRANCH) - else: - return False, "< {} days".format(MAX_AGE_IN_DAYS_BULK_PREVIEW_BRANCH) - elif 'merge' in branch_name and 'github' in branch_name: - if days_old >= MAX_AGE_IN_DAYS_GITHUB_MERGE_BRANCH: - return True, ">= {} days".format(MAX_AGE_IN_DAYS_GITHUB_MERGE_BRANCH) - else: - return False, "< {} days".format(MAX_AGE_IN_DAYS_GITHUB_MERGE_BRANCH) - else: - print('Could not parse timestamp from branch name: {}'.format(branch_name)) - - # the auto-preview branches are only used temporarily to trigger the generation + build process - # after making the commit with the pom updates so we can delete them very promptly - if branch_name.startswith(AUTO_PREVIEW_BRANCH_PREFIX) and days_old >= MAX_AGE_IN_DAYS_AUTO_PREVIEW_BRANCH: - return True, ">= {} days".format(MAX_AGE_IN_DAYS_AUTO_PREVIEW_BRANCH) - - if branch_name.startswith(AUTO_PUBLIC_BRANCH_PREFIX) and days_old >= MAX_AGE_IN_DAYS_AUTO_PUBLIC_BRANCH: - return True, ">= {} days".format(MAX_AGE_IN_DAYS_AUTO_PUBLIC_BRANCH) - - # Delete generated branches if DEXREQ ticket is in terminal state i.e Done / Withdrawn - return check_dexreq_in_terminal_state(branch_name) - - -# def checkout_sdk_and_cli_branches(base_branch, tool_name): -# for repo in CLEAN_TOOL_REPOS_FOR_TOOL[tool_name]: -# repo.git.checkout(base_branch) - - -if __name__ == "__main__": - parser = argparse.ArgumentParser(description='Clean up all auto generated branches for a given tool and base branch.') - parser.add_argument('--base-branch', - default='preview', - help='The base branch to start from') - parser.add_argument('--tool', - default='ALL', - help='The tool for which to generate the preview. Accepted values: ALL, {}'.format(', '.join(CLEAN_TOOL_NAMES))) - parser.add_argument('--dry-run', - default=False, - action='store_true', - help='Perform a dry-run') - parser.add_argument('--issue', - action='append', - help='By default, we query JIRA. This allows you to specify a DEXREQ issue to process instead: --issue DEXREQ-123') - parser.add_argument('--branch', - action='append', - help='By default, we query Bitbucket. This allows you to specify a branch to process instead: --branch generated-auto-preview-JavaSDK-DEXREQ-40-2018-06-28-22-32-06') - - args = parser.parse_args() - - if args.branch and args.issue: - raise ValueError("Cannot use --issue and --branch together.") - - base_branch = args.base_branch - tool_name = args.tool - config.IS_DRY_RUN = args.dry_run - - if tool_name != 'ALL' and tool_name not in CLEAN_TOOL_NAMES: - raise ValueError("Tool name must be one of: ALL, {}".format(', '.join(CLEAN_TOOL_NAMES))) - - if tool_name == 'ALL': - for tool in CLEAN_TOOL_NAMES: - print('Cleaning branches for tool: {}'.format(tool)) - clean_auto_preview_branches(tool, args.issue, args.branch) - else: - clean_auto_preview_branches(tool_name, args.issue, args.branch) diff --git a/scripts/auto_gen_utils/cli_manual_changes_required_check.py b/scripts/auto_gen_utils/cli_manual_changes_required_check.py deleted file mode 100644 index 2a03087d1c..0000000000 --- a/scripts/auto_gen_utils/cli_manual_changes_required_check.py +++ /dev/null @@ -1,209 +0,0 @@ -import util -import config -import argparse -import sys -import os -import re - -from autogen_issue_advisor_shared import printv -from create_cli_design_review_ticket import get_cli_design_ticket -from add_or_update_scripts.cli_add_or_update_spec import determine_pom_location # noqa: ignore=F402 -from shared.buildsvc_tc_compatibility import build_log_link - - -FIX_FAILED_TESTS_COMMENT = """Following tests were disabled/failed during the PythonCLI generation. {action}: -{{code:title=Disabled/Failed Tests}} -{failed_tests} -{{code}} -""" -ERROR_MESSAGE_TEMPLATE = """The job failed to determine failed tests during CLI generation. {exception}. - -The full build log can be found {build_log_link}. - -For build log and artifact access see the [self-service FAQ|https://confluence.oci.oraclecorp.com/pages/viewpage.action?pageId=132774928#SDK/CLISelfServiceFrequentlyAskedQuestions-HowdoIgetaccesstologsandartifacts?]. -""" - -PUBLIC_TESTS_FIX_MESSAGE = "Please use the generated PythonCLI branch to fix the disabled tests" - -PREVIEW_TESTS_FIX_MESSAGE = "Please open a PR against PythonCLI's preview branch to fix the disabled tests" - -FAILED_TEST_PATH = "../python-cli/failed_tests.txt" - -CHANGED_SERVICE = "env.CHANGED_SERVICE" -SERVICES_DIR = "services" -POM_LOCATION_PATTERN = "services/(.*)/pom.xml" - - -def add_cli_tests_failed_label(issue_key): - if config.IS_DRY_RUN: - print("DRY-RUN: Not adding label to {}".format(issue_key)) - else: - issue = util.get_dexreq_issue(issue_key) - printv("Adding CLI_FAILED_TESTS_LABEL label to: " + issue_key) - issue.add_field_value('labels', config.CLI_FAILED_TESTS_LABEL) - - -def remove_cli_tests_failed_label(issue_key): - if config.IS_DRY_RUN: - return - else: - issue = util.get_dexreq_issue(issue_key) - if config.CLI_FAILED_TESTS_LABEL in issue.fields.labels: - issue.fields.labels.remove(config.CLI_FAILED_TESTS_LABEL) - issue.update(fields={"labels": issue.fields.labels}) - - -def check_for_json_test_failures(build_type): - cli_repo = config.CLI_REPO - head_commit = cli_repo.head.commit - if build_type == config.BUILD_TYPE_INDIVIDUAL_PUBLIC: - head_commit_origin = cli_repo.commit('origin/master') - else: - head_commit_origin = cli_repo.commit('origin/preview') - diff = head_commit_origin.diff(head_commit, create_patch=True) - - for mdiff in diff.iter_change_type('M'): - if 'tests/resources/json_ignore_command_list.txt' in mdiff.a_path: - print("Json test failures found") - return True - - print("No Json test failures found") - return False - - -def revert_changes_to_json_ignore_command_list_file(build_type): - if not config.IS_DRY_RUN: - try: - cli_repo = config.CLI_REPO - if build_type == config.BUILD_TYPE_INDIVIDUAL_PUBLIC: - cli_repo.git.checkout(['origin/master', 'tests/resources/json_ignore_command_list.txt'], force=True) - else: - cli_repo.git.checkout(['origin/preview', 'tests/resources/json_ignore_command_list.txt'], force=True) - cli_repo.git.commit("--amend", "-m", cli_repo.head.commit.message, '--allow-empty') - cli_repo.git.push('-u', 'origin', 'HEAD', '-f') - except Exception as e: - print("Unable to remove local changes to tests/resources/json_ignore_command_list.txt file") - print(e) - - -def get_service_name_from_issue(dexreq_issue_ticket): - jira_obj = util.get_dexreq_issue(dexreq_issue_ticket) - spec_name = getattr(jira_obj.fields, config.CUSTOM_FIELD_ID_SPEC_FRIENDLY_NAME) - artifact_id = getattr(jira_obj.fields, config.CUSTOM_FIELD_ID_ARTIFACT_ID) - services_root_dir = os.path.join(config.CLI_REPO_RELATIVE_LOCATION, SERVICES_DIR) - - path = determine_pom_location(artifact_id, spec_name, services_root_dir) - return get_service_name_from_path(path) - - -def get_service_name_from_path(file_path): - result = re.search(POM_LOCATION_PATTERN, file_path) - return result.group(1) - - -# This script will be used in PythonCLI DEXREQ pipeline to verify if any JSON skeleton tests(tests/test_json_skeleton_command_coverage.py) were disabled -# as part of CLI generation. -# For public ticket: If any new tests were disabled, CLI-ManualChangesRequired label is added to the issue. -# For preview ticket: Comments on Design Review ticket with the failed tests. -# Reverts the changes to tests/resources/json_ignore_command_list.txt file. -if __name__ == "__main__": - parser = argparse.ArgumentParser(description='Checks for disabled JSON unit tests during the CLI generation') - parser.add_argument('--build-id', - required=True, - help='The team city build id for the build that is running this script. This is used to update the relevant JIRA tickets with links to the team city build') - parser.add_argument('--tool', default=config.CLI_NAME) - parser.add_argument('--build-type', - default=config.BUILD_TYPE_INDIVIDUAL_PUBLIC, - help='The build type to use') - parser.add_argument('--dry-run', - default=False, - action='store_true', - help='Perform a dry-run') - - args = parser.parse_args() - tool_name = args.tool - build_id = args.build_id - build_type = args.build_type - config.IS_DRY_RUN = args.dry_run - - if build_type not in [config.BUILD_TYPE_INDIVIDUAL_PUBLIC, config.BUILD_TYPE_INDIVIDUAL_PREVIEW] or tool_name != config.CLI_NAME: - print('Cannot find failed tests for :{}, {}'.format(build_type, tool_name)) - sys.exit(0) - - generation_pass, build_pass = util.were_steps_successful(tool_name) - if not (generation_pass and build_pass): - print('Generation or Build did not pass, not proceeding.') - sys.exit(0) - - last_commit_message = util.get_last_commit_message(tool_name) - issue_keys = util.parse_issue_keys_from_commit_message(last_commit_message) - if len(issue_keys) != 1: - print('More than one DEXReq issues found {}, exiting!'.format(', '.join(issue_keys))) - sys.exit(0) - - dexreq_issue = issue_keys[0] - failed_json_tests = check_for_json_test_failures(build_type) - - failed_tests = [] - if failed_json_tests: - failed_tests = ['tests/test_json_skeleton_command_coverage.py'] - revert_changes_to_json_ignore_command_list_file(build_type) # this is not really required for individual_preview builds. - - # Check for failed integration tests: - if os.path.isfile(FAILED_TEST_PATH): - - with open(FAILED_TEST_PATH, "r") as file_handle: - failed_integ_tests = file_handle.readlines() - - failed_integ_tests = [name.strip() for name in failed_integ_tests] - - if len(failed_integ_tests) > 0: - print("Found failed integ tests:", failed_integ_tests) - failed_tests.extend(failed_integ_tests) - - try: - if failed_tests: - if build_type == config.BUILD_TYPE_INDIVIDUAL_PUBLIC: - util.add_jira_comment(dexreq_issue, FIX_FAILED_TESTS_COMMENT.format(action=PUBLIC_TESTS_FIX_MESSAGE, failed_tests="\n".join(failed_tests)), config.COMMENT_TYPE_ERROR) - add_cli_tests_failed_label(dexreq_issue) - else: - print("Adding comment to Design Review ticket") - dexreq_issue_ticket = util.get_dexreq_issue(dexreq_issue) - design_review_issue = get_cli_design_ticket(dexreq_issue_ticket, False) - spec_name = getattr(dexreq_issue_ticket.fields, config.CUSTOM_FIELD_ID_SPEC_FRIENDLY_NAME) - - if design_review_issue: - if spec_name: - modified_service = get_service_name_from_issue(issue_keys) - for test in failed_tests: - test_split = test.split('.') - if len(test_split) >= 2 and test_split[0] == 'services' and\ - test_split[1] != modified_service: - failed_tests.remove(test) - if failed_tests: - add_cli_tests_failed_label(design_review_issue.key) - util.add_jira_comment(design_review_issue, FIX_FAILED_TESTS_COMMENT.format(action=PREVIEW_TESTS_FIX_MESSAGE, failed_tests="\n".join(failed_tests)), config.COMMENT_TYPE_ERROR) - if design_review_issue.fields.status.name in [config.STATUS_DONE, config.STATUS_CLOSED]: - util.transition_issue_overall_status(util.JIRA_CLIENT(), design_review_issue, config.STATUS_NEEDS_TRIAGE_STATUS) - - else: - if build_type == config.BUILD_TYPE_INDIVIDUAL_PUBLIC: - remove_cli_tests_failed_label(dexreq_issue) - else: - print("Found non individual_public build_type: " + build_type) - design_review_issue = get_cli_design_ticket(util.get_dexreq_issue(dexreq_issue), False) - if design_review_issue: - print("Removing CLI-FailedTests from design_review_issue") - remove_cli_tests_failed_label(design_review_issue) - print("Removed CLI-FailedTests from design_review_issue") - print("No tests failures found.") - except Exception as e: - issue = util.get_dexreq_issue(dexreq_issue) - util.add_jira_comment( - issue.key, - ERROR_MESSAGE_TEMPLATE.format( - exception=str(e), - build_log_link=build_log_link(build_id) - ), - comment_type=config.COMMENT_TYPE_ERROR - ) diff --git a/scripts/auto_gen_utils/codecov_baseline.html b/scripts/auto_gen_utils/codecov_baseline.html deleted file mode 100644 index f7ef37520a..0000000000 --- a/scripts/auto_gen_utils/codecov_baseline.html +++ /dev/null @@ -1,35 +0,0 @@ - - - - - code coverage summary page - - - - - -
- - - - - - - - - - - - diff --git a/scripts/auto_gen_utils/comment_with_fields_to_be_deleted.py b/scripts/auto_gen_utils/comment_with_fields_to_be_deleted.py deleted file mode 100644 index b956baf883..0000000000 --- a/scripts/auto_gen_utils/comment_with_fields_to_be_deleted.py +++ /dev/null @@ -1,116 +0,0 @@ -import argparse -import textwrap -import sys -import traceback - -import config -import util - -from autogen_issue_advisor_shared import DEFAULT_JIRA_ISSUE_FIELDS, CUSTOM_JIRA_ISSUE_FIELDS - - -def query_all_issues(issues): - query = 'project = {JIRA_PROJECT} AND issuetype = "{ISSUE_TYPE}"'.format( - JIRA_PROJECT=config.JIRA_PROJECT, - ISSUE_TYPE=config.PUBLIC_ISSUE_TYPE_NAME) - - if issues: - query = query + " AND key in (" + ", ".join(issues) + ")" - - # We really do want the values from the public ticket, so don't use util.search_dexreq_issues - all_issues = util.jira_search_issues(query, fields=DEFAULT_JIRA_ISSUE_FIELDS + CUSTOM_JIRA_ISSUE_FIELDS) - - for issue in all_issues: - print('{} - {}'.format(issue.key, issue.fields.summary)) - - return all_issues - - -REMOVED_FIELDS_TEMPLATE = """\ -There is nothing you need to do in response to this message. It is for archival purposes only. - -We are changing the schema of the '{public_issuetype}' issue type and are removing some fields, which will now be loaded from the referenced preview ticket. - -The values for the fields will still be available via REST API, but to make it easier for users to understand what values were contained in this field, this comment will also display them in the JIRA web interface. -{fields} -""" - - -if __name__ == "__main__": - parser = argparse.ArgumentParser(description='Ticket advisor (preview and public).') - parser.add_argument('--issue', - action='append', - help='By default, we query JIRA. This allows you to specify a DEXREQ issue to process instead: --issue DEXREQ-123') - parser.add_argument('--dry-run', - default=False, - action='store_true', - help='Perform a dry-run') - parser.add_argument('--force', - default=False, - action='store_true', - help='Force an update') - parser.add_argument('--verbose', - default=False, - action='store_true', - help='Verbose logging') - - failed = [] - - args = parser.parse_args() - verbose = args.verbose - config.IS_DRY_RUN = args.dry_run - force = args.force - - issues = query_all_issues(args.issue) - - commented_issues = [] - for issue in issues: - print(textwrap.dedent("""\ - ======================================== - Issue: {}""").format(issue.key)) - try: - # Flush, so we make sure the output of the issue key is already visible - # NOTE: This is to help debug for DEX-6382 - sys.stdout.flush() - - fields = "" - for removed_field_id in config.CUSTOM_FIELD_IDS_READ_FROM_PREVIEW_TICKET: - name = config.CUSTOM_FIELD_NAME_FOR_ID[removed_field_id] - if hasattr(issue.fields, removed_field_id): - v = getattr(issue.fields, removed_field_id) - if v is not None: - value = "'" + v + "'" - else: - value = "" - else: - value = "" - fields = fields + "\n'{}': {}".format(name, value) - - text = REMOVED_FIELDS_TEMPLATE.format( - public_issuetype=config.PUBLIC_ISSUE_TYPE_NAME, - fields=fields) - - if text: - # comment on issue - util.add_jira_comment( - issue.key, - text, - comment_type=config.COMMENT_TYPE_INFO - ) - commented_issues.append(issue.key) - except Exception as error: - exception_string = traceback.format_exc() - print("Unexpected error: {}\n{}".format(type(error), exception_string)) - failed.append(issue.key) - - if config.IS_DRY_RUN: - print("DRY-RUN: Would have left {} comment(s)".format(len(commented_issues))) - else: - print("Left {} comment(s)".format(len(commented_issues))) - - if commented_issues: - print("Commented on the following issues:\n{}".format("\n".join(commented_issues))) - - if failed: - print("The following issues failed:\n{}".format("\n".join(failed))) - sys.exit(1) diff --git a/scripts/auto_gen_utils/config.py b/scripts/auto_gen_utils/config.py deleted file mode 100644 index fab038cf2f..0000000000 --- a/scripts/auto_gen_utils/config.py +++ /dev/null @@ -1,1148 +0,0 @@ -from git import Repo - -import os -import six -import urllib3 - -if 'PYTHON_REQUESTS_DEBUGGING' in os.environ and os.environ.get('PYTHON_REQUESTS_DEBUGGING').lower() in ("yes", "true", "1"): - import logging - import httplib - - # Debug logging - httplib.HTTPConnection.debuglevel = 1 - - # These two lines enable debugging at httplib level (requests->urllib3->http.client) - # You will see the REQUEST, including HEADERS and DATA, and RESPONSE with HEADERS but without DATA. - # The only thing missing will be the response.body which is not logged. - try: - import http.client as http_client - except ImportError: - # Python 2 - import httplib as http_client - http_client.HTTPConnection.debuglevel = 1 - - # You must initialize logging, otherwise you'll not see debug output. - logging.basicConfig() - logging.getLogger().setLevel(logging.DEBUG) - logger = logging.getLogger("requests.packages.urllib3") - logger.setLevel(logging.DEBUG) - logger.propagate = True - logger = logging.getLogger("requests") - logger.setLevel(logging.DEBUG) - logger.propagate = True - logger = logging.getLogger("urllib3") - logger.setLevel(logging.DEBUG) - logger.propagate = True - - -# disable warnings for verify=False -urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) - -# JIRA REST API v2 documentation: -# https://docs.atlassian.com/software/jira/docs/api/REST/7.8.0/ -JIRA_SERVER = 'https://jira.oci.oraclecorp.com' -JIRA_SERVER_REST_API_VERSION = 2 -JIRA_PROJECT = 'DEX-REQUEST' -JIRA_PROJECT_KEY = 'DEXREQ' - -JIRA_DEV_STATUS_REST_API_URL_FORMAT = "https://jira.oci.oraclecorp.com/rest/dev-status/1.0/issue/detail?issueId={}&applicationType=stash&dataType=pullrequest" - -USERNAME = os.environ.get('JIRA_USERNAME') -PASSWORD = os.environ.get('JIRA_PASSWORD') -JSESSIONID = os.environ.get('JSESSIONID') - -JIRA_OPTIONS = { - 'server': JIRA_SERVER, - 'rest_api_version': JIRA_SERVER_REST_API_VERSION, - 'verify': False -} - -IS_DRY_RUN = False - -DISABLE_COMMENT_INCORRECT_DATES = False - -# Also used for Bitbucket -JIRA_BASIC_AUTH = (USERNAME, PASSWORD) - -# SDKs/CLIs -JAVA_SDK_NAME = "JavaSDK" -PYTHON_SDK_NAME = "PythonSDK" -CLI_NAME = "PythonCLI" -RUBY_SDK_NAME = "RubySDK" -GO_SDK_NAME = "GoSDK" -TYPESCRIPT_SDK_NAME = "TypescriptSDK" -DOTNET_SDK_NAME = "DotNetSDK" -TEST_DATA_GEN_NAME = "TestDataGen" -POWERSHELL_NAME = "PowerShell" -LEGACY_JAVA_SDK_NAME = "LegacyJavaSDK" - -TOOL_NAMES = [ - JAVA_SDK_NAME, - PYTHON_SDK_NAME, - CLI_NAME, - RUBY_SDK_NAME, - GO_SDK_NAME, - TYPESCRIPT_SDK_NAME, - DOTNET_SDK_NAME, - POWERSHELL_NAME, - TEST_DATA_GEN_NAME, - LEGACY_JAVA_SDK_NAME -] - -SDKS_SUPPORTING_REGION_UPDATE = [ - DOTNET_SDK_NAME, - JAVA_SDK_NAME, - TYPESCRIPT_SDK_NAME, - PYTHON_SDK_NAME, - GO_SDK_NAME, - RUBY_SDK_NAME -] - -# Testing service does not have all of the same behavior as the other tool repos, so it -# is not included in the list of TOOL_NAMES -TESTING_SERVICE_NAME = "TestingService" -TOOL_NAMES_WITH_TESTING_SERVICE = TOOL_NAMES + [TESTING_SERVICE_NAME] - -PUBLIC_SPEC_GENERATION_TYPE = "PUBLIC" -PREVIEW_SPEC_GENERATION_TYPE = "PREVIEW" - -CLI_REQUIRED_MANUAL_CHANGES_LABEL = "CLI-ManualChangesRequired" -CLI_FAILED_TESTS_LABEL = "CLI-FailedTests" -CLI_MANUAL_CHANGES_LABELS = ["ManualCLIChange", "CLITeamCodingRequested", "ServiceTeamManual"] -BACKWARD_INCOMPATIBLE_CHANGES_LABEL = "BackwardIncompatibleChanges" -CHANGES_NOT_BEHIND_CONDITIONAL_GROUPS_LABEL = "ChangesNotBehindConditionalGroups" - -CLI_PR_REQUIRED_LABELS = [ - CLI_REQUIRED_MANUAL_CHANGES_LABEL, - CLI_FAILED_TESTS_LABEL -] - -# Custom Field IDs for status fields in DEXREQ project -CUSTOM_FIELD_NAME_JAVA_SDK_STATUS = 'Status - Java SDK' -CUSTOM_FIELD_NAME_PYTHON_SDK_STATUS = 'Status - Python SDK' -CUSTOM_FIELD_NAME_RUBY_SDK_STATUS = 'Status - Ruby SDK' -CUSTOM_FIELD_NAME_GO_SDK_STATUS = 'Status - Go SDK' -CUSTOM_FIELD_NAME_CLI_STATUS = 'Status - CLI' -CUSTOM_FIELD_NAME_TYPESCRIPT_SDK_STATUS = 'Status - Typescript SDK' -CUSTOM_FIELD_NAME_DOTNET_SDK_STATUS = 'Status - DotNet SDK' -CUSTOM_FIELD_NAME_POWERSHELL_STATUS = 'Status - PowerShell' -CUSTOM_FIELD_NAME_TEST_DATA_STATUS = 'Status - Test Data' -CUSTOM_FIELD_NAME_LEGACY_JAVA_SDK_STATUS = 'Status - Legacy Java SDK' - -CUSTOM_FIELD_NAME_FOR_TOOL = { - JAVA_SDK_NAME: CUSTOM_FIELD_NAME_JAVA_SDK_STATUS, - PYTHON_SDK_NAME: CUSTOM_FIELD_NAME_PYTHON_SDK_STATUS, - CLI_NAME: CUSTOM_FIELD_NAME_CLI_STATUS, - RUBY_SDK_NAME: CUSTOM_FIELD_NAME_RUBY_SDK_STATUS, - GO_SDK_NAME: CUSTOM_FIELD_NAME_GO_SDK_STATUS, - TYPESCRIPT_SDK_NAME: CUSTOM_FIELD_NAME_TYPESCRIPT_SDK_STATUS, - DOTNET_SDK_NAME: CUSTOM_FIELD_NAME_DOTNET_SDK_STATUS, - POWERSHELL_NAME: CUSTOM_FIELD_NAME_POWERSHELL_STATUS, - TEST_DATA_GEN_NAME: CUSTOM_FIELD_NAME_TEST_DATA_STATUS, - LEGACY_JAVA_SDK_NAME: CUSTOM_FIELD_NAME_LEGACY_JAVA_SDK_STATUS -} - -# To determine custom field ids, use a query such as: -# https://jira.oci.oraclecorp.com/rest/api/latest/issue/DEXREQ-200?expand=names,renderedFields - -# common for preview + public -CUSTOM_FIELD_ID_ARTIFACT_ID = 'customfield_12528' -CUSTOM_FIELD_ID_GROUP_ID = 'customfield_12529' -CUSTOM_FIELD_ID_ARTIFACT_VERSION = 'customfield_12530' -CUSTOM_FIELD_ID_SPEC_LOCATION_IN_ARTIFACT = 'customfield_12531' -CUSTOM_FIELD_ID_SPEC_FRIENDLY_NAME = 'customfield_12532' -CUSTOM_FIELD_ID_SERVICE_SUBDOMAIN = 'customfield_12533' -CUSTOM_FIELD_ID_FEATURE_IDS = 'customfield_12534' -CUSTOM_FIELD_ID_UDX_TICKET = 'customfield_13596' - -# public only -CUSTOM_FIELD_ID_PREVIEW_ISSUE = 'customfield_13600' -CUSTOM_FIELD_ID_SDK_CLI_GA_DATE = 'customfield_13448' -CUSTOM_FIELD_ID_CHANGELOG = 'customfield_13599' -CUSTOM_FIELD_ID_ACKNOWLEDGE_RESPONSIBILITIES = 'customfield_13601' -CUSTOM_FIELD_ID_FEATURE_API_IS_PUBLICLY_AVAILABLE = 'customfield_13780' -CUSTOM_FIELD_ID_CLI_CHANGELOG = 'customfield_16306' - -# updated for new Status - {TOOL NAME} fields -CUSTOM_FIELD_ID_JAVA_SDK_STATUS = 'customfield_13608' -CUSTOM_FIELD_ID_PYTHON_SDK_STATUS = 'customfield_13611' -CUSTOM_FIELD_ID_RUBY_SDK_STATUS = 'customfield_13609' -CUSTOM_FIELD_ID_GO_SDK_STATUS = 'customfield_13612' -CUSTOM_FIELD_ID_CLI_STATUS = 'customfield_13610' -CUSTOM_FIELD_ID_TYPESCRIPT_SDK_STATUS = 'customfield_17314' -CUSTOM_FIELD_ID_DOTNET_SDK_STATUS = 'customfield_12208' -CUSTOM_FIELD_ID_POWERSHELL_STATUS = 'customfield_18202' -CUSTOM_FIELD_ID_TEST_DATA_STATUS = 'customfield_19500' -CUSTOM_FIELD_ID_LEGACY_JAVA_SDK_STATUS = 'customfield_23500' - -CUSTOM_FIELD_ID_FOR_TOOL = { - JAVA_SDK_NAME: CUSTOM_FIELD_ID_JAVA_SDK_STATUS, - PYTHON_SDK_NAME: CUSTOM_FIELD_ID_PYTHON_SDK_STATUS, - CLI_NAME: CUSTOM_FIELD_ID_CLI_STATUS, - RUBY_SDK_NAME: CUSTOM_FIELD_ID_RUBY_SDK_STATUS, - GO_SDK_NAME: CUSTOM_FIELD_ID_GO_SDK_STATUS, - TEST_DATA_GEN_NAME: CUSTOM_FIELD_ID_TEST_DATA_STATUS, - TYPESCRIPT_SDK_NAME: CUSTOM_FIELD_ID_TYPESCRIPT_SDK_STATUS, - DOTNET_SDK_NAME: CUSTOM_FIELD_ID_DOTNET_SDK_STATUS, - POWERSHELL_NAME: CUSTOM_FIELD_ID_POWERSHELL_STATUS, - LEGACY_JAVA_SDK_NAME: CUSTOM_FIELD_ID_LEGACY_JAVA_SDK_STATUS -} - -CUSTOM_FIELD_ID_SDK_CLI_GA_DATE = 'customfield_13448' -CUSTOM_FIELD_ID_ISSUE_ROUTING_TAG = 'customfield_13597' -CUSTOM_FIELD_ID_SPRINT = 'customfield_10004' - -CUSTOM_FIELD_NAME_FOR_ID = { - CUSTOM_FIELD_ID_ARTIFACT_ID: 'Spec Artifact Id', - CUSTOM_FIELD_ID_GROUP_ID: 'Spec Group Id', - CUSTOM_FIELD_ID_ARTIFACT_VERSION: 'Spec Artifact Version', - CUSTOM_FIELD_ID_SPEC_LOCATION_IN_ARTIFACT: 'Spec Location in Artifact', - CUSTOM_FIELD_ID_SPEC_FRIENDLY_NAME: 'Service Friendly Name', - CUSTOM_FIELD_ID_SERVICE_SUBDOMAIN: 'Service Subdomain', - CUSTOM_FIELD_ID_FEATURE_IDS: 'Feature id(s)', - CUSTOM_FIELD_ID_JAVA_SDK_STATUS: 'Status - Java SDK', - CUSTOM_FIELD_ID_RUBY_SDK_STATUS: 'Status - Ruby SDK', - CUSTOM_FIELD_ID_GO_SDK_STATUS: 'Status - Go SDK', - CUSTOM_FIELD_ID_PYTHON_SDK_STATUS: 'Status - Python SDK', - CUSTOM_FIELD_ID_CLI_STATUS: 'Status - CLI', - CUSTOM_FIELD_ID_TYPESCRIPT_SDK_STATUS: "Status - Typescript SDK", - CUSTOM_FIELD_ID_DOTNET_SDK_STATUS: "Status - DotNet SDK", - CUSTOM_FIELD_ID_POWERSHELL_STATUS: "Status - PowerShell", - CUSTOM_FIELD_ID_TEST_DATA_STATUS: "Status - Test Data", - CUSTOM_FIELD_ID_LEGACY_JAVA_SDK_STATUS: "Status - Legacy Java SDK", - CUSTOM_FIELD_ID_SDK_CLI_GA_DATE: 'SDK/CLI GA Date', - CUSTOM_FIELD_ID_ISSUE_ROUTING_TAG: 'Issue Routing Tag', - CUSTOM_FIELD_ID_PREVIEW_ISSUE: 'Preview Issue', - CUSTOM_FIELD_ID_CHANGELOG: 'CHANGELOG Entry', - CUSTOM_FIELD_ID_ACKNOWLEDGE_RESPONSIBILITIES: 'Acknowledge Responsibilities', - CUSTOM_FIELD_ID_FEATURE_API_IS_PUBLICLY_AVAILABLE: 'Feature API is publicly available & un-whitelisted in Prod', - CUSTOM_FIELD_ID_UDX_TICKET: 'UDX Ticket', - CUSTOM_FIELD_ID_CLI_CHANGELOG: 'CLI ChangeLog Entry' -} - -CUSTOM_FIELD_IDS_READ_FROM_PREVIEW_TICKET = [ - CUSTOM_FIELD_ID_ARTIFACT_ID, - CUSTOM_FIELD_ID_GROUP_ID, - CUSTOM_FIELD_ID_ARTIFACT_VERSION, - CUSTOM_FIELD_ID_SPEC_LOCATION_IN_ARTIFACT, - CUSTOM_FIELD_ID_SPEC_FRIENDLY_NAME, - CUSTOM_FIELD_ID_SERVICE_SUBDOMAIN, - CUSTOM_FIELD_ID_FEATURE_IDS, - CUSTOM_FIELD_ID_ISSUE_ROUTING_TAG, - CUSTOM_FIELD_ID_UDX_TICKET, -] - -# RubySDK is in 'maintenance mode' and receives limited support owned by the DEX SDK team. -TOOLS_FOR_WHICH_GENERATION_MAY_FAIL = [RUBY_SDK_NAME, TEST_DATA_GEN_NAME, LEGACY_JAVA_SDK_NAME] - -# STATUS_READY_FOR_WORK = 'Ready for Work' -STATUS_BACKLOG = 'Backlog' -STATUS_PROCESSING_REQUESTED = 'Processing Requested' -STATUS_IN_PROGRESS = 'In Progress' -STATUS_IN_REVIEW = 'In Review' -STATUS_IN_DESIGN = 'In Design' -STATUS_NEEDS_TRIAGE = 'Needs Triage' -# STATUS_PENDING_MERGE = 'Pending Merge' -STATUS_PROCESSING = 'Processing' -STATUS_PROCESSING_BULK = 'Processing - Bulk' -STATUS_SERVICE_TEAM_FAILURE_INVESTIGATION = 'Service Team Failure Investigation' -STATUS_DEX_SUPPORT_REQUIRED = 'DEX Support Required' -STATUS_SERVICE_TEAM_REVIEW_REQUIRED = 'Service Team Review Required' -STATUS_SERVICE_TEAM_WORK_REQUIRED = 'Service Team Work Required' -STATUS_RELEASE_REQUESTED = 'Release Requested' -STATUS_RELEASE_APPROVED = 'Release Approved' -STATUS_READY_FOR_PREVIEW = 'Ready for Preview' -STATUS_DEX_BULK_REVIEW = 'DEX Bulk Review' -STATUS_TO_DEPLOY = 'To Deploy' -STATUS_DONE = 'Done' -STATUS_WITHDRAWN = 'Withdrawn' -STATUS_NEEDS_TRIAGE_STATUS = 'Needs triage status' -STATUS_CLOSED = 'Closed' -STATUS_MORE_INFORMATION_NEEDED = 'More Information Needed' - -CUSTOM_STATUS_TODO = 'To Do' -CUSTOM_STATUS_PROCESSING = 'Processing' -CUSTOM_STATUS_FAILURE = 'Failure' -CUSTOM_STATUS_SUCCESS = 'Success' -CUSTOM_STATUS_DONE = 'Done' - -# We expect the Git repos to be checked out relative to the working directory when the scripts auto-gen-utils pipeline scripts are run: -# dexreq repo: ../dexreq -# Java SDK repo: ../java-sdk -# Go SDK repo: ../src/github.com/oracle/oci-go-sdk -# Ruby SDK repo: ../ruby-sdk -# Python SDK repo: ../python-sdk -# Python SDK repo: ../python-cli -# Testing service: ../oci-testing-service -# Typescript SDK repo: ../oci-typescript-sdk -# DotNet SDK repo: ../oci-dotnet-sdk -# Legacy Java SDK repo : ../legacy-java-sdk - -DEXREQ_REPO_NAME = "dexreq" -DEXREQ_DIFF_REPO_RELATIVE_LOCATION = os.path.join('..', 'dexreq') -DEXREQ_REPO = Repo.init(DEXREQ_DIFF_REPO_RELATIVE_LOCATION) - -CLI_REPO_RELATIVE_LOCATION = os.path.join('..', 'python-cli') -PYTHON_SDK_REPO_RELATIVE_LOCATION = os.path.join('..', 'python-sdk') -GO_SDK_REPO_RELATIVE_LOCATION = os.path.join('..', 'src', 'github.com', 'oracle', 'oci-go-sdk') -JAVA_SDK_REPO_RELATIVE_LOCATION = os.path.join('..', 'java-sdk') -TYPESCRIPT_SDK_REPO_RELATIVE_LOCATION = os.path.join('..', 'oci-typescript-sdk') -RUBY_SDK_REPO_RELATIVE_LOCATION = os.path.join('..', 'ruby-sdk') -DOTNET_SDK_REPO_RELATIVE_LOCATION = os.path.join('..', 'oci-dotnet-sdk') -POWERSHELL_REPO_RELATIVE_LOCATION = os.path.join('..', 'oci-powershell-modules') -TESTING_SERVICE_REPO_RELATIVE_LOCATION = os.path.join('..', 'oci-testing-service') -TEST_DATA_GEN_REPO_RELATIVE_LOCATION = os.path.join('..', 'sdk-client-test-data') -LEGACY_JAVA_SDK_REPO_RELATIVE_LOCATION = os.path.join('..', 'legacy-java-sdk') - -REPO_RELATIVE_LOCATION_FOR_TOOL = { - JAVA_SDK_NAME: JAVA_SDK_REPO_RELATIVE_LOCATION, - PYTHON_SDK_NAME: PYTHON_SDK_REPO_RELATIVE_LOCATION, - CLI_NAME: CLI_REPO_RELATIVE_LOCATION, - RUBY_SDK_NAME: RUBY_SDK_REPO_RELATIVE_LOCATION, - GO_SDK_NAME: GO_SDK_REPO_RELATIVE_LOCATION, - TYPESCRIPT_SDK_NAME: TYPESCRIPT_SDK_REPO_RELATIVE_LOCATION, - DOTNET_SDK_NAME: DOTNET_SDK_REPO_RELATIVE_LOCATION, - POWERSHELL_NAME: POWERSHELL_REPO_RELATIVE_LOCATION, - TEST_DATA_GEN_NAME: TEST_DATA_GEN_REPO_RELATIVE_LOCATION, - LEGACY_JAVA_SDK_NAME: LEGACY_JAVA_SDK_REPO_RELATIVE_LOCATION -} - -CLI_REPO = Repo.init(CLI_REPO_RELATIVE_LOCATION) -SDK_REPO = Repo.init(PYTHON_SDK_REPO_RELATIVE_LOCATION) -JAVA_SDK_REPO = Repo.init(JAVA_SDK_REPO_RELATIVE_LOCATION) -RUBY_SDK_REPO = Repo.init(RUBY_SDK_REPO_RELATIVE_LOCATION) -GO_SDK_REPO = Repo.init(GO_SDK_REPO_RELATIVE_LOCATION) -TYPESCRIPT_SDK_REPO = Repo.init(TYPESCRIPT_SDK_REPO_RELATIVE_LOCATION) -DOTNET_SDK_REPO = Repo.init(DOTNET_SDK_REPO_RELATIVE_LOCATION) -POWERSHELL_REPO = Repo.init(POWERSHELL_REPO_RELATIVE_LOCATION) -TESTING_SERVICE_REPO = Repo.init(TESTING_SERVICE_REPO_RELATIVE_LOCATION) -TEST_DATA_GEN_REPO = Repo.init(TEST_DATA_GEN_REPO_RELATIVE_LOCATION) -LEGACY_JAVA_SDK_REPO = Repo.init(LEGACY_JAVA_SDK_REPO_RELATIVE_LOCATION) -REPO_NAMES_FOR_TOOL = { - JAVA_SDK_NAME: ["java-sdk"], - PYTHON_SDK_NAME: ["python-sdk"], - CLI_NAME: ["python-sdk", "python-cli"], - RUBY_SDK_NAME: ["ruby-sdk"], - GO_SDK_NAME: ["oci-go-sdk"], - TYPESCRIPT_SDK_NAME: ["oci-typescript-sdk"], - DOTNET_SDK_NAME: ["oci-dotnet-sdk"], - POWERSHELL_NAME: ["oci-dotnet-sdk", "oci-powershell-modules"], - TEST_DATA_GEN_NAME: ["sdk-client-test-data"], - TESTING_SERVICE_NAME: ["oci-testing-service"], - LEGACY_JAVA_SDK_NAME: ["legacy-java-sdk"] -} -REPOS_FOR_TOOL = { - JAVA_SDK_NAME: [JAVA_SDK_REPO], - PYTHON_SDK_NAME: [SDK_REPO], - CLI_NAME: [SDK_REPO, CLI_REPO], - RUBY_SDK_NAME: [RUBY_SDK_REPO], - GO_SDK_NAME: [GO_SDK_REPO], - TYPESCRIPT_SDK_NAME: [TYPESCRIPT_SDK_REPO], - DOTNET_SDK_NAME: [DOTNET_SDK_REPO], - POWERSHELL_NAME: [DOTNET_SDK_REPO, POWERSHELL_REPO], - TEST_DATA_GEN_NAME: [TEST_DATA_GEN_REPO], - TESTING_SERVICE_NAME: [TESTING_SERVICE_REPO], - LEGACY_JAVA_SDK_NAME: [LEGACY_JAVA_SDK_REPO] -} - -REPO_NAME_TO_PRIMARY_TOOL = {} -for tool, repo_names in six.iteritems(REPO_NAMES_FOR_TOOL): - REPO_NAME_TO_PRIMARY_TOOL[repo_names[-1]] = tool - -# This is for links to generate PRs for the correct repo for the TC jobs with a VCS -# configured to reference a forked repo. -REPO_FOR_LINKS = { - JAVA_SDK_NAME: ["java-sdk"], - PYTHON_SDK_NAME: ["python-sdk"], - CLI_NAME: ["python-sdk", "python-cli"], - RUBY_SDK_NAME: ["ruby-sdk-autogen-fork"], - GO_SDK_NAME: ["oci-go-sdk"], - TYPESCRIPT_SDK_NAME:["oci-typescript-sdk-autogen-fork"], - DOTNET_SDK_NAME: ["oci-dotnet-sdk-autogen-fork"], - POWERSHELL_NAME: ["oci-dotnet-sdk-autogen-fork", "oci-powershell-modules-autogen-fork"], - TEST_DATA_GEN_NAME: ["sdk-client-test-data-autogen-fork"], - LEGACY_JAVA_SDK_NAME: ["legacy-java-sdk"] -} - -# This is for specifying a target repo to merge to. It's only required for TC jobs -# with a VCS configured to reference a forked repo with the changes. -# If left as an empty string, the link defaults to use the same repo as the source for the PR. -# To find the repo ID for a given BitBucket Repo: -# https://bitbucket.oci.oraclecorp.com/rest/api/1.0/projects/SDK/repos/{repo_name} -TARGET_REPO_IDS_FOR_LINKS = { - JAVA_SDK_NAME: [""], - PYTHON_SDK_NAME: [""], - CLI_NAME: ["", ""], - RUBY_SDK_NAME: ["464"], # BitBucket's repo ID for ruby-sdk - GO_SDK_NAME: [""], - TYPESCRIPT_SDK_NAME: ["14954"], # BitBucket's repo ID for oci-typescript-sdk - DOTNET_SDK_NAME: ["50528"], # BitBucket's repo ID for oci-dotnet-sdk, - POWERSHELL_NAME: ["50528", "52679"], - TEST_DATA_GEN_NAME: ["31489"], # BitBucket's repo ID for sdk-client-test-data - LEGACY_JAVA_SDK_NAME: [""] -} - -PREVIEW_TICKET_LABEL = "PreviewSDK" -PUBLIC_TICKET_LABEL = "PublicSDK" - -PREVIEW_ISSUE_TYPE_NAME = "Preview" -PUBLIC_ISSUE_TYPE_NAME = "Public" - -PREVIEW_ISSUE_TYPE_ID = '13122' -PUBLIC_ISSUE_TYPE_ID = '13121' - -PULL_REQUEST_STATUS_MERGED = 'MERGED' -PULL_REQUEST_STATUS_OPEN = 'OPEN' -PULL_REQUEST_STATUS_DECLINED = 'DECLINED' - -TEMPLATE_TODO_TICKETS_PREVIEW_JQL = 'project = {JIRA_PROJECT} AND resolution = Unresolved AND issuetype = "{ISSUE_TYPE}" AND ("{CUSTOM_FIELD_STATUS}" = "To Do")' - -TODO_PREVIEW_CLI_TICKETS_JQL = TEMPLATE_TODO_TICKETS_PREVIEW_JQL.format( - JIRA_PROJECT=JIRA_PROJECT, - CUSTOM_FIELD_STATUS=CUSTOM_FIELD_NAME_CLI_STATUS, - ISSUE_TYPE=PREVIEW_ISSUE_TYPE_ID -) -TODO_PREVIEW_PYTHON_SDK_TICKETS_JQL = TEMPLATE_TODO_TICKETS_PREVIEW_JQL.format( - JIRA_PROJECT=JIRA_PROJECT, - CUSTOM_FIELD_STATUS=CUSTOM_FIELD_NAME_PYTHON_SDK_STATUS, - ISSUE_TYPE=PREVIEW_ISSUE_TYPE_ID -) -TODO_PREVIEW_JAVA_SDK_TICKETS_JQL = TEMPLATE_TODO_TICKETS_PREVIEW_JQL.format( - JIRA_PROJECT=JIRA_PROJECT, - CUSTOM_FIELD_STATUS=CUSTOM_FIELD_NAME_JAVA_SDK_STATUS, - ISSUE_TYPE=PREVIEW_ISSUE_TYPE_ID -) -TODO_PREVIEW_RUBY_SDK_TICKETS_JQL = TEMPLATE_TODO_TICKETS_PREVIEW_JQL.format( - JIRA_PROJECT=JIRA_PROJECT, - CUSTOM_FIELD_STATUS=CUSTOM_FIELD_NAME_RUBY_SDK_STATUS, - ISSUE_TYPE=PREVIEW_ISSUE_TYPE_ID -) -TODO_PREVIEW_GO_SDK_TICKETS_JQL = TEMPLATE_TODO_TICKETS_PREVIEW_JQL.format( - JIRA_PROJECT=JIRA_PROJECT, - CUSTOM_FIELD_STATUS=CUSTOM_FIELD_NAME_GO_SDK_STATUS, - ISSUE_TYPE=PREVIEW_ISSUE_TYPE_ID -) -TODO_PREVIEW_TYPESCRIPT_SDK_TICKETS_JQL = TEMPLATE_TODO_TICKETS_PREVIEW_JQL.format( - JIRA_PROJECT=JIRA_PROJECT, - CUSTOM_FIELD_STATUS=CUSTOM_FIELD_NAME_TYPESCRIPT_SDK_STATUS, - ISSUE_TYPE=PREVIEW_ISSUE_TYPE_ID -) -TODO_PREVIEW_DOTNET_SDK_TICKETS_JQL = TEMPLATE_TODO_TICKETS_PREVIEW_JQL.format( - JIRA_PROJECT=JIRA_PROJECT, - CUSTOM_FIELD_STATUS=CUSTOM_FIELD_NAME_DOTNET_SDK_STATUS, - ISSUE_TYPE=PREVIEW_ISSUE_TYPE_ID -) - -TODO_PREVIEW_POWERSHELL_TICKETS_JQL = TEMPLATE_TODO_TICKETS_PREVIEW_JQL.format( - JIRA_PROJECT=JIRA_PROJECT, - CUSTOM_FIELD_STATUS=CUSTOM_FIELD_NAME_POWERSHELL_STATUS, - ISSUE_TYPE=PREVIEW_ISSUE_TYPE_ID -) - -TODO_PREVIEW_TEST_DATA_TICKETS_JQL = TEMPLATE_TODO_TICKETS_PREVIEW_JQL.format( - JIRA_PROJECT=JIRA_PROJECT, - CUSTOM_FIELD_STATUS=CUSTOM_FIELD_NAME_TEST_DATA_STATUS, - ISSUE_TYPE=PREVIEW_ISSUE_TYPE_ID -) - -TODO_PREVIEW_LEGACY_JAVA_SDK_TICKETS_JQL = TEMPLATE_TODO_TICKETS_PREVIEW_JQL.format( - JIRA_PROJECT=JIRA_PROJECT, - CUSTOM_FIELD_STATUS=CUSTOM_FIELD_NAME_LEGACY_JAVA_SDK_STATUS, - ISSUE_TYPE=PREVIEW_ISSUE_TYPE_ID -) - -TEMPLATE_TODO_TICKETS_PUBLIC_JQL = 'project = {JIRA_PROJECT} AND status = "' + STATUS_PROCESSING_REQUESTED + '" AND resolution = Unresolved AND issuetype = "{ISSUE_TYPE}" AND ("{CUSTOM_FIELD_STATUS}" = "To Do")' - -TODO_PUBLIC_CLI_TICKETS_JQL = TEMPLATE_TODO_TICKETS_PUBLIC_JQL.format( - JIRA_PROJECT=JIRA_PROJECT, - CUSTOM_FIELD_STATUS=CUSTOM_FIELD_NAME_CLI_STATUS, - ISSUE_TYPE=PUBLIC_ISSUE_TYPE_ID -) -TODO_PUBLIC_PYTHON_SDK_TICKETS_JQL = TEMPLATE_TODO_TICKETS_PUBLIC_JQL.format( - JIRA_PROJECT=JIRA_PROJECT, - CUSTOM_FIELD_STATUS=CUSTOM_FIELD_NAME_PYTHON_SDK_STATUS, - ISSUE_TYPE=PUBLIC_ISSUE_TYPE_ID -) -TODO_PUBLIC_JAVA_SDK_TICKETS_JQL = TEMPLATE_TODO_TICKETS_PUBLIC_JQL.format( - JIRA_PROJECT=JIRA_PROJECT, - CUSTOM_FIELD_STATUS=CUSTOM_FIELD_NAME_JAVA_SDK_STATUS, - ISSUE_TYPE=PUBLIC_ISSUE_TYPE_ID -) -TODO_PUBLIC_RUBY_SDK_TICKETS_JQL = TEMPLATE_TODO_TICKETS_PUBLIC_JQL.format( - JIRA_PROJECT=JIRA_PROJECT, - CUSTOM_FIELD_STATUS=CUSTOM_FIELD_NAME_RUBY_SDK_STATUS, - ISSUE_TYPE=PUBLIC_ISSUE_TYPE_ID -) -TODO_PUBLIC_GO_SDK_TICKETS_JQL = TEMPLATE_TODO_TICKETS_PUBLIC_JQL.format( - JIRA_PROJECT=JIRA_PROJECT, - CUSTOM_FIELD_STATUS=CUSTOM_FIELD_NAME_GO_SDK_STATUS, - ISSUE_TYPE=PUBLIC_ISSUE_TYPE_ID -) -TODO_PUBLIC_TYPESCRIPT_SDK_TICKETS_JQL = TEMPLATE_TODO_TICKETS_PUBLIC_JQL.format( - JIRA_PROJECT=JIRA_PROJECT, - CUSTOM_FIELD_STATUS=CUSTOM_FIELD_NAME_TYPESCRIPT_SDK_STATUS, - ISSUE_TYPE=PUBLIC_ISSUE_TYPE_ID -) -TODO_PUBLIC_DOTNET_SDK_TICKETS_JQL = TEMPLATE_TODO_TICKETS_PUBLIC_JQL.format( - JIRA_PROJECT=JIRA_PROJECT, - CUSTOM_FIELD_STATUS=CUSTOM_FIELD_NAME_DOTNET_SDK_STATUS, - ISSUE_TYPE=PUBLIC_ISSUE_TYPE_ID -) - -TODO_PUBLIC_POWERSHELL_TICKETS_JQL = TEMPLATE_TODO_TICKETS_PUBLIC_JQL.format( - JIRA_PROJECT=JIRA_PROJECT, - CUSTOM_FIELD_STATUS=CUSTOM_FIELD_NAME_POWERSHELL_STATUS, - ISSUE_TYPE=PUBLIC_ISSUE_TYPE_ID -) - -TODO_PUBLIC_TEST_DATA_TICKETS_JQL = TEMPLATE_TODO_TICKETS_PUBLIC_JQL.format( - JIRA_PROJECT=JIRA_PROJECT, - CUSTOM_FIELD_STATUS=CUSTOM_FIELD_NAME_TEST_DATA_STATUS, - ISSUE_TYPE=PUBLIC_ISSUE_TYPE_ID -) -TODO_PUBLIC_LEGACY_JAVA_SDK_TICKETS_JQL = TEMPLATE_TODO_TICKETS_PUBLIC_JQL.format( - JIRA_PROJECT=JIRA_PROJECT, - CUSTOM_FIELD_STATUS=CUSTOM_FIELD_NAME_LEGACY_JAVA_SDK_STATUS, - ISSUE_TYPE=PUBLIC_ISSUE_TYPE_ID -) - -TODO_PREVIEW_ALL_TOOLS = ('project = {JIRA_PROJECT} AND resolution = Unresolved AND (status = "{STATUS_PROCESSING_REQUESTED}") AND issuetype = "{ISSUE_TYPE}"').format( - JIRA_PROJECT=JIRA_PROJECT, - ISSUE_TYPE=PREVIEW_ISSUE_TYPE_ID, - STATUS_PROCESSING_REQUESTED=STATUS_PROCESSING_REQUESTED -) -TODO_PUBLIC_ALL_TOOLS = ('project = {JIRA_PROJECT} AND resolution = Unresolved AND status = "{STATUS_PROCESSING_REQUESTED}" AND issuetype = "{ISSUE_TYPE}"').format( - JIRA_PROJECT=JIRA_PROJECT, - ISSUE_TYPE=PUBLIC_ISSUE_TYPE_ID, - STATUS_PROCESSING_REQUESTED=STATUS_PROCESSING_REQUESTED -) - -TODO_PREVIEW_TICKETS_JQL_FOR_TOOL = { - JAVA_SDK_NAME: TODO_PREVIEW_JAVA_SDK_TICKETS_JQL, - PYTHON_SDK_NAME: TODO_PREVIEW_PYTHON_SDK_TICKETS_JQL, - CLI_NAME: TODO_PREVIEW_CLI_TICKETS_JQL, - RUBY_SDK_NAME: TODO_PREVIEW_RUBY_SDK_TICKETS_JQL, - GO_SDK_NAME: TODO_PREVIEW_GO_SDK_TICKETS_JQL, - TYPESCRIPT_SDK_NAME: TODO_PREVIEW_TYPESCRIPT_SDK_TICKETS_JQL, - DOTNET_SDK_NAME: TODO_PREVIEW_DOTNET_SDK_TICKETS_JQL, - POWERSHELL_NAME: TODO_PREVIEW_POWERSHELL_TICKETS_JQL, - TEST_DATA_GEN_NAME: TODO_PREVIEW_TEST_DATA_TICKETS_JQL, - LEGACY_JAVA_SDK_NAME: TODO_PREVIEW_LEGACY_JAVA_SDK_TICKETS_JQL -} - -TODO_PUBLIC_TICKETS_JQL_FOR_TOOL = { - JAVA_SDK_NAME: TODO_PUBLIC_JAVA_SDK_TICKETS_JQL, - PYTHON_SDK_NAME: TODO_PUBLIC_PYTHON_SDK_TICKETS_JQL, - CLI_NAME: TODO_PUBLIC_CLI_TICKETS_JQL, - RUBY_SDK_NAME: TODO_PUBLIC_RUBY_SDK_TICKETS_JQL, - GO_SDK_NAME: TODO_PUBLIC_GO_SDK_TICKETS_JQL, - TYPESCRIPT_SDK_NAME: TODO_PUBLIC_TYPESCRIPT_SDK_TICKETS_JQL, - DOTNET_SDK_NAME: TODO_PUBLIC_DOTNET_SDK_TICKETS_JQL, - POWERSHELL_NAME: TODO_PUBLIC_POWERSHELL_TICKETS_JQL, - TEST_DATA_GEN_NAME: TODO_PUBLIC_TEST_DATA_TICKETS_JQL, - LEGACY_JAVA_SDK_NAME: TODO_PUBLIC_LEGACY_JAVA_SDK_TICKETS_JQL -} - -READY_FOR_PREVIEW_CLI_TICKETS_JQL = 'project = {JIRA_PROJECT} AND resolution = Unresolved AND issuetype = "{ISSUE_TYPE}" AND "{CUSTOM_FIELD_NAME_PER_TOOL_STATUS}" in ("{CUSTOM_STATUS_VALUE_1}", "{CUSTOM_STATUS_VALUE_2}") AND status = "{STATUS_READY_FOR_PREVIEW}"'.format( - JIRA_PROJECT=JIRA_PROJECT, - CUSTOM_FIELD_NAME_PER_TOOL_STATUS=CUSTOM_FIELD_NAME_CLI_STATUS, - STATUS=STATUS_READY_FOR_PREVIEW, - ISSUE_TYPE=PREVIEW_ISSUE_TYPE_ID, - CUSTOM_STATUS_VALUE_1=CUSTOM_STATUS_SUCCESS, - CUSTOM_STATUS_VALUE_2=CUSTOM_STATUS_DONE, - STATUS_READY_FOR_PREVIEW=STATUS_READY_FOR_PREVIEW -) -READY_FOR_PREVIEW_PYTHON_SDK_TICKETS_JQL = 'project = {JIRA_PROJECT} AND resolution = Unresolved AND issuetype = "{ISSUE_TYPE}" AND "{CUSTOM_FIELD_NAME_PER_TOOL_STATUS}" in ("{CUSTOM_STATUS_VALUE_1}", "{CUSTOM_STATUS_VALUE_2}") AND status = "{STATUS_READY_FOR_PREVIEW}"'.format( - JIRA_PROJECT=JIRA_PROJECT, - CUSTOM_FIELD_NAME_PER_TOOL_STATUS=CUSTOM_FIELD_NAME_PYTHON_SDK_STATUS, - STATUS=STATUS_READY_FOR_PREVIEW, - ISSUE_TYPE=PREVIEW_ISSUE_TYPE_ID, - CUSTOM_STATUS_VALUE_1=CUSTOM_STATUS_SUCCESS, - CUSTOM_STATUS_VALUE_2=CUSTOM_STATUS_DONE, - STATUS_READY_FOR_PREVIEW=STATUS_READY_FOR_PREVIEW -) -READY_FOR_PREVIEW_JAVA_SDK_TICKETS_JQL = 'project = {JIRA_PROJECT} AND resolution = Unresolved AND issuetype = "{ISSUE_TYPE}" AND "{CUSTOM_FIELD_NAME_PER_TOOL_STATUS}" in ("{CUSTOM_STATUS_VALUE_1}", "{CUSTOM_STATUS_VALUE_2}") AND status = "{STATUS_READY_FOR_PREVIEW}"'.format( - JIRA_PROJECT=JIRA_PROJECT, - CUSTOM_FIELD_NAME_PER_TOOL_STATUS=CUSTOM_FIELD_NAME_JAVA_SDK_STATUS, - STATUS=STATUS_READY_FOR_PREVIEW, - ISSUE_TYPE=PREVIEW_ISSUE_TYPE_ID, - CUSTOM_STATUS_VALUE_1=CUSTOM_STATUS_SUCCESS, - CUSTOM_STATUS_VALUE_2=CUSTOM_STATUS_DONE, - STATUS_READY_FOR_PREVIEW=STATUS_READY_FOR_PREVIEW -) -READY_FOR_PREVIEW_RUBY_SDK_TICKETS_JQL = 'project = {JIRA_PROJECT} AND resolution = Unresolved AND issuetype = "{ISSUE_TYPE}" AND "{CUSTOM_FIELD_NAME_PER_TOOL_STATUS}" in ("{CUSTOM_STATUS_VALUE_1}", "{CUSTOM_STATUS_VALUE_2}") AND status = "{STATUS_READY_FOR_PREVIEW}"'.format( - JIRA_PROJECT=JIRA_PROJECT, - CUSTOM_FIELD_NAME_PER_TOOL_STATUS=CUSTOM_FIELD_NAME_RUBY_SDK_STATUS, - STATUS=STATUS_READY_FOR_PREVIEW, - ISSUE_TYPE=PREVIEW_ISSUE_TYPE_ID, - CUSTOM_STATUS_VALUE_1=CUSTOM_STATUS_SUCCESS, - CUSTOM_STATUS_VALUE_2=CUSTOM_STATUS_DONE, - STATUS_READY_FOR_PREVIEW=STATUS_READY_FOR_PREVIEW -) -READY_FOR_PREVIEW_GO_SDK_TICKETS_JQL = 'project = {JIRA_PROJECT} AND resolution = Unresolved AND issuetype = "{ISSUE_TYPE}" AND "{CUSTOM_FIELD_NAME_PER_TOOL_STATUS}" in ("{CUSTOM_STATUS_VALUE_1}", "{CUSTOM_STATUS_VALUE_2}") AND status = "{STATUS_READY_FOR_PREVIEW}"'.format( - JIRA_PROJECT=JIRA_PROJECT, - CUSTOM_FIELD_NAME_PER_TOOL_STATUS=CUSTOM_FIELD_NAME_GO_SDK_STATUS, - ISSUE_TYPE=PREVIEW_ISSUE_TYPE_ID, - CUSTOM_STATUS_VALUE_1=CUSTOM_STATUS_SUCCESS, - CUSTOM_STATUS_VALUE_2=CUSTOM_STATUS_DONE, - STATUS_READY_FOR_PREVIEW=STATUS_READY_FOR_PREVIEW -) -READY_FOR_PREVIEW_TYPESCRIPT_SDK_TICKETS_JQL = 'project = {JIRA_PROJECT} AND resolution = Unresolved AND issuetype = "{ISSUE_TYPE}" AND "{CUSTOM_FIELD_NAME_PER_TOOL_STATUS}" in ("{CUSTOM_STATUS_VALUE_1}", "{CUSTOM_STATUS_VALUE_2}") AND status = "{STATUS_READY_FOR_PREVIEW}"'.format( - JIRA_PROJECT=JIRA_PROJECT, - CUSTOM_FIELD_NAME_PER_TOOL_STATUS=CUSTOM_FIELD_NAME_TYPESCRIPT_SDK_STATUS, - STATUS=STATUS_READY_FOR_PREVIEW, - ISSUE_TYPE=PREVIEW_ISSUE_TYPE_ID, - CUSTOM_STATUS_VALUE_1=CUSTOM_STATUS_SUCCESS, - CUSTOM_STATUS_VALUE_2=CUSTOM_STATUS_DONE, - STATUS_READY_FOR_PREVIEW=STATUS_READY_FOR_PREVIEW -) -READY_FOR_PREVIEW_DOTNET_SDK_TICKETS_JQL = 'project = {JIRA_PROJECT} AND resolution = Unresolved AND issuetype = "{ISSUE_TYPE}" AND "{CUSTOM_FIELD_NAME_PER_TOOL_STATUS}" in ("{CUSTOM_STATUS_VALUE_1}", "{CUSTOM_STATUS_VALUE_2}") AND status = "{STATUS_READY_FOR_PREVIEW}"'.format( - JIRA_PROJECT=JIRA_PROJECT, - CUSTOM_FIELD_NAME_PER_TOOL_STATUS=CUSTOM_FIELD_NAME_DOTNET_SDK_STATUS, - STATUS=STATUS_READY_FOR_PREVIEW, - ISSUE_TYPE=PREVIEW_ISSUE_TYPE_ID, - CUSTOM_STATUS_VALUE_1=CUSTOM_STATUS_SUCCESS, - CUSTOM_STATUS_VALUE_2=CUSTOM_STATUS_DONE, - STATUS_READY_FOR_PREVIEW=STATUS_READY_FOR_PREVIEW -) - -READY_FOR_PREVIEW_POWERSHELL_TICKETS_JQL = 'project = {JIRA_PROJECT} AND resolution = Unresolved AND issuetype = "{ISSUE_TYPE}" AND "{CUSTOM_FIELD_NAME_PER_TOOL_STATUS}" in ("{CUSTOM_STATUS_VALUE_1}", "{CUSTOM_STATUS_VALUE_2}") AND status = "{STATUS_READY_FOR_PREVIEW}"'.format( - JIRA_PROJECT=JIRA_PROJECT, - CUSTOM_FIELD_NAME_PER_TOOL_STATUS=CUSTOM_FIELD_NAME_POWERSHELL_STATUS, - STATUS=STATUS_READY_FOR_PREVIEW, - ISSUE_TYPE=PREVIEW_ISSUE_TYPE_ID, - CUSTOM_STATUS_VALUE_1=CUSTOM_STATUS_SUCCESS, - CUSTOM_STATUS_VALUE_2=CUSTOM_STATUS_DONE, - STATUS_READY_FOR_PREVIEW=STATUS_READY_FOR_PREVIEW -) - -READY_FOR_PREVIEW_TEST_DATA_TICKETS_JQL = 'project = {JIRA_PROJECT} AND resolution = Unresolved AND issuetype = "{ISSUE_TYPE}" AND "{CUSTOM_FIELD_NAME_PER_TOOL_STATUS}" in ("{CUSTOM_STATUS_VALUE_1}", "{CUSTOM_STATUS_VALUE_2}") AND status = "{STATUS_READY_FOR_PREVIEW}"'.format( - JIRA_PROJECT=JIRA_PROJECT, - CUSTOM_FIELD_NAME_PER_TOOL_STATUS=CUSTOM_FIELD_NAME_TEST_DATA_STATUS, - STATUS=STATUS_READY_FOR_PREVIEW, - ISSUE_TYPE=PREVIEW_ISSUE_TYPE_ID, - CUSTOM_STATUS_VALUE_1=CUSTOM_STATUS_SUCCESS, - CUSTOM_STATUS_VALUE_2=CUSTOM_STATUS_DONE, - STATUS_READY_FOR_PREVIEW=STATUS_READY_FOR_PREVIEW -) - -READY_FOR_PREVIEW_LEGACY_JAVA_SDK_TICKETS_JQL = 'project = {JIRA_PROJECT} AND resolution = Unresolved AND issuetype = "{ISSUE_TYPE}" AND "{CUSTOM_FIELD_NAME_PER_TOOL_STATUS}" in ("{CUSTOM_STATUS_VALUE_1}", "{CUSTOM_STATUS_VALUE_2}") AND status = "{STATUS_READY_FOR_PREVIEW}"'.format( - JIRA_PROJECT=JIRA_PROJECT, - CUSTOM_FIELD_NAME_PER_TOOL_STATUS=CUSTOM_FIELD_NAME_LEGACY_JAVA_SDK_STATUS, - STATUS=STATUS_READY_FOR_PREVIEW, - ISSUE_TYPE=PREVIEW_ISSUE_TYPE_ID, - CUSTOM_STATUS_VALUE_1=CUSTOM_STATUS_SUCCESS, - CUSTOM_STATUS_VALUE_2=CUSTOM_STATUS_DONE, - STATUS_READY_FOR_PREVIEW=STATUS_READY_FOR_PREVIEW -) - -ALL_TOOLS_SUCCESSFUL_PARTIAL_QUERY = " AND ".join(['("{CUSTOM_FIELD_STATUS}" in ("{CUSTOM_STATUS_VALUE_1}", "{CUSTOM_STATUS_VALUE_2}"))'.format(CUSTOM_FIELD_STATUS=custom_status_field, CUSTOM_STATUS_VALUE_1=CUSTOM_STATUS_SUCCESS, CUSTOM_STATUS_VALUE_2=CUSTOM_STATUS_DONE) for _,custom_status_field in six.iteritems(CUSTOM_FIELD_NAME_FOR_TOOL)]) -READY_FOR_PREVIEW_ALL_TOOLS = 'project = {JIRA_PROJECT} AND resolution = Unresolved AND issuetype = "{ISSUE_TYPE}" AND status = "{STATUS_READY_FOR_PREVIEW}" AND ({ALL_TOOLS_SUCCESSFUL_PARTIAL_QUERY})'.format( - JIRA_PROJECT=JIRA_PROJECT, - ALL_TOOLS_SUCCESSFUL_PARTIAL_QUERY=ALL_TOOLS_SUCCESSFUL_PARTIAL_QUERY, - ISSUE_TYPE=PREVIEW_ISSUE_TYPE_ID, - STATUS_READY_FOR_PREVIEW=STATUS_READY_FOR_PREVIEW -) -READY_FOR_PUBLIC_ALL_TOOLS = 'project = {JIRA_PROJECT} AND resolution = Unresolved AND issuetype = "{ISSUE_TYPE}" AND status = "{STATUS_RELEASE_APPROVED}" AND ({ALL_TOOLS_SUCCESSFUL_PARTIAL_QUERY})'.format( - JIRA_PROJECT=JIRA_PROJECT, - ALL_TOOLS_SUCCESSFUL_PARTIAL_QUERY=ALL_TOOLS_SUCCESSFUL_PARTIAL_QUERY, - ISSUE_TYPE=PUBLIC_ISSUE_TYPE_ID, - STATUS_RELEASE_APPROVED=STATUS_RELEASE_APPROVED -) - -ANY_TOOLS_TODO_PARTIAL_QUERY = " OR ".join(['("{CUSTOM_FIELD_STATUS}" = "{CUSTOM_STATUS_TODO}")'.format(CUSTOM_FIELD_STATUS=custom_status_field, CUSTOM_STATUS_TODO=CUSTOM_STATUS_TODO) for _,custom_status_field in six.iteritems(CUSTOM_FIELD_NAME_FOR_TOOL)]) -READY_FOR_PREVIEW_ANY_TOOLS_REQUESTED = 'project = {JIRA_PROJECT} AND resolution = Unresolved AND issuetype = "{ISSUE_TYPE}" AND (status = "{STATUS_READY_FOR_PREVIEW}") AND ({ANY_TOOLS_TODO_PARTIAL_QUERY})'.format( - JIRA_PROJECT=JIRA_PROJECT, - ANY_TOOLS_TODO_PARTIAL_QUERY=ANY_TOOLS_TODO_PARTIAL_QUERY, - ISSUE_TYPE=PREVIEW_ISSUE_TYPE_ID, - STATUS_READY_FOR_PREVIEW=STATUS_READY_FOR_PREVIEW -) -READY_FOR_PUBLIC_ANY_TOOLS_REQUESTED = 'project = {JIRA_PROJECT} AND resolution = Unresolved AND issuetype = "{ISSUE_TYPE}" AND (status = "{STATUS_RELEASE_APPROVED}") AND ({ANY_TOOLS_TODO_PARTIAL_QUERY})'.format( - JIRA_PROJECT=JIRA_PROJECT, - ANY_TOOLS_TODO_PARTIAL_QUERY=ANY_TOOLS_TODO_PARTIAL_QUERY, - ISSUE_TYPE=PUBLIC_ISSUE_TYPE_ID, - STATUS_RELEASE_APPROVED=STATUS_RELEASE_APPROVED -) - -READY_FOR_PREVIEW_TICKETS_JQL_FOR_TOOL = { - JAVA_SDK_NAME: READY_FOR_PREVIEW_JAVA_SDK_TICKETS_JQL, - PYTHON_SDK_NAME: READY_FOR_PREVIEW_PYTHON_SDK_TICKETS_JQL, - CLI_NAME: READY_FOR_PREVIEW_CLI_TICKETS_JQL, - RUBY_SDK_NAME: READY_FOR_PREVIEW_RUBY_SDK_TICKETS_JQL, - GO_SDK_NAME: READY_FOR_PREVIEW_GO_SDK_TICKETS_JQL, - TYPESCRIPT_SDK_NAME: READY_FOR_PREVIEW_TYPESCRIPT_SDK_TICKETS_JQL, - DOTNET_SDK_NAME: READY_FOR_PREVIEW_DOTNET_SDK_TICKETS_JQL, - POWERSHELL_NAME: READY_FOR_PREVIEW_POWERSHELL_TICKETS_JQL, - TEST_DATA_GEN_NAME: READY_FOR_PREVIEW_TEST_DATA_TICKETS_JQL, - LEGACY_JAVA_SDK_NAME: READY_FOR_PREVIEW_LEGACY_JAVA_SDK_TICKETS_JQL -} - -COMMENT_TYPE_INFO = "INFO" -COMMENT_TYPE_ERROR = "ERROR" -COMMENT_TYPE_SUCCESS = "SUCCESS" - -COMMENT_TYPE_TO_COLOR = { - COMMENT_TYPE_INFO: '#707070', # gray - COMMENT_TYPE_ERROR: '#FF0000', # red - COMMENT_TYPE_SUCCESS: '#14892c', # green -} - -GENERATION_COMMIT_MESSAGE_PREFIX = 'Running generation for:' -POM_UPDATE_COMMIT_MESSAGE_PREFIX = 'Updating pom.xml for' -SPEC_BASELINE_COMMIT_MESSAGE_PREFIX = 'Spec baseline for' - -GENERATION_BRANCH_PREFIX = 'generated' -FAILED_BRANCH_PREFIX = "failed" -SPEC_BRANCH_PREFIX = "spec" # in DEXREQ repo - - -def get_debug_dexreq_branch_prefix(): - return (os.environ.get('DEBUG_DEXREQ_BRANCH_PREFIX') or "").lstrip('"').rstrip('"') - - -def init_branches(): - global DEBUG_DEXREQ_BRANCH_PREFIX - global BULK_PREVIEW_BRANCH_PREFIX - global BULK_PUBLIC_BRANCH_PREFIX - global INDIVIDUAL_PREVIEW_BRANCH_PREFIX - global INDIVIDUAL_PUBLIC_BRANCH_PREFIX - global V1_BULK_PREVIEW_BRANCH_PREFIX - global V1_INDIVIDUAL_PREVIEW_BRANCH_PREFIX - global V1_INDIVIDUAL_PUBLIC_BRANCH_PREFIX - global BRANCH_PREFIXES_SAFE_FOR_DELETION - - DEBUG_DEXREQ_BRANCH_PREFIX = get_debug_dexreq_branch_prefix() - - BULK_PREVIEW_BRANCH_PREFIX = DEBUG_DEXREQ_BRANCH_PREFIX + 'auto-v2-preview-bulk' - BULK_PUBLIC_BRANCH_PREFIX = DEBUG_DEXREQ_BRANCH_PREFIX + 'auto-v2-public-bulk' - INDIVIDUAL_PREVIEW_BRANCH_PREFIX = DEBUG_DEXREQ_BRANCH_PREFIX + 'auto-v2-preview' - INDIVIDUAL_PUBLIC_BRANCH_PREFIX = DEBUG_DEXREQ_BRANCH_PREFIX + 'auto-v2-public' - - V1_BULK_PREVIEW_BRANCH_PREFIX = DEBUG_DEXREQ_BRANCH_PREFIX + 'auto-preview-bulk' - V1_INDIVIDUAL_PREVIEW_BRANCH_PREFIX = DEBUG_DEXREQ_BRANCH_PREFIX + 'auto-preview' - V1_INDIVIDUAL_PUBLIC_BRANCH_PREFIX = DEBUG_DEXREQ_BRANCH_PREFIX + 'auto-public' - - # only delete branches that match patterns of branches created by auto-gen utils - BRANCH_PREFIXES_SAFE_FOR_DELETION = [ - BULK_PREVIEW_BRANCH_PREFIX, - '{}-{}'.format(GENERATION_BRANCH_PREFIX, BULK_PREVIEW_BRANCH_PREFIX), - '{}-{}'.format(FAILED_BRANCH_PREFIX, BULK_PREVIEW_BRANCH_PREFIX), - '{}-{}'.format(SPEC_BRANCH_PREFIX, BULK_PREVIEW_BRANCH_PREFIX), - BULK_PUBLIC_BRANCH_PREFIX, - '{}-{}'.format(GENERATION_BRANCH_PREFIX, BULK_PUBLIC_BRANCH_PREFIX), - '{}-{}'.format(FAILED_BRANCH_PREFIX, BULK_PUBLIC_BRANCH_PREFIX), - '{}-{}'.format(SPEC_BRANCH_PREFIX, BULK_PUBLIC_BRANCH_PREFIX), - INDIVIDUAL_PREVIEW_BRANCH_PREFIX, - '{}-{}'.format(GENERATION_BRANCH_PREFIX, INDIVIDUAL_PREVIEW_BRANCH_PREFIX), - '{}-{}'.format(FAILED_BRANCH_PREFIX, INDIVIDUAL_PREVIEW_BRANCH_PREFIX), - '{}-{}'.format(SPEC_BRANCH_PREFIX, INDIVIDUAL_PREVIEW_BRANCH_PREFIX), - INDIVIDUAL_PUBLIC_BRANCH_PREFIX, - '{}-{}'.format(GENERATION_BRANCH_PREFIX, INDIVIDUAL_PUBLIC_BRANCH_PREFIX), - '{}-{}'.format(FAILED_BRANCH_PREFIX, INDIVIDUAL_PUBLIC_BRANCH_PREFIX), - '{}-{}'.format(SPEC_BRANCH_PREFIX, INDIVIDUAL_PUBLIC_BRANCH_PREFIX), - - # v1 branches - V1_BULK_PREVIEW_BRANCH_PREFIX, - '{}-{}'.format(GENERATION_BRANCH_PREFIX, V1_BULK_PREVIEW_BRANCH_PREFIX), - '{}-{}'.format(FAILED_BRANCH_PREFIX, V1_BULK_PREVIEW_BRANCH_PREFIX), - V1_INDIVIDUAL_PREVIEW_BRANCH_PREFIX, - '{}-{}'.format(GENERATION_BRANCH_PREFIX, V1_INDIVIDUAL_PREVIEW_BRANCH_PREFIX), - '{}-{}'.format(FAILED_BRANCH_PREFIX, V1_INDIVIDUAL_PREVIEW_BRANCH_PREFIX), - V1_INDIVIDUAL_PUBLIC_BRANCH_PREFIX, - '{}-{}'.format(GENERATION_BRANCH_PREFIX, V1_INDIVIDUAL_PUBLIC_BRANCH_PREFIX), - '{}-{}'.format(FAILED_BRANCH_PREFIX, V1_INDIVIDUAL_PUBLIC_BRANCH_PREFIX), - - # merge_to_github and merge_from_github branches - "merge_to_github", - "merge_from_github", - ] - - -init_branches() - -# generates individual preview builds for all tickets with CLI Status == 'To Do' or 'None' -BUILD_TYPE_INDIVIDUAL_PREVIEW = 'individual_preview' - -# generates one preview build including the changes from all tickets with CLI Status == 'Pending Merge' -# tickets in the 'Pending Merge' state have already built successfully so they can be included in the bulk release -BUILD_TYPE_BULK_PENDING_MERGE_PREVIEW = 'bulk_pending_merge_preview' - -# generates individual public builds for all tickets with label 'PublicSDK' -BUILD_TYPE_INDIVIDUAL_PUBLIC = 'individual_public' - -# generates one public build including the changes from all tickets wtih CLI Status == 'Pending Merge' -# tickets in the 'Pending Merge' state have already built successfully so they can be included in the bulk release -BUILD_TYPE_BULK_PENDING_MERGE_PUBLIC = 'bulk_pending_merge_public' - -PREVIEW_BUILD_TYPES = [BUILD_TYPE_INDIVIDUAL_PREVIEW, BUILD_TYPE_BULK_PENDING_MERGE_PREVIEW] -PUBLIC_BUILD_TYPES = [BUILD_TYPE_INDIVIDUAL_PUBLIC, BUILD_TYPE_BULK_PENDING_MERGE_PUBLIC] -BULK_BUILD_TYPES = [BUILD_TYPE_BULK_PENDING_MERGE_PREVIEW, BUILD_TYPE_BULK_PENDING_MERGE_PUBLIC] - -VALID_BUILD_TYPES = [BUILD_TYPE_INDIVIDUAL_PREVIEW, - BUILD_TYPE_BULK_PENDING_MERGE_PREVIEW, - BUILD_TYPE_INDIVIDUAL_PUBLIC, - BUILD_TYPE_BULK_PENDING_MERGE_PUBLIC] - -BULK_GENERATION_COMPLETE_COMMENT_TEMPLATE_CLI = """ -h4. *CLI* - -[https://artifactory.oci.oraclecorp.com/opc-public-sdk-dev-pypi-local/oci-cli-{full_version}.zip] - -This can also be installed by doing: -{{code}} -pip install --trusted-host=artifactory.oci.oraclecorp.com -i https://artifactory.oci.oraclecorp.com/api/pypi/global-dev-pypi/simple -U oci-cli=={full_version} -{{code}} - -Customers who don't have Python or other prerequisites installed (or who haven't done the CLI setup before to set up a virtual environment etc.) may find it more convenient to use: [https://artifactory.oci.oraclecorp.com/opc-public-sdk-dev-pypi-local/oci-cli-full-install-{full_version}.zip] as that ZIP contains an install script which install the CLI and any prerequisites. -""" - -BULK_GENERATION_COMPLETE_COMMENT_TEMPLATE_PYTHON_SDK = """ -h4. *Python SDK* -[https://artifactory.oci.oraclecorp.com/opc-public-sdk-dev-pypi-local/oci-python-sdk-{full_version}.zip] - -This can also be installed by doing: -{{code}} -pip install --trusted-host=artifactory.oci.oraclecorp.com -i https://artifactory.oci.oraclecorp.com/api/pypi/global-dev-pypi/simple -U oci=={full_version} -{{code}} -""" - -BULK_GENERATION_COMPLETE_COMMENT_TEMPLATE_GOSDK = """ -h4. *Go SDK* - -To install the Go SDK, you have to clone the oci-go-sdk Bitbucket repo into your Oracle oci-go-sdk path. Example: -{{code}} -git clone -b preview ssh://git@bitbucket.oci.oraclecorp.com:7999/sdk/oci-go-sdk.git $GOPATH/src/github.com/oracle/oci-go-sdk -cd $GOPATH/src/github.com/oracle/oci-go-sdk -git reset --hard {full_version} -{{code}} -If you would like to preserve your current installation of the oci-go-sdk, consider vendoring in the above repo at commit {full_version} - -""" - -BULK_GENERATION_COMPLETE_COMMENT_TEMPLATE_JAVA_SDK = """ -h4. *Java SDK* - -[https://artifactory.oci.oraclecorp.com/webapp/#/artifacts/browse/tree/General/{artifactory_repo}/com/oracle/oci/sdk/oci-java-sdk-dist/{short_version}/oci-java-sdk-dist-{full_version}.zip] - -If you want to use this in Maven, use: -{{code}} - - com.oracle.oci.sdk - oci-java-sdk-shaded-full - {full_version} - -{{code}} -""" - -BULK_GENERATION_COMPLETE_COMMENT_TEMPLATE_RUBY_SDK = """ -h4. *Ruby SDK* - -[https://artifactory.oci.oraclecorp.com/opc-public-sdk-dev-rubygems-local/gems/r2/oci-ruby-sdk-{full_version}.zip] -[https://artifactory.oci.oraclecorp.com/opc-public-sdk-dev-rubygems-local/gems/oci-{full_version}.gem] -""" - -BULK_GENERATION_COMPLETE_COMMENT_TEMPLATE_TYPESCRIPT_SDK = """ -h4. *Typescript SDK* - -[https://artifactory.oci.oraclecorp.com/opc-public-sdk-typescript-dev-npm-local/main/oci-sdk-{full_version}.tgz] - -This can also be installed by doing: - -{{code}} -npm config set https://artifactory.oci.oraclecorp.com/api/npm/global-dev-npm -npm install oci-sdk@{full_version} -{{code}} - -""" - -BULK_GENERATION_COMPLETE_COMMENT_TEMPLATE_DOTNET_SDK = """ -h4. *DotNet SDK* - -Packages can be found from artifactory with version {full_version}: -[https://artifactory.oci.oraclecorp.com/api/nuget/oci-dex-dev-nuget-local] - -Packages can be installed to .NET project by doing: - -{{code}} -dotnet nuget add source https://artifactory.oci.oraclecorp.com/api/nuget/oci-dex-dev-nuget-local --name Artifactory -dotnet add package -s https://artifactory.oci.oraclecorp.com/api/nuget/oci-dex-dev-nuget-local -v {full_version} -{{code}} - -""" - -BULK_GENERATION_COMPLETE_COMMENT_TEMPLATE_POWERSHELL = """ -h4. *PowerShell* - -PowerShell modules can be found from artifactory with version {full_version}: -[https://artifactory.oci.oraclecorp.com/api/nuget/oci-psmodules-dev-nuget-local] - -PowerShell modules can be installed by doing: - -{{code}} -Register-PSRepository -Name Artifactory -SourceLocation https://artifactory.oci.oraclecorp.com/api/nuget/oci-psmodules-dev-nuget-local -PublishLocation https://artifactory.oci.oraclecorp.com/api/nuget/oci-psmodules-dev-nuget-local -InstallationPolicy Trusted -Install-Module -Name -Repository Artifactory -RequiredVersion "{full_version}" -{{code}} - -""" - -BULK_GENERATION_COMPLETE_COMMENT_TEMPLATE_TESTDATA = """ -h4. *TestData* - -Test Data have been generated for internal use only. -Example files can be found from artifactory with version {full_version}: -[https://artifactory.oci.oraclecorp.com/sdk-dev-generic-local/examples] - -""" - -BULK_GENERATION_COMPLETE_COMMENT_TEMPLATE_LEGACY_JAVA_SDK = """ -h4. *Legacy Java SDK* - -[https://artifactory.oci.oraclecorp.com/webapp/#/artifacts/browse/tree/General/{artifactory_repo}/com/oracle/oci/sdk/oci-java-sdk-dist/{short_version}/oci-java-sdk-dist-{full_version}.zip] - -If you want to use this in Maven, use: -{{code}} - - com.oracle.oci.sdk - oci-java-sdk-shaded-full - {full_version} - -{{code}} - -Note: By default, we are not building weekly release versions of the Legacy Java SDK anymore. If you have to use the Legacy Java SDK and cannot use the above {full_version} timed snapshot version, you may request a new release version in [#oci_public_sdks](https://oracle.enterprise.slack.com/archives/C7HKLGU3E) on Slack. -""" - - -BULK_GENERATION_COMPLETE_COMMENT_TEMPLATE_FOR_TOOL = { - CLI_NAME: BULK_GENERATION_COMPLETE_COMMENT_TEMPLATE_CLI, - PYTHON_SDK_NAME: BULK_GENERATION_COMPLETE_COMMENT_TEMPLATE_PYTHON_SDK, - GO_SDK_NAME: BULK_GENERATION_COMPLETE_COMMENT_TEMPLATE_GOSDK, - JAVA_SDK_NAME: BULK_GENERATION_COMPLETE_COMMENT_TEMPLATE_JAVA_SDK, - RUBY_SDK_NAME: BULK_GENERATION_COMPLETE_COMMENT_TEMPLATE_RUBY_SDK, - TYPESCRIPT_SDK_NAME: BULK_GENERATION_COMPLETE_COMMENT_TEMPLATE_TYPESCRIPT_SDK, - DOTNET_SDK_NAME: BULK_GENERATION_COMPLETE_COMMENT_TEMPLATE_DOTNET_SDK, - POWERSHELL_NAME: BULK_GENERATION_COMPLETE_COMMENT_TEMPLATE_POWERSHELL, - TEST_DATA_GEN_NAME: BULK_GENERATION_COMPLETE_COMMENT_TEMPLATE_TESTDATA, - LEGACY_JAVA_SDK_NAME: BULK_GENERATION_COMPLETE_COMMENT_TEMPLATE_LEGACY_JAVA_SDK -} - -PYTHON_SDK_SUCCESS_GENERATION_RELATIVE_LOCATION = '../python_sdk_generation_success.txt' -CLI_SUCCESS_GENERATION_RELATIVE_LOCATION = '../cli_generation_success.txt' -JAVA_SDK_SUCCESS_GENERATION_RELATIVE_LOCATION = '../java_sdk_generation_success.txt' -RUBY_SDK_SUCCESS_GENERATION_RELATIVE_LOCATION = '../ruby_sdk_generation_success.txt' -GO_SDK_SUCCESS_GENERATION_RELATIVE_LOCATION = '../go_sdk_generation_success.txt' -TYPESCRIPT_SDK_SUCCESS_GENERATION_RELATIVE_LOCATION = '../typescript_sdk_generation_success.txt' -DOTNET_SDK_SUCCESS_GENERATION_RELATIVE_LOCATION = '../dotnet_sdk_generation_success.txt' -POWERSHELL_SUCCESS_GENERATION_RELATIVE_LOCATION = '../powershell_generation_success.txt' -TEST_DATA_SUCCESS_GENERATION_RELATIVE_LOCATION = '../test_data_generation_success.txt' -LEGACY_JAVA_SDK_SUCCESS_GENERATION_RELATIVE_LOCATION = '../legacy_java_sdk_generation_success.txt' - -PYTHON_SDK_SUCCESS_BUILD_RELATIVE_LOCATION = '../python_sdk_build_success.txt' -CLI_SUCCESS_BUILD_RELATIVE_LOCATION = '../cli_build_success.txt' -JAVA_SDK_SUCCESS_BUILD_RELATIVE_LOCATION = '../java_sdk_build_success.txt' -RUBY_SDK_SUCCESS_BUILD_RELATIVE_LOCATION = '../ruby_sdk_build_success.txt' -GO_SDK_SUCCESS_BUILD_RELATIVE_LOCATION = '../go_sdk_build_success.txt' -TYPESCRIPT_SDK_SUCCESS_BUILD_RELATIVE_LOCATION = '../typescript_sdk_build_success.txt' -DOTNET_SDK_SUCCESS_BUILD_RELATIVE_LOCATION = '../dotnet_sdk_build_success.txt' -POWERSHELL_SUCCESS_BUILD_RELATIVE_LOCATION = '../powershell_build_success.txt' -TEST_DATA_SUCCESS_BUILD_RELATIVE_LOCATION = '../test_data_build_success.txt' -LEGACY_JAVA_SDK_SUCCESS_BUILD_RELATIVE_LOCATION = '../legacy_java_sdk_build_success.txt' - -SUCCESS_GENERATION_FILES_FOR_TOOL = { - JAVA_SDK_NAME: [JAVA_SDK_SUCCESS_GENERATION_RELATIVE_LOCATION], - PYTHON_SDK_NAME: [PYTHON_SDK_SUCCESS_GENERATION_RELATIVE_LOCATION], - CLI_NAME: [PYTHON_SDK_SUCCESS_GENERATION_RELATIVE_LOCATION, CLI_SUCCESS_GENERATION_RELATIVE_LOCATION], - RUBY_SDK_NAME: [RUBY_SDK_SUCCESS_GENERATION_RELATIVE_LOCATION], - GO_SDK_NAME: [GO_SDK_SUCCESS_GENERATION_RELATIVE_LOCATION], - TYPESCRIPT_SDK_NAME: [TYPESCRIPT_SDK_SUCCESS_GENERATION_RELATIVE_LOCATION], - DOTNET_SDK_NAME: [DOTNET_SDK_SUCCESS_GENERATION_RELATIVE_LOCATION], - POWERSHELL_NAME: [DOTNET_SDK_SUCCESS_GENERATION_RELATIVE_LOCATION, POWERSHELL_SUCCESS_GENERATION_RELATIVE_LOCATION], - TEST_DATA_GEN_NAME: [TEST_DATA_SUCCESS_GENERATION_RELATIVE_LOCATION], - LEGACY_JAVA_SDK_NAME: [LEGACY_JAVA_SDK_SUCCESS_GENERATION_RELATIVE_LOCATION] -} - -SUCCESS_BUILD_FILES_FOR_TOOL = { - JAVA_SDK_NAME: [JAVA_SDK_SUCCESS_BUILD_RELATIVE_LOCATION], - PYTHON_SDK_NAME: [PYTHON_SDK_SUCCESS_BUILD_RELATIVE_LOCATION], - CLI_NAME: [PYTHON_SDK_SUCCESS_BUILD_RELATIVE_LOCATION, CLI_SUCCESS_BUILD_RELATIVE_LOCATION], - RUBY_SDK_NAME: [RUBY_SDK_SUCCESS_BUILD_RELATIVE_LOCATION], - GO_SDK_NAME: [GO_SDK_SUCCESS_BUILD_RELATIVE_LOCATION], - TYPESCRIPT_SDK_NAME: [TYPESCRIPT_SDK_SUCCESS_BUILD_RELATIVE_LOCATION], - DOTNET_SDK_NAME: [DOTNET_SDK_SUCCESS_BUILD_RELATIVE_LOCATION], - POWERSHELL_NAME: [DOTNET_SDK_SUCCESS_BUILD_RELATIVE_LOCATION, POWERSHELL_SUCCESS_BUILD_RELATIVE_LOCATION], - TEST_DATA_GEN_NAME: [TEST_DATA_SUCCESS_BUILD_RELATIVE_LOCATION], - LEGACY_JAVA_SDK_NAME: [LEGACY_JAVA_SDK_SUCCESS_BUILD_RELATIVE_LOCATION] -} - -# Shared messages - -GENERATION_AND_BUILD_SUCCESSFUL_TEMPLATE = 'Generation and build for tool: {tool_name} (repos: {repos}) was successful. Proceeding to next step.' - -# Note: if you change this template, add the old value to BUILD_PASS_JIRA_MESSAGE_TEMPLATE_VARIATIONS in autogen_issue_advisor_shared.py -BUILD_PASS_JIRA_MESSAGE_TEMPLATE = """Completed generating tool: {tool_name} (repos: {repos}) {build_description}. - -{links} - -Artifacts can be found {build_artifacts_link} (for the next 10 days). - -For build log and artifact access see the [self-service FAQ|https://confluence.oci.oraclecorp.com/pages/viewpage.action?pageId=132774928#SDK/CLISelfServiceFrequentlyAskedQuestions-HowdoIgetaccesstologsandartifacts?].""" - - -# Note: if you change this template, add the old value to BUILD_FAIL_JIRA_MESSAGE_TEMPLATE_VARIATIONS in autogen_issue_advisor_shared.py -BUILD_FAIL_JIRA_MESSAGE_TEMPLATE = """Building the change failed for tool: {tool_name} (repos: {repos}) {build_description}. - -{links} - -If necessary, you can use {branch_text} as a starting point to fix the build (e.g. if you made a breaking change in preview and you now have to change tests or samples). - -Information about the failure can be found in the {build_log_link}.""" - -# Note: if you change this template, add the old value to STEP_FAILED_MESSAGE_TEMPLATE_VARIATIONS in autogen_issue_advisor_shared.py -STEP_FAILED_MESSAGE_TEMPLATE = """{failure_step} for tool: {tool_name} (repos: {repos}) failed. - -Information about the failure can be found in the {build_log_link}. - -If it is unclear how to resolve the issue, you can set the status to '{dex_support_required_status}' to request help from the SDK / CLI team.{additional_comment}""" - -UNPROTECTED_CHANGES_MESSAGE_TEMPLATE = """Note: We detected changes in the spec that were not protected by fresh conditional groups not already enabled in the {pipeline} SDK. This may cause problems for Terraform and is not recommended. - -To see the changes that were not behind fresh, not yet enabled conditional conditional groups, click [here|{link}] and then select the "Diff" tab.""" - -BYPASSED_UNPROTECTED_CHANGES_MESSAGE_TEMPLATE = """Note: We detected changes in the spec that were not protected by fresh conditional groups not already enabled in the {pipeline} SDK. While this may cause problems for Terraform and is not recommended, these changes were accepted using a bypass label. - -To see the changes that were not behind conditional groups, click here: {link}""" - -SELF_SERVICE_BUG_TEMPLATE = """The job failed while attempting to update pom.xml. {exception}. - -The full build log can be found {build_log_link}. - -For build log and artifact access see the [self-service FAQ|https://confluence.oci.oraclecorp.com/pages/viewpage.action?pageId=132774928#SDK/CLISelfServiceFrequentlyAskedQuestions-HowdoIgetaccesstologsandartifacts?]. - -This appears to be a problem with the self-service pipeline; therefore the issue status has been set to 'DEX Support Required', and the SDK / CLI team oncall will take a look.""" - -# -# For new bypass labels: -# Add description to https://confluence.oci.oraclecorp.com/display/DEX/SDK+Runbook+-+Exceptions+in+Self-Service+Pipeline -# - -BYPASS_CHECK_UDX_TICKET_LABEL = "BypassCheck-UDX-Ticket" -BYPASS_CHECK_SPEC_EXTENSION_LABEL = "BypassCheck-Specfile-Extension" -BYPASS_CHECK_FEATURE_ID_LABEL = "BypassCheck-Feature-ID" -BYPASS_CHECK_SERVICE_FRIENDLY_NAME_LABEL = "BypassCheck-Service-Friendly-Name" -BYPASS_CHECK_INTERNAL_TICKET_LABEL = "BypassCheck-Internal-Ticket" -# BypassCheck-PR-master-PythonCLI -# BypassCheck-PR-master-JavaSDK -# BypassCheck-PR-master-PythonSDK -# BypassCheck-PR-master-GoSDK -# BypassCheck-PR-master-TypescriptSDK -# BypassCheck-PR-master-DotNetSDK -# BypassCheck-PR-master-PowerShell -# BypassCheck-PR-master-TestData -# BypassCheck-PR-master-LegacyJavaSDK -BYPASS_CHECK_PR_MASTER_PREFIX = "BypassCheck-PR-master-" -# BypassCheck-PR-build-master-PythonCLI -# BypassCheck-PR-build-master-JavaSDK -# BypassCheck-PR-build-master-PythonSDK -# BypassCheck-PR-build-master-GoSDK -# BypassCheck-PR-build-master-TypescriptSDK -# BypassCheck-PR-build-master-DotNetSDK -# BypassCheck-PR-build-master-PowerShell -# BypassCheck-PR-build-master-TestData -# BypassCheck-PR-build-master-LegacyJavaSDK -BYPASS_CHECK_PR_BUILD_MASTER_PREFIX = "BypassCheck-PR-build-master-" - -BYPASS_CLI_DESIGN_REVIEW_CHECK = "BypassCLI-Design-Review" - -# For master, we need Java, Python, Go and CLI PRs and successful builds -BYPASS_CHECK_PR_MASTER_LABELS = ["{}{}".format(BYPASS_CHECK_PR_MASTER_PREFIX, tool) for tool in TOOL_NAMES] -BYPASS_CHECK_PR_BUILD_MASTER_LABELS = ["{}{}".format(BYPASS_CHECK_PR_BUILD_MASTER_PREFIX, tool) for tool in TOOL_NAMES] - -BYPASS_CHECK_PREVENT_MANUAL_STATUS_CHANGES = "BypassCheck-PreventManualStatusChanges" - -BYPASS_CHECK_API_PUBLICLY_AVAILABLE = "BypassCheck-APIPubliclyAvailable" -BYPASS_CHECK_CHANGES_NOT_BEHIND_CONDITIONAL_GROUPS = "BypassCheck-ChangesNotBehindConditionalGroups" - -# BypassCheck-Generation-RubySDK <-- Primary use-case to ignore Ruby with exception -BYPASS_CHECK_GENERATION_PREFIX = "BypassCheck-Generation-" -BYPASS_CHECK_GENERATION_LABELS = ["{}{}".format(BYPASS_CHECK_GENERATION_PREFIX, tool) for tool in (TOOLS_FOR_WHICH_GENERATION_MAY_FAIL)] - -BYPASS_LABELS = [ - BYPASS_CHECK_UDX_TICKET_LABEL, - BYPASS_CHECK_SPEC_EXTENSION_LABEL, - BYPASS_CHECK_FEATURE_ID_LABEL, - BYPASS_CHECK_SERVICE_FRIENDLY_NAME_LABEL, - BYPASS_CHECK_PREVENT_MANUAL_STATUS_CHANGES, - BYPASS_CHECK_API_PUBLICLY_AVAILABLE, - BYPASS_CLI_DESIGN_REVIEW_CHECK, - BYPASS_CHECK_CHANGES_NOT_BEHIND_CONDITIONAL_GROUPS, - BYPASS_CHECK_INTERNAL_TICKET_LABEL -] + BYPASS_CHECK_PR_MASTER_LABELS \ - + BYPASS_CHECK_PR_BUILD_MASTER_LABELS \ - + BYPASS_CHECK_GENERATION_LABELS \ - -IS_ACTION_ITEM_OF_LINK = 'is action item of' - -UDX_TICKET_LINK_RELATIONSHIP = "Required" -UDX_TICKET_DISALLOWED_STATES = ['Done', 'Closed'] - -CLI_DESIGN_REVIEW_TERMINAL_STATES = [STATUS_DONE, STATUS_WITHDRAWN, STATUS_CLOSED] - -BITBUCKET_PR_URL_PREFIX = 'https://bitbucket.oci.oraclecorp.com/projects' - - -DEXREQ_IGNORED_ISSUES_ENV_VAR_NAME = "DEXREQ_IGNORED_ISSUES" - - -def init_dexreq_ignored_issues(): - global DEXREQ_IGNORED_ISSUES - DEXREQ_IGNORED_ISSUES_ENV_VAR = (os.environ.get(DEXREQ_IGNORED_ISSUES_ENV_VAR_NAME) or "").lstrip('"').rstrip('"') - DEXREQ_IGNORED_ISSUES = [e.strip() for e in DEXREQ_IGNORED_ISSUES_ENV_VAR.split(",")] if DEXREQ_IGNORED_ISSUES_ENV_VAR else [] - - -init_dexreq_ignored_issues() - - -def should_ignore_issue(issue_key): - return issue_key in DEXREQ_IGNORED_ISSUES - - -# TODO: replace this with a group lookup -APPROVED_DEX_TEAM_MEMBERS = [ - # SDK Team - 'Joe Levy', - 'Jyoti Saini', - 'Mathias Ricken', - 'Mingchi Stephen Mak', - 'Omkar Patil', - 'Peng Liu', - 'Yan Sun', - 'Vyas Bhagwat', - 'Walt Tran', - 'Ziyao Qiao', - 'Yash Chandra', - 'Vidhi Bhansali', - 'Anurag Garg', - 'Josh Hunter', - 'Swetha Krishnan', - 'Rakesh Kumar Parappa', - 'Swarnava Sarkar', - 'Ram Kishan Vooka', - 'Nivedita Parihar', - 'Kalpana Ramasamy', - 'Joshua Ramirez', - 'Eric Pendergrass', - - # CLI team - 'Hamada Ibrahim', - 'Kern Lee', - 'Manoj Meda', - 'Mike Ross', - 'Srikanth Reddy Kumbham', - 'Viral Modi', - 'Vishwas Bhat', - 'Harsh Kumar', - 'Arun Swarnam', - 'Varun Mankal', - 'Zhongwan Wang', - 'Alex Le', - 'Mandy Tsai' -] diff --git a/scripts/auto_gen_utils/consolidate_feature_id_enabled_groups_files.py b/scripts/auto_gen_utils/consolidate_feature_id_enabled_groups_files.py deleted file mode 100644 index ec6482bdaf..0000000000 --- a/scripts/auto_gen_utils/consolidate_feature_id_enabled_groups_files.py +++ /dev/null @@ -1,114 +0,0 @@ -################################################################################ -# Script to clean up featureId and enabledGroups files by consolidating them -# -# Consolidates all feature IDs referenced in files under codegenConfig/featuredIds -# into a single file: codegenConfig/featureIds/{CONSOLIDATED_FEATURE_IDS_FILENAME}.yaml -# -# Consolidates all enabled groups referenced in files under codegenConfig/enabledGroups -# into a single file: codegenConfig/enabledGroups/{CONSOLIDATED_ENABLED_GROUPS_FILENAME}.txt -# -# Example invocation: -# python consolidate_feature_id_enabled_groups_files.py --codegen-config-dir ~/dev/SDK/python-cli/codegenConfig/ -# -################################################################################ - -import argparse -import os -import sys -import yaml -import util - -ENABLED_GROUPS_SUBDIRECTORY = "enabledGroups" -FEATURE_IDS_SUBDIRECTORY = "featureIds" - -CONSOLIDATED_ENABLED_GROUPS_FILENAME = "enabledGroups-combined" -CONSOLIDATED_FEATURE_IDS_FILENAME = "featureIds-combined" - -COMMENT_CHARACTER = "#" -FEATURE_ID_FILE_LIST_HEADER = "whitelisted" - -FILES_TO_SKIP = [ - '.empty', - CONSOLIDATED_ENABLED_GROUPS_FILENAME, - CONSOLIDATED_FEATURE_IDS_FILENAME -] - - -def consolidate_directory(dir, consolidated_filename, get_all_ids_from_file_func, add_ids_to_file_func): - ids = [] - for filename in os.listdir(dir): - filename_without_extension = filename - try: - filename_without_extension = os.path.splitext(os.path.basename(filename))[0] - except Exception: - pass - - if filename in FILES_TO_SKIP or filename_without_extension in FILES_TO_SKIP: - continue - - file_path = os.path.join(dir, filename) - ids.extend(get_all_ids_from_file_func(file_path)) - - os.remove(file_path) - - consolidated_file_path = os.path.join(dir, consolidated_filename) - add_ids_to_file_func(dir, ids, consolidated_filename) - - print('Created file: {}. With IDs: {}'.format(consolidated_file_path, ', '.join(ids))) - - -def get_all_ids_from_feature_id_file(file_path): - feature_ids = [] - with open(file_path, 'r') as f: - doc = yaml.load(f) - if FEATURE_ID_FILE_LIST_HEADER in doc: - feature_ids.extend(doc[FEATURE_ID_FILE_LIST_HEADER]) - - return feature_ids - - -def get_all_ids_from_enabled_groups_file(file_path): - enabled_groups = [] - - with open(file_path, 'r') as f: - lines = f.readlines() - - for line in lines: - if not line.strip().startswith(COMMENT_CHARACTER): - enabled_groups.append(line.strip()) - - return enabled_groups - - -if __name__ == "__main__": - parser = argparse.ArgumentParser(description='Consolidate codegenConfig directory into a single file for feature Ids and a single file for enabled groups.') - parser.add_argument('--codegen-config-dir', - required=True, - help='The codegenConfig directory to consolidate. Two directories "enabledGroups" and "featureIds" are expected as subdirectories') - - args = parser.parse_args() - - codegen_config_dir = args.codegen_config_dir - codegen_config_dir = os.path.expandvars(os.path.expanduser(codegen_config_dir)) - if not os.path.exists(codegen_config_dir): - sys.exit('Path did not exist: {}'.format(codegen_config_dir)) - - if not os.path.isdir(codegen_config_dir): - sys.exit('Path {} is not a directory'.format(codegen_config_dir)) - - enabled_groups_directory = os.path.join(codegen_config_dir, ENABLED_GROUPS_SUBDIRECTORY) - feature_ids_directory = os.path.join(codegen_config_dir, FEATURE_IDS_SUBDIRECTORY) - - consolidate_directory( - enabled_groups_directory, - CONSOLIDATED_ENABLED_GROUPS_FILENAME, - get_all_ids_from_enabled_groups_file, - util.update_pre_processor_file - ) - - consolidate_directory( - feature_ids_directory, - CONSOLIDATED_FEATURE_IDS_FILENAME, - get_all_ids_from_feature_id_file, - util.update_feature_id_file - ) diff --git a/scripts/auto_gen_utils/create_cli_design_review_ticket.py b/scripts/auto_gen_utils/create_cli_design_review_ticket.py deleted file mode 100644 index 9fa7c605a8..0000000000 --- a/scripts/auto_gen_utils/create_cli_design_review_ticket.py +++ /dev/null @@ -1,571 +0,0 @@ -import util -import config -import argparse -import sys -import re - -from jira import JIRAError -from shared.buildsvc_tc_compatibility import build_log_link - -MAX_SUB_COMMAND_LEN = 25 -MAX_PARAM_LEN = 25 -IS_ACTION_ITEM_OF_LINK = 'is action item of' -DEPENDS_ON_LINK = 'depends on' -REQUIRED_PARAM_TO_OPTIONAL_WARNING_TEMPLATE = 'Required param {} has been made optional.' -OPTIONAL_PARAM_TO_REQUIRED_WARNING_TEMPLATE = 'Optional param {} has been made required.' -PARAM_TOO_LONG_WARNING_TEMPLATE = 'Param {} has more than 25 characters. Please consider to shorten it if possible.' -SUB_COMMAND_TOO_LONG_WARNING_TEMPLATE = 'Sub-Command {} has more than 25 characters. Please consider to shorten it if possible.' -REQUIRED_PARAM_DELETED_OR_RENAME_WARNING = 'Required param {} is deleted/renamed.' -OPTIONAL_PARAM_DELETED_OR_RENAME_WARNING = 'Optional param {} is deleted/renamed.' -DESIGN_REVIEW_ISSUE_SUCCESS_MESSAGE_TEMPLATE = 'The following CLI Design Review ticket has been opened and needs your attention -- https://jira.oci.oraclecorp.com/browse/{}' -CLI_DESIGN_FAILURE_TEMPLATE = """The job failed to create/update CLI Design review ticket. {exception}. - -The full build log can be found {build_log_link}. - -For build log and artifact access see the [self-service FAQ|https://confluence.oci.oraclecorp.com/pages/viewpage.action?pageId=132774928#SDK/CLISelfServiceFrequentlyAskedQuestions-HowdoIgetaccesstologsandartifacts?]. -""" - -MODIFIED_COMMANDS_SUMMARY_TEMPLATE = """ - -{{code:bash|title=Modified Existing Commands|borderStyle=solid}}{modified_commands}{{code}} -""" - -REMOVED_COMMANDS_SUMMARY_TEMPLATE = """ - -{{code:bash|title=Removed Commands|borderStyle=solid}}{removed_commands}{{code}} -""" - -ADDED_COMMANDS_SUMMARY_TEMPLATE = """ - -{{code:bash|title=Newly Added Commands|borderStyle=solid}}{added_commands}{{code}} -""" - -GENERATED_PR_TEMPLATE = """ -Generated code changes: {pr_link} -""" - -CONFLICTING_PARAMETER_TEMPLATE = """ -*Parameter conflict detected:* Your generated commands conflict with internal CLI parameters. Please make manual changes to the parameters that end with *-parameterconflict*. - -""" - -CLI_DESIGN_SUMMARY_TEMPLATE = """ -Please review the generated commands posted in the comments. Work with product management to suggest or accept changes to the generated command structure. After reviewing the generated commands: -* Before you review this ticket, make sure preview DEXREQ ticket is marked done. We run weekly bulk-preview job on Wednesday which will mark preview DEXREQ ticket to done. Don't change the preview DEXREQ manually to done. -* If you feel that generated commands changes are good to go - please leave a comment saying *No changes required* on this ticket and assign the ticket to the SDK/CLI on-call engineer. -https://ocean.ocs.oraclecloud.com/org/8723F92A522EB107E0530424000A6404/team/AB9CC5B3A525512FE0530202FD0ACA13/calendars -* If you want to modify the generated commands, follow these steps: - - Follow https://confluence.oci.oraclecorp.com/display/DEX/CLI+Self+Service+Guide and leave a comment using the appropriate template - - Our CLI bot will read "[~gear-dexreq-automation] Manual Changes Requested" in comment and process the comment and create a branch automatically - - Only last comment will be used to create a branch - - Follow the comment on the ticket after branch is created - -h4. Command Review Checklist: -# Are commands too long/verbose? Can commands be shortened and yet convey the correct meaning? -# Do any commands need to be renamed/hidden due to polymorphic input types? -# Do you need to rename a root command (load-balancer -> lb)? -# Are any parameters better specified as a file instead of text input? -# Are any parameters complex and can be expanded into parameters for individual elements? -# Do you need to restructure generated commands to match the common structure (oci + service + resource + operation)? - -More info on CLI design review process: https://confluence.oci.oraclecorp.com/pages/viewpage.action?spaceKey=DEX&title=CLI+Command+Structure+Design+Review -SDK/CLI Support engineer information can be found here: https://ocean.ocs.oraclecloud.com/org/8723F92A522EB107E0530424000A6404/team/AB9CC5B3A525512FE0530202FD0ACA13/calendars or reach out to *#oci_public_cli* slack group. - -Making CLI Manual changes using our CLI Self Service BOT: https://confluence.oci.oraclecorp.com/display/DEX/CLI+Self+Service+Guide -Making CLI Manual changes: https://confluence.oci.oraclecorp.com/display/DEX/CLI+Manual+Code+Changes -Common manual change recipes: https://confluence.oci.oraclecorp.com/display/DEX/CLI+recipes+for+overriding+generated+code -Recommended CLI installation for Mac: https://confluence.oci.oraclecorp.com/display/DEX/Installing+OCI-CLI+using+Python3 - -Once the manual change process is complete, please ensure end to end testing of all generated or modified CLI commands. -After your changes are merged, install the generated [preview build|https://confluence.oci.oraclecorp.com/pages/viewpage.action?spaceKey=DEX&title=CLI+FAQs#CLIFAQs-WherecanIfindthelatestversionofthepreviewCLI?]. -For setting up the development environment, [refer|https://confluence.oci.oraclecorp.com/display/DEX/OCI+CLI+Development+Setup#OCICLIDevelopmentSetup-1.Settingupthedevelopmentenvironment]. -For running tests in your tenancy please follow the [steps mentioned|https://confluence.oci.oraclecorp.com/display/DEX/OCI+CLI+Development+Setup#OCICLIDevelopmentSetup-Runningtestsagainstanothertenancy(RecommendedforServiceTeam)] -Test all the commands and param which are added, modified or deleted after the design review process. - -*NEW FEATURE FOR AUTOMATING CLI MANUAL CHANGE REQUESTS* -*======================================================* -Service teams can now request CLI manual changes by updating their Spec. Check this link for more details https://confluence.oci.oraclecorp.com/display/DEX/CLI+Spec+Customisation%3A+User+Guide. The bot will auto detect and suggest if your changes are applicable for this automation. -""" - -CLI_NO_CHANGES_TO_THE_COMMAND_STRUCTURE = """ -{issue} is not introducing any new/modifying existing commands. This can only happen in the following cases: - -* Updating documentation for the existing operations. -* Changes to request models which are complex type inputs for a CLI Command. -* Changes to response models. - -In case if you are expecting that your DEXREQ ticket should have a generated new commands or modify existing commands, Please reach out to *#oci_public_cli* slack group. -""" - -TRY_CLI_SPEC_CUSTOMIZATION_SUGGESTION_TEMPLATE = """ - -*NEW FEATURE FOR AUTOMATING CLI MANUAL CHANGE REQUESTS* -*======================================================* -Below generated APIs have been detected as applicable for CLI change requests through spec customisation. Service team can now request CLI changes by updating their Spec and re-processing DEXREQ ticket. Check this link for more details https://confluence.oci.oraclecorp.com/display/DEX/CLI+Spec+Customisation%3A+User+Guide. - -""" - -NEW_DETECTED_API_SUMMARY_TEMPLATE = """ - -{{code:bash|borderStyle=solid}}{added_commands}{{code}} -""" - - -# This method parses command_summary full_json_input.txt file to construct a map with key as full -# command string and value as another map where complex param is the key and its structure is the value. -# i.e content_map[oci iam tag-namespace create][freeform-tags] ={"tagKey1": "tagValue1","tagKey2": "tagValue2"} -def generate_cli_dictionary_for_json_input(file_contents): - content_map = {} - for match in re.finditer(r'oci([^\n]+)\s*([\w-]+)\s*:\s*(.*?)(?=\s*oci|$)', file_contents, re.S): - if match.group(1) in content_map: - content_map[match.group(1)][match.group(2)] = match.group(3) - else: - content_map[match.group(1)] = {match.group(2): match.group(3)} - - return content_map - - -# This method parses command_summary file to construct a map -# with key as full command string and CliCommandSummary object as value. -def generate_cli_dictionary(file_contents): - contents = file_contents.split("\n") - content_map = {} - i = 0 - contents.remove("") - while i < len(contents): - if i % 4 == 0 and i + 3 < len(contents) and contents[i].startswith('oci'): - content_map[contents[i]] = CliCommandSummary( - contents[i], - contents[i + 1], - contents[i + 2], - contents[i + 3]) - i += 4 - return content_map - - -# Detects if a required param has been modified to optional or vice versa and removes the -# common param in both the commands -def detect_change_in_required_or_optional_params(params_one, params_two, warning_str, modified_params): - warnings = [] - for param in params_one[:]: - if param in params_two: - warnings.append(warning_str.format(param)) - params_one.remove(param) - params_two.remove(param) - modified_params.append(param) - - return params_one, params_two, warnings - - -def subtract_lists(list1, list2): - return [element for element in list1 if element not in list2] - - -def get_modified_command(modified_cmd, original_cmd): - modified_params = [] - # Get required and optional params - m_required_params = modified_cmd.get_required_params() - m_optional_params = modified_cmd.get_optional_params() - o_required_params = original_cmd.get_required_params() - o_optional_params = original_cmd.get_optional_params() - - # Filter unchanged params - m_unique_required_params = subtract_lists(m_required_params, o_required_params) - o_unique_required_params = subtract_lists(o_required_params, m_required_params) - m_unique_optional_params = subtract_lists(m_optional_params, o_optional_params) - o_unique_optional_params = subtract_lists(o_optional_params, m_optional_params) - - # Detect if any of the required params are dropped to optional params - o_unique_required_params, m_unique_optional_params, required_change_warnings = detect_change_in_required_or_optional_params( - o_unique_required_params, m_unique_optional_params, REQUIRED_PARAM_TO_OPTIONAL_WARNING_TEMPLATE, modified_params) - - # Detect if any of optional params are moved to required params - m_unique_required_params, o_unique_optional_params, optional_change_warnings = detect_change_in_required_or_optional_params( - m_unique_required_params, o_unique_optional_params, OPTIONAL_PARAM_TO_REQUIRED_WARNING_TEMPLATE, modified_params) - - command_summary_warnings = required_change_warnings + optional_change_warnings - # Add newly added required params - modified_params.extend(m_unique_required_params) - # Add newly added optional params - modified_params.extend(m_unique_optional_params) - - # Renamed/Deleted required params. - for param in o_unique_required_params: - command_summary_warnings.append(REQUIRED_PARAM_DELETED_OR_RENAME_WARNING.format(param)) - # Renamed/Deleted optional params. - for param in o_unique_optional_params: - command_summary_warnings.append(OPTIONAL_PARAM_DELETED_OR_RENAME_WARNING.format(param)) - modified_cli_command_summary = ModifiedCliCommandSummary(modified_cmd) - modified_cli_command_summary.modified_params = modified_params - modified_cli_command_summary.warnings = command_summary_warnings - - return modified_cli_command_summary - - -# Compares the local command summary file with the remote file to find -# differences in the command structure and to check if any new commands have -# added to the local command summary file. -def get_generated_commands(diff): - modified_cmds = [] - added_cmds = [] - complex_modified_cmds = [] - # filter non command-summary changes - - if (diff.a_path and 'docs/command-summary' in diff.a_path and 'full_json_input.txt' not in diff.a_path) and \ - (diff.b_path and 'docs/command-summary' in diff.b_path and 'full_json_input.txt' not in diff.b_path): - original_file = diff.a_blob.data_stream.read().decode('utf-8') - original_dictionary = generate_cli_dictionary(original_file) - modified_file = diff.b_blob.data_stream.read().decode('utf-8') - modified_dictionary = generate_cli_dictionary(modified_file) - - for key in modified_dictionary: - if original_dictionary.__contains__(key): - if modified_dictionary[key] != original_dictionary[key]: - # change in command structure detected - modified_cmds.append(get_modified_command(modified_dictionary[key], original_dictionary[key])) - else: - added_cmds.append(modified_dictionary[key]) - if (diff.a_path and 'docs/command-summary' in diff.a_path and 'full_json_input.txt' in diff.a_path) and ( - diff.b_path and 'docs/command-summary' in diff.b_path and 'full_json_input.txt' in diff.b_path): - original_file = diff.a_blob.data_stream.read().decode('utf-8') - original_dictionary = generate_cli_dictionary_for_json_input(original_file) - modified_file = diff.b_blob.data_stream.read().decode('utf-8') - modified_dictionary = generate_cli_dictionary_for_json_input(modified_file) - for command in modified_dictionary: - if original_dictionary.__contains__(command): - for complex_param in modified_dictionary[command]: - if complex_param in original_dictionary[command]: - if modified_dictionary[command][complex_param] != original_dictionary[command][complex_param]: - complex_modified_cmds.append((command, complex_param)) - return modified_cmds, added_cmds, complex_modified_cmds - - -# When a new path is defined in the oci-cli, This returns all the commands defined in the new path. -def get_newly_added_commands(diff): - original_dictionary = {} - # filter non command-summary changes - if diff.b_path and 'docs/command-summary' in diff.b_path and 'full_json_input.txt' not in diff.b_path: - added_file = diff.b_blob.data_stream.read().decode('utf-8') - original_dictionary = generate_cli_dictionary(added_file) - return original_dictionary - - -def get_removed_commands(diff): - removed_cmds = [] - # filter non command-summary changes - if (diff.a_path and 'docs/command-summary' in diff.a_path and 'full_json_input.txt' not in diff.a_path) and \ - (diff.b_path and 'docs/command-summary' in diff.b_path and 'full_json_input.txt' not in diff.b_path): - original_file = diff.a_blob.data_stream.read().decode('utf-8') - original_dictionary = generate_cli_dictionary(original_file) - modified_file = diff.b_blob.data_stream.read().decode('utf-8') - modified_dictionary = generate_cli_dictionary(modified_file) - for key in original_dictionary: - if not modified_dictionary.__contains__(key): - removed_cmds.append(original_dictionary[key]) - return removed_cmds - - -def is_issue_summary_matches_cli_design(issue_summary): - return issue_summary and 'CLI Design Review'.lower() in issue_summary.lower() - - -def create_design_ticket(dexreq_issue): - print('Creating new CLI Design Review Issue') - sprint_id = util.find_dex_tools_active_sprint_id() - udx_issue_field = getattr(dexreq_issue.fields, config.CUSTOM_FIELD_ID_UDX_TICKET) - udx_issue_keys = util.get_udx_issue_keys(udx_issue_field) - related_issues = ', '.join([dexreq_issue.key] + udx_issue_keys) if udx_issue_keys else dexreq_issue.key - fields = { - 'project': 'DEX', - 'summary': 'CLI Design Review for {}'.format(related_issues), - 'description': CLI_DESIGN_SUMMARY_TEMPLATE, - 'issuetype': {'name': 'Design Reviews'}, - 'components': [{'name': 'CLI'}], - 'labels': ['CLIDesignReview'], - 'assignee': {'name': dexreq_issue.fields.creator.name}, - config.CUSTOM_FIELD_ID_SPRINT: sprint_id - } - - design_ticket = util.JIRA_CLIENT().create_issue(fields) - - util.JIRA_CLIENT().create_issue_link(IS_ACTION_ITEM_OF_LINK, design_ticket, dexreq_issue) - if udx_issue_keys: - for u in udx_issue_keys: - udx_issue = util.JIRA_CLIENT().issue(u) - util.JIRA_CLIENT().create_issue_link(DEPENDS_ON_LINK, udx_issue, design_ticket) - - print('Issue {} has been created'.format(design_ticket.key)) - return design_ticket - - -# returns CliDesignReview ticket associated with DexReq ticket. -# Creates a new CliDesignReview ticket if DexReq doesn't have any review ticket associated to it. -def get_cli_design_ticket(dexreq_issue, create_issue_if_absent=True): - if hasattr(dexreq_issue.fields, 'issuelinks'): - for link in dexreq_issue.fields.issuelinks: - if hasattr(link, 'outwardIssue'): - issue = util.JIRA_CLIENT().issue(link.outwardIssue.key, - fields='description, summary, status, issuetype, labels') - if is_issue_summary_matches_cli_design(issue.fields.summary): - print('Found CLI Design review issue: {}'.format(issue.key)) - return issue - - if create_issue_if_absent: - return create_design_ticket(dexreq_issue) - - return None - - -def is_design_ticket_in_non_terminal_state(issue): - return issue and issue.fields.status and issue.fields.status.name not in config.CLI_DESIGN_REVIEW_TERMINAL_STATES - - -# Returns pending design review tickets associated with an UDX ticket. -def get_cli_design_review_issues_for_udx(udx_issue_keys): - design_tickets = [] - for udx_issue_key in udx_issue_keys: - try: - udx_issue = util.JIRA_CLIENT().issue(udx_issue_key) - if hasattr(udx_issue.fields, 'issuelinks'): - for link in udx_issue.fields.issuelinks: - if hasattr(link, 'outwardIssue'): - issue = util.JIRA_CLIENT().issue(link.outwardIssue.key, fields='description, summary, status, labels') - if is_issue_summary_matches_cli_design(issue.fields.summary): - print('Found CLI Design review issue: {}'.format(issue.key)) - design_tickets.append(issue) - except JIRAError as e: - print("JIRAError while getting CLI design review issues for {}: {}".format(udx_issue_keys, str(e))) - - return design_tickets - - -def get_cli_command_str_with_warnings(cli_cmd, is_modified_command=False, modified_complex_params=[]): - params = cli_cmd.get_all_params_str() if not is_modified_command else cli_cmd.get_modified_params_str() - complete_command = cli_cmd.cmd_str + ' ' + params - return_string = complete_command - for complex_cmd, complex_param_name in modified_complex_params: - if complex_cmd in complete_command: - return_string += "\n## Complex param --" + complex_param_name + " has been updated." - if cli_cmd.get_warnings_str(): - return_string += '\n' + cli_cmd.get_warnings_str() - - return return_string - - -def check_for_long_params(cli_cmds, is_modified_cmds=False): - for cli_cmd in cli_cmds: - params = cli_cmd.get_all_params() if not is_modified_cmds else cli_cmd.modified_params - for param in params: - if len(param) > MAX_PARAM_LEN: - cli_cmd.add_warning(PARAM_TOO_LONG_WARNING_TEMPLATE.format(param)) - - -def check_for_long_sub_commands(cli_cmds): - for cli_cmd in cli_cmds: - sub_commands = cli_cmd.cmd_str.split(' ') - for sub_command in sub_commands: - if len(sub_command) > MAX_SUB_COMMAND_LEN: - cli_cmd.add_warning(SUB_COMMAND_TOO_LONG_WARNING_TEMPLATE.format(sub_command)) - - -# This functions returns commands eligible for spec-customization in comment format. -# The commands which are new addition in the service are eligible for spec customisation. -def check_new_added_commands(newly_added_commands_string): - if newly_added_commands_string.strip(): - NEW_DETECTED_API_COMMAND_FORMATTED_TEXT = NEW_DETECTED_API_SUMMARY_TEMPLATE.format(added_commands=newly_added_commands_string) - return TRY_CLI_SPEC_CUSTOMIZATION_SUGGESTION_TEMPLATE + NEW_DETECTED_API_COMMAND_FORMATTED_TEXT - return "" - - -# This uses tests/output/command-summary/* files to determine change in command structure. -# We compare local command-summary/* files with the remote preview branch. -def get_command_summary_changes(): - cli_repo = config.CLI_REPO - head_commit = cli_repo.head.commit - head_commit_origin = cli_repo.commit('origin/preview') - diff = head_commit_origin.diff(head_commit, create_patch=True) - - modified_commands = [] - newly_added_commands = [] - modified_complex_params = [] - removed_commands = [] - - for mdiff in diff.iter_change_type('M'): - mc, nc, cmc = get_generated_commands(mdiff) - modified_commands.extend(mc) - newly_added_commands.extend(nc) - modified_complex_params.extend(cmc) - - for adiff in diff.iter_change_type('A'): - newly_added_commands.extend(list(get_newly_added_commands(adiff).values())) - - for ddiff in diff.iter_change_type('M'): - removed_commands.extend(get_removed_commands(ddiff)) - - check_for_long_params(modified_commands, True) - check_for_long_params(newly_added_commands) - check_for_long_sub_commands(newly_added_commands) - - modified_commands_string = '\n\n'.join( - map(lambda cli_cmd: get_cli_command_str_with_warnings(cli_cmd, True, modified_complex_params), modified_commands) - ) - - newly_added_commands_string = '\n\n'.join( - map(lambda cli_cmd: get_cli_command_str_with_warnings(cli_cmd), newly_added_commands) - ) - - removed_commands_string = '\n\n'.join( - map(lambda cli_cmd: get_cli_command_str_with_warnings(cli_cmd), removed_commands) - ) - - modified_commands_comment = MODIFIED_COMMANDS_SUMMARY_TEMPLATE.format(modified_commands=modified_commands_string) if modified_commands_string else "" - added_commands_comment = ADDED_COMMANDS_SUMMARY_TEMPLATE.format(added_commands=newly_added_commands_string) if newly_added_commands_string else "" - removed_commands_comment = REMOVED_COMMANDS_SUMMARY_TEMPLATE.format(removed_commands=removed_commands_string) if removed_commands_string else "" - conflicting_parameters_comment = check_for_conflicting_params(modified_commands_comment + added_commands_comment) - - print('Modified Commands: {}'.format(modified_commands_string)) - print('Newly Added Commands: {}'.format(newly_added_commands_string)) - print('Removed Commands: {}'.format(removed_commands_string)) - try_cli_spec_customization_comment = check_new_added_commands(newly_added_commands_string) - return modified_commands_comment + added_commands_comment + removed_commands_comment + conflicting_parameters_comment + try_cli_spec_customization_comment - - -def check_for_conflicting_params(command_summary_changes): - if "-parameterconflict" in command_summary_changes: - return CONFLICTING_PARAMETER_TEMPLATE - return "" - - -class CliCommandSummary: - default_cli_params = ['--from-json', '--help', '--limit', '--page', '--page-size', '--sort-by', - '--sort-order', '--if-match', '--wait-for-state', '--wait-interval-seconds', - '--max-wait-seconds', '--lifecycle-state'] - - def __init__(self, cmd_str, count, required_params, optional_params, warnings=None): - self.cmd_str = cmd_str - self.count = count - self.required_params = required_params - self.optional_params = optional_params - if warnings is None: - warnings = [] - self.warnings = warnings - - def get_required_params(self): - return self.required_params.replace('Required Parameters:', '').strip().split(', ') - - def get_optional_params(self): - optional_params = self.optional_params.replace('Optional Parameters:', '').strip().split(', ') - # exclude default cli params in the optional params - we don't want to display them! - optional_params = [param for param in optional_params if param not in CliCommandSummary.default_cli_params] - return optional_params - - def get_all_params(self): - required_params = self.get_required_params() - optional_params = self.get_optional_params() - return required_params + optional_params - - def get_all_params_str(self): - return ', '.join(filter(None, self.get_all_params())) - - def add_warning(self, warning): - self.warnings.append(warning) - - def get_warnings_str(self): - return '\n'.join('## ' + warning for warning in self.warnings) - - def __eq__(self, other): - if not isinstance(other, CliCommandSummary): - return NotImplemented - - return self.cmd_str == other.cmd_str and \ - self.count == other.count and \ - self.required_params == other.required_params and \ - self.optional_params == other.optional_params - - -class ModifiedCliCommandSummary(CliCommandSummary): - def __init__(self, cli_command_summary, modified_params=[]): - self.modified_params = modified_params - CliCommandSummary.__init__(self, cli_command_summary.cmd_str, cli_command_summary.count, cli_command_summary.required_params, cli_command_summary.optional_params, cli_command_summary.warnings) - - def get_modified_params_str(self): - return ', '.join(filter(None, self.modified_params)) - - -def get_pull_request_link_for_generated_branch(): - try: - with open('preview-pr.txt', 'r') as filehandle: - pr_link = filehandle.read() - print('PR link found: {}'.format(pr_link)) - except Exception as e: - print(str(e)) - return "" - return pr_link - - -if __name__ == "__main__": - # Last step of CLI Preview build. This script will create a CLI Design review issue and - # associates it with DEXREQ and UDX issues. CLI Design review issue will be automatically assigned to - # the DEXREQ reporter. - parser = argparse.ArgumentParser(description='CLI Design review issue creation!') - parser.add_argument('--build-id', - required=True, - help='The team city build id for the build that is running this script. This is used to update the relevant JIRA tickets with links to the team city build') - parser.add_argument('--tool', - default=config.CLI_NAME, - help='The tool for which to generate the preview. Accepted values: {}'.format(', '.join(config.TOOL_NAMES))) - parser.add_argument('--build-type', - default=config.BUILD_TYPE_INDIVIDUAL_PREVIEW, - help='The build type to use, can be one of the following: {}'.format(', '.join(config.VALID_BUILD_TYPES))) - parser.add_argument('--dry-run', - default=False, - action='store_true', - help='Perform a dry-run') - - args = parser.parse_args() - tool_name = args.tool - build_id = args.build_id - build_type = args.build_type - config.IS_DRY_RUN = args.dry_run - - if build_type != config.BUILD_TYPE_INDIVIDUAL_PREVIEW or tool_name != config.CLI_NAME: - print('CLI Design review issue generation not required for build type: {}'.format(build_type)) - sys.exit(0) - - generation_pass, build_pass = util.were_steps_successful(tool_name) - if not (generation_pass and build_pass): - print('Generation or Build did not pass, not proceeding.') - sys.exit(0) - - last_commit_message = util.get_last_commit_message(tool_name) - issue_keys = util.parse_issue_keys_from_commit_message(last_commit_message) - if len(issue_keys) != 1: - print('More than one DEXReq issues found {}. Not generating CLI Design review ticket', issue_keys) - sys.exit(0) - dexreq_issue = issue_keys[0] - try: - command_summary_changes_str = get_command_summary_changes() - dex_issue = get_cli_design_ticket(util.get_dexreq_issue(dexreq_issue)) - pr_link = GENERATED_PR_TEMPLATE.format(pr_link=get_pull_request_link_for_generated_branch()) - if command_summary_changes_str: - comment_str = command_summary_changes_str + pr_link - # if ticket is in 'Done'/'Closed' state, change state to 'Needs Triage Status' - if dex_issue.fields.status.name in [config.STATUS_DONE, config.STATUS_CLOSED]: - util.transition_issue_overall_status(util.JIRA_CLIENT(), dex_issue, config.STATUS_NEEDS_TRIAGE_STATUS) - else: - comment_str = CLI_NO_CHANGES_TO_THE_COMMAND_STRUCTURE.format(issue=dexreq_issue) + pr_link - - print('Updating CLI Design Review issue {} with {} '.format(dex_issue.key, comment_str)) - util.add_jira_comment(dex_issue, comment_str) - util.add_jira_comment(dexreq_issue, DESIGN_REVIEW_ISSUE_SUCCESS_MESSAGE_TEMPLATE.format(dex_issue.key)) - - except Exception as e: - issue = util.get_dexreq_issue(dexreq_issue) - util.add_jira_comment( - issue.key, - CLI_DESIGN_FAILURE_TEMPLATE.format( - exception=str(e), - build_log_link=build_log_link(build_id) - ), - comment_type=config.COMMENT_TYPE_ERROR - ) diff --git a/scripts/auto_gen_utils/create_recordings_ticket.py b/scripts/auto_gen_utils/create_recordings_ticket.py deleted file mode 100644 index 4dfa541f35..0000000000 --- a/scripts/auto_gen_utils/create_recordings_ticket.py +++ /dev/null @@ -1,153 +0,0 @@ -import util -import config -import os -import sys -import argparse -from shared.buildsvc_tc_compatibility import build_log_link - -FAILED_TEST_PATH = "../python-cli/failed_tests.txt" - -CLI_RECORDINGS_DESCRIPTION_TEMPLATE = """ -The tests posted in the comments were disabled while generating the CLI due to compatibility issues with the newly generated code. -Please re-test and re-create the recordings for the skipped tests, and include them as part of a PR for the CLI preview branch. -Include a link to this ticket as part of your pull request comments. The Jira ticket may be closed once the corresponding PR has been merged. -""" - -ADDED_RECORDINGS_TEMPLATE = """ -{{code:title=Disabled Tests}} -{tests} -{{code}} -""" -CLI_RECORDINGS_FAILURE_TEMPLATE = """The job failed to create/update CLI Recordings ticket. {exception}. - -The full build log can be found {build_log_link}. - -For build log and artifact access see the [self-service FAQ|https://confluence.oci.oraclecorp.com/pages/viewpage.action?pageId=132774928#SDK/CLISelfServiceFrequentlyAskedQuestions-HowdoIgetaccesstologsandartifacts?]. -""" - -TESTS_FAILED_COMMENT = """Following tests have been failed while generating the CLI. Please use the generated branch to fix the failed tests. -{{code:title=Failed Tests}} -{tests} -{{code}} -""" - -CLI_RECORDINGS_SUCCESS_MESSAGE_TEMPLATE = 'The following CLI Create Test Recordings ticket has been opened and needs your attention -- https://jira.oci.oraclecorp.com/browse/{}' - - -def is_issue_summary_matches_cli_recordings_issue(issue_summary): - return issue_summary and 'CLI Create Test Recordings'.lower() in issue_summary.lower() - - -def create_recordings_ticket(dexreq_issue): - print('Creating new CLI Recordings Issue') - sprint_id = util.find_dex_tools_active_sprint_id() - - fields = { - 'project': 'DEX', - 'summary': 'CLI Create Test Recordings for {}'.format(dexreq_issue.key), - 'description': CLI_RECORDINGS_DESCRIPTION_TEMPLATE, - 'issuetype': {'name': 'Task'}, - 'components': [{'name': 'CLI'}], - 'labels': ['CLITestRecordings'], - 'assignee': {'name': dexreq_issue.fields.creator.name}, - config.CUSTOM_FIELD_ID_SPRINT: sprint_id - } - - recordings_ticket = util.JIRA_CLIENT().create_issue(fields) - util.JIRA_CLIENT().create_issue_link(config.IS_ACTION_ITEM_OF_LINK, recordings_ticket, dexreq_issue) - - return recordings_ticket - - -def get_cli_test_recordings_ticket(dexreq_issue): - if hasattr(dexreq_issue.fields, 'issuelinks'): - for link in dexreq_issue.fields.issuelinks: - if hasattr(link, 'outwardIssue'): - issue = util.JIRA_CLIENT().issue(link.outwardIssue.key, fields='description, summary') - if is_issue_summary_matches_cli_recordings_issue(issue.fields.summary): - print('Found CLI Recordings ticket: {}'.format(issue.key)) - return issue - - return create_recordings_ticket(dexreq_issue) - - -# Comments on the DEXREQ ticekt and halts the DEXREQ pipeline. -def handle_non_generated_tests_failures(tests, dexreq_issue): - tests_str = "".join(tests) - util.add_jira_comment(dexreq_issue, TESTS_FAILED_COMMENT.format(tests=tests_str), config.COMMENT_TYPE_ERROR) - - -# Creates/Updates a CLI Test recordings ticket and continues the DEXREQ pipeline. -def handle_generated_tests_failures(tests, dexreq_issue): - tests_str = "".join(tests) - if tests_str: - recordings_ticket = get_cli_test_recordings_ticket(dexreq_issue) - util.add_jira_comment(recordings_ticket, ADDED_RECORDINGS_TEMPLATE.format(tests=tests_str)) - util.add_jira_comment(dexreq_issue, CLI_RECORDINGS_SUCCESS_MESSAGE_TEMPLATE.format(recordings_ticket.key)) - - -if __name__ == '__main__': - parser = argparse.ArgumentParser(description='CLI Design review issue creation!') - parser.add_argument('--build-id', - required=True, - help='The team city build id for the build that is running this script. This is used to update the relevant JIRA tickets with links to the team city build') - parser.add_argument('--tool', - default=config.CLI_NAME, - help='The tool for which to generate the preview. Accepted values: {}'.format(', '.join(config.TOOL_NAMES))) - parser.add_argument('--build-type', - default=config.BUILD_TYPE_INDIVIDUAL_PREVIEW, - help='The build type to use, can be one of the following: {}'.format(', '.join(config.VALID_BUILD_TYPES))) - parser.add_argument('--dry-run', - default=False, - action='store_true', - help='Perform a dry-run') - parser.add_argument('--create-jira', - default=False, - action='store_true', - help='Opens a JIRA ticket with the failed tests and links it to DEXREQ ticket') - - args = parser.parse_args() - tool_name = args.tool - build_id = args.build_id - build_type = args.build_type - config.IS_DRY_RUN = args.dry_run - - if build_type != config.BUILD_TYPE_INDIVIDUAL_PREVIEW or tool_name != config.CLI_NAME: - print('CLI Create Test Recordings issue generation not required for build type: {}'.format(build_type)) - sys.exit(0) - - last_commit_message = util.get_last_commit_message(tool_name) - issue_keys = util.parse_issue_keys_from_commit_message(last_commit_message) - if len(issue_keys) != 1: - print('More than one DEXReq issues found {}. Not generating CLI Test Recordings ticket', issue_keys) - sys.exit(0) - dexreq_issue_key = issue_keys[0] - - try: - if not os.path.isfile(FAILED_TEST_PATH): - print("No test failures found, exiting!") - sys.exit(0) - - with open(FAILED_TEST_PATH, "r") as file_handle: - failed_tests = list(file_handle) - dexreq_issue = util.get_dexreq_issue(dexreq_issue_key) - # filter unit/integ/extended test failures. - not_generated_tests = list(filter((lambda test: '/tests/generated/' not in test), failed_tests)) - if not_generated_tests and len(not_generated_tests) > 0: - handle_non_generated_tests_failures(not_generated_tests, dexreq_issue) - - if args.create_jira: - generated_tests = list(filter(lambda test: '/tests/generated/' in test, failed_tests)) - if generated_tests and len(generated_tests) > 0: - handle_generated_tests_failures(generated_tests, dexreq_issue) - - except Exception as e: - issue = util.get_dexreq_issue(dexreq_issue_key) - util.add_jira_comment( - issue.key, - CLI_RECORDINGS_FAILURE_TEMPLATE.format( - exception=str(e), - build_log_link=build_log_link(build_id) - ), - comment_type=config.COMMENT_TYPE_ERROR - ) diff --git a/scripts/auto_gen_utils/dexreq_migration.py b/scripts/auto_gen_utils/dexreq_migration.py deleted file mode 100644 index 90fb732f2e..0000000000 --- a/scripts/auto_gen_utils/dexreq_migration.py +++ /dev/null @@ -1,281 +0,0 @@ -import argparse - -import util -import config - -import re -import json -import sys - -process_preview_jira_queue = __import__("1_process_preview_jira_queue") - -DEXREQ_PREVIEW_ISSUES_QUERY = 'project = {JIRA_PROJECT} and labels = {PREVIEW_LABEL} and resolution = Unresolved and labels != NewPipelineTesting'.format(JIRA_PROJECT=config.JIRA_PROJECT, PREVIEW_LABEL=config.PREVIEW_TICKET_LABEL) -DEXREQ_PUBLIC_ISSUES_QUERY = 'project = {JIRA_PROJECT} and labels = {PUBLIC_LABEL} and resolution = Unresolved and labels != NewPipelineTesting'.format(JIRA_PROJECT=config.JIRA_PROJECT, PUBLIC_LABEL=config.PUBLIC_TICKET_LABEL) - -OLD_CUSTOM_STATUS_MORE_INFORMATION_NEEDED = 'More Information Needed' # -> changed to SDK Status = Failure + Overall Status = Service Team Failure Investigation -OLD_CUSTOM_STATUS_MANUAL_ATTENTION_REQUIRED = 'Manual Attention Required' # -> changed to SDK Status = Failure + Overall Status = DEX Support Required -OLD_CUSTOM_STATUS_PENDING_MERGE = 'Pending Merge' # -> changed to SDK Status = Success + (Overall Status = 'Service Team Review Required' in preview and Overall Status = 'Service Team Work Required' in public) - -OLD_CUSTOM_FIELD_ID_JAVA_SDK_STATUS = 'customfield_12535' -OLD_CUSTOM_FIELD_ID_PYTHON_SDK_STATUS = 'customfield_12536' -OLD_CUSTOM_FIELD_ID_RUBY_SDK_STATUS = 'customfield_12537' -OLD_CUSTOM_FIELD_ID_GO_SDK_STATUS = 'customfield_13104' -OLD_CUSTOM_FIELD_ID_CLI_STATUS = 'customfield_12538' - -OLD_SDK_CLI_STATUS_FIELDS = [ - OLD_CUSTOM_FIELD_ID_JAVA_SDK_STATUS, - OLD_CUSTOM_FIELD_ID_PYTHON_SDK_STATUS, - OLD_CUSTOM_FIELD_ID_RUBY_SDK_STATUS, - OLD_CUSTOM_FIELD_ID_GO_SDK_STATUS, - OLD_CUSTOM_FIELD_ID_CLI_STATUS, - 'labels', - 'status' -] - -OLD_PREVIEW_SIGN_OFF_LABEL = 'PreviewReviewComplete' - -OLD_STATUS_READY_FOR_WORK = 'Ready for Work' -OLD_STATUS_IN_PROGRESS = 'In Progress' - -STATUS_BACKLOG = 'Backlog' - -OLD_SDK_CLI_STATUS_FIELD_ID_TO_NEW_MAPPINGS = [ - (OLD_CUSTOM_FIELD_ID_JAVA_SDK_STATUS, config.CUSTOM_FIELD_ID_JAVA_SDK_STATUS, config.CUSTOM_FIELD_NAME_JAVA_SDK_STATUS), - (OLD_CUSTOM_FIELD_ID_PYTHON_SDK_STATUS, config.CUSTOM_FIELD_ID_PYTHON_SDK_STATUS, config.CUSTOM_FIELD_NAME_PYTHON_SDK_STATUS), - (OLD_CUSTOM_FIELD_ID_RUBY_SDK_STATUS, config.CUSTOM_FIELD_ID_RUBY_SDK_STATUS, config.CUSTOM_FIELD_NAME_RUBY_SDK_STATUS), - (OLD_CUSTOM_FIELD_ID_GO_SDK_STATUS, config.CUSTOM_FIELD_ID_GO_SDK_STATUS, config.CUSTOM_FIELD_NAME_GO_SDK_STATUS), - (OLD_CUSTOM_FIELD_ID_CLI_STATUS, config.CUSTOM_FIELD_ID_CLI_STATUS, config.CUSTOM_FIELD_NAME_CLI_STATUS) -] - -OLD_SDK_CLI_STATUS_VALUES_TO_NEW = { - config.CUSTOM_STATUS_TODO: config.CUSTOM_STATUS_TODO, - OLD_CUSTOM_STATUS_MORE_INFORMATION_NEEDED: config.CUSTOM_STATUS_FAILURE, - OLD_CUSTOM_STATUS_MANUAL_ATTENTION_REQUIRED: config.CUSTOM_STATUS_FAILURE, - OLD_CUSTOM_STATUS_PENDING_MERGE: config.CUSTOM_STATUS_SUCCESS, - config.CUSTOM_STATUS_DONE: config.CUSTOM_STATUS_DONE -} - -ISSUE_TYPE_ENUM_PUBLIC = "Public" -ISSUE_TYPE_ENUM_PREVIEW = "Preview" - -SCRIPT_MODE_PRE_PROCESS = 'PreProcess' -SCRIPT_MODE_UPDATE_ISSUES = 'UpdateIssues' - - -def process_old_sdk_status_fields_and_overall_status(issue, issue_type_enum, new_issue_state): - all_fields_in_todo = True - all_fields_in_pending_merge = True - all_fields_in_pending_merge_or_done = True - for old_field_id, new_field_id, new_field_name in OLD_SDK_CLI_STATUS_FIELD_ID_TO_NEW_MAPPINGS: - old_value = getattr(issue.fields, old_field_id).value - new_value = OLD_SDK_CLI_STATUS_VALUES_TO_NEW[old_value] - - if not old_value == config.CUSTOM_STATUS_TODO: - all_fields_in_todo = False - - if not old_value == OLD_CUSTOM_STATUS_PENDING_MERGE: - all_fields_in_pending_merge = False - - if not old_value == config.CUSTOM_STATUS_DONE: - all_fields_in_pending_merge_or_done = False - - if old_value == OLD_CUSTOM_STATUS_MORE_INFORMATION_NEEDED: - # if the issue has already been set to DEX Support Required, that takes precedence, we don't want to override to - # 'Service Team Failure Investigation' - if not issue.fields.status == config.STATUS_DEX_SUPPORT_REQUIRED: - new_issue_state['status'] = config.STATUS_SERVICE_TEAM_FAILURE_INVESTIGATION - elif old_value == OLD_CUSTOM_STATUS_MANUAL_ATTENTION_REQUIRED: - new_issue_state['status'] = config.STATUS_DEX_SUPPORT_REQUIRED - - new_issue_state['fields'][new_field_id] = {'value': new_value} - - if issue_type_enum == ISSUE_TYPE_ENUM_PREVIEW: - if all_fields_in_todo: - new_issue_state['status'] = config.STATUS_PROCESSING_REQUESTED - elif all_fields_in_pending_merge: - if issue.fields and issue.fields.labels and OLD_PREVIEW_SIGN_OFF_LABEL in issue.fields.labels: - new_issue_state['status'] = config.STATUS_READY_FOR_PREVIEW - else: - # service team still needs to give explicit sign off - new_issue_state['status'] = config.STATUS_SERVICE_TEAM_REVIEW_REQUIRED - elif issue_type_enum == ISSUE_TYPE_ENUM_PUBLIC: - if all_fields_in_todo and issue.fields and issue.fields.status and issue.fields.status.name == OLD_STATUS_READY_FOR_WORK: - new_issue_state['status'] = config.STATUS_PROCESSING_REQUESTED - elif (all_fields_in_pending_merge or all_fields_in_pending_merge_or_done) and (not new_issue_state.get('status') == config.STATUS_DEX_SUPPORT_REQUIRED): - # DEX support Required takes precedence over any of the states that would be set based on PR progress - process_pull_request_progress(issue, new_issue_state, 'master') - - -def process_udx_ticket_from_summary(issue, new_issue_state): - summary = issue.fields.summary - udx_ticket = re.findall(r'UDX-\d+', summary, re.IGNORECASE) - - if not udx_ticket or not len(udx_ticket) == 1: - if issue.fields and issue.fields.labels and config.BYPASS_CHECK_UDX_TICKET_LABEL in issue.fields.labels: - print('INFO: Allowing issue {} with no UDX ticket because it has bypass label'.format(issue.key)) - else: - raise ValueError("Issue: {} did not contain exactly 1 UDX tickets in summary. Summary: {}".format(issue.key, issue.fields.summary)) - else: - new_issue_state['fields'][config.CUSTOM_FIELD_ID_UDX_TICKET] = udx_ticket[0] - - -def process_pull_request_progress(issue, new_issue_state, target_branch_filter=None): - pr_status = util.get_pr_status_for_tools(util.JIRA_CLIENT(), issue, config.TOOLS_HANDLED_BY_SERVICE_TEAMS + [config.TESTING_SERVICE_NAME], target_branch_filter=target_branch_filter) - print('pr status: ' + json.dumps(pr_status, indent=2)) - if pr_status['all_prs_merged']: - new_issue_state['status'] = config.STATUS_TO_DEPLOY - elif pr_status['all_prs_initiated']: - new_issue_state['status'] = config.STATUS_RELEASE_REQUESTED - - -def calculate_new_issue_states(issues, output_file): - # grab all open DEXREQ tickets - preview_issues = util.jira_search_issues(process_preview_jira_queue.limit_query_to_issue_keys(DEXREQ_PREVIEW_ISSUES_QUERY, issues, False), fields=', '.join( - process_preview_jira_queue.DEFAULT_JIRA_ISSUE_FIELDS + process_preview_jira_queue.CUSTOM_JIRA_ISSUE_FIELDS_PER_BUILD_TYPE[config.BUILD_TYPE_INDIVIDUAL_PREVIEW] + OLD_SDK_CLI_STATUS_FIELDS) - ) - public_issues = util.jira_search_issues(process_preview_jira_queue.limit_query_to_issue_keys(DEXREQ_PUBLIC_ISSUES_QUERY, issues, False), fields=', '.join( - process_preview_jira_queue.DEFAULT_JIRA_ISSUE_FIELDS + process_preview_jira_queue.CUSTOM_JIRA_ISSUE_FIELDS_PER_BUILD_TYPE[config.BUILD_TYPE_INDIVIDUAL_PUBLIC] + OLD_SDK_CLI_STATUS_FIELDS) - ) - - print('Found total {} open DEXREQ tickets. Preview: {}, Public: {}.'.format( - len(preview_issues) + len(public_issues), - len(preview_issues), - len(public_issues) - )) - - new_issue_states = [] - - errors = [] - - print('Processing Preview Issue(s)...') - print('=====================') - for issue in preview_issues: - print('{} - {}'.format(issue.key, issue.fields.summary)) - - new_issue_state = { - 'status': STATUS_BACKLOG, - 'key': issue.key, - 'issuetype': config.PREVIEW_ISSUE_TYPE_NAME, - 'fields': {} - } - - try: - process_udx_ticket_from_summary(issue, new_issue_state) - process_old_sdk_status_fields_and_overall_status(issue, ISSUE_TYPE_ENUM_PREVIEW, new_issue_state) - - new_issue_states.append(new_issue_state) - except Exception as e: - errors.append('Could not process ticket {}. Error: {}'.format(issue.key, str(e))) - - print('\n') - - print('Public Issue(s):') - print('=====================') - for issue in public_issues: - print('{} - {}'.format(issue.key, issue.fields.summary)) - - new_issue_state = { - 'status': STATUS_BACKLOG, - 'key': issue.key, - 'issuetype': config.PUBLIC_ISSUE_TYPE_NAME, - 'fields': {} - } - - try: - process_udx_ticket_from_summary(issue, new_issue_state) - process_old_sdk_status_fields_and_overall_status(issue, ISSUE_TYPE_ENUM_PUBLIC, new_issue_state) - - new_issue_states.append(new_issue_state) - except Exception as e: - errors.append('Could not process ticket {}. Error: {}'.format(issue.key, str(e))) - - print('\n') - - with open(output_file, 'w+') as f: - f.write(json.dumps(new_issue_states, indent=2)) - - if errors: - print('ERROR: The following errors occurred:') - for error in errors: - print('ERROR: {}'.format(error)) - - print("""Manually update all tickets referenced above to their appropriate new ticket states (Preview or Public). -Then re-run this script with mode == {} and --input-file {}""".format( - SCRIPT_MODE_UPDATE_ISSUES, - output_file) - ) - - print('Query for bulk migrating PREVIEW issues:') - print('\t' + ' OR '.join(['key = {}'.format(issue.key) for issue in preview_issues])) - - print('Query for bulk migrating PUBLIC issues:') - print('\t' + ' OR '.join(['key = {}'.format(issue.key) for issue in public_issues])) - - -def update_issues(issues_filter, input_file): - new_issue_states = [] - with open(input_file, 'r') as f: - content = f.read() - new_issue_states = json.loads(content) - - for issue_state in new_issue_states: - if issues_filter and (not issue_state['key'] in issues_filter): - print('Skipping issue: {} because it is not in issues filter'.format(issue_state['key'])) - continue - - issue = util.JIRA_CLIENT().issue(issue_state['key']) - - if issue_state['issuetype'] != issue.fields.issuetype.name: - print('Skipping issue {} because it has not yet been converted to issuetype: {}'.format(issue.key, issue_state['issuetype'])) - continue - - util.transition_issue_overall_status(util.JIRA_CLIENT(), issue, issue_state['status']) - - if config.IS_DRY_RUN: - print('DRY-RUN: Not updating the following fields in issue {}: {}'.format(issue.key, json.dumps(issue_state['fields']))) - else: - print('Updating the following fields in issue {}: {}'.format(issue.key, json.dumps(issue_state['fields']))) - issue.update(fields=issue_state['fields']) - - -# Known limitations: -# - The script will not detect if bulk preview PRs are out for preview, so don't do the migraiton while the weekly bulk preview is in progress -# - The script does not attempt to handle converting into the new 'Processing' and 'Processing - Bulk' states, so don't do the migration while the current 'Process queue' job is running -if __name__ == "__main__": - parser = argparse.ArgumentParser(description='Process the DEXREQ JIRA queue (preview and public).') - parser.add_argument('--issue', - action='append', - help='By default, we query JIRA. This allows you to specify a DEXREQ issue to process instead: --issue DEXREQ-123') - parser.add_argument('--dry-run', - default=False, - action='store_true', - help='Perform a dry-run') - parser.add_argument('--mode', - default=SCRIPT_MODE_PRE_PROCESS, - help=""""The mode to run the script in. {} will determine and output - the new desired states for each issue, {} will take in the output of a PreProcess job and update issues - accordingly""".format(SCRIPT_MODE_PRE_PROCESS, SCRIPT_MODE_UPDATE_ISSUES)) - parser.add_argument('--input-file', - help=""""The input file to read the desired issue states from. This option is only applicable for the mode: {}""".format(SCRIPT_MODE_UPDATE_ISSUES)) - parser.add_argument('--output-file', - help=""""The output file to write the desired issue states to. This option is only applicable for the mode: {}""".format(SCRIPT_MODE_PRE_PROCESS)) - - args = parser.parse_args() - issues = args.issue - output_file = args.output_file - input_file = args.input_file - mode = args.mode - config.IS_DRY_RUN = args.dry_run - - if mode == SCRIPT_MODE_PRE_PROCESS: - if not output_file: - sys.exit('--output-file is required for mode == {}'.format(SCRIPT_MODE_PRE_PROCESS)) - - calculate_new_issue_states(issues, output_file) - elif mode == SCRIPT_MODE_UPDATE_ISSUES: - if not input_file: - sys.exit('--input-file is required for mode == {}'.format(SCRIPT_MODE_UPDATE_ISSUES)) - - update_issues(issues, input_file) - else: - print('Invalid script mode') diff --git a/scripts/auto_gen_utils/generate_button.js b/scripts/auto_gen_utils/generate_button.js deleted file mode 100644 index da2d7e4105..0000000000 --- a/scripts/auto_gen_utils/generate_button.js +++ /dev/null @@ -1,114 +0,0 @@ -'use strict'; - -const e = React.createElement; - -class LikeButton extends React.Component { - constructor(props) { - super(props); - this.state = { - liked: false , - masterItems: "", - localItems:"" - }; - } - - componentDidMount(){ - fetch(`https://objectstorage.us-phoenix-1.oraclecloud.com/p/5mzix52OhxjnITDKe5bdefXAwxOLpEUEta1czeh_aK4/n/dex-us-phoenix-1/b/codecov_baseline/o/codecov_master.json`) - .then(result=>result.json()) - .then(items=>this.setState({masterItems: items})) - fetch(`https://objectstorage.us-phoenix-1.oraclecloud.com/p/y4R2h_AwvDBBG0avDuy8ZilmHWQU8MrGP2GYadWP91Y/n/dex-us-phoenix-1/b/codecov_baseline/o/codecov_preview.json`) - .then(result=>result.json()) - .then(items=>this.setState({previewItems: items})) - fetch('./master_local_codecov.json') - .then(result=>result.json()) - .then(items=>this.setState({localItems: items})) - // var localData = require('./local_codecov.xml') - - } - - render() { - if (this.state.liked) { - var itemsArray = this.getItemArray(this.state.masterItems); - var previewArray = this.getItemArray(this.state.previewItems); - var localArray = this.getItemArray(this.state.localItems); - - itemsArray.sort((a, b) => (a.name > b.name) ? 1 : -1); - previewArray.sort((a, b) => (a.name < b.name) ? 1 : -1); - localArray.sort((a, b) => (a.name < b.name) ? 1 : -1); - - return ( -
-

Master branch

- {this.getTable(itemsArray)} -

Preview branch

- {this.getTable(previewArray)} -

Local

- {this.getTable(localArray)} -
- ); - } - return e( - 'button', - { onClick: () => this.setState({ liked: true }) }, - 'Generate' - ); - } - - // given an array of items stored in the state, return an array that will be used to display the table - getItemArray(items) { - - var itemsArray = []; - for (var key in items) { - for(var i=0; i < items[key].length; i++) { - var innerItem = items[key][i]; - var item = {}; - item.name = key; - item.tag = innerItem.tag; - item.className = innerItem.class; - var entry = innerItem.data; - item.instructionCoverage = Math.round(entry.coveredInstructions * 100 / (entry.coveredInstructions + entry.missedInstructions)); - item.branchCoverage = Math.round(entry.coveredBranches * 100 / (entry.missedBranches + entry.coveredBranches)); - item.lineCoverage = Math.round(entry.coveredLines * 100 / (entry.coveredLines + entry.missedLines)); - var d = new Date(Date.parse(entry.timestamp)); - item.timestamp = d.toLocaleString(); - item.testsPassedPercentage = entry.testsPassedPercentage; - itemsArray.push(item); - } - } - - return itemsArray; - } - - // given an item array, return table - getTable(itemsArray) { - const header = ["service", "tag", "class", "Instruction coverage", "Branch coverage", "Line coverage", "Timestamp", "Tests Passed (%)"]; - - return ( - - - {header.map((h, i) => )} - - {itemsArray.map(function(item, key) { - - return ( - - - - - - - - - - - ) - - })} - -
{h}
{item.name}{item.tag}{item.className}{item.instructionCoverage}{item.branchCoverage}{item.lineCoverage}{item.timestamp}{item.testsPassedPercentage}
- ) - } -} - -const domContainer = document.querySelector('#generate_button_container'); -ReactDOM.render(e(LikeButton), domContainer); diff --git a/scripts/auto_gen_utils/get_jira_access_token.py b/scripts/auto_gen_utils/get_jira_access_token.py deleted file mode 100644 index e679274992..0000000000 --- a/scripts/auto_gen_utils/get_jira_access_token.py +++ /dev/null @@ -1,34 +0,0 @@ -from jira import JIRA - -import config -import six -import sys -import getpass -import traceback - - -username = six.moves.input('JIRA username: ') -password = getpass.getpass('JIRA password: ') - -client = JIRA(config.JIRA_OPTIONS, auth=(username, password)) - -session_id = client._session.cookies.get('JSESSIONID') - -cookie_options = dict(config.JIRA_OPTIONS) -cookie_options['cookies'] = { - 'JSESSIONID': session_id -} - -cookie_client = JIRA(cookie_options) - -try: - # make a test request to ensure this works - cookie_client.server_info() - cookie_client.search_issues(config.TODO_PREVIEW_CLI_TICKETS_JQL) - print('JIRA session was valid') -except Exception as e: # noqa:F841 - print('Error failed to authenticate with JIRA using session id') - traceback.print_exc() - sys.exit(1) - -print('JIRA session ID: {}'.format(session_id)) diff --git a/scripts/auto_gen_utils/hooks/pre-commit b/scripts/auto_gen_utils/hooks/pre-commit deleted file mode 100755 index e7ca4fbcb0..0000000000 --- a/scripts/auto_gen_utils/hooks/pre-commit +++ /dev/null @@ -1,2 +0,0 @@ -echo "Running linting and unit tests as pre-commit hook" -./verify.sh \ No newline at end of file diff --git a/scripts/auto_gen_utils/ocibuild.conf b/scripts/auto_gen_utils/ocibuild.conf deleted file mode 100644 index 42f7d96435..0000000000 --- a/scripts/auto_gen_utils/ocibuild.conf +++ /dev/null @@ -1,59 +0,0 @@ -runnerTag: latest -name: auto-gen-utils -team: SDK -phoneBookId: software-development-kit -description: "This builds the auto-gen-utils." - -# Example: If you want to update the version to 3.1.0, set major 3, minor 0, and "master": 0 -majorversion: 0 -minorversion: 0 -initialBuildNumber: { - "master": 0 -} -shortversion: ${majorversion}.${minorversion} -snapshotVersion: ${shortversion}-SNAPSHOT -version: ${shortversion}${?BLD_BRANCH_SUFFIX}.${BLD_NUMBER} - -triggerOnCommitBranches: ["master", "pull-requests"] - -# releaseBranches: ["master"] -# Don't release anything, this is just running unit tests -releaseBranches: [] - -# 'dex-build' compartment in 'dex-us-phoenix-1' -authCompartmentOcid: ocid1.compartment.oc1..aaaaaaaa2ttm66djhqpzww6vbx4cwd346wdhwc6h5od3rq5idzy7b3e4yyaq - -# TODO: fix flake8 problems in sdk_regions_updater, python_cli, and team_city_scripts -# flake8Excludes: "./venv,./temp,./input_ocibuild,./output_ocibuild*" -flake8Excludes: "./venv,./temp,./input_ocibuild,./output_ocibuild*,./sdk_regions_updater,./python_cli,./team_city_scripts" - -# TODO: fix these problems so we don't have to ignore the errors -flake8IgnoredErrors: "N806,N802,N803,N817,E501,E128,E241,E231,W291,W293" - -steps: - [ - { - name: python3-tests - type: python - pyVersion: 3.11 - runnerImage: build-runner-python-ol8 - pythonCommands: [ - { args: "-m pip install -r requirements-ocibuild.txt"} - { args: "-m pytest" } - { args: "-m flake8 --exclude="${flake8Excludes}" --ignore="${flake8IgnoredErrors} } - ] - printVerboseLogs: false - }, - { - name: python2-tests - type: python - pyVersion: 2.7 - #runnerImage: build-runner-python-ol8 - pythonCommands: [ - { args: "-m pip install -r requirements-ocibuild.txt"} - { args: "-m pytest" } - { args: "-m flake8 --exclude="${flake8Excludes}" --ignore="${flake8IgnoredErrors} } - ] - printVerboseLogs: false - } - ] \ No newline at end of file diff --git a/scripts/auto_gen_utils/pull_request_tracker/.gitignore b/scripts/auto_gen_utils/pull_request_tracker/.gitignore deleted file mode 100644 index eabe27bbca..0000000000 --- a/scripts/auto_gen_utils/pull_request_tracker/.gitignore +++ /dev/null @@ -1,2 +0,0 @@ -cassettes/ -reports/ diff --git a/scripts/auto_gen_utils/pull_request_tracker/README.md b/scripts/auto_gen_utils/pull_request_tracker/README.md deleted file mode 100644 index 37ab6459cb..0000000000 --- a/scripts/auto_gen_utils/pull_request_tracker/README.md +++ /dev/null @@ -1,74 +0,0 @@ -DEXREQ PR Tracker -======================== - -Overview ------------------------ -The purpose of this tool is to generate a report that helps visualize the status of the various DEXREQ tickets for a given UDX feature. - -The script takes in a comma separateed list of UDX ticket keys (e.g. UDX-1234,UDX-234) and generates 3 reports: one preview report showing only PRs to preview related to that UDX ticket, one public report showing only PRs to master related to that UDX ticket, and one full report showing both combined. - -Note by default the script only looks back ~45 PRs in bitbucket history to limit the load on the Bitbucket APIs. If you want to include older PRs, you can override this with the --limit parameter. - -Installing dependencies ------------------------- -To install dependencies for this project, execute the following command from the root of the auto-gen-utils project: - -`pip install -r requirements.txt` - -Generating a report -------------------------- -Below is an example invocation of the script: - -`python dexreq_pr_tracker.py --issues UDX-494,UDX.497` - -Below is an example invocation of the script to generate a report for all tickets in a given GA date: - -`python dexreq_pr_tracker.py --sdk-cli-ga-date 2019-03-12` - -Below is an example invocation of the script with more of the optional parameters: - -`python dexreq_pr_tracker.py --issues UDX-494,UDX.497 --output-dir ~/reports/ --report-name release_10_18 --upload-to-object-storage` - - -Common Workflow -------------------------- -The goal for this tool is to allow generating reports for a specific release based on all of the UDX tickets that expect to be released. This report would be continually updated to allow the on-call and other members of the team to track the status of all PRs for a given release. - -Here is a suggested workflow for a release: -* At the beginning of the release, figure out which UDX tickets are aiming for the RELEASE_DATE -* Generate a report for all of those tickets using a standard report name and upload to object storage (e.g. --report-name release_{RELEASE_DATE}) -* In our internal tenancy (right now dex-us-phx-cli-1) create a PAR for the public, private, and full reports in object storage so you have a stable link that you can share for a given release -* Continue reviewing PRs and adding / removing UDX tickets from the report as the release progresses. Each report generation with the same --report-name will overwrite the previous report in object storage - - -Running in Team City -------------------------- -This script can also be run in Team City as the following job: -https://teamcity.oci.oraclecorp.com/viewType.html?buildTypeId=Sdk_PythonCli_Misc_GenerateDexreqPrStatusReport - - -Persistent comments in reports ------------------------- -You can leave notes in the report under the 'comments' column and when you lose focus on the text box the comments will be saved. Comments are persisted across reports, they are tied to the corresponding UDX ticket so they will be shown in any report that includes that UDX ticket. - -Persistent comment storage is done through the following object storage PAR (valid through 2020): -https://objectstorage.us-phoenix-1.oraclecloud.com/p/Sy7XXIKMDfXao-zvGfqXbMn6E1_xtz1GTCifWx6J8Cc/n/dex-us-phx-cli-1/b/dexreq_reports/o/datastore.json - -This is also included in templates/report_table.html - - -Reports stored in object storage -------------------------------- - -Read reports homepage PAR: -This is used as a permalink for all team members to find all of the reports that have been created: -https://objectstorage.us-phoenix-1.oraclecloud.com/p/_VNrbExh9IlndgYeH2fsS5NByaEL9rBTKy2NlceNJjc/n/dex-us-phx-cli-1/b/dexreq_reports/o/reports_homepage.html - -Read / write reports homepage PAR: -This is used by publish_homepage.py to upload new versions of the homepage (mostly while developing) -https://objectstorage.us-phoenix-1.oraclecloud.com/p/3AARHQXKrCbO-S7PfKc2nJG0Ad5ZCeg9KEU4K4Y8RGs/n/dex-us-phx-cli-1/b/dexreq_reports/o/reports_homepage.html - -Read / Write Reports Index PAR: -This is used by the reports homepage to read the index of reports and display them -It is also used by the dexreq_pr_tracker to update the index of reports when a new one is created -https://objectstorage.us-phoenix-1.oraclecloud.com/p/L8mVstLyLPSGmVO5-vFP5VKukTQGhj1W5d0y1NCVbE4/n/dex-us-phx-cli-1/b/dexreq_reports/o/reports_index.json diff --git a/scripts/auto_gen_utils/pull_request_tracker/__init__.py b/scripts/auto_gen_utils/pull_request_tracker/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/scripts/auto_gen_utils/pull_request_tracker/dexreq_pr_tracker.py b/scripts/auto_gen_utils/pull_request_tracker/dexreq_pr_tracker.py deleted file mode 100644 index fef89e1119..0000000000 --- a/scripts/auto_gen_utils/pull_request_tracker/dexreq_pr_tracker.py +++ /dev/null @@ -1,918 +0,0 @@ -import six -import sys -import getpass - -import stashy -import os -import oci -import argparse -import vcr -import requests - -from datetime import datetime -from datetime import timedelta -from datetime import date -from dateutil import tz -import pytz -import re -import json -import time - -import jinja2 -from jira import JIRA - -# Add the root of the package, one directory up, to the sys.path -dir_path = os.path.dirname(os.path.realpath(__file__)) -sys.path.append(os.path.join(dir_path, '..')) - -import util # noqa: ignore=F402 - -BITBUCKET_SERVER_URL = "https://bitbucket.oci.oraclecorp.com" -PROJECT_NAME = "SDK" - -BITBUCKET_BUILD_STATUS_API_URL_PATH_FORMAT = "/rest/build-status/1.0/commits/{}" -BITBUCKET_PULL_REQUEST_CHANGES_API_URL_PATH_FORMAT = "/rest/api/1.0/projects/SDK/repos/{}/pull-requests/{}/changes" - -TICKET_TYPE_PUBLIC = "PUBLIC" -TICKET_TYPE_PREVIEW = "PREVIEW" -TICKET_TYPE_ALL = "ALL" - -MASTER_BRANCH_REF = 'refs/heads/master' -PREVIEW_BRANCH_REF = 'refs/heads/preview' - -STATE_OPEN = "OPEN" -STATE_MERGED = "MERGED" - -# how far back to look in PRs on bitbucket -PULL_REQUEST_PAGINATION_LIMIT = 45 - -REPO_SLUG_JAVA_SDK = 'java-sdk' -REPO_SLUG_CLI = 'python-cli' -REPO_SLUG_TESTING_SERVICE = 'oci-testing-service' -REPO_SLUG_RUBY_SDK = 'ruby-sdk' -REPO_SLUG_PYTHON_SDK = 'python-sdk' -REPO_SLUG_GO_SDK = 'oci-go-sdk' - -REPOS_TO_REPORT = [ - REPO_SLUG_JAVA_SDK, - REPO_SLUG_CLI, - REPO_SLUG_TESTING_SERVICE, - REPO_SLUG_RUBY_SDK, - REPO_SLUG_PYTHON_SDK, - REPO_SLUG_GO_SDK -] - -OUTPUT_FORMAT_TEXT = "text" -OUTPUT_FORMAT_HTML = "html" -OUTPUT_FORMAT_JSON = "json" - -JIRA_SERVER = 'https://jira.oci.oraclecorp.com' -JIRA_SERVER_REST_API_VERSION = 2 - -JIRA_OPTIONS = { - 'server': JIRA_SERVER, - 'rest_api_version': JIRA_SERVER_REST_API_VERSION, - 'verify': True -} - -DEFAULT_JIRA_ISSUE_FIELDS = ['summary', 'description'] - -PUBLIC_IN_PROGRESS_QUERY = 'project = DEXREQ and issuetype = "Public" and status = "In Progress"' -PUBLIC_READY_FOR_RELEASE_QUERY = 'project = DEXREQ and issuetype = "Public" and status = "Ready for Release"' - -PUBLIC_DEXREQ_ISSUE_FROM_UDX_TICKET_QUERY_FORMAT = 'project = DEXREQ and (summary ~ "{udx_ticket}" or "UDX Ticket" ~ "{udx_ticket}") and issuetype = "Public"' -PREVIEW_DEXREQ_ISSUE_FROM_UDX_TICKET_QUERY_FORMAT = 'project = DEXREQ and (summary ~ "{udx_ticket}" or "UDX Ticket" ~ "{udx_ticket}") and issuetype = "Preview"' - -PUBLIC_WITH_GA_DATE_QUERY_FORMAT = 'project = DEXREQ and issuetype = "Public" and "SDK/CLI GA Date" = "{}"' - -CUSTOM_FIELD_UDX_CLI_GA_DATE = "customfield_11120" -CUSTOM_FIELD_UDX_SDK_GA_DATE = "customfield_11126" -CUSTOM_FIELD_UDX_CONSOLE_GA_DATE = "customfield_12197" -CUSTOM_FIELD_ID_UDX_TICKET = 'customfield_13596' - -CUSTOM_FIELD_FEATURE_AVAILABLE_IN_PROD = 'customfield_13780' - -OUTPUT_DIRECTORY = 'reports' -UPLOAD_TO_OBJECT_STORAGE = False -REPORT_NAME = 'report' - -# PAR with write access on bucket to add reports -OBJECT_STORAGE_DEXREQ_REPORTS_PAR = "https://objectstorage.us-phoenix-1.oraclecloud.com/p/ZZoZkNKUfyEyPQ_VVF3eA8HF28YobPfGBAuVSeJA2C8/n/dex-us-phx-cli-1/b/dexreq_reports_collection/o/" -OBJECT_STORAGE_DEXREQ_REPORTS_INDEX_RW_PAR = "https://objectstorage.us-phoenix-1.oraclecloud.com/p/L8mVstLyLPSGmVO5-vFP5VKukTQGhj1W5d0y1NCVbE4/n/dex-us-phx-cli-1/b/dexreq_reports/o/reports_index.json" - -TIME_ZONE_FOR_DATE_FORMATS = 'America/Los_Angeles' -DATE_FORMAT = '%h %d %I:%M %p %Z' - -VCR_INSTANCE = None - -SLEEP_BETWEEN_REQUESTS = 0.2 - -PYTHON_CLI_REPO_SLUG = 'python-cli' -JAVA_SDK_REPO_SLUG = 'java-sdk' -RUBY_SDK_REPO_SLUG = 'ruby-sdk' -GO_SDK_REPO_SLUG = 'oci-go-sdk' -PYTHON_SDK_REPO_SLUG = 'python-sdk' -OCI_TESTING_SERVICE_REPO_SLUG = 'oci-testing-service' - -# this was for a hack day so I'm allowed to do ugly things like this :) -DEX_TEAM_APPROVER_EMAILS = [ - # sdk team - 'joe.levy@oracle.com', - 'jyoti.s.saini@oracle.com', - 'mathias.ricken@oracle.com', - 'mingchi.stephen.mak@oracle.com', - 'omkar.p.patil@oracle.com', - 'peng.p.liu@oracle.com', - 'sun.yan@oracle.com', - 'vyas.bhagwat@oracle.com', - 'walt.tran@oracle.com', - 'ziyao.qiao@oracle.com', - 'yash.chandra@oracle.com', - 'vidhi.bhansali@oracle.com', - 'anurag.g.garg@oracle.com', - 'josh.hunter@oracle.com', - 'swetha.krishnan@oracle.com', - 'rakesh.kumar.parappa@oracle.com', - 'swarnava.s.sarkar@oracle.com', - 'ram.kishan.v.vooka@oracle.com', - 'nivedita.parihar@oracle.com', - 'kalpana.ramasamy@oracle.com', - 'joshua.r.ramirez@oracle.com', - 'eric.pendergrass@oracle.com', - - # CLI team - 'hamada.ibrahim@oracle.com', - 'kern.lee@oracle.com', - 'manoj.meda@oracle.com', - 'mike.c.ross@oracle.com', - 'srikanth.reddy.kumbham@oracle.com', - 'viral.modi@oracle.com', - 'vishwas.bhat@oracle.com', - 'h.harsh.kumar@oracle.com', - 'arun.swarnam@oracle.com', - 'varun.mankal@oracle.com', - 'zhongwan.wang@oracle.com', - 'alex.t.le@oracle.com', - 'mandy.tsai@oracle.com' -] - -REPORTS_NAMESPACE = 'dex-us-phx-cli-1' -REPORTS_BUCKET_NAME = 'dexreq_reports_collection' - -CONFIG_FILE_LOCATION = os.path.join('resources', 'config') - -AUTO_GEN_API_KEY_PASS_PHRASE_ENV_VAR = 'AUTO_GEN_PASS_PHRASE' - - -def unix_date_format(timestamp): - utc_datetime = datetime.utcfromtimestamp(timestamp / 1000) - return utc_datetime.replace(tzinfo=tz.tzutc()).astimezone(pytz.timezone(TIME_ZONE_FOR_DATE_FORMATS)).strftime(DATE_FORMAT) - - -def render_html(data): - environment = jinja2.Environment( - loader=jinja2.FileSystemLoader('./templates') - ) - - environment.filters['unix_date_format'] = unix_date_format - - template = environment.get_template('report_table.html') - return template.render(data) - - -def get_all_pull_requests(bb_client, repo_slug, at, state): - prs = [] - count = 0 - - for pull_request in bb_client.projects[PROJECT_NAME].repos[repo_slug].pull_requests.all(at=at, state=state): - # LOG.debug('Checking PR: ' + pull_request.get('title')) - if (pull_request.get('state', '') != 'DECLINED'): - prs.append(pull_request) - - count = count + 1 - if PULL_REQUEST_PAGINATION_LIMIT is not None and count > PULL_REQUEST_PAGINATION_LIMIT: - # print('Stopping paginating early for PRs, slug: {}, target={}, issue={}'.format(repo_slug, at, dexreq_issue_key)) - break - - _throttle() - - return prs - - -def build_clients(): - username = os.environ.get('JIRA_USERNAME') - if not username: - sys.stderr.write('Bitbucket / JIRA username: ') - username = six.moves.input() - - password = os.environ.get('JIRA_PASSWORD') - if not password: - password = getpass.getpass('Bitbucket / JIRA password: ', sys.stderr) - - bb_client = stashy.connect(BITBUCKET_SERVER_URL, username, password) - - # set environment variables so that JIRA client works - os.environ['JIRA_USERNAME'] = username - os.environ['JIRA_PASSWORD'] = password - - jira_client = JIRA(JIRA_OPTIONS, basic_auth=(username, password)) - return bb_client, jira_client - - -def get_dexreq_tickets_for_udx_tickets(jira_client, udx_ticket_keys): - udx_tickets = [] - for udx_ticket_key in udx_ticket_keys: - udx_issue = jira_client.issue(udx_ticket_key) - - udx_cli_ga_date = getattr(udx_issue.fields, CUSTOM_FIELD_UDX_CLI_GA_DATE) - udx_sdk_ga_date = getattr(udx_issue.fields, CUSTOM_FIELD_UDX_SDK_GA_DATE) - udx_console_ga_date = getattr(udx_issue.fields, CUSTOM_FIELD_UDX_CONSOLE_GA_DATE) - udx_status = udx_issue.fields.status.name - - public_issues = util.search_dexreq_issues(PUBLIC_DEXREQ_ISSUE_FROM_UDX_TICKET_QUERY_FORMAT.format(udx_ticket=udx_ticket_key), fields=(DEFAULT_JIRA_ISSUE_FIELDS + [CUSTOM_FIELD_FEATURE_AVAILABLE_IN_PROD])) - preview_issues = util.search_dexreq_issues(PREVIEW_DEXREQ_ISSUE_FROM_UDX_TICKET_QUERY_FORMAT.format(udx_ticket=udx_ticket_key)) - - udx_ticket = { - 'key': udx_issue.key, - 'summary': udx_issue.fields.summary, - 'status': udx_status, - 'console_ga_date': udx_console_ga_date, - 'sdk_ga_date': udx_sdk_ga_date, - 'cli_ga_date': udx_cli_ga_date - } - - if len(public_issues) > 0: - udx_ticket['public_issues'] = [] - for issue in public_issues: - field_value = getattr(issue.fields, CUSTOM_FIELD_FEATURE_AVAILABLE_IN_PROD) - available_in_prod = field_value and field_value.value == 'Yes' - udx_ticket['public_issues'].append({ - 'key': issue.key, - 'summary': issue.fields.summary, - 'available_in_prod': available_in_prod - }) - - if len(preview_issues) > 0: - udx_ticket['preview_issues'] = [] - for issue in preview_issues: - udx_ticket['preview_issues'].append({ - 'key': issue.key, - 'summary': issue.fields.summary - }) - - udx_tickets.append(udx_ticket) - - return udx_tickets - - -def does_pr_match_any_dexreqs(pull_request, dexreq_public_issue_keys, dexreq_preview_issue_keys, udx_issue_key): - # do some loose matching, sometimes people will improperly reference the ticket (e.g. UDX123 instead of UDX-123) - dexreq_public_issue_matches = False - dexreq_preview_issue_matches = False - - for public_key in dexreq_public_issue_keys: - public_issue_key_without_dash = public_key if public_key is None else public_key.replace('-', '') - dexreq_public_issue_matches = dexreq_public_issue_matches or is_string_referenced_by_pr(public_key, pull_request) or is_string_referenced_by_pr(public_issue_key_without_dash, pull_request) - - for preview_key in dexreq_preview_issue_keys: - preview_issue_key_without_dash = preview_key if preview_key is None else preview_key.replace('-', '') - dexreq_preview_issue_matches = dexreq_preview_issue_matches or is_string_referenced_by_pr(preview_key, pull_request) or is_string_referenced_by_pr(preview_issue_key_without_dash, pull_request) - - udx_issue_key_without_dash = udx_issue_key.replace('-', '') - udx_issue_matches = is_string_referenced_by_pr(udx_issue_key, pull_request) or is_string_referenced_by_pr(udx_issue_key_without_dash, pull_request) - - return dexreq_public_issue_matches or dexreq_preview_issue_matches or udx_issue_matches - - -def is_string_referenced_by_pr(text, pull_request): - return text is not None and (text in pull_request.get('title', '').lower() or text in pull_request.get('description', '').lower() or text in pull_request['fromRef']['id'].lower()) - - -def add_build_info_for_pr(pull_request): - latest_commit = pull_request['fromRef']['latestCommit'] - - headers = { - "X-Atlassian-Token": "no-check", - "Content-Type": "application/json" - } - - response = requests.get( - url=BITBUCKET_SERVER_URL + BITBUCKET_BUILD_STATUS_API_URL_PATH_FORMAT.format(latest_commit), - auth=(os.environ['JIRA_USERNAME'], os.environ['JIRA_PASSWORD']), - headers=headers, - verify=False - ) - - r = json.loads(response.content.decode('UTF-8')) - pull_request['builds'] = r['values'] - - _throttle() - - -def add_approver_info(pull_request): - for reviewer in pull_request['reviewers']: - if reviewer['approved']: - if 'emailAddress' in reviewer['user'] and reviewer['user']['emailAddress'] in DEX_TEAM_APPROVER_EMAILS: - pull_request['has_dex_approver'] = True - else: - pull_request['has_non_dex_approver'] = True - - if reviewer['status'] == 'NEEDS_WORK': - pull_request['needs_work'] = True - - -# this is slightly confusing since if both are missing we only say 'Non DEX approver' is missing -# if there is a DEX approver then we don't really care that we are missing a non-dex approver -# these checks are all used solely to determine if the PR is ready to be reviewed by DEX, and then -# separately we check if it has a DEX approver to move it into the final READY state -def check_has_any_approver(pull_request): - check_pass = pull_request.get('has_non_dex_approver') or pull_request.get('has_dex_approver') - if not check_pass: - pull_request['missing'].append('No non-DEX approver') - - return check_pass - - -def check_no_reviewers_marked_needs_work(pull_request): - check_pass = not pull_request.get('needs_work') - if not check_pass: - pull_request['missing'].append('PR marked Needs Work') - - return check_pass - - -def check_no_merge_conflict(pull_request): - check_pass = not (pull_request['properties'].get('mergeResult') and pull_request['properties']['mergeResult']['outcome'] == "CONFLICTED") - if not check_pass: - pull_request['missing'].append('Merge conflict') - - return check_pass - - -def check_all_builds_passing(pull_request): - check_pass = True - for build in pull_request['builds']: - if build['state'] == 'FAILED': - check_pass = False - break - - if not check_pass: - pull_request['missing'].append('Not all builds passing') - - return check_pass - - -def check_any_build_passing(pull_request): - check_pass = False - for build in pull_request['builds']: - if build['state'] == 'SUCCESSFUL': - check_pass = True - break - - if not check_pass: - pull_request['missing'].append('No build passing') - - return check_pass - - -def check_has_samples(pull_request): - check_pass = pull_request.get('has_samples') - if not check_pass: - pull_request['missing'].append('No samples') - - return check_pass - - -def check_has_recordings(pull_request): - check_pass = pull_request.get('has_recordings') - if not check_pass: - pull_request['missing'].append('No recordings') - - return check_pass - - -def check_has_changelog(pull_request): - # changelog is not required for preview - if pull_request['toRef']['id'] == 'refs/heads/preview': - return True - - check_pass = pull_request.get('has_changelog') - if not check_pass: - pull_request['missing'].append('No changelog') - - return check_pass - - -# this function determines which state the PR is in so that we can color it appropriately -# states: -# - MERGED - GRAY - PR is merged -# - READY FOR MERGE - GREEN - all pre-reqs are met on call could press 'MERGE' -# - READY FOR DEX PR - BLUE - all pre-reqs are met EXCEPT DEX approved -# - MISSING ANY PRE-REQS - YELLOW -def add_pr_overall_status(pull_request): - ready_for_dex_review_check_per_repo = { - PYTHON_CLI_REPO_SLUG: [ - check_has_any_approver, - check_no_merge_conflict, - check_has_samples, - check_has_recordings, - check_has_changelog, - check_any_build_passing, - check_no_reviewers_marked_needs_work - ], - JAVA_SDK_REPO_SLUG: [ - check_has_any_approver, - check_no_merge_conflict, - check_has_samples, - check_all_builds_passing, - check_no_reviewers_marked_needs_work - ], - OCI_TESTING_SERVICE_REPO_SLUG: [ - check_has_any_approver, - check_no_merge_conflict, - check_all_builds_passing, - check_no_reviewers_marked_needs_work - ], - PYTHON_SDK_REPO_SLUG: [ - check_has_any_approver, - check_no_merge_conflict, - check_all_builds_passing, - check_no_reviewers_marked_needs_work - ], - GO_SDK_REPO_SLUG: [ - check_has_any_approver, - check_no_merge_conflict, - check_all_builds_passing, - check_no_reviewers_marked_needs_work - ], - RUBY_SDK_REPO_SLUG: [ - check_has_any_approver, - check_no_merge_conflict, - check_all_builds_passing, - check_no_reviewers_marked_needs_work - ] - } - - pull_request['missing'] = [] - repo_slug = pull_request['toRef']['repository']['slug'] - - ready_for_dex_review = True - for check in ready_for_dex_review_check_per_repo[repo_slug]: - ready_for_dex_review = check(pull_request) and ready_for_dex_review - - ready_for_merge = ready_for_dex_review and pull_request.get('has_dex_approver') - - if pull_request['state'] == 'MERGED': - # PR is merged, that status takes precedence over everyone else - pull_request['dexreq_status_class'] = 'list-group-item-secondary' - elif ready_for_merge: - # EVERY pre-req is met for this, all on-call has to do is click merge - # only exception is for CLI, we allow the build against master PySDK to be failing - pull_request['dexreq_status_class'] = 'list-group-item-success' - elif ready_for_dex_review: - # every pre-req is met EXCEPT review by DEX team member - pull_request['dexreq_status_class'] = 'list-group-item-primary' - else: - # missing some pre-reqs (will be displayed in 'Missing') - pull_request['dexreq_status_class'] = 'list-group-item-warning' - - -def has_cli_samples(change): - components = change['path']['components'] - return len(components) > 2 and components[0] == 'scripts' and components[1] == 'examples' - - -def has_java_samples(change): - components = change['path']['components'] - return len(components) > 0 and components[0] == 'bmc-examples' - - -def has_cli_recordings(change): - components = change['path']['components'] - yml_extension = 'extension' in change['path'] and change['path']['extension'] == 'yml' - return yml_extension and \ - len(components) > 5 and \ - 'cassettes' in components and \ - 'tests' in components - - -def has_cli_changelog(change): - components = change['path']['components'] - return len(components) > 0 and components[0] == 'changelog_entries' - - -def add_pr_pre_req_checks_to_pr(bb_client, pull_request): - pr_id = pull_request['id'] - repo_slug = pull_request['toRef']['repository']['slug'] - - detect_samples_func_for_repo = { - PYTHON_CLI_REPO_SLUG: has_cli_samples, - JAVA_SDK_REPO_SLUG: has_java_samples - } - - detect_recordings_func_for_repo = { - PYTHON_CLI_REPO_SLUG: has_cli_recordings - } - - detect_changelog_func_for_repo = { - PYTHON_CLI_REPO_SLUG: has_cli_changelog - } - - detect_samples_func = detect_samples_func_for_repo.get(repo_slug) - detect_recordings_func = detect_recordings_func_for_repo.get(repo_slug) - detect_changelog_func = detect_changelog_func_for_repo.get(repo_slug) - - headers = { - "X-Atlassian-Token": "no-check", - "Content-Type": "application/json" - } - - is_last_page = False - start = 'null' - - has_samples = False - has_recordings = False - has_changelog = False - - # we can quit paginating early if we find what we're looking for - while not is_last_page and (not has_samples or not has_recordings or not has_changelog): - response = requests.get( - url=BITBUCKET_SERVER_URL + BITBUCKET_PULL_REQUEST_CHANGES_API_URL_PATH_FORMAT.format(repo_slug, pr_id) + '?start={}'.format(start), - auth=(os.environ['JIRA_USERNAME'], os.environ['JIRA_PASSWORD']), - headers=headers, - verify=False - ) - - r = json.loads(response.content.decode('UTF-8')) - for change in r['values']: - if detect_samples_func: - if detect_samples_func(change): - has_samples = True - - if detect_recordings_func: - if detect_recordings_func(change): - has_recordings = True - - if detect_changelog_func: - if detect_changelog_func(change): - has_changelog = True - - is_last_page = r['isLastPage'] - start = r['nextPageStart'] - - pull_request['has_samples'] = has_samples - pull_request['has_recordings'] = has_recordings - pull_request['has_changelog'] = has_changelog - - _throttle() - - -def generate_report(bb_client, jira_client, issues): - print('Fetching PRs for all tickets...') - all_testing_service_prs_preview = get_all_pull_requests(bb_client, REPO_SLUG_TESTING_SERVICE, PREVIEW_BRANCH_REF, "ALL") - all_java_prs_preview = get_all_pull_requests(bb_client, REPO_SLUG_JAVA_SDK, PREVIEW_BRANCH_REF, "ALL") - all_cli_prs_preview = get_all_pull_requests(bb_client, REPO_SLUG_CLI, PREVIEW_BRANCH_REF, "ALL") - all_ruby_prs_preview = get_all_pull_requests(bb_client, REPO_SLUG_RUBY_SDK, PREVIEW_BRANCH_REF, "ALL") - all_python_prs_preview = get_all_pull_requests(bb_client, REPO_SLUG_PYTHON_SDK, PREVIEW_BRANCH_REF, "ALL") - all_go_prs_preview = get_all_pull_requests(bb_client, REPO_SLUG_GO_SDK, PREVIEW_BRANCH_REF, "ALL") - - all_testing_service_prs_master = get_all_pull_requests(bb_client, REPO_SLUG_TESTING_SERVICE, MASTER_BRANCH_REF, "ALL") - all_java_prs_master = get_all_pull_requests(bb_client, REPO_SLUG_JAVA_SDK, MASTER_BRANCH_REF, "ALL") - all_cli_prs_master = get_all_pull_requests(bb_client, REPO_SLUG_CLI, MASTER_BRANCH_REF, "ALL") - all_ruby_prs_master = get_all_pull_requests(bb_client, REPO_SLUG_RUBY_SDK, MASTER_BRANCH_REF, "ALL") - all_python_prs_master = get_all_pull_requests(bb_client, REPO_SLUG_PYTHON_SDK, MASTER_BRANCH_REF, "ALL") - all_go_prs_master = get_all_pull_requests(bb_client, REPO_SLUG_GO_SDK, MASTER_BRANCH_REF, "ALL") - - udx_tickets = get_dexreq_tickets_for_udx_tickets(jira_client, issues) - for udx_ticket in udx_tickets: - udx_ticket_key = udx_ticket['key'].lower() - - preview_prs = {} - dexreq_preview_issue_keys = [] - if 'preview_issues' in udx_ticket: - dexreq_preview_issue_keys = [x['key'].lower() for x in udx_ticket['preview_issues']] - - public_prs = {} - dexreq_public_issue_keys = [] - if 'public_issues' in udx_ticket: - dexreq_public_issue_keys = [x['key'].lower() for x in udx_ticket['public_issues']] - - preview_prs['testing'] = [] - preview_prs['java'] = [] - preview_prs['cli'] = [] - preview_prs['ruby'] = [] - preview_prs['python'] = [] - preview_prs['go'] = [] - - public_prs['testing'] = [] - public_prs['java'] = [] - public_prs['cli'] = [] - public_prs['ruby'] = [] - public_prs['python'] = [] - public_prs['go'] = [] - - # find all preview PRs that match either the public dexreq, preview dexreq, or UDX - preview_prs['testing'].extend([pr for pr in all_testing_service_prs_preview if does_pr_match_any_dexreqs(pr, dexreq_public_issue_keys, dexreq_preview_issue_keys, udx_ticket_key)]) - preview_prs['java'].extend([pr for pr in all_java_prs_preview if does_pr_match_any_dexreqs(pr, dexreq_public_issue_keys, dexreq_preview_issue_keys, udx_ticket_key)]) - preview_prs['cli'].extend([pr for pr in all_cli_prs_preview if does_pr_match_any_dexreqs(pr, dexreq_public_issue_keys, dexreq_preview_issue_keys, udx_ticket_key)]) - preview_prs['ruby'].extend([pr for pr in all_ruby_prs_preview if does_pr_match_any_dexreqs(pr, dexreq_public_issue_keys, dexreq_preview_issue_keys, udx_ticket_key)]) - preview_prs['python'].extend([pr for pr in all_python_prs_preview if does_pr_match_any_dexreqs(pr, dexreq_public_issue_keys, dexreq_preview_issue_keys, udx_ticket_key)]) - preview_prs['go'].extend([pr for pr in all_go_prs_preview if does_pr_match_any_dexreqs(pr, dexreq_public_issue_keys, dexreq_preview_issue_keys, udx_ticket_key)]) - - # find all master PRs that match either the public dexreq, preview dexreq, or UDX - public_prs['testing'].extend([pr for pr in all_testing_service_prs_master if does_pr_match_any_dexreqs(pr, dexreq_public_issue_keys, dexreq_preview_issue_keys, udx_ticket_key)]) - public_prs['java'].extend([pr for pr in all_java_prs_master if does_pr_match_any_dexreqs(pr, dexreq_public_issue_keys, dexreq_preview_issue_keys, udx_ticket_key)]) - public_prs['cli'].extend([pr for pr in all_cli_prs_master if does_pr_match_any_dexreqs(pr, dexreq_public_issue_keys, dexreq_preview_issue_keys, udx_ticket_key)]) - public_prs['ruby'].extend([pr for pr in all_ruby_prs_master if does_pr_match_any_dexreqs(pr, dexreq_public_issue_keys, dexreq_preview_issue_keys, udx_ticket_key)]) - public_prs['python'].extend([pr for pr in all_python_prs_master if does_pr_match_any_dexreqs(pr, dexreq_public_issue_keys, dexreq_preview_issue_keys, udx_ticket_key)]) - public_prs['go'].extend([pr for pr in all_go_prs_master if does_pr_match_any_dexreqs(pr, dexreq_public_issue_keys, dexreq_preview_issue_keys, udx_ticket_key)]) - - udx_ticket['pull_requests'] = { - "preview": preview_prs, - "master": public_prs - } - - print('Fetching build info for all PRs for {}...'.format(udx_ticket['key'])) - for lang,prs in six.iteritems(preview_prs): - for pr in prs: - add_build_info_for_pr(pr) - add_pr_pre_req_checks_to_pr(bb_client, pr) - add_approver_info(pr) - add_pr_overall_status(pr) - - for lang,prs in six.iteritems(public_prs): - for pr in prs: - add_build_info_for_pr(pr) - add_pr_pre_req_checks_to_pr(bb_client, pr) - add_approver_info(pr) - add_pr_overall_status(pr) - - seattle_now = datetime.now(pytz.timezone(TIME_ZONE_FOR_DATE_FORMATS)) - response = { - "report_name": REPORT_NAME, - "generated_time": seattle_now.strftime(DATE_FORMAT), - "tickets": udx_tickets - } - - udx_ticket_stamp = '_'.join(issues) # noqa:F841 - timestamp = datetime.utcnow().strftime('%m_%d_%H_%M') # noqa:F841 - - if not os.path.exists(OUTPUT_DIRECTORY): - os.makedirs(OUTPUT_DIRECTORY) - - preview_report_location = os.path.abspath(os.path.join(OUTPUT_DIRECTORY, '{report_name}_preview.html'.format(report_name=REPORT_NAME))) - public_report_location = os.path.abspath(os.path.join(OUTPUT_DIRECTORY, '{report_name}_public.html'.format(report_name=REPORT_NAME))) - full_report_location = os.path.abspath(os.path.join(OUTPUT_DIRECTORY, '{report_name}_full.html'.format(report_name=REPORT_NAME))) - - # create 3 reports, one for public only, one for preview, and one for combined - with open(preview_report_location, 'w') as preview_report_file: - response['show_preview'] = True - response['show_public'] = False - - preview_report_file.write(render_html(response)) - - with open(public_report_location, 'w') as public_report_file: - response['show_preview'] = False - response['show_public'] = True - - public_report_file.write(render_html(response)) - - with open(full_report_location, 'w') as public_report_file: - response['show_preview'] = True - response['show_public'] = True - - public_report_file.write(render_html(response)) - - if UPLOAD_TO_OBJECT_STORAGE: - upload_reports_to_object_storage([ - { - "location": preview_report_location, - "report_type": "preview" - }, - { - "location": public_report_location, - "report_type": "public" - }, - { - "location": full_report_location, - "report_type": "full" - } - ]) - - print('Reports uploaded to object storage. Tenancy: {}. Bucket: {}'.format(REPORTS_NAMESPACE, REPORTS_BUCKET_NAME)) - - print('Preview report written to: {}'.format(preview_report_location)) - print('Public report written to: {}'.format(public_report_location)) - print('Full report written to: {}'.format(full_report_location)) - - -# - Uploads the report files to object storage in the dexreq_reports bucket -# - If OCI Python SDK is configured, will also attempt to update the reports_index -# which contains PARs for all existing reports -# - This is used to render the reports homepage -def upload_reports_to_object_storage(reports): - result = requests.get(url=OBJECT_STORAGE_DEXREQ_REPORTS_INDEX_RW_PAR) - reports_index = json.loads(result.content.decode('UTF-8')) - reports_from_server = reports_index['reports'] - - new_reports = {} - for report in reports: - file_path = report['location'] - file_name = os.path.basename(report['location']) - report_type = report['report_type'] - - # upload report to object storage - with open(file_path, 'r') as f: - requests.put( - url=OBJECT_STORAGE_DEXREQ_REPORTS_PAR + file_name, - data=f.read(), - headers={ - "Content-Type": "text/html" - } - ) - - # check if PAR for this report already exists in reports index, if not, create it - if isinstance(reports_from_server, dict): - if REPORT_NAME in reports_from_server and report_type in reports_from_server[REPORT_NAME]: - print('Report {}: {} already exists in index, skipping creating new PAR...'.format(REPORT_NAME, report_type)) - continue - else: - print('Failed to retrieve reports index from object storage, not creating PAR or adding to index') - continue - - # no existing PAR for this reoprt so create one - # catch all exceptions because this part is optional if user hasn't configured OCI Python SDK - try: - config = oci.config.from_file(file_location=CONFIG_FILE_LOCATION) - if AUTO_GEN_API_KEY_PASS_PHRASE_ENV_VAR not in os.environ: - raise ValueError('No passphrase specified for API key. Please populate environment varaible: {}'.format(AUTO_GEN_API_KEY_PASS_PHRASE_ENV_VAR)) - - config['pass_phrase'] = os.environ[AUTO_GEN_API_KEY_PASS_PHRASE_ENV_VAR] - object_storage_client = oci.object_storage.ObjectStorageClient(config) - create_par_details = oci.object_storage.models.CreatePreauthenticatedRequestDetails( - name='Read Access for DEXREQ Report: {} - {}'.format(REPORT_NAME, report_type), - object_name=file_name, - access_type=oci.object_storage.models.CreatePreauthenticatedRequestDetails.ACCESS_TYPE_OBJECT_READ, - time_expires=datetime.today() + timedelta(2 * 365 / 12) # two months from now - ) - - response = object_storage_client.create_preauthenticated_request(REPORTS_NAMESPACE, REPORTS_BUCKET_NAME, create_par_details) - if response.status != 200: - print('Failed creating par: ' + str(response.data)) - continue - - par = response.data - new_reports[report_type] = 'https://objectstorage.us-phoenix-1.oraclecloud.com' + par.access_uri - - except Exception as e: - print('Failed to create new PAR for report') - print(str(e)) - continue - - # if there are any new reports, update the report index with them - if new_reports and isinstance(reports_from_server, dict): - reports_from_server[REPORT_NAME] = new_reports - - print('Updated reports index: ' + str(reports_from_server)) - - result = requests.put( - url=OBJECT_STORAGE_DEXREQ_REPORTS_INDEX_RW_PAR, - headers={'Content-Type': 'application/json'}, - data=json.dumps(reports_index) - ) - - -def get_udx_tickets_with_sdk_cli_ga_date(bb_client, jira_client, ga_date): - all_udx_tickets = set() - public_issues_with_ga_date = util.search_dexreq_issues(PUBLIC_WITH_GA_DATE_QUERY_FORMAT.format(ga_date)) - for public_issue in public_issues_with_ga_date: - udx_keys = re.findall("UDX-[0-9]+", public_issue.fields.summary) - if not udx_keys: - udx_ticket_field_value = getattr(public_issue.fields, CUSTOM_FIELD_ID_UDX_TICKET) - if udx_ticket_field_value: - udx_keys = [x.strip() for x in udx_ticket_field_value.split(',')] - - if not udx_keys: - print('WARNING: Did not find any corresponding UDX tickets for: {}'.format(public_issue.key)) - - all_udx_tickets.update(udx_keys) - - return all_udx_tickets - - -def validate_date_format(date_text): - try: - datetime.strptime(date_text, '%Y-%m-%d') - except ValueError: - return False - - return True - - -def _throttle(): - # no need to sleep if we are running against recordings - if VCR_INSTANCE.record_mode != "none": - time.sleep(SLEEP_BETWEEN_REQUESTS) - - -# 0 = Monday, 1=Tuesday, 2=Wednesday... -def next_weekday(d, weekday): - days_ahead = weekday - d.weekday() - if days_ahead <= 0: # Target day already happened this week - days_ahead += 7 - return d + timedelta(days_ahead) - - -if __name__ == "__main__": - parser = argparse.ArgumentParser(description='Print information about in progress and complete pull requests for a given DEXREQ issue.') - parser.add_argument('--issues', - help='A comma separated list of UDX tickets to generate a report for') - parser.add_argument('--limit', - type=int, - help='The maximum number of PRs to look back over. This is to avoid querying the entire PR history unnecessarily') - parser.add_argument('--output', - default='html', - help='Output format. Options are: HTML') - parser.add_argument('--output-dir', - default=OUTPUT_DIRECTORY, - help='Output directory. Default is {}'.format(OUTPUT_DIRECTORY)) - parser.add_argument('--report-name', - default=REPORT_NAME, - help="""The name of the report to generate. This will be generated in the output directory. - Reports will have the name {{report-name}}_full.html, {{report-name}}_preview.html, an {{report-name}}_public.html.""") - parser.add_argument('--upload-to-object-storage', - default=False, - action='store_true', - help="""Whether or not to upload reports to object storage in dexreq_reports bucket.""") - parser.add_argument('--vcr-record-mode', - default='all', - help="""VCR record mode to use: https://vcrpy.readthedocs.io/en/latest/usage.html#record-modes""") - parser.add_argument('--sdk-cli-ga-date', - help="""Create a report for all DEXREQ tickets with the given GA Date (YYYY-MM-DD). - This uses the SDK / CLI GA Date field on the public DEXREQ ticket and then looks up the corresponding UDX tickets. - This parameter may not be used with --issues""") - parser.add_argument('--next-release-n', - type=int, - default=-1, - help="""Create a report for all DEXREQ tickets for the nth release date from now. - For example '0' indicates the next release, '1' indicates 2 releases from now.""") - - args = parser.parse_args() - issues = args.issues - limit = args.limit - output = args.output - output_dir = args.output_dir - report_name = args.report_name - upload_to_object_storage = args.upload_to_object_storage - vcr_record_mode = args.vcr_record_mode - sdk_cli_ga_date = args.sdk_cli_ga_date - next_release_n = args.next_release_n - - print('Starting...') - - if issues: - issues = [issue.strip() for issue in issues.split(',')] - else: - issues = [] - - if not issues and not sdk_cli_ga_date and next_release_n == -1: - sys.exit('Must specify either --issues or --sdk-cli-ga-date or --next-release-n parameter') - - if not (bool(issues) ^ bool(sdk_cli_ga_date) ^ bool(next_release_n != -1)): - sys.exit('Can only use one of --issues, --sdk-cli-ga-date, and --next-release-n parameters') - - if sdk_cli_ga_date and not validate_date_format(sdk_cli_ga_date): - sys.exit('--sdk-cli-ga-date was not in valid date format (YYYY-MM-DD)') - - if limit: - PULL_REQUEST_PAGINATION_LIMIT = limit - - if output_dir: - OUTPUT_DIRECTORY = output_dir - - if upload_to_object_storage: - UPLOAD_TO_OBJECT_STORAGE = True - - if report_name: - REPORT_NAME = report_name - - if next_release_n != -1: - today = date.today() + timedelta(7 * next_release_n) - next_tuesday_date_str = next_weekday(today, 1).isoformat() - REPORT_NAME = 'release_{}'.format(next_tuesday_date_str.replace('-', '_')) - sdk_cli_ga_date = next_tuesday_date_str - print('Fetching tickets for release date: {}'.format(next_tuesday_date_str)) - - VCR_INSTANCE = vcr.VCR( - serializer='yaml', - record_mode=vcr_record_mode - ) - - with VCR_INSTANCE.use_cassette('cassettes/dexreq_pr_tracker.yaml'): - bb_client, jira_client = build_clients() - udx_tickets = issues - if sdk_cli_ga_date: - udx_tickets = get_udx_tickets_with_sdk_cli_ga_date(bb_client, jira_client, sdk_cli_ga_date) - - print("Generating report for tickets: {}".format(', '.join(udx_tickets))) - generate_report(bb_client, jira_client, udx_tickets) diff --git a/scripts/auto_gen_utils/pull_request_tracker/homepage/publish_homepage.py b/scripts/auto_gen_utils/pull_request_tracker/homepage/publish_homepage.py deleted file mode 100644 index 81cc1e33fd..0000000000 --- a/scripts/auto_gen_utils/pull_request_tracker/homepage/publish_homepage.py +++ /dev/null @@ -1,21 +0,0 @@ -import requests -import os - -# PAR that allows overwriting reports_homepage so we can publish new versions of it -reports_homepage_par = 'https://objectstorage.us-phoenix-1.oraclecloud.com/p/3AARHQXKrCbO-S7PfKc2nJG0Ad5ZCeg9KEU4K4Y8RGs/n/dex-us-phx-cli-1/b/dexreq_reports/o/reports_homepage.html' - -homepage_html_file_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'reports_homepage.html') - -with open(homepage_html_file_path, 'r') as f: - content = f.read() - result = requests.put( - reports_homepage_par, - data=content, - headers={'Content-type': 'text/html'} - ) - - if result.status_code == 200: - print('Successful upload') - else: - print('Failed uploading') - print(str(result.content)) diff --git a/scripts/auto_gen_utils/pull_request_tracker/homepage/reports_homepage.html b/scripts/auto_gen_utils/pull_request_tracker/homepage/reports_homepage.html deleted file mode 100644 index 81471460c4..0000000000 --- a/scripts/auto_gen_utils/pull_request_tracker/homepage/reports_homepage.html +++ /dev/null @@ -1,88 +0,0 @@ - - - - - - - -
-
-

All reports

-
-
-
- - - - - - - \ No newline at end of file diff --git a/scripts/auto_gen_utils/pull_request_tracker/homepage/reports_index.json b/scripts/auto_gen_utils/pull_request_tracker/homepage/reports_index.json deleted file mode 100644 index b9baa1bde2..0000000000 --- a/scripts/auto_gen_utils/pull_request_tracker/homepage/reports_index.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "reports" : {} -} \ No newline at end of file diff --git a/scripts/auto_gen_utils/pull_request_tracker/resources/config b/scripts/auto_gen_utils/pull_request_tracker/resources/config deleted file mode 100644 index c99ee33ccd..0000000000 --- a/scripts/auto_gen_utils/pull_request_tracker/resources/config +++ /dev/null @@ -1,6 +0,0 @@ -[DEFAULT] -user = ocid1.user.oc1..aaaaaaaa7wzg3q5rbxxvp2qfdkeof7cy3cgymnxxud5up6qgyztiugjerxha -fingerprint = 5a:3c:61:f0:1c:23:4f:8e:aa:cf:e9:fb:c2:b1:5a:19 -key_file = resources/dexreq_pr_tracker.pem -tenancy = ocid1.tenancy.oc1..aaaaaaaa3vi3ft3yi3sq4nhiql4nvbzjz6gipbn72h7werl6njs6xsq4wgdq -region = us-phoenix-1 \ No newline at end of file diff --git a/scripts/auto_gen_utils/pull_request_tracker/resources/dexreq_pr_tracker.pem b/scripts/auto_gen_utils/pull_request_tracker/resources/dexreq_pr_tracker.pem deleted file mode 100644 index 37b3634f35..0000000000 --- a/scripts/auto_gen_utils/pull_request_tracker/resources/dexreq_pr_tracker.pem +++ /dev/null @@ -1,30 +0,0 @@ ------BEGIN RSA PRIVATE KEY----- -Proc-Type: 4,ENCRYPTED -DEK-Info: AES-256-CBC,D47053C2F807DBA4EC1C3517C89C84FF - -obQstVROMeR9Oo5jNlVvpR9VNNaKwn/OXre0c5phgNabW3YYaRlOUXpmitLhNkkE -6ms6q1oUsrzHbD6TqFLnUzloGy7xbPVAaf7b4pmcTX8Wa50x/v3COKGW6hCXRQi+ -qZOnRWEYvp6fOybLTBKU2qWXn9LWGRvNVYNKIlRyQIz85CU+fyv2egh9vmdtCYIU -h0kMk2uINrf+jtPxjzx1b9MBjOq8btkKhFgahkDJleMGgGyxjp/W/aK7f6YfNcnk -iDBF+WJq/VdaWDjhQcTQg4Y4Cyq+XQAU1rYdOfYVVr1s8tlzVwJ6WpHitGuyXbcM -keGkRYndbII4AVRP02eafc4alvGVOaAM3LIsnjrAJdrWKW4x1GOtjAhTPNnCBPOl -bi/85vfQILBKALn79XTY5Nhv64Wyo1HJCv+hsrc++JuDq3izb19al+UyvaqhvPOT -YgqqN+oLVSZtLnupx22JuPx9XE/FYJJJCdUNfrDCQhagICn6TrrwgRcqYtWtOWkp -4tRLOKuNCtqWRpsfEXMrDFWiYjhpKHRwgVbLKO270FhbgD63KSDWxWMqNpcJIIxb -wQQXheltdUxS9GbAwcWA7lF6S/GDKc3gkrv8EEIePeJzuPL4pDNhFRHCxbut+wbd -E1gyAVcRQZir8bw7kmxVqGi0L2oGA6dHU6Y1nwlyxwJ7AoMNuLgmeeOjHXUT936m -rmoJ4pIZQT8SdPhepXinfQTS8+1rzDb9LFDcOcJws1qVegRCz+Jaa9+KXeBEjiP5 -Art6tC3O5cd9wI3LmcAsYzvkgDK6G2L68Wkby87K5HpNjcrZmxILsgHAqjtrGnFQ -H3bB5lu28gS900AtguCJZGamrsqhsZytQFTAq+DfDSf9ufLVJQUt1qYUjbPfv+bL -NTWmYrxbW4hwecPcvK7d+n/5pcR58nSQwxXN9DiIl2QH58I4nbvIPNCDVPgf9RXT -1TjTsLufjzjfbR3DxeEoF+TeDcmZdqVnLN3/CSnfwqL0Ia0g9fG3Ntv/Q5vGimFE -2N/5AUvacshk+MeF3RERvNxMqzglEkTDS4jtGulaaoaWZZd319lBJRCJIgoMQqJN -YDp0h5IIx+1q1pt+QT8yNPczY10Oj/OHkzzoUlWpbsIM1e7x25d69qQh1zuBvNVp -501DQI8UapFq5Q6KJp9KpfNHpzcEyIxDh6+0WQCiWQSRlEMw64rol1gETC/C2qnH -YZ1kARN6oePZWHankL/gAPdhjO/K2G0TE8L4zK8PjpJFpWMsOwzfsJFW39UkfeV1 -KRghaThoUNio4MJuhd2iBdEh0LePC8fnWVpaGc49a2d2TCbrS+JLiw++f0tJrrYN -di10V2HO82YBnsiYf3pnRZ7J+LPUMFJ22cXI3UoN3Iu2tguVWul6pF+Dhg/f7xBA -CRQohlUUtDTZBkKo2Yrp4wDacvWf2xwaip4UFJu6+J8/hJGuTxD49S+otYXgojn7 -PLaEywpsItUR4A1ezgAWU7GUyHJt94U/15knNEfL1HdUdEMrDDX4/zP8gXTlhtaM -23xT0gdFRwhhy2NE5TFpXkbL/iYyZZDBGF0YkloXHqA9tmExrFeR5HVf3LeEe9cY ------END RSA PRIVATE KEY----- diff --git a/scripts/auto_gen_utils/pull_request_tracker/resources/dexreq_pr_tracker_public.pem b/scripts/auto_gen_utils/pull_request_tracker/resources/dexreq_pr_tracker_public.pem deleted file mode 100644 index 3b4d773f59..0000000000 --- a/scripts/auto_gen_utils/pull_request_tracker/resources/dexreq_pr_tracker_public.pem +++ /dev/null @@ -1,9 +0,0 @@ ------BEGIN PUBLIC KEY----- -MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAruzfhDQ+PNVeF8BRBXxM -Wagd/Fqyg50ZT9AQI9qJhp9fOHeCeS3u8BeY7MnZDCngfRZ/rZ6a+s3dzkku72hr -QxwkH1/8qqZHkI/CbUeABPi74aad+Ja757qcQ/+kjW+ccDSpY3/4gtvLmvttmo8N -SJHF6W2F1LAkuKs+f/fKysnRPSDphtgAB/pL49k1Zh5dCa/0/B7FZ+Tm/MqvA88Q -3kCW8lj0BDZU0J1TyvqHNk5byPEWoOK3KyQbZstuqjBbZDpPCH1iDu711myy5kVP -hagfS7KV5ut00+ZXpCtPZnZFg960JV02BzcCHENoDd+ZC5JXVdoXXa35fnu/y4HX -2wIDAQAB ------END PUBLIC KEY----- diff --git a/scripts/auto_gen_utils/pull_request_tracker/tc_run.sh b/scripts/auto_gen_utils/pull_request_tracker/tc_run.sh deleted file mode 100755 index bef4459528..0000000000 --- a/scripts/auto_gen_utils/pull_request_tracker/tc_run.sh +++ /dev/null @@ -1,49 +0,0 @@ -set -e -set -x - -ISSUE_FILTER_ARG="" -if [ ! -z "$ISSUES" ]; then - ISSUE_FILTER_ARG="--issues "$ISSUES -fi - -LIMIT_ARG="" -if [ ! -z "$LIMIT" ]; then - LIMIT_ARG="--limit "$LIMIT -fi - -UDX_STATUS_ARG="" -if [ ! -z "$UDX_STATUS" ]; then - UDX_STATUS_ARG="--udx-status "$UDX_STATUS -fi - -REPORT_NAME_ARG="" -if [ ! -z "$REPORT_NAME" ]; then - REPORT_NAME_ARG="--report-name "$REPORT_NAME -fi - -UPLOAD_TO_OBJECT_STORAGE_ARG="" -if [ ! -z "$UPLOAD_TO_OBJECT_STORAGE" ]; then - UPLOAD_TO_OBJECT_STORAGE_ARG="--upload-to-object-storage" -fi - -SDK_CLI_GA_DATE_ARG="" -if [ ! -z "$SDK_CLI_GA_DATE" ]; then - SDK_CLI_GA_DATE_ARG="--sdk-cli-ga-date "$SDK_CLI_GA_DATE -fi - -NEXT_RELEASE_N_ARG="" -if [ ! -z "$NEXT_RELEASE_N" ]; then - NEXT_RELEASE_N_ARG="--next-release-n "$NEXT_RELEASE_N -fi - -eval "$(pyenv init -)" -eval "$(pyenv init --path)" -pyenv shell cli-3 - -MOUNT_DIR=auto-gen-utils - -cd $MOUNT_DIR/pull_request_tracker - -pip ${PIP_TIMEOUT_PARAMETER} install -r ../requirements.txt - -python dexreq_pr_tracker.py $ISSUE_FILTER_ARG $LIMIT_ARG $UDX_STATUS_ARG $REPORT_NAME_ARG $UPLOAD_TO_OBJECT_STORAGE_ARG $SDK_CLI_GA_DATE_ARG $NEXT_RELEASE_N_ARG diff --git a/scripts/auto_gen_utils/pull_request_tracker/templates/issue_table.html b/scripts/auto_gen_utils/pull_request_tracker/templates/issue_table.html deleted file mode 100644 index 840fcf16d9..0000000000 --- a/scripts/auto_gen_utils/pull_request_tracker/templates/issue_table.html +++ /dev/null @@ -1,142 +0,0 @@ -{% if show_public %} - - -
{{ udx_ticket.key }}
-
{{ udx_ticket.status }}
-
{{ udx_ticket.summary }}
-
Console {{ udx_ticket.console_ga_date }}
-
CLI {{ udx_ticket.cli_ga_date }}
-
SDK {{ udx_ticket.sdk_ga_date }}
- - {% if udx_ticket.public_issues %} - - {% for public_issue in udx_ticket.public_issues %} - PUBLIC-{{ public_issue.key }} -
- {% if public_issue.available_in_prod %} - Available in Prod: - {% else %} - Available in Prod: - {% endif %} - {{ "
" if not loop.last }} - {% endfor %} - - -
- {% for pull_request in udx_ticket.pull_requests.master.testing %} - {% include 'pull_request_table.html' %} - {% endfor %} -
- - -
- {% for pull_request in udx_ticket.pull_requests.master.java %} - {% include 'pull_request_table.html' %} - {% endfor %} -
- - -
- {% for pull_request in udx_ticket.pull_requests.master.cli %} - {% include 'pull_request_table.html' %} - {% endfor %} -
- - -
- {% for pull_request in udx_ticket.pull_requests.master.python %} - {% include 'pull_request_table.html' %} - {% endfor %} -
- - -
- {% for pull_request in udx_ticket.pull_requests.master.ruby %} - {% include 'pull_request_table.html' %} - {% endfor %} -
- - -
- {% for pull_request in udx_ticket.pull_requests.master.go %} - {% include 'pull_request_table.html' %} - {% endfor %} -
- - -
- -
- - {% endif %} - -{% endif %} -{% if show_preview %} - - {% if not show_public %} - -
{{ udx_ticket.key }}
-
{{ udx_ticket.status }}
-
{{ udx_ticket.summary }}
-
Console {{ udx_ticket.console_ga_date }}
-
CLI {{ udx_ticket.cli_ga_date }}
-
SDK {{ udx_ticket.sdk_ga_date }}
- - {% endif %} - {% if udx_ticket.preview_issues %} - - {% for preview_issue in udx_ticket.preview_issues %} - PREVIEW-{{ preview_issue.key }} - {{ "
" if not loop.last }} - {% endfor %} - - -
- {% set more_prs_div_id = "more-prs-preview-testing" %} - {% set pull_requests = udx_ticket.pull_requests.preview.testing %} - {% include 'pull_requests_table.html' %} -
- - -
- {% set more_prs_div_id = "more-prs-preview-java" %} - {% set pull_requests = udx_ticket.pull_requests.preview.java %} - {% include 'pull_requests_table.html' %} -
- - -
- {% set more_prs_div_id = "more-prs-preview-cli" %} - {% set pull_requests = udx_ticket.pull_requests.preview.cli %} - {% include 'pull_requests_table.html' %} -
- - -
- {% set more_prs_div_id = "more-prs-preview-python" %} - {% set pull_requests = udx_ticket.pull_requests.preview.python %} - {% include 'pull_requests_table.html' %} -
- - -
- {% set more_prs_div_id = "more-prs-preview-ruby" %} - {% set pull_requests = udx_ticket.pull_requests.preview.ruby %} - {% include 'pull_requests_table.html' %} -
- - -
- {% set more_prs_div_id = "more-prs-preview-go" %} - {% set pull_requests = udx_ticket.pull_requests.preview.go %} - {% include 'pull_requests_table.html' %} -
- - -
- -
- - {% endif %} - -{% endif %} diff --git a/scripts/auto_gen_utils/pull_request_tracker/templates/pull_request_table.html b/scripts/auto_gen_utils/pull_request_tracker/templates/pull_request_table.html deleted file mode 100644 index 08912e35db..0000000000 --- a/scripts/auto_gen_utils/pull_request_tracker/templates/pull_request_table.html +++ /dev/null @@ -1,45 +0,0 @@ -{#
  • - {{ pull_request.title }} -

    Author: {{ pull_request.author.user.displayName }}

    -

    Approvers: {{ (pull_request.reviewers | selectattr("status", "equalto", "APPROVED")) | map(attribute='user') | map(attribute='displayName') | join(', ') }}

    -#} - - {% set needs_work = (pull_request.reviewers | selectattr('status', 'equalto', 'NEEDS_WORK') | list | length) > 0 %} - {% set approved = (pull_request.reviewers | selectattr('status', 'equalto', 'APPROVED') | list | length) > 0 %} - {% set build_failure = (pull_request.builds | selectattr('state', 'equalto', 'FAILED') | list | length) > 0 %} - - -

    -
    -
    {{ pull_request.title | truncate(50) }}
    -
    - {#

    {{ pull_request.toRef.repository.name }}

    #} - Author: {{ pull_request.author.user.displayName }} | Approvers: {{ (pull_request.reviewers | selectattr("status", "equalto", "APPROVED")) | map(attribute='user') | map(attribute='displayName') | join(', ') }} -
    - Updated: {{ pull_request.updatedDate | unix_date_format }} -
    - {% if pull_request.missing %} - Blockers: {{ (pull_request.missing | join(', ')) }} - {% endif %} -
    diff --git a/scripts/auto_gen_utils/pull_request_tracker/templates/pull_requests_table.html b/scripts/auto_gen_utils/pull_request_tracker/templates/pull_requests_table.html deleted file mode 100644 index 543841733f..0000000000 --- a/scripts/auto_gen_utils/pull_request_tracker/templates/pull_requests_table.html +++ /dev/null @@ -1,15 +0,0 @@ -{% if (pull_requests| length) > 1 %} - {% set pull_request = pull_requests[0] %} - {% include 'pull_request_table.html' %} - -
    - {% for pull_request in pull_requests[1:] %} - {% include 'pull_request_table.html' %} - {% endfor %} -
    - view more / less -{% else %} - {% for pull_request in pull_requests %} - {% include 'pull_request_table.html' %} - {% endfor %} -{% endif %} \ No newline at end of file diff --git a/scripts/auto_gen_utils/pull_request_tracker/templates/report_table.html b/scripts/auto_gen_utils/pull_request_tracker/templates/report_table.html deleted file mode 100644 index ba1e05a112..0000000000 --- a/scripts/auto_gen_utils/pull_request_tracker/templates/report_table.html +++ /dev/null @@ -1,250 +0,0 @@ - - - - - - - - -
    -
    -

    {{report_name}}: UDX tickets as of {{ generated_time }}

    -
    - Merged - - Missing prerequisites - Ready for DEX Review - Ready for Merge - - - -
    - - - -
    - -
    - - - - - - - - - - - - - - - - {% if tickets %} - {% for udx_ticket in tickets %} - {% include 'issue_table.html' %} - {% endfor %} - {% endif %} - -
    UDXDEXREQTesting ServiceJavaCLIPythonRubyGoComments
    -
    -
    - - - - - - - - \ No newline at end of file diff --git a/scripts/auto_gen_utils/python_cli/__init__.py b/scripts/auto_gen_utils/python_cli/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/scripts/auto_gen_utils/python_cli/auto-route-jira-sd/auto_close_ticket.py b/scripts/auto_gen_utils/python_cli/auto-route-jira-sd/auto_close_ticket.py deleted file mode 100644 index 8d24a00971..0000000000 --- a/scripts/auto_gen_utils/python_cli/auto-route-jira-sd/auto_close_ticket.py +++ /dev/null @@ -1,33 +0,0 @@ -import jira_client_util -import constants -import os -from datetime import datetime, timedelta - - -if __name__ == "__main__": - - username = os.environ[constants.JIRA_SD_USERNAME] - password = os.environ[constants.JIRA_SD_PASSWORD] - if constants.PROJECT_ID_ENV_VAR in os.environ: - constants.PROJECT_ID = os.environ[constants.PROJECT_ID_ENV_VAR] - - jira = jira_client_util.JiraClient(constants.JIRA_SERVER, - constants.JIRA_SERVER_REST_API_VERSION, - username=username,password=password) - - print("PROJECT ID = {}".format(constants.PROJECT_ID)) - # Calculate the date 3 days ago - three_days_ago = (datetime.now() - timedelta(days=3)).strftime('%Y-%m-%d') - pending_customer_issue_list = jira.get_issue_list(constants.PROJECT_ID,constants.CLOSE_TICKET_STATE, three_days_ago) - - # close long pending customer ticket - for issue in pending_customer_issue_list: - print("Processing {}".format(issue)) - # select resolution - resolution = {'name': 'Closed'} - resolution_field = {'resolution': resolution} - # closing comment - jira.post_comment(issue, constants.CLOSE_COMMENT) - # mark as resolved - jira.close_pending_customer_issue(issue, "^^", resolution_field) - diff --git a/scripts/auto_gen_utils/python_cli/auto-route-jira-sd/auto_suggest.py b/scripts/auto_gen_utils/python_cli/auto-route-jira-sd/auto_suggest.py deleted file mode 100644 index 38c2ad8a9e..0000000000 --- a/scripts/auto_gen_utils/python_cli/auto-route-jira-sd/auto_suggest.py +++ /dev/null @@ -1,338 +0,0 @@ -import jira_client_util -import re -import json -import constants -import os -import csv -from itertools import takewhile - - -def snake_to_camel(snake_case): - """ - - :param snake_case: - :return: camel case converted string - """ - if '_' not in snake_case: - return snake_case - temp = snake_case.split('_') - ret = ''.join(ele.title() for ele in temp) - camel_case = str(ret) - return camel_case - - -def get_queue_name_from_oci_cli_cmd_name(service_name): - """ - fetches the jira-sd name for given oci cli command - in the service-jira-phonebook-mapping.csv file. - :param cmd: key in auto-route-jira-sd/auto_suggest.json - :return: correct jira-sd queue name - """ - with open('{}/{}'.format(constants.PATH_TO_CSV, constants.OCI_CLI_CMD_JIRA_SD)) as json_file: - data = json.load(json_file) - print(service_name) - if service_name in data: - return data[service_name] - else: - return None - - -def get_queue_name_from_api_tool_name(api_tool_name): - """ - fetches the jira-sd name for given api_tool_name - in the service-jira-phonebook-mapping.csv file. - :param api_tool_name: as mentioned in service-jira-phonebook-mapping.csv - :return: correct jira-sd queue name - """ - jira_csv_mapping_file = constants.PATH_TO_CSV + constants.JIRA_MAP_CSV - reader = csv.DictReader(open(jira_csv_mapping_file)) - for row in reader: - if row['API/Tool'] == api_tool_name: - print("Corresponding jira-sd: " + row["Jira-sd"]) - return row["Jira-sd"] - return None - - -def get_suggestion_string_from_oci_cli_command(msg): - try: - for line in msg.split("\n"): - if "oci " in line: - line.lstrip().rstrip() - cmd_list = line.split() - # As command can be in format -'command = oci os ns get' ,so finding position of oci keyword - oci_position = cmd_list.index('oci') - # if 'oci' is present - if oci_position != -1: - actual_cmd_list = cmd_list[oci_position:] - # remove all authentication params from command - for param in constants.AUTHENTICATION_PARAMS: - if param in actual_cmd_list: - param_idx = actual_cmd_list.index(param) - # ex- --auth abc - del actual_cmd_list[param_idx: param_idx + 2] - if actual_cmd_list[0].lstrip().rstrip() == 'oci' and len(actual_cmd_list) > 2: - service_name = actual_cmd_list[1].lstrip().rstrip() - jira_sd = get_queue_name_from_oci_cli_cmd_name(service_name) - if jira_sd is not None: - suggestion = constants.JIRA_SD_MISROUTE_MESSAGE + "Queue: " + jira_sd.upper() - else: - suggestion = constants.JIRA_SD_MISROUTE_MESSAGE + "Service Team: " + service_name - print("suggestion string: " + suggestion) - return suggestion - return None - except Exception: - print("Unable to parse jira-sd comment") - return None - - -def get_suggestion_string_from_service_error_code(target_service='', operation_name='', error_code='',message=''): - """ - If error_code is 401 then, ask customer to follow troubleshooting guide, still - if the issue persists, then move it to service team. - For other cases, (non 401 code), fetch appropriate jira-sd queue & suggest moving - the issue in it. - :param target_service: as per the service error - :param operation_name: as per the service error - :param error_code: as per the service error - :return: The suggestion for the customer to be posted on jira-sd. - """ - suggestion = "" - # if 401 add suggestion string - if error_code == "401" or 'status: 401' in message: - suggestion = constants.SUGGESTION_MESSAGE - - # if operation_name and target service is present - if operation_name != '' and target_service != '': - operation_name = snake_to_camel(operation_name) - api_tool_name = target_service.lower() + '_' + operation_name - print(api_tool_name) - jira_sd = get_queue_name_from_api_tool_name(api_tool_name) - if jira_sd is not None: - suggestion = suggestion + constants.JIRA_SD_MISROUTE_MESSAGE + "Queue: " + \ - jira_sd.upper() - else: - suggestion = suggestion + constants.JIRA_SD_MISROUTE_MESSAGE + "Service: " + \ - target_service - - else: - res = get_suggestion_string_from_oci_cli_command(message) - # if command is provided suggest to move to right queue. - if res is not None: - suggestion = suggestion + res - # if command is not provided in description,ask for it. - else: - suggestion = suggestion + constants.AUTOMATION_SEARCH_STRING + constants.JIRA_SD_ALTERNATE_MESSAGE - print("suggestion string: " + suggestion) - return suggestion - - -def get_client_version(comment): - version_line_pattern = r'.*client[-_\s]{0,1}version.*?:' - quote_or_space = r'\\\'\" ' - quote_or_space_or_newline_or_comma = r',\'\"$ ' - for line in comment.splitlines(): - match_object = re.search(version_line_pattern, line.lower()) - if match_object: - line = line[match_object.end():].lstrip(quote_or_space) - until_word = list( - takewhile(lambda x: x not in quote_or_space_or_newline_or_comma, line)) - return ''.join(until_word) - return None - - -def parse_python_error(error_string): - operation_pattern = "operation_name: (.*?)," - service_pattern = "target_service: (.*?)," - status_pattern = "status: (.*?)," - operation = re.search(operation_pattern, error_string) - service = re.search(service_pattern, error_string) - status = re.search(status_pattern, error_string) - if operation is not None: - operation = operation.group(1) - if service is not None: - service = service.group(1) - if status is not None: - status = status.group(1) - else: - status = "" - return {constants.ERROR_FIELD_OPERATION_NAME: operation, - constants.ERROR_FIELD_TARGET_SERVICE: service, - constants.ERROR_FIELD_STATUS: status} - - -def parse_java_error(error_string): - operation_pattern = "Error returned by (.*) operation" - service_pattern = "operation in (.*) service" - operation = re.search(operation_pattern, error_string) - service = re.search(service_pattern, error_string) - if operation is not None: - operation = operation.group(1).strip() - if service is not None: - service = service.group(1).strip() - return { - constants.ERROR_FIELD_OPERATION_NAME: operation, - constants.ERROR_FIELD_TARGET_SERVICE: service} - - -def parse_go_error(error_string): - operation_pattern = "Operation Name: (.*)" - service_pattern = "Error returned by (.*) Service\." - operation = re.search(operation_pattern, error_string) - service = re.search(service_pattern, error_string) - if operation is not None: - operation = operation.group(1).strip() - if service is not None: - service = service.group(1).strip() - return {constants.ERROR_FIELD_OPERATION_NAME: operation, - constants.ERROR_FIELD_TARGET_SERVICE: service} - - -def parse_ts_error(error_string): - operation_pattern = "operationName: (.*?)," - service_pattern = "targetService: (.*?)," - operation_name = re.search(operation_pattern, error_string).group(1) - if operation_name is not None: - operation_name_char_arr = list(operation_name) - operation_name_char_arr[0] = operation_name_char_arr[0].upper() - operation_name = ''.join(operation_name_char_arr) - operation_name = operation_name.strip() - service = re.search(service_pattern, error_string) - if service is not None: - service = service.group(1).strip() - return {constants.ERROR_FIELD_OPERATION_NAME: operation_name, - constants.ERROR_FIELD_TARGET_SERVICE: service} - - -def find_error_and_parse(error_string): - error_dict = {} - if all_keywords_in_error(constants.KEYWORDS, error_string.lower()): - client_version = get_client_version(error_string) - print(client_version) - if client_version is not None: - if "Oracle-JavaSDK" in client_version: - error_dict = parse_java_error(error_string) - elif "Oracle-GoSDK" in client_version or "Oracle-DotNetSDK" in client_version: - error_dict = parse_go_error(error_string) - elif "Oracle-TypeScriptSDK" in client_version: - error_dict = parse_ts_error(error_string) - elif "Oracle-PythonSDK" in client_version or "": - error_dict = parse_python_error(error_string) - if error_dict == {}: - print("Service error is not provided by the reporter. No actions taken") - return None - key_list_err_dict = list(error_dict.keys()) - if error_dict[constants.ERROR_FIELD_OPERATION_NAME] in constants.EMPTY or \ - error_dict[constants.ERROR_FIELD_TARGET_SERVICE] in constants.EMPTY: - print("Could not find the operation_name or target_service") - return None - if constants.ERROR_FIELD_STATUS not in key_list_err_dict: - error_dict[constants.ERROR_FIELD_STATUS] = "" - return get_suggestion_string_from_service_error_code( - error_dict[constants.ERROR_FIELD_TARGET_SERVICE], - error_dict[constants.ERROR_FIELD_OPERATION_NAME], - error_dict[constants.ERROR_FIELD_STATUS]) - else: - print("Client version not provided ") - return None - - -def search_in_comments(jira, jira_id): - """ - Iterate through all comments in the ticket, and search the service error - text in them. - """ - # first, search in description - description = jira.get_description(str(jira_id)) - if description is not None: - description = description.replace(" ", ' ').replace('{', '').replace('}', '').\ - replace('\'', '').replace('"', '') - res = find_error_and_parse(description) - if res is not None: - return res - elif all_keywords_in_error(constants.SERVICE_ERROR_KEYWORDS, description): - return get_suggestion_string_from_service_error_code(description) - - # Second, search in comments - comment_list = jira.get_comment(str(jira_id)) - for comment in reversed(comment_list): - error_string = comment.body.replace(" ", ' ').replace('{', '').replace('}', '').replace( - '\'', '').replace('"', '') - res = find_error_and_parse(error_string) - if res is not None: - return res - # If customer didn't follow SR template or customer is using old oci-cli version, try to find CLI command and - # find correct jira-sd queue - elif all_keywords_in_error(constants.SERVICE_ERROR_KEYWORDS, comment.body) and jira.get_comment_author( - comment) != constants.AUTHOR: - print( - "Customer may be using old version of OCI-CLI. Service Error found in jira-sd comment") - return get_suggestion_string_from_service_error_code(error_string) - return None - - -def all_keywords_in_error(keywords, error_string): - for keyword in keywords: - if keyword not in error_string: - return False - return True - -def get_prefix_label(project_id): - if project_id == constants.CLI_PROJECT_ID: - return constants.CLI_LABEL_PREFIX - else: - return constants.SDK_LABEL_PREFIX - -def add_jira_sd_labels(issue, labels, prefix = ''): - current_labels = issue.fields.labels if issue.fields.labels else [] - for label_to_add in labels: - if label_to_add not in current_labels: - current_labels.append(prefix + label_to_add) - issue.update(fields={'labels': current_labels}) - -def is_already_processed(jira, jira_id): - comment_list = jira.get_comment(str(jira_id)) - # Iterate through all comments in the ticket, search for Automation comment or author - for comment in comment_list: - if constants.AUTOMATION_SEARCH_STRING in comment.body: - print("This ticket {} is already processed by automation".format(jira_id)) - return True - # This condition should work when we have a jira-sd bot as author - if jira.get_comment_author(comment) == constants.THIS_BOT_AUTHOR: - print("This ticket {} is already processed by automation".format(jira_id)) - return True - return False - - -if __name__ == "__main__": - - username = os.environ[constants.JIRA_SD_USERNAME] - password = os.environ[constants.JIRA_SD_PASSWORD] - if constants.PROJECT_ID_ENV_VAR in os.environ: - constants.PROJECT_ID = os.environ[constants.PROJECT_ID_ENV_VAR] - - jira = jira_client_util.JiraClient(constants.JIRA_SERVER, - constants.JIRA_SERVER_REST_API_VERSION, - username=username,password=password) - - print("PROJECT ID = {}".format(constants.PROJECT_ID)) - jira_issue_list = jira.get_issue_list(constants.PROJECT_ID, constants.JIRA_BOT_PROCESSING_STATES) - for issue in jira_issue_list: - print("Processing {}".format(issue)) - if is_already_processed(jira, issue): - continue - reply_comment = search_in_comments(jira, issue) - if reply_comment is None: - print("Could not find Jira-sd queue name for jira: {} ".format(issue)) - continue - else: - # Its redundant, to be removed after state transition workflow is stable. - if constants.JIRA_SD_ALTERNATE_MESSAGE not in reply_comment: - prefix = get_prefix_label(constants.PROJECT_ID) - add_jira_sd_labels(issue, constants.LABELS, prefix) - # add common label - add_jira_sd_labels(issue, constants.COMMON_LABEL) - jira.post_comment(issue, reply_comment) - # move the state to pending customer. - jira.change_state(issue, constants.PENDING_CUSTOMER_STATE, "^^", - constants.PENDING_CUSTOMER_STATE_CHECK) - diff --git a/scripts/auto_gen_utils/python_cli/auto-route-jira-sd/confluence_api_utils/utils.py b/scripts/auto_gen_utils/python_cli/auto-route-jira-sd/confluence_api_utils/utils.py deleted file mode 100644 index 61b88842bc..0000000000 --- a/scripts/auto_gen_utils/python_cli/auto-route-jira-sd/confluence_api_utils/utils.py +++ /dev/null @@ -1,51 +0,0 @@ -import subprocess -import sys -subprocess.check_call([sys.executable, "-m", "pip", "install", "atlassian-python-api"]) -from atlassian import Confluence - -def auth_using_username_password(host,username,password): - conf = Confluence(url=host, username=username, password=password) - return conf - -def auth_using_personal_access_token(host,personal_access_token): - conf = Confluence(url=host, token=personal_access_token) - return conf - -def get_content_of_confluence_page_by_page_id(conf,page_id): - content = conf.get_page_by_id(page_id=page_id, expand='body.storage') - print(content['title']) - print(content['body']['storage']['value']) - -def get_content_of_confluence_page_by_page_space_and_title(conf,space,title): - content = conf.get_page_by_title(space=space, title=title,expand='body.storage') - print(content['title']) - print(content['body']['storage']['value']) - -def get_all_attachment_name(page_id,conf): - t = conf.get_attachments_from_content(page_id=page_id ) - return [t['results'][i]['title'] for i in range(len(t['results']))] - -def upload_with_no_replacement_a_csv_file_to_confluence_page(conf,page_id,file_to_be_uploaded): - # get all files at that page - temp = get_all_attachment_name(page_id,conf) - # check if same name file already exist - if file_to_be_uploaded in temp: - print('file already exist') - return - - conf.attach_file(filename=file_to_be_uploaded, name=file_to_be_uploaded, page_id=page_id, comment='new file uploaded') - print("file attached to confluence page") - -def remove_a_csv_file_from_confluence_page(conf,page_id,file_to_be_removed): - #get all files at that page - temp=get_all_attachment_name(page_id,conf) - #check if file exist - if file_to_be_removed not in temp: - print('file do not exist') - return - conf.delete_attachment(page_id=page_id,filename=file_to_be_removed) - print("file removed from confluence page") - -def upload_with_replacement_a_csv_file_to_confluence_page(conf,page_id,file_to_be_uploaded): - conf.attach_file(filename=file_to_be_uploaded, name=file_to_be_uploaded, page_id=page_id, comment=' ') - print("file attached to confluence page") \ No newline at end of file diff --git a/scripts/auto_gen_utils/python_cli/auto-route-jira-sd/confluence_automation.py b/scripts/auto_gen_utils/python_cli/auto-route-jira-sd/confluence_automation.py deleted file mode 100644 index 8c3fd9cd7c..0000000000 --- a/scripts/auto_gen_utils/python_cli/auto-route-jira-sd/confluence_automation.py +++ /dev/null @@ -1,38 +0,0 @@ -import os -from confluence_api_utils import utils - -username = os.environ.get("CONFLUENCE_USERNAME") -password = os.environ.get("CONFLUENCE_PASSWORD") -personal_access_token = os.environ.get("CONFLUENCE_TOKEN") -page_id = "2461340038" #used page id for jira phonebook lookup tool page -file_name='sample.csv' -host = 'https://confluence.oci.oraclecorp.com' - -# below 2 are required if you wanted to access confluence page using space and title -page_space_name = '' -page_title = '' - -# auth 0 means authentication using username-password and 1 means using PAT -auth=1 - -if auth == 0: - conf = utils.auth_using_username_password(host,username,password) - -# If you want to login through Personal acess token -if auth == 1: - conf = utils.auth_using_personal_access_token(host,personal_access_token) - -# If you want page content by page id -#content1 = utils.get_content_of_confluence_page_by_page_id(conf,page_id) - - -#If you want page content by space and title --- uncomment to use -#content1 = utils.get_content_of_confluence_page_by_page_space_and_title(conf,page_space_name, page_title) - -# upload a csv file -#utils.upload_with_no_replacement_a_csv_file_to_confluence_page(conf,page_id,file_name) - -# remove a csv file -#utils.remove_a_csv_file_from_confluence_page(conf,page_id,file_name) - -utils.upload_with_replacement_a_csv_file_to_confluence_page(conf,page_id,file_name) diff --git a/scripts/auto_gen_utils/python_cli/auto-route-jira-sd/constants.py b/scripts/auto_gen_utils/python_cli/auto-route-jira-sd/constants.py deleted file mode 100644 index fbed9eedc3..0000000000 --- a/scripts/auto_gen_utils/python_cli/auto-route-jira-sd/constants.py +++ /dev/null @@ -1,41 +0,0 @@ -JIRA_SD_TOKEN_ENV_VAR = 'JIRA_SD_TOKEN' -KEYWORDS = ["operation", "service", "client", "version"] -AUTOMATION_SEARCH_STRING = "This is an auto-generated message from JIRA BOT." -JIRA_SD_MISROUTE_MESSAGE = "{} Error message indicates that this issue is happening at the " \ - "service end. Please move this ticket to ".format(AUTOMATION_SEARCH_STRING) -JIRA_SD_ALTERNATE_MESSAGE = "Please provide more details (example-command used,version you are using). \n\n" -EMPTY = [None, ''] -JIRA_SD_FOUND = 0 -PENDING_CUSTOMER_STATE = "PENDING CUSTOMER" -PENDING_CUSTOMER_STATE_CHECK = "Pending Customer" -PATH_TO_CSV = "../python-cli/scripts/doc_gen/issue_routing/" -JIRA_MAP_CSV = "service_jira_phonebook_mapping.csv" -OCI_CLI_CMD_JIRA_SD = "oci_cli_cmd_jira_sd.json" -JIRA_SERVER = "https://jira-sd.mc1.oracleiaas.com" -JIRA_SERVER_REST_API_VERSION = 2 -TOKEN = "" -PROJECT_ID = "27401" -AUTHOR = "Jira Automation Bot" -ERROR_FIELD_TARGET_SERVICE = "target_service" -ERROR_FIELD_OPERATION_NAME = "operation_name" -ERROR_FIELD_STATUS = "status" -SERVICE_ERROR_KEYWORDS = ["ServiceError:","opc-request-id"] -AUTHENTICATION_PARAMS = ["--profile", "--config-file", "--auth", "--region", "--endpoint", "--cert-bundle", "--auth-purpose", "--cli-rc-file"] -PROJECT_ID_ENV_VAR = "PROJECT_ID" -THIS_BOT_AUTHOR = "auto-suggest-jira-sd-sdk" -SUGGESTION_MESSAGE = AUTOMATION_SEARCH_STRING + " Please first troubleshoot using this doc " \ - "https://docs.oracle.com/en-us/iaas/Content/API/References" \ - "/apierrors.htm#apierrors_401 If issue still exists, please try the next " \ - "suggestion. \n\n" -JIRA_SD_USERNAME="JIRA_SD_USERNAME" -JIRA_SD_PASSWORD="JIRA_SD_PASSWORD" -SDK_PROJECT_ID= "11001" -CLI_PROJECT_ID= "27401" -SDK_LABEL_PREFIX = "CD_SDK_" -CLI_LABEL_PREFIX = "CD_CLI_" -LABELS = ["Misrouted", "SERVICE_ERR_BOT"] -COMMON_LABEL=["DS_MISROUTED"] -CLOSE_COMMENT = AUTOMATION_SEARCH_STRING + " Closing the ticket as there is no reponse from the customer. " \ - "Please reopen the ticket if any further help is needed." -CLOSE_TICKET_STATE = ["Pending Customer"] -JIRA_BOT_PROCESSING_STATES = ["Pending Engineering", "Open", "In Progress"] \ No newline at end of file diff --git a/scripts/auto_gen_utils/python_cli/auto-route-jira-sd/jira_client_util.py b/scripts/auto_gen_utils/python_cli/auto-route-jira-sd/jira_client_util.py deleted file mode 100644 index f41e37e7f5..0000000000 --- a/scripts/auto_gen_utils/python_cli/auto-route-jira-sd/jira_client_util.py +++ /dev/null @@ -1,60 +0,0 @@ -from jira import JIRA - - -class JiraClient: - def __init__(self, server, rest_api_version, username, password, content_type='application/json', verify_certi=True): - jira_option = { - 'server': server, - 'rest_api_version': rest_api_version, - 'verify': verify_certi, - 'headers': { - 'Content-Type': content_type - } - } - self.jira_instance = JIRA(jira_option,basic_auth=(username,password)) - - def get_jira_issue(self, jira_id): - return self.jira_instance.issue(jira_id) - - def get_description(self, jira_id): - issue = self.get_jira_issue(jira_id) - return issue.fields.description - - def post_comment(self, jira_id, comment): - self.jira_instance.add_comment(self.get_jira_issue((jira_id)), comment) - - def get_comment(self, jira_id): - issue = self.get_jira_issue(jira_id) - return issue.fields.comment.comments - - def close_pending_customer_issue(self, jira_id, closing_comment, resolution_field): - issue = self.get_jira_issue(jira_id) - current_state = str(issue.fields.status) - if current_state == 'Pending Customer': - resolved_state = "Resolve Issue " - self.jira_instance.transition_issue(issue, transition="Start Progress") - self.jira_instance.transition_issue(issue, transition=resolved_state, comment=closing_comment, - fields=resolution_field) - - def change_state(self, jira_id, new_state, closing_comment, state_condition): - issue = self.get_jira_issue(jira_id) - current_state = str(issue.fields.status) - if current_state == "Pending Engineering" or current_state == "Open": - self.jira_instance.transition_issue(issue, transition="Start Progress") - if current_state != state_condition: - self.jira_instance.transition_issue(issue, transition=new_state, comment=closing_comment) - else: - print("State condition check failed. Probably issue is already in the new state.") - - def get_comment_author(self, comment): - return str(comment.author.displayName) - - def get_issue_list(self, project_id, state_list=None, update_date=None): - query = 'project={}'.format(project_id) - if state_list: - state_list_str = ", ".join(['"{}"'.format(status) for status in state_list]) - query += f' and status in ({state_list_str})' - if update_date: - query += f' and updated<"{update_date}"' - issues = self.jira_instance.search_issues(query) - return issues diff --git a/scripts/auto_gen_utils/python_cli/auto-route-jira-sd/mos_mapping.py b/scripts/auto_gen_utils/python_cli/auto-route-jira-sd/mos_mapping.py deleted file mode 100644 index d3732ed834..0000000000 --- a/scripts/auto_gen_utils/python_cli/auto-route-jira-sd/mos_mapping.py +++ /dev/null @@ -1,24 +0,0 @@ -import pandas as pd -import constants - -PATH_TO_MOS_MAP_CSV = constants.PATH_TO_CSV + "mos_mapping_file.csv" - - -def load_mos_mapping_csv(): - df = pd.read_csv(constants.PATH_TO_MOS_MAP_CSV, encoding='latin-1') - return df - - -def fetch_comp_sub_comp(jira_q): - """ - :param jira_q: jira sd queue code - :return: list of mos component code and subcomponent code - """ - df = load_mos_mapping_csv() - df = df.query('`JIRA_PROJECT` == "' + jira_q + '"') - if not df.empty: - component_code = df["COMPONENT_CODE"].values.tolist()[0] - sub_comp_code = df["SUBCOMPONENT_CODE"].values.tolist()[0] - else: - print("Entry for " + jira_q + " not found") - return [component_code, sub_comp_code] diff --git a/scripts/auto_gen_utils/python_cli/check_design_review_tickets.py b/scripts/auto_gen_utils/python_cli/check_design_review_tickets.py deleted file mode 100644 index fba88656db..0000000000 --- a/scripts/auto_gen_utils/python_cli/check_design_review_tickets.py +++ /dev/null @@ -1,33 +0,0 @@ -#!/usr/bin/env python3 -# Copyright (c) 2016, 2020, Oracle and/or its affiliates. All rights reserved. -# This software is dual-licensed to you under the Universal Permissive License (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl or Apache License 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose either license. - -import util -import python_cli.jira_util as jira_util -import python_cli.constants as constants - -design_reviews = jira_util.get_open_design_review_tickets() -for design_review in design_reviews: - issue = jira_util.get_jira_issue(design_review.key) - if not hasattr(issue.fields, 'assignee') or not hasattr(issue.fields.assignee, 'key'): - continue - - if jira_util.is_new_design_review(issue): - jira_util.add_jira_comment(issue, jira_util.NEW_DESIGN_REVIEW_TEMPLATE.format(user=issue.fields.assignee.key)) - - if jira_util.no_changes_required(issue, constants.CLI_TEAM_MEMBERS): - jira = jira_util.JIRA_CLIENT() - util.transition_issue_overall_status(jira, issue, 'Done') - - issue = jira_util.get_jira_issue(design_review.key) - if issue.fields.status.name == 'Done': - jira_util.add_jira_comment(issue, jira_util.NO_MANUAL_CHANGES_TICKET_CLOSED.format(user=issue.fields.reporter.key)) - - if jira_util.not_updated_in_seven_days(issue): - udx_ticket = jira_util.get_udx_ticket_from_design_review(issue) - if udx_ticket: - public_ticket = jira_util.get_public_ticket_from_udx(udx_ticket) - if public_ticket: - days = jira_util.is_ga_date_within_month(issue) - if days: - jira_util.add_jira_comment(issue, jira_util.UPCOMING_GA_DATE_TEMPLATE.format(user=issue.fields.assignee.key, days=days)) diff --git a/scripts/auto_gen_utils/python_cli/constants.py b/scripts/auto_gen_utils/python_cli/constants.py deleted file mode 100644 index da59fce669..0000000000 --- a/scripts/auto_gen_utils/python_cli/constants.py +++ /dev/null @@ -1,21 +0,0 @@ - -# CLI change scripts -RENAME_ROOT_GROUP = "python_cli/rename_root_group.py" -EXECUTE_MANUAL_CHANGES = "python_cli/execute_manual_changes.py" -MANUAL_CHANGE_SCRIPT = "python3 {operation} {issue} {branch}" - -# CLI Installation and cleanup -USE_LOCAL_PACKAGE = "pip install -e ." -CLI_REINSTALL_SCRIPT = "../auto-gen-utils/python_cli/install_python_cli_local.sh" - -CLI_TEAM_MEMBERS = [ - "haibrah", - "hjkumar", - "kpasrich", - "karkamat", - "alexle", - "matsai", - "nugugupt", - "aanups", - "pankajos", -] diff --git a/scripts/auto_gen_utils/python_cli/exceptions.py b/scripts/auto_gen_utils/python_cli/exceptions.py deleted file mode 100644 index fd824da7dd..0000000000 --- a/scripts/auto_gen_utils/python_cli/exceptions.py +++ /dev/null @@ -1,9 +0,0 @@ -class BotException(Exception): - """Base exception for self-service jira bit""" - - -class CliException(BotException): - def __init__(self, message, line): - super().__init__() - self.message = message - self.line = line diff --git a/scripts/auto_gen_utils/python_cli/execute_manual_changes.py b/scripts/auto_gen_utils/python_cli/execute_manual_changes.py deleted file mode 100644 index 2e60d7789b..0000000000 --- a/scripts/auto_gen_utils/python_cli/execute_manual_changes.py +++ /dev/null @@ -1,20 +0,0 @@ -#!/usr/bin/env python3 - -import python_cli.jira_util as jira_util -import sys - -issue_key = sys.argv[1] -self_service_branch = sys.argv[2] -issue = jira_util.get_jira_issue(issue_key) -manual_change = jira_util.check_comments_for_manual_change(issue) -try: - jira_util.execute_manual_change(manual_change) - -except Exception as e: - print(e) - # handle all raised exceptions - jira_util.handle_exceptions(issue_key) - # aggregate list - jira_util.add_jira_comment(issue_key, jira_util.MANUAL_CHANGE_FAILED_TEMPLATE.format(e, self_service_branch, - self_service_branch.replace('-', '\-'))) # noqa: W605 - sys.exit(1) diff --git a/scripts/auto_gen_utils/python_cli/generate_local_changes.py b/scripts/auto_gen_utils/python_cli/generate_local_changes.py deleted file mode 100644 index d0c4e3f0ad..0000000000 --- a/scripts/auto_gen_utils/python_cli/generate_local_changes.py +++ /dev/null @@ -1,51 +0,0 @@ -import os -import sys - -import constants -import git_util -import jira_util - - -def execute_local_changes(): - issue_key = sys.argv[1] # Design review ticket - try: - manual_change = sys.argv[2] # Manual Change Command - except Exception: - print("manual change not supplied fetch from jira ticket") - issue = jira_util.get_jira_issue(issue_key) - manual_change = jira_util.check_comments_for_manual_change(issue) - print("retrieved manual change is ", manual_change) - - self_service_branch = git_util.checkout_self_service_branch(git_util.PYTHON_CLI_WORKING_DIRECTORY, 'preview', - issue_key) - rtv = 0 - if jira_util.RENAME_ROOT_GROUP in manual_change.upper(): - print("rename root command") - rtv = os.system(constants.MANUAL_CHANGE_SCRIPT.format(operation=constants.RENAME_ROOT_GROUP, - issue=issue_key, - branch=self_service_branch)) - if rtv != 0: - print("root command changes have completed please check") - exit() - - for supported_change in jira_util.SUPPORTED_MANUAL_CHANGES: - if supported_change in manual_change.upper(): - try: - rtv = jira_util.execute_manual_change(manual_change) - except Exception as e: - print(e) - # handle all raised exceptions - jira_util.handle_exceptions(issue_key) - # aggregate list - jira_util.add_jira_comment(issue_key, - jira_util.MANUAL_CHANGE_FAILED_TEMPLATE.format(e, self_service_branch, - self_service_branch.replace( - '-', '\-'))) # noqa: W605 - break - if rtv == 0: - print("supported command changes have completed please check") - exit() - - -if __name__ == '__main__': - execute_local_changes() diff --git a/scripts/auto_gen_utils/python_cli/generate_manual_changes.py b/scripts/auto_gen_utils/python_cli/generate_manual_changes.py deleted file mode 100644 index c3b7935413..0000000000 --- a/scripts/auto_gen_utils/python_cli/generate_manual_changes.py +++ /dev/null @@ -1,49 +0,0 @@ -#!/usr/bin/env python3 -# Copyright (c) 2016, 2020, Oracle and/or its affiliates. All rights reserved. -# This software is dual-licensed to you under the Universal Permissive License (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl or Apache License 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose either license. - -import os - -import python_cli.git_util as git -import python_cli.jira_config as jira_config -import python_cli.jira_util as jira_util -import python_cli.self_service_manual_change as self_service -from python_cli.constants import RENAME_ROOT_GROUP, EXECUTE_MANUAL_CHANGES, MANUAL_CHANGE_SCRIPT, CLI_REINSTALL_SCRIPT - -design_reviews = jira_util.get_open_design_review_tickets() - -for design_review in design_reviews: - issue = jira_util.get_jira_issue(design_review.key) - manual_change = jira_util.check_comments_for_manual_change(issue) - if manual_change is not None: - branch = jira_util.get_branch_with_changes(issue) - if branch is None: - continue - self_service_branch = git.checkout_self_service_branch(git.PYTHON_CLI_WORKING_DIRECTORY, branch, - design_review.key) - self_service.run_command(git.PYTHON_CLI_WORKING_DIRECTORY, CLI_REINSTALL_SCRIPT) - rtv = 0 - if jira_util.RENAME_ROOT_GROUP in manual_change.upper(): - rtv = os.system(MANUAL_CHANGE_SCRIPT.format(operation=RENAME_ROOT_GROUP, - issue=design_review.key, - branch=self_service_branch)) - if rtv != 0: - git.push_all_generated_changes_to_remote(git.PYTHON_CLI_WORKING_DIRECTORY, design_review.key) - continue - for supported_change in jira_util.SUPPORTED_MANUAL_CHANGES: - - if supported_change in manual_change.upper(): - rtv = os.system(MANUAL_CHANGE_SCRIPT.format(operation=EXECUTE_MANUAL_CHANGES, - issue=design_review.key, - branch=self_service_branch)) - break - - if rtv == 0: - jira_util.add_jira_comment(design_review.key, - jira_util.MANUAL_CHANGE_COMPLETED_TEMPLATE.format(self_service_branch, - self_service_branch.replace( - '-', '\-'))) # noqa: W605 - git.push_all_generated_changes_to_remote(git.PYTHON_CLI_WORKING_DIRECTORY, design_review.key) - issue.add_field_value('labels', jira_config.CLI_SELF_SERVICE_LABEL) - issue.add_field_value('labels', jira_config.CLI_MANUAL_CHANGES_LABEL) - issue.update() diff --git a/scripts/auto_gen_utils/python_cli/git_util.py b/scripts/auto_gen_utils/python_cli/git_util.py deleted file mode 100644 index ace1f61602..0000000000 --- a/scripts/auto_gen_utils/python_cli/git_util.py +++ /dev/null @@ -1,50 +0,0 @@ -import git -import os - - -PYTHON_CLI_WORKING_DIRECTORY = os.environ.get('PYTHON_CLI_DIR') -PYTHON_SDK_WORKING_DIRECTORY = os.environ.get('PYTHON_SDK_DIR') -COMMIT_MESSAGE = "{} Self Service Manual Changes" - - -def checkout_branch(directory, branch): - repo = git.Repo(directory) - repo.remote().fetch() - repo.remote().pull() - try: - repo.git.checkout(branch) - print("Successfully checked out branch: {}".format(branch)) - except Exception as e: - print(e) - - -def checkout_self_service_branch(directory, branch, ticket): - repo = git.Repo(directory) - repo.remote().fetch() - repo.remote().pull() - self_service_branch = branch + '-' + ticket - try: - repo.git.reset('--hard') - repo.git.checkout(branch) - try: - repo.git.branch('-D', self_service_branch) - repo.git.checkout('-b', self_service_branch) - except: # noqa: E722 - repo.git.checkout('-b', self_service_branch) - except Exception as e: - print(e) - print("Successfully checked out branch for manual changes: {}".format(self_service_branch)) - return self_service_branch - - -def push_all_generated_changes_to_remote(directory, ticket): - try: - repo = git.Repo(directory) - repo.git.add(A=True) - repo.git.reset('--', 'setup.py') - repo.git.reset('--', 'requirements.txt') - repo.index.commit(COMMIT_MESSAGE.format(ticket)) - repo.git.push('-u', 'origin', repo.active_branch, force=True) - print("Pushed all changes to remote branch: {}".format(repo.active_branch)) - except Exception as e: - print(e) diff --git a/scripts/auto_gen_utils/python_cli/install_python_cli_local.sh b/scripts/auto_gen_utils/python_cli/install_python_cli_local.sh deleted file mode 100755 index 5a5863d0b9..0000000000 --- a/scripts/auto_gen_utils/python_cli/install_python_cli_local.sh +++ /dev/null @@ -1,5 +0,0 @@ -#!/usr/bin/env bash - -find . -name '*.pyc' -delete -pip install --pre --trusted-host artifactory.oci.oraclecorp.com --index-url https://artifactory.oci.oraclecorp.com/api/pypi/global-dev-pypi/simple -r requirements.txt -pip install --trusted-host=artifactory.oci.oraclecorp.com -e . diff --git a/scripts/auto_gen_utils/python_cli/jira_config.py b/scripts/auto_gen_utils/python_cli/jira_config.py deleted file mode 100644 index c524b6fa1f..0000000000 --- a/scripts/auto_gen_utils/python_cli/jira_config.py +++ /dev/null @@ -1,36 +0,0 @@ -import os -import urllib3 - -urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) - -# JIRA REST API v2 documentation: -# https://docs.atlassian.com/software/jira/docs/api/REST/7.8.0/ -JIRA_SERVER = 'https://jira.oci.oraclecorp.com' -JIRA_SERVER_REST_API_VERSION = 2 -JIRA_PROJECT = 'Developer Experience' -JIRA_PROJECT_KEY = 'DEX' -JIRA_USERNAME = "kerlee" - -USERNAME = os.environ.get('JIRA_USERNAME') -PASSWORD = os.environ.get('JIRA_PASSWORD') -JSESSIONID = os.environ.get('JSESSIONID') - -JIRA_OPTIONS = { - 'server': JIRA_SERVER, - 'rest_api_version': JIRA_SERVER_REST_API_VERSION, - 'verify': False -} - -# Also used for Bitbucket -JIRA_BASIC_AUTH = (USERNAME, PASSWORD) - -STATUS_DONE = "Done" -STATUS_CLOSED = "Closed" -STATUS_WITHDRAWN = "Withdrawn" -CLI_SELF_SERVICE_LABEL = "SelfServeManual" -CLI_MANUAL_CHANGES_LABEL = "ManualCLIChange" -CUSTOM_FIELD_ID_SDK_CLI_GA_DATE = 'customfield_13448' - -DEXREQ_TERMINAL_STATES = [STATUS_DONE, STATUS_WITHDRAWN, STATUS_CLOSED] - -CLI_DESIGN_REVIEW_TERMINAL_STATES = [STATUS_DONE, STATUS_CLOSED] diff --git a/scripts/auto_gen_utils/python_cli/jira_util.py b/scripts/auto_gen_utils/python_cli/jira_util.py deleted file mode 100644 index 695f107d22..0000000000 --- a/scripts/auto_gen_utils/python_cli/jira_util.py +++ /dev/null @@ -1,555 +0,0 @@ -from jira import JIRA -from config import PREVIEW_ISSUE_TYPE_ID -import datetime -import python_cli.jira_config as config -import python_cli.self_service_manual_change as manual_change -import sys -import re -from python_cli.exceptions import CliException - -_JIRA_CLIENT = None - -BOT_USERNAME = "[~gear-dexreq-automation]" -BOT_FULL_NAME = "DEXREQ Automation" -BOT_ID = "gear-dexreq-automation" -GENERATED_BRANCH_REGEX = re.compile(r"(?<=sourceBranch=refs%2Fheads%2F)[\s\S]+?(?=&title=)") -VALID_CHARS = "[^\d\w\s\[\]@\-\>]" # noqa: W605 -MANUAL_CHANGE_MATCH = "(\[{}(\w+)?]\s)([^\[]+)" # noqa: W605 -MANUAL_CHANGE_DELIMTER = "->" - -OPEN_DESIGN_REVIEWS = 'project="Developer Experience" AND issuetype="Design Reviews" AND status not in (Done, Closed, Blocked)' -NO_MANUAL_CHANGES_REQUIRED = '.*(no).*(change).*(require).*' # noqa: W605 - -NO_MANUAL_CHANGES_TICKET_CLOSED = """ -[~{user}] -This Design Review ticket has been marked *Done* by DEXREQ Automation because "No changes required" was detected. - -If this is a mistake or if you need further assistance, please assign the ticket to the CLI Support engineer. -""" - -MANUAL_CHANGE_COMPLETED_TEMPLATE = """Manual Change Request Done - -You can find your changes here: -https://bitbucket.oci.oraclecorp.com/projects/SDK/repos/python-cli/pull-requests?create&targetBranch=refs%2Fheads%2Fpreview&sourceBranch=refs%2Fheads%2F{}&targetRepoId=930 - -You can choose to create a PR, or use the branch, {}, as a base for any additional changes. - -If you would like to add more changes on created branch by CLI bot, Follow below steps: -1. Please refer to "Fork Model for Pull Requests" on how to utilize Bitbucket forks for development: https://confluence.oci.oraclecorp.com/pages/viewpage.action?pageId=132774928#SDK/CLISelfServiceFrequentlyAskedQuestions-HowdoIdoapullrequestforSDK/CLIrepos? -2. Install Development env of OCI-CLI: https://confluence.oci.oraclecorp.com/display/DEX/Installing+OCI-CLI+using+Python3 -3. Checkout to preview-DEX- branch after forking the repo -4. make changes -5. cd services/ -6. flake8 --ignore=F841,E501,W503 service/ -7. run make docs inside service/ to generate all docs -8. git add, commit and push changes to branch -9. create PR against preview branch - -If you create a PR, post the PR link on this ticket after both builds have passed successfully -PR should be created against preview branch and approved by at least one of your teammates -Finally assign the ticket to the CLI Support engineer -CLI support engineer will mark the ticket Done -Next step would be to create a Public DEXREQ ticket https://confluence.oci.oraclecorp.com/pages/viewpage.action?pageId=43683000 - -If making additional manual changes, see the docs below: -Making CLI Manual changes: https://confluence.oci.oraclecorp.com/display/DEX/CLI+Manual+Code+Changes -Common manual change recipes: https://confluence.oci.oraclecorp.com/display/DEX/CLI+recipes+for+overriding+generated+code -Recommended CLI installation for Mac: https://confluence.oci.oraclecorp.com/display/DEX/Installing+OCI-CLI+using+Python3 -""" - -MANUAL_CHANGE_FAILED_TEMPLATE = """Manual Change Request Failed. {} - -The manual changes will have to be done manually. -The latest status of the changes can be found here: https://bitbucket.oci.oraclecorp.com/projects/SDK/repos/python-cli/pull-requests?create&targetBranch=refs%2Fheads%2Fpreview&sourceBranch=refs%2Fheads%2F{}&targetRepoId=930 -You can checkout this changes from this branch: {} - -To make additional changes on top of this branch, or to make manual changes in general, see the docs below: -Making CLI Manual changes: https://confluence.oci.oraclecorp.com/display/DEX/CLI+Manual+Code+Changes -Common manual change recipes: https://confluence.oci.oraclecorp.com/display/DEX/CLI+recipes+for+overriding+generated+code -Recommended CLI installation for Mac: https://confluence.oci.oraclecorp.com/display/DEX/Installing+OCI-CLI+using+Python3 -""" - -PREVIEW_NOT_DONE_TEMPLATE = """Manual Change Request Rejected. -Your manual changes cannot be generated until your preview ticket is *Done*. -Please request another manual change after your ticket is *Done*. -""" - -NEW_DESIGN_REVIEW_TEMPLATE = """[~{user}] -Please review the generated commands above. -If no changes are required, please comment "No changes required" and assign the ticket to the CLI Support engineer. -If no changes were generated, please confirm that it is intended. - -Otherwise, please follow these links if you would like to manually change generated CLI commands. - -To make manual changes using our CLI Self Service: - - Please make sure your Preview DEXREQ ticket is in Done status before making a manual change. Do NOT change it yourself. - - Follow https://confluence.oci.oraclecorp.com/display/DEX/CLI+Self+Service+Guide and leave a comment using the appropriate template - - Our CLI bot will read "[~gear-dexreq-automation] Manual Changes Requested" in comments and process the comment and create a branch automatically - - Only last comment will be used to create a branch - - Follow the comment on the ticket after branch is created - -To make additional changes on top of this branch, or to make general manual changes, see the docs below: -Making CLI Manual changes: https://confluence.oci.oraclecorp.com/display/DEX/CLI+Manual+Code+Changes -Common manual change recipes: https://confluence.oci.oraclecorp.com/display/DEX/CLI+recipes+for+overriding+generated+code -Recommended CLI installation for Mac: https://confluence.oci.oraclecorp.com/display/DEX/Installing+OCI-CLI+using+Python3 -""" - -UPCOMING_GA_DATE_TEMPLATE = """[~{user}] -Your GA date is coming up in {days} days. -This Design Review ticket must be done a week before your GA date. -To ensure your feature is ready for release, please ensure all work for this ticket is done and this ticket is closed. -""" - -RENAME_ROOT_GROUP = "RENAME ROOT GROUP" -RENAME_COMMAND = "RENAME COMMAND" -REMOVE_COMMAND = "REMOVE COMMAND" -MOVE_COMMAND = "MOVE COMMAND" -MOVE_GROUP = "MOVE GROUP" -RENAME_PARAMETER = "RENAME PARAMETER" -REMOVE_PARAMETER = "REMOVE PARAMETER" -FLATTEN_PARAMETER = "FLATTEN PARAMETER" - -PREVIEW_DONE_STATE = "Done" - -SUPPORTED_MANUAL_CHANGES = [RENAME_COMMAND, REMOVE_COMMAND, MOVE_COMMAND, MOVE_GROUP, RENAME_PARAMETER, REMOVE_PARAMETER] - -RENAME_ROOT_GROUP_EXCEPTION = """Failed in renaming the root group {line} , and error is {e}.Check if the {line} is present in the latest preview oci-cli branch""" - -RENAME_COMMAND_EXCEPTION = """"Failed in renaming the commands {line} , and error is {e}. Check if the {line} is present in the latest preview oci-cli branch""" - -MOVE_GROUP_OR_COMMANDS_EXCEPTION = """Failed in moving the commands {line} , and error is {e}. Check if both old and new groups are present in the latest preview oci-cli branch for this line {line}. CLI bot cannot create a new group, it can only move commands to existing groups""" - -RENAME_PARAMETER_EXCEPTION = """Failed in renaming the parameter the commands and error is {e}""" - -raised_exceptions = {} - - -def JIRA_CLIENT(): - global _JIRA_CLIENT - - if _JIRA_CLIENT: - return _JIRA_CLIENT - - # attempt to log in using user name and password if present, if not use config.JSESSIONID - if config.USERNAME and config.PASSWORD: - print('Building JIRA client with username / password auth') - _JIRA_CLIENT = JIRA(config.JIRA_OPTIONS, basic_auth=config.JIRA_BASIC_AUTH) - elif config.JSESSIONID: - print('Building JIRA client with cookie based auth') - cookie_options = dict(config.JIRA_OPTIONS) - cookie_options['cookies'] = { - 'JSESSIONID': config.JSESSIONID - } - - _JIRA_CLIENT = JIRA(cookie_options) - else: - sys.exit('Could not authenticate with JIRA server. Must specify environment variables for either config.JSESSIONID or JIRA_USERNAME and JIRA_PASSWORD.') - - return _JIRA_CLIENT - - -def is_design_ticket_in_non_terminal_state(issue): - return issue and issue.fields.status and issue.fields.status.name not in config.CLI_DESIGN_REVIEW_TERMINAL_STATES - - -def get_open_design_review_tickets(): - # Results default to 50, need to set to a higher arbitrary number to return all results - design_reviews = JIRA_CLIENT().search_issues(OPEN_DESIGN_REVIEWS, maxResults=1000) - return design_reviews - - -def add_jira_comment(issue_key, comment): - JIRA_CLIENT().add_comment(issue_key, comment) - - -def get_jira_issue(issue_key): - return JIRA_CLIENT().issue(issue_key) - - -# Returns "preview" if the Preview SDK/CLI ticket is closed otherwise None -def get_branch_with_changes(design_review): - if check_if_preview_is_done(design_review): - return "preview" - else: - add_jira_comment(design_review, PREVIEW_NOT_DONE_TEMPLATE) - return None - - -# Checks if the preview ticket associated with the design review ticket is closed -def check_if_preview_is_done(design_review): - if hasattr(design_review.fields, 'issuelinks'): - for link in design_review.fields.issuelinks: - if hasattr(link, 'inwardIssue'): - issue = JIRA_CLIENT().issue(link.inwardIssue.key, fields='description, summary, status, issuetype') - ticket_type_id = issue.fields.issuetype.id - if ticket_type_id and ticket_type_id == PREVIEW_ISSUE_TYPE_ID: - print('Found Preview SDK issue: {}'.format(issue.key)) - if PREVIEW_DONE_STATE in issue.fields.status.name: - return True - return False - - -# Checks if the ticket is a Preview SDK ticket -def is_issue_summary_matches_preview_sdk(issue_summary): - return issue_summary and 'Preview '.lower() in issue_summary.lower() - - -# Returns None if manual change is not requested, else returns with requested manual change comment -def check_comments_for_manual_change(issue): - for comment in reversed(issue.fields.comment.comments): - # This check is true if a manual change has already been executed - if "MANUAL CHANGE REQUEST" in str(comment.body).upper() and BOT_ID in comment.author.name: - return None - # Checks for a NEW manual change request - elif BOT_USERNAME in comment.body and "MANUAL CHANGES REQUESTED" in str(comment.body).upper()\ - and BOT_ID not in comment.author.name: - print("Manual change request for {} found!".format(issue.key)) - return comment.body - return None - - -def is_new_design_review(issue): - if len(issue.fields.comment.comments) <= 1: - print("Found new Design Review ticket: {}".format(issue.key)) - return True - else: - new_ticket = True - for comment in issue.fields.comment.comments: - if "Generated code changes:" not in comment.body: - new_ticket = False - return new_ticket - - -def no_changes_required(issue, not_by=[]): - for comment in reversed(issue.fields.comment.comments): - if 'fail' in comment.body or comment.author.name in not_by: - break - if comment.author.name not in not_by + [BOT_ID] and re.match(NO_MANUAL_CHANGES_REQUIRED, comment.body.lower()): - return True - return False - - -# Checks if issue has been updated/modified in the last 7 days -def not_updated_in_seven_days(issue): - comments = issue.fields.comment.comments - if len(comments) > 0: - last_comment = comments[-1] - comment_datetime = convert_date_to_datetime(last_comment.created[:10]) - today = datetime.datetime.now() - delta = today - comment_datetime - if delta.days > 7: - print("{} not updated in more than 7 days.".format(issue.key)) - return True - return False - - -# Returns the associated UDX ticket from the Design Review ticket -def get_udx_ticket_from_design_review(design_review): - if hasattr(design_review.fields, 'issuelinks'): - for link in design_review.fields.issuelinks: - if hasattr(link, 'inwardIssue'): - issue = JIRA_CLIENT().issue(link.inwardIssue.key, fields='description, summary, status, issuelinks') - if "UDX" in issue.key: - print('Found UDX ticket: {}'.format(issue.key)) - return issue - return None - - -# Returns the associated Public SDK/CLI ticket from the UDX ticket -def get_public_ticket_from_udx(udx_ticket): - if hasattr(udx_ticket.fields, 'issuelinks'): - for link in udx_ticket.fields.issuelinks: - if hasattr(link, 'outwardIssue'): - issue = JIRA_CLIENT().issue(link.outwardIssue.key) - if is_issue_summary_matches_public_sdk(issue.fields.summary): - print('Found Public SDK issue: {}'.format(issue.key)) - if issue.fields.status.name not in config.DEXREQ_TERMINAL_STATES: - return issue - return None - - -# Checks if the Public SDK/CLI ticket is GA within a month -def is_ga_date_within_month(public_ticket): - if hasattr(public_ticket.fields, config.CUSTOM_FIELD_ID_SDK_CLI_GA_DATE): - ga_date = getattr(public_ticket.fields, config.CUSTOM_FIELD_ID_SDK_CLI_GA_DATE) - if ga_date is None: - return None - ga_datetime = convert_date_to_datetime(ga_date) - today = datetime.datetime.now() - delta = ga_datetime - today - if delta.days <= 28: - return delta.days - return None - - -def convert_date_to_datetime(ga_date): - return datetime.datetime.strptime(ga_date, '%Y-%m-%d') - - -def is_issue_summary_matches_public_sdk(issue_summary): - return issue_summary and 'Public '.lower() in issue_summary.lower() - - -# Returns the generated CLI branch from a preview SDK/CLI ticket -def get_generated_branch(issue): - for comment in reversed(issue.fields.comment.comments): - if "Generated code changes:" in comment.body: - generated_branch = GENERATED_BRANCH_REGEX.search(comment.body) - if generated_branch: - generated_branch = generated_branch.group(0) - print("Found generated branch: {}".format(generated_branch)) - return generated_branch - return None - - -def execute_manual_change(request): - request_upper = request.upper() - # Cleans the request for formatting - request_upper = re.sub(VALID_CHARS, '', request_upper) - - for operation_type in SUPPORTED_MANUAL_CHANGES: - if operation_type in request_upper: - try: - body = re.compile(MANUAL_CHANGE_MATCH.format(operation_type)).search(request_upper) - if body: - body = body.group(3) - body = body.lower() - MANUAL_CHANGE_FUNCTIONS[operation_type](body) - except CliException as exception: - print("enqueuing raised exceptions for {} and the exception is {}".format(operation_type, exception.message)) - raised_exceptions[exception.line] = exception - - if raised_exceptions: - print("found exceptions while executing manual changes") - raise Exception("found exceptions while executing manual changes") - - print("Successfully executed manual change!") - manual_change.run_make_docs(find_service(request)) - manual_change.run_make_docs(find_service(request)) - print("Successfully ran make docs") - - -def rename_root_group(request): - request_upper = request.upper() - # Cleans the request for formatting - request_upper = re.sub(VALID_CHARS, '', request_upper) - print(request_upper) - - if RENAME_ROOT_GROUP in request_upper: - body = re.compile(MANUAL_CHANGE_MATCH.format(RENAME_ROOT_GROUP)).search(request_upper) - if body: - body = body.group(3) - body = body.lower() - MANUAL_CHANGE_FUNCTIONS[RENAME_ROOT_GROUP](body) - print("Successfully executed renaming root group!") - else: - return - - -def find_service(request): - for line in reversed(request.splitlines()): - if 'oci' in line: - values = line.split() - for i, val in enumerate(values): - if 'oci' in val: - return values[i + 1].strip() - - -def execute_rename_root_group(body): - for line in body.splitlines(): - if not line.strip(): - continue - request = line.split(MANUAL_CHANGE_DELIMTER) - old_root_group = sanitize_command(request[0]) - new_root_group = sanitize_command(request[1]) - try: - manual_change.rename_root_group(old_root_group[-1], new_root_group[-1]) - except Exception as e: - raise CliException(RENAME_ROOT_GROUP_EXCEPTION.format(line, e), line) - - -def execute_rename_command(body): - commands = [] - new_names = [] - for line in body.splitlines(): - if not line.strip(): - continue - request = line.split(MANUAL_CHANGE_DELIMTER) - old_command = sanitize_command(request[0]) - commands.append(old_command) - new_command = sanitize_command(request[1]) - new_names.append(new_command[-1]) - try: - manual_change.rename_commands(commands, new_names) - except Exception as e: - raise CliException(RENAME_COMMAND_EXCEPTION.format(line=line, e=e), line) - commands = [] - new_names = [] - - -def execute_remove_command(body): - commands = [] - for line in body.splitlines(): - if not line.strip(): - continue - request = line.split(MANUAL_CHANGE_DELIMTER) - old_command = sanitize_command(request[0]) - commands.append(old_command) - manual_change.remove_commands(commands) - commands = [] - - -def execute_move_group(body): - for line in body.splitlines(): - if not line.strip(): - continue - request = line.split(MANUAL_CHANGE_DELIMTER) - old_group = sanitize_command(request[0]) - new_group = sanitize_command(request[1]) - try: - manual_change.move_group(old_group, new_group) - except Exception as e: - raise CliException(MOVE_GROUP_OR_COMMANDS_EXCEPTION.format(line=line, e=e), line) - - -def execute_move_command(body): - for line in body.splitlines(): - if not line.strip(): - continue - request = line.split(MANUAL_CHANGE_DELIMTER) - command = sanitize_command(request[0]) - new_group = sanitize_command(request[1]) - try: - manual_change.move_command(command, new_group) - except Exception as e: - raise CliException(MOVE_GROUP_OR_COMMANDS_EXCEPTION.format(line=line, e=e), line) - - -def execute_rename_parameter(body): - for line in body.splitlines(): - if not line.strip(): - continue - if 'oci' in line and MANUAL_CHANGE_DELIMTER in line: - rename_parameter([line]) - - -def rename_parameter(body): - command = [] - old_params = [] - new_params = [] - options = None - for line in body: - if not line.strip(): - continue - if ":" in line: - if options is None: - options = {} - option = line.split(':') - options.update({option[0].strip(): option[1].strip('][').split(', ')}) - # options.update({option[0].strip(): ast.literal_eval(option[1].strip())}) - else: - request = line.split(MANUAL_CHANGE_DELIMTER) - old_command = sanitize_command(request[0]) - command = old_command[:find_parameter_index(old_command)] - old_params = old_command[find_parameter_index(old_command):] - old_params = [x.replace('--', '') for x in old_params] - new_command = sanitize_command(request[1]) - new_params = new_command[find_parameter_index(new_command):] - new_params = [x.replace('--', '') for x in new_params] - print("Renaming Command") - print(command) - print(old_params) - print(new_params) - try: - manual_change.rename_parameters(command, old_params, new_params, options) - except Exception as e: - raise CliException(RENAME_COMMAND_EXCEPTION.format(line=line, e=e), line) - - -def execute_remove_parameter(body): - for line in body.splitlines(): - if not line.strip(): - continue - if 'oci' in line: - remove_parameter([line]) - - -def remove_parameter(body): - command = [] - params = [] - for line in body: - if not line.strip(): - continue - request = line.split(MANUAL_CHANGE_DELIMTER) - old_command = sanitize_command(request[0]) - command = old_command[:find_parameter_index(old_command)] - params = old_command[find_parameter_index(old_command):] - params = [x.replace('--', '') for x in params] - manual_change.remove_parameters(command, params) - - -# TODO -def execute_flatten_parameter(body): - flatten_query = [] - query = False - for line in body.splitlines(): - if not line.strip(): - continue - if 'oci' in line and query: - query = False - flatten_parameter(flatten_query) - flatten_query = [line] - else: - query = True - flatten_query.append(line) - flatten_parameter(flatten_query) - - -# TODO -def flatten_parameter(body): - command = [] - params = [] - params_flattened = [] - params_options = {} - for line in body: - if not line.strip(): - continue - if 'oci' in line: - command = sanitize_command(line) - else: - option = line.split(':') # noqa: F841 - manual_change.flatten_parameters(command, params, params_flattened, params_options) - - -# Removes leading/trailing whitespaces from command and converts into comma separated list -# oci audit list -> ['audit', 'list'] -def sanitize_command(request): - request = request.split() - if "oci" == request[0]: - request = request[1:] - request = [x.strip() for x in request] - return request - - -def find_parameter_index(command): - for i, group in enumerate(command): - if '--' in group: - return i - - -MANUAL_CHANGE_FUNCTIONS = {RENAME_ROOT_GROUP: execute_rename_root_group, - RENAME_COMMAND: execute_rename_command, - MOVE_COMMAND: execute_move_command, - REMOVE_COMMAND: execute_remove_command, - MOVE_GROUP: execute_move_group, - RENAME_PARAMETER: execute_rename_parameter, - REMOVE_PARAMETER: execute_remove_parameter} - - -def handle_exceptions(issue_key): - comment = "CLI bot failed to create branch based on your last comment. Please fix these errors \n" - for key, exception in raised_exceptions.items(): - comment += key + "\n " + exception.message - comment += "\n**********\n" - print(comment) - add_jira_comment(issue_key, comment) diff --git a/scripts/auto_gen_utils/python_cli/manual_change_examples.py b/scripts/auto_gen_utils/python_cli/manual_change_examples.py deleted file mode 100644 index 14ff066fda..0000000000 --- a/scripts/auto_gen_utils/python_cli/manual_change_examples.py +++ /dev/null @@ -1,51 +0,0 @@ -#!/usr/bin/env python3 -# Copyright (c) 2016, 2020, Oracle and/or its affiliates. All rights reserved. -# This software is dual-licensed to you under the Universal Permissive License (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl or Apache License 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose either license. -import python_cli.self_service_manual_change_util as self_service_util # noqa: F401 -import python_cli.self_service_manual_change as self_service # noqa: F401 - -# Use this python script as an example to generate manual changes - -# Rename Command -# @param commands: A list of list of commands. Takes in multiple commands. [[audit, event, list], [audit, event, create]] -# @param new_names: A list of names to replace the command with. [new_list, new_create] -# self_service.rename_commands([['usage', 'subscription-info', 'get']], ['getter']) - - -# Rename Parameter -# @param command: A command in list form, [audit, event, list]. Takes in only 1 command -# @param old_params: A list of parameters to rename, [param1, param2] -# @param new_params: A list of parameters to replace old_params. Index mapped to old_params. [new_param1, new_param2] -# @param options: An optional field, defaulted to None, to replace the options for the parameter. A dictionary for parameter options. {'param1': [required=True], 'param1': [required=True]} -# self_service.rename_parameters(['usage', 'usage-record', 'list'], ['granularity', 'tenancy-id'], ['grangran', 'tenten']) - - -# Remove command -# @param commands: A list of list of commands. Takes in multiple commands. -# self_service.remove_commands([['usage', 'usage-record', 'list'], ['usage', 'subscription-info', 'get']]) - - -# Move group -# @param old_group: A group in list form, [os, bucket]. Takes in only 1 group -# @param new_group: The group to move all commands under. A group in list form, [os, new_bucket]. Takes in only 1 group -# self_service.move_group(['waas', 'address-list'], ['waas', 'access-rule']) - - -# Move command -# @param old_command: A command in list form, [audit, event, list]. Takes in only 1 command -# @param new_command: The group to move the command under. A group in list form. [audit, config] Takes in only 1 group -# self_service.move_command(['waas', 'address-list', 'create'], ['waas', 'access-rule']) - - -# Flatten command -# @param command: A command in list form, [audit, event, list]. Takes in only 1 command -# @param params: A list of complex parameters to flatten, [param1, param2] -# @param params_flattened: A list of lists of flattened parameters to replace old_params. Index mapped to old_params. [[param1_flat, param1_flat], [param2_flat, param2_flat]] -# @param params_options: A dictionary with key being the flattened params and value being the parameter options {'param1_flat': [required=True]} -# self_service.flatten_parameters(['usage', 'usage-record', 'list'], ['tenancy-id'], [['tenancy-renamed', 'gran-renamed']], {'tenancy-renamed': ['required=True', 'help="Text"'], 'gran-renamed': ['help="Text"']}) - - -# Rename root group -# @param service: The root group to rename, takes in only 1 -# @param new_name: The new name for the root group. -# self_service.rename_root_group("budgets", "budget") diff --git a/scripts/auto_gen_utils/python_cli/rename_root_group.py b/scripts/auto_gen_utils/python_cli/rename_root_group.py deleted file mode 100644 index ba2aa8a817..0000000000 --- a/scripts/auto_gen_utils/python_cli/rename_root_group.py +++ /dev/null @@ -1,15 +0,0 @@ -#!/usr/bin/env python3 - -import python_cli.jira_util as jira_util -import sys - -issue_key = sys.argv[1] -self_service_branch = sys.argv[2] -issue = jira_util.get_jira_issue(issue_key) -manual_change = jira_util.check_comments_for_manual_change(issue) -try: - jira_util.rename_root_group(manual_change) -except Exception as e: - print(e) - jira_util.add_jira_comment(issue_key, jira_util.MANUAL_CHANGE_FAILED_TEMPLATE.format(e, self_service_branch, - self_service_branch.replace('-', '\-'))) # noqa: W605 diff --git a/scripts/auto_gen_utils/python_cli/self_service_manual_change.py b/scripts/auto_gen_utils/python_cli/self_service_manual_change.py deleted file mode 100644 index 853e9b46da..0000000000 --- a/scripts/auto_gen_utils/python_cli/self_service_manual_change.py +++ /dev/null @@ -1,426 +0,0 @@ -# Copyright (c) 2016, 2020, Oracle and/or its affiliates. All rights reserved. -# This software is dual-licensed to you under the Universal Permissive License (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl or Apache License 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose either license. - -# For all existing and new manual chages through self service. -# Deals with changing and adding onto existing manual changes and new manual changes. - -from oci_cli import cli, dynamic_loader -import ast -import jinja2 -import re -import inspect -import python_cli.self_service_manual_change_util as self_service_util -import subprocess -import os - -dynamic_loader.load_all_services() - -TEMPLATE_LOCATION = 'python_cli/templates' -file_loader = jinja2.FileSystemLoader(TEMPLATE_LOCATION) -env = jinja2.Environment(loader=file_loader) - -LIST_REGEX = re.compile(r"\[.*?\]") - -SERVICE = None - - -# command is a command invocation as a list [audit, event, list] -# old_params and new_params need to be a mapping to each other by index -# [param-old-1, param-old-2], [param_new_1, param_new_2] -# options are optional custom parameter options if you want to replace default values such as required=True -def rename_parameters(command, old_params, new_params, options=None): - print("Renaming: " + ' '.join(command)) - parent_and_child = self_service_util.get_parent_child_command(command) - parent_group = parent_and_child[0] - - extended_file = self_service_util.get_extended_file(parent_group) - rename_param_dict = self_service_util.rename_parameter_dict(command, old_params, new_params, options) - print("Generated param dictionary") - - existing_change = self_service_util.check_existing_manual_change(extended_file, rename_param_dict['command']) - print("Checked for Existing Change") - - if existing_change: - out_file = [] - command_found = False - option_updated = False - kwargs_updated = False - try: - with open(extended_file, 'r') as file: - contents = file.readlines() - for i, line in enumerate(contents): - if 'cli_util.copy_params_from_generated_command' in line and rename_param_dict['command'] in line: - command_found = True - excluded_params = LIST_REGEX.search(line) - if excluded_params: - excluded_params = excluded_params.group(0) - excluded_params = ast.literal_eval(excluded_params) - old_params_underscore = [param.replace("-", "_") for param in old_params] - for param in old_params_underscore: - excluded_params.append(param) - line = self_service_util.render_output({'service_cli': rename_param_dict['service_cli'], - 'command': rename_param_dict['command'], - 'params_to_exclude': excluded_params - }, - "copy_params_from_generated.py.js2") - if command_found and "cli_util.option" in line and not option_updated: - option_updated = True - output = self_service_util.render_output(rename_param_dict, "rename_parameter_option.py.js2") - out_file.append(output) - - out_file.append(line) - - if option_updated and "ctx" in line and not kwargs_updated: - kwargs_updated = True - output = self_service_util.render_output(rename_param_dict, "rename_parameter_kwargs.py.js2") - out_file.append(output) - - except Exception as e: - print(e) - - try: - with open(extended_file, 'w') as file: - file.writelines(out_file) - except Exception as e: - print(e) - - else: - output = self_service_util.render_output(rename_param_dict, "rename_parameter.py.js2") - output = output.replace(",help=", ", help=") - # check for imports - add_import_if_not_present(extended_file, "from oci_cli import json_skeleton_utils") - append_to_file(extended_file, output) - - # json_skeleton_generation_handler = self_service_util.get_json_skeleton_generation_handler(command[0], child_group).rstrip() - print("Successfuly renamed parameters!") - return - - -def remove_parameters(command, params): - print("Removing: " + ' '.join(command)) - parent_and_child = self_service_util.get_parent_child_command(command) - parent_group = parent_and_child[0] - - extended_file = self_service_util.get_extended_file(parent_group) - remove_param_dict = self_service_util.remove_parameter_dict(command, params) - print("Generated param dictionary") - - existing_change = self_service_util.check_existing_manual_change(extended_file, remove_param_dict['command']) - print("Checked for Existing Change") - - if existing_change: - out_file = [] - try: - with open(extended_file, 'r') as file: - contents = file.readlines() - for i, line in enumerate(contents): - if 'cli_util.copy_params_from_generated_command' in line and remove_param_dict['command'] in line: - excluded_params = LIST_REGEX.search(line) - if excluded_params: - excluded_params = excluded_params.group(0) - excluded_params = ast.literal_eval(excluded_params) - old_params_underscore = [param.replace("-", "_") for param in params] - for param in old_params_underscore: - excluded_params.append(param) - line = self_service_util.render_output({'service_cli': remove_param_dict['service_cli'], - 'command': remove_param_dict['command'], - 'params_to_exclude': excluded_params - }, - "copy_params_from_generated.py.js2") - - out_file.append(line) - - except Exception as e: - print(e) - - try: - with open(extended_file, 'w') as file: - file.writelines(out_file) - except Exception as e: - print(e) - - else: - output = self_service_util.render_output(remove_param_dict, "remove_parameter.py.js2") - append_to_file(extended_file, output) - - # json_skeleton_generation_handler = self_service_util.get_json_skeleton_generation_handler(command[0], child_group).rstrip() - print("Successfuly removed parameters!") - return - - -# command is a command invocation as a list [audit, event, list] -# params is a list of complex parameters to flatten [param1, param2] -# params_flattened is a list of lists of the flattened params [[param1_flat, param1_flat], [param2_flat, param2_flat]] -# params_options is a dictionary with key being the flattened params and value being the parameter options {'param1_flat': [required=True]} -def flatten_parameters(command, params, params_flattened, params_options): - print("Flattening: " + ' '.join(command)) - parent_and_child = self_service_util.get_parent_child_command(command) - parent_group = parent_and_child[0] - - extended_file = self_service_util.get_extended_file(parent_group) - rename_param_dict = self_service_util.flatten_parameter_dict(command, params, params_flattened, params_options) - - existing_change = self_service_util.check_existing_manual_change(extended_file, rename_param_dict['command']) - - if existing_change: - out_file = [] - command_found = False - option_updated = False - kwargs_updated = False - try: - with open(extended_file, 'r') as file: - contents = file.readlines() - for i, line in enumerate(contents): - if 'cli_util.copy_params_from_generated_command' in line and rename_param_dict['command'] in line: - command_found = True - excluded_params = LIST_REGEX.search(line) - if excluded_params: - excluded_params = excluded_params.group(0) - excluded_params = ast.literal_eval(excluded_params) - params_underscore = [param.replace("-", "_") for param in params] - for param in params_underscore: - excluded_params.append(param) - line = self_service_util.render_output({'service_cli': rename_param_dict['service_cli'], - 'command': rename_param_dict['command'], - 'params_to_exclude': excluded_params - }, - "copy_params_from_generated.py.js2") - if command_found and "cli_util.option" in line and not option_updated: - option_updated = True - output = self_service_util.render_output(rename_param_dict, "flatten_parameter_option.py.js2") - out_file.append(output) - - out_file.append(line) - - if option_updated and "ctx" in line and not kwargs_updated: - kwargs_updated = True - output = self_service_util.render_output(rename_param_dict, "flatten_parameter_kwargs.py.js2") - out_file.append(output) - - except Exception as e: - print(e) - - try: - with open(extended_file, 'w') as file: - file.writelines(out_file) - except Exception as e: - print(e) - - else: - output = self_service_util.render_output(rename_param_dict, "flatten_parameter.py.js2") - append_to_file(extended_file, output) - - # json_skeleton_generation_handler = self_service_util.get_json_skeleton_generation_handler(command[0], child_group).rstrip() - print("Successfuly flattened parameters!") - return - - -# Renames all commands to the new name. -# Commands and new_names are a 1-to-1 mapping by index -# Commands are a list of lists and new_names is a list -# e.g. [[audit, list-long], [audit, create-long]], [list, create] -def rename_commands(commands, new_names): - parent_and_child = self_service_util.get_parent_child_command(commands[0]) - parent_group = parent_and_child[0] - child_group = parent_and_child[1] - - extended_file = self_service_util.get_extended_file(parent_group, child_group) - rename_dict = self_service_util.rename_command_dict(commands, new_names) - output = self_service_util.render_output(rename_dict, "rename_command.py.js2") - - # check for required imports - add_import_if_not_present(extended_file, "from oci_cli import cli_util") - append_to_file(extended_file, output) - print("Successfully renamed commands!") - return - - -# Removes all commands -# Commands are a list of lists of commands -# e.g. [[audit, list-long], [audit, create-long]] -def remove_commands(commands): - parent_and_child = self_service_util.get_parent_child_command(commands[0]) - child_group = parent_and_child[1] - - extended_file = self_service_util.get_extended_file(child_group) - remove_dict = self_service_util.remove_command_dict(commands) - output = self_service_util.render_output(remove_dict, "remove_command.py.js2") - - append_to_file(extended_file, output) - print("Successfully removed commands!") - return - - -# Move command between groups -# e.g. [audit, event, list], [audit, config, list] -# oci audit event list -> oci audit config list -def move_command(command, new_group): - print("Moving: " + ' '.join(command)) - old_parent_and_child = self_service_util.get_parent_child_command(command) - old_child_group = old_parent_and_child[1] - - extended_file = self_service_util.get_extended_file(old_child_group) - move_dict = self_service_util.move_command_dict(command, new_group) - output = self_service_util.render_output(move_dict, "move_command.py.js2") - - append_to_file(extended_file, output) - print("Successfully moved commands!") - return - - -# Move commands under one group to another -# e.g. [audit, event], [audit, config] -# oci audit event list -> oci audit config list, oci audit event get -> oci audit config get -# [audit, event], [audit] -# oci audit event list -> oci audit list -def move_group(old_group, new_group): - print("Moving: " + ' '.join(old_group)) - old_parent_and_child = self_service_util.get_parent_child_command(old_group) - old_child_group = old_parent_and_child[1] - - extended_file = self_service_util.get_extended_file(old_child_group) - move_dict = self_service_util.move_group_dict(old_group, new_group) - output = self_service_util.render_output(move_dict, "move_group.py.js2") - - append_to_file(extended_file, output) - print("Successfully moved group commands!") - return - - -# Renames the root group in the service pom.xml file with new_name -def rename_root_group(service, new_name): - print("Renaming service: " + service) - service_cli_file = str(inspect.getfile(cli.commands[service].callback)) - pom_file = service_cli_file.rsplit('/', 4)[0] + "/pom.xml" - with open(pom_file, 'r') as infile: - file_data = infile.read() - file_data = file_data.replace(_pom_root_group(service), _pom_root_group(new_name)) - with open(pom_file, 'w') as outfile: - outfile.write(file_data) - - service_dir = service_cli_file.rsplit('/', 4)[0] - run_make_gen(service_dir) - run_make_docs_dir(service_dir) - print("Successfully renamed root group!") - - -def _pom_root_group(service): - return "{}".format(service) - - -def run_make_gen(service_dir): - # Switches to service directory for 'make gen' then switches back to original working directory - owd = os.getcwd() - - os.chdir(service_dir) - os.environ["OCI_CLI_SKIP_DOC_LINK_VALIDATION"] = "1" - make_gen = subprocess.run(['make', 'gen'], - stdout=subprocess.PIPE, - stderr=subprocess.DEVNULL, - universal_newlines=True) - if make_gen.returncode != 0: - raise Exception("Error while running 'make gen'!") - - os.chdir(owd) - - -def run_make_docs(service): - service_cli_file = str(inspect.getfile(cli.commands[service].callback)) - run_make_docs_dir(service_cli_file.rsplit('/', 4)[0]) - - -def run_make_docs_dir(service_dir): - print(service_dir) - # Switches to service directory for 'make gen' then switches back to original working directory - owd = os.getcwd() - - os.chdir(service_dir) - os.environ["OCI_CLI_SKIP_DOC_LINK_VALIDATION"] = "1" - make_docs = subprocess.run(['make', 'docs'], - stdout=subprocess.PIPE, - stderr=subprocess.DEVNULL, - universal_newlines=True) - if make_docs.returncode != 0: - raise Exception("Error while running 'make docs'!") - - os.chdir(owd) - - -def run_command(dir, command): - # Switches to directory for command then switches back to original working directory - owd = os.getcwd() - - os.chdir(dir) - process = subprocess.run(command.split(), - stdout=subprocess.PIPE, - universal_newlines=True) - if process.returncode != 0: - raise Exception("Error while running " + command) - - os.chdir(owd) - - -def use_local_python_sdk(dir): - setup_file = os.path.join(dir, 'setup.py') - modify_python_sdk_version(setup_file, "preview.1',\n") - requirements_file = os.path.join(dir, 'requirements.txt') - modify_python_sdk_version(requirements_file, "preview.1\n") - - -def modify_python_sdk_version(file, replacement): - outfile = [] - try: - with open(file, 'r') as f: - content = f.readlines() - for line in content: - if 'oci==' in line: - local_sdk = line.split('preview.1')[0] - outfile.append(local_sdk + replacement) - else: - outfile.append(line) - except Exception as e: - print(e) - - try: - with open(file, 'w') as f: - f.writelines(outfile) - except Exception as e: - print(e) - - -def append_to_file(file, data): - try: - with open(file, "a") as file: - file.write(data) - except Exception as e: - print(e) - - -def add_import_if_not_present(file, input_line): - # check if file contains input line - with open(file, 'r') as f: - lines_list = f.readlines() - data = "".join(lines_list) - - regex = get_regex(input_line) - matches = re.search(regex, data, re.MULTILINE) - if not matches: - i = 0 - line = lines_list[0] - # skip comments - while line.startswith("#"): - i = i + 1 - line = lines_list[i] - try: - insert_line = input_line + " # noqa: F401\n" - lines_list.insert(i, insert_line) - with open(file, "w") as file: - lines_list = "".join(lines_list) - file.write(lines_list) - except Exception as e: - print(e) - - -def get_regex(input_line): - noqa_statement = '\\s*# noqa:\\s*F401\\s*' - return '^(\\s*' + input_line + '\\s*|\\s*' + input_line + noqa_statement + ')$' diff --git a/scripts/auto_gen_utils/python_cli/self_service_manual_change_util.py b/scripts/auto_gen_utils/python_cli/self_service_manual_change_util.py deleted file mode 100644 index 80f0200ba9..0000000000 --- a/scripts/auto_gen_utils/python_cli/self_service_manual_change_util.py +++ /dev/null @@ -1,437 +0,0 @@ -# Copyright (c) 2016, 2020, Oracle and/or its affiliates. All rights reserved. -# This software is dual-licensed to you under the Universal Permissive License (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl or Apache License 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose either license. -from oci_cli import cli, dynamic_loader -import sys -import inspect -import glob -import jinja2 - -dynamic_loader.load_all_services() - -file_loader = jinja2.FileSystemLoader('python_cli/templates') -env = jinja2.Environment(loader=file_loader) - - -# Generates dict object required for renaming parameter template -# old_params and new_params need to be a mapping to each other by index -# [param-old-1, param-old-2], [param_new_1, param_new_2] -# options are optional custom parameter options if you want to replace default values such as required=True. It is a list of lists -def rename_parameter_dict(command, old_params, new_params, options=None): - """ - @param command: e.g. [audit, list, event] - @param params_old: [param-old, param-old2] - @param params_new: [param-new, param-new2] - @param options: [[required=True], [required=False]] - """ - - parent_and_child = get_parent_child_command(command) - parent_group = parent_and_child[0] - child_group = parent_and_child[1] - - service_cli = get_service_cli_from_command(parent_group) - - params_old_underscore = [param.replace("-", "_") for param in old_params] - params_new_underscore = [param.replace("-", "_") for param in new_params] - # Zips old and new parameters for a 1-to-1 mapping - rename_parameters = [list(x) for x in zip(params_old_underscore, params_new_underscore)] - - param_dict = parameter_list(child_group, params_old_underscore) - param_options = get_parameter_options(command[0], parent_group, child_group, old_params, options) - param_dict = _combine_parameter_dicts(param_dict, param_options) - param_dict = zip(param_dict, new_params) - - command_name = get_command_from_extended(get_extended_file(parent_group), child_group) - - json_skeleton_generation_handler = get_json_skeleton_generation_handler(command[0], parent_group, child_group).rstrip() - rename_dict = {'service_cli': service_cli, - 'parent_command': parent_group.callback.__name__, - 'command': command_name, - 'command_name': child_group.name, - 'params_to_exclude': params_old_underscore, - 'rename_parameters': rename_parameters, - 'param_dict': param_dict, - 'json_skeleton_generation_handler': json_skeleton_generation_handler} - return rename_dict - - -# Generates dict object required for removing parameter template -def remove_parameter_dict(command, params): - """ - @param command: e.g. [audit, list, event] - @param params: [param1, param2] - """ - - parent_and_child = get_parent_child_command(command) - parent_group = parent_and_child[0] - child_group = parent_and_child[1] - - service_cli = get_service_cli_from_command(parent_group) - - params_underscore = [param.replace("-", "_") for param in params] - - command_name = get_command_from_extended(get_extended_file(parent_group), child_group) - - json_skeleton_generation_handler = get_json_skeleton_generation_handler(command[0], parent_group, child_group).rstrip() - rename_dict = {'service_cli': service_cli, - 'parent_command': parent_group.callback.__name__, - 'command': command_name, - 'params_to_exclude': params_underscore, - 'json_skeleton_generation_handler': json_skeleton_generation_handler} - return rename_dict - - -# Generates dict object required for flattening parameter template. -# params and params_flattened need to be a mapping to each other by index. -# params is a list of params to be flattened. -# params_flattened is a list of lists with each index corresponding to the original param. -# params_options is a dictionary with key being the name of the flattened param and value being the param options -def flatten_parameter_dict(command, params, params_flattened, params_options): - """ - @param command: [audit, list, event] - @param params: e.g. [param-1, param-2] - @param params_flattened: [[param1-flattened1, param1-flattened2]] - @param params_options: {param-flattened1: [required=True, help="Text"], param-flattened2: [help="Text"]} - """ - - parent_and_child = get_parent_child_command(command) - parent_group = parent_and_child[0] - child_group = parent_and_child[1] - - service_cli = get_service_cli_from_command(parent_group) - - params_underscore = [param.replace("-", "_") for param in params] - params_flattened_underscore = [[param.replace("-", "_") for param in params] for params in params_flattened] - flatten_parameters = [list(x) for x in zip(params_underscore, params_flattened_underscore)] - - json_skeleton_generation_handler = get_json_skeleton_generation_handler(command[0], child_group).rstrip() - - command_name = get_command_from_extended(get_extended_file(parent_group), child_group) - - flatten_dict = {'service_cli': service_cli, - 'parent_command': parent_group.callback.__name__, - 'command': command_name, - 'command_name': child_group.name, - 'params_to_exclude': params_underscore, - 'json_skeleton_generation_handler': json_skeleton_generation_handler, - 'flatten_parameters': flatten_parameters, - 'params_options': params_options} - return flatten_dict - - -# Generates dict object required for renaming command template -def rename_command_dict(commands, new_names): - renamed_commands = [] - for i, command in enumerate(commands): - print("Renaming command: {}".format(' '.join(command))) - parent_and_child = get_parent_child_command(command) - parent_group = parent_and_child[0] - child_group = parent_and_child[1] - - service_cli = get_service_cli_from_command(parent_group) - child_service_cli = get_service_cli_from_command(child_group) - - new_command = command.copy() - new_command[-1] = new_names[i] - rename_dict = {'service_cli': service_cli, - 'child_service_cli': child_service_cli, - 'command_parent_group': parent_group.callback.__name__, - 'command_group': child_group.callback.__name__, - 'new_name': new_names[i], - 'old_command': " ".join(command), - 'new_command': " ".join(new_command)} - renamed_commands.append(rename_dict) - return {'renamed_commands': renamed_commands} - - -# Generates dict object required for removing command template -def remove_command_dict(commands): - removed_commands = [] - for command in commands: - print("Removing command: {}".format(' '.join(command))) - - parent_and_child = get_parent_child_command(command) - parent_group = parent_and_child[0] - child_group = parent_and_child[1] - - service_cli = get_service_cli_from_command(child_group) - - removed_command = {'service_cli': service_cli, - 'parent_group': parent_group.callback.__name__, - 'command_group': child_group.callback.__name__, - 'full_command': " ".join(command[:-1]), - 'command': command[-1]} - removed_commands.append(removed_command) - return {'removed_commands': removed_commands} - - -# Generates dict object required for moving command template -def move_command_dict(command, new_group): - - old_parent_and_child = get_parent_child_command(command) - old_parent_group = old_parent_and_child[0] - old_child_group = old_parent_and_child[1] - - service_cli = get_service_cli_from_command(old_child_group) - - new_parent_group = get_command_group(new_group) - - move_dict = {'old_command': " ".join(command), - 'new_command': " ".join(new_group), - 'service_cli': service_cli, - 'old_parent_group': old_parent_group.callback.__name__, - 'command_group': old_child_group.callback.__name__, - 'new_parent_group': new_parent_group.callback.__name__} - - return move_dict - - -def move_group_dict(old_group, new_group): - - old_parent_and_child = get_parent_child_command(old_group) - old_parent_group = old_parent_and_child[0] - old_child_group = old_parent_and_child[1] - - service_cli = get_service_cli_from_command(old_child_group) - - new_parent_group = get_command_group(new_group) - new_service_cli = get_service_cli_from_command(new_parent_group) - - moved_commands = [] - for command in old_child_group.commands.values(): - moved_commands.append(command.callback.__name__) - - group_dict = {'service_cli': service_cli, - 'new_service_cli':new_service_cli, - 'old_group': " ".join(old_group), - 'new_group': " ".join(new_group), - 'old_parent_group': old_parent_group.callback.__name__, - 'old_command_group': old_child_group.callback.__name__, - 'new_parent_group': new_parent_group.callback.__name__, - 'moved_commands': moved_commands} - return group_dict - - -# Returns the "@json_skeleton_utils.json_skeleton_generation_handler(input_params_to_complex_types={}, output_type={})" -# that is used for a command. -def get_json_skeleton_generation_handler(service, command, child_group): - service_cli = get_generated_file_from_command(command) - command_name = child_group.callback.__name__ - command_to_find = command_name + ".command_name" - found_command = False - json_skeleton_generation_handler = "" - try: - with open(service_cli, "r") as service_file: - contents = service_file.readlines() - for line in contents: - if not found_command: - if line.__contains__(command_to_find): - found_command = True - else: - if line.__contains__("@json_skeleton_utils.json_skeleton_generation_handler"): - json_skeleton_generation_handler = line - break - except IOError: - raise Exception("Encountered error while parsing service file.") - finally: - service_file.close() - return json_skeleton_generation_handler - - -# Returns the optional flags of a command mapped to their option -# Searches through the generated file for the options in params -# @cli_util.option('--tenancy-id', required=True, help=u"""The OCID of the tenancy for which usage is being fetched.""") -# Will return only '{"tenancy_id": "required=True, "}' from the above string -def get_parameter_options(service, command, child_group, params, options=None): - service_cli = get_generated_file_from_command(command) - command_name = child_group.callback.__name__ - command_name = command_name.replace("_extended", "") - command_to_find = command_name + ".command_name" - param_dict = {} - - found_command = False - - try: - with open(service_cli, "r") as service_file: - contents = service_file.readlines() - for line in contents: - if not found_command: - if line.__contains__(command_to_find): - found_command = True - else: - if len(param_dict) >= len(params): - break - for param in params: - if line.__contains__(param + "',"): - line_split = line.split("help=") - line_split = line_split[0].split(",") - param_options = "" - for i, option in enumerate(line_split): - if "=" in option: - param_options = ",".join(line_split[i:]) - break - param_dict.update({param.replace("-", "_"): param_options}) - except Exception as e: - print(e) - - if options is not None: - for param, option in options.items(): - param_dict.update({param.replace("-", "_"): option}) - return param_dict - - -def _combine_parameter_dicts(param_dict, param_options): - for dict in param_dict: - dict.update({dict['name']: param_options[dict['name']]}) - return param_dict - - -# Command is a list of a valid CLI command and returns the group of the parent and child -# e.g. [os, object, get] returns the groups for object and get -def get_parent_child_command(command): - commands = cli.commands - parent_group = commands - for group in command[:-1]: - if group in commands: - parent_group = commands[group] - commands = parent_group.commands - else: - raise Exception("Invalid command entered. {}".format(str(command.name))) - return [parent_group, parent_group.commands[command[-1]]] - - -def get_command_group(command): - commands = cli.commands - command_group = commands - for group in command: - if group in commands: - command_group = commands[group] - commands = command_group.commands - else: - raise Exception("Invalid command entered. {}".format(str(command.name))) - return command_group - - -# Gets the extended file for the service. -# If it does not exist, create one. -def get_extended_file(command, child_group=None): - service_cli_file = get_generated_file_from_command(command) - service_dir = service_cli_file.rsplit('/', 2)[0] - - extended_file_path = check_extended_file_exists(service_dir) - - service_cli = get_service_cli_from_command(command) - module_path = ".".join(service_cli_file.rsplit('/', 6)[1:-1]) - - # This step is if the parent and child commands have different source generated files. - # oci usage list, list can be part of usage_list_cli, but usage can be in usage_cli - child_service_cli = None - child_module_path = None - if child_group: - child_service_cli = get_generated_file_from_command(child_group) - if service_cli_file == child_service_cli: - child_group = None - else: - child_module_path = ".".join(child_service_cli.rsplit('/', 6)[1:-1]) - child_service_cli = child_service_cli.rsplit('/', 1)[-1][:-3] - - if not extended_file_path: - output = render_output({'service_cli': service_cli, - 'module_path': module_path, - 'child_group': child_group, - 'child_service_cli': child_service_cli, - 'child_module_path': child_module_path}, - "new_extended_file.py.js2") - extended_file_path = service_dir + "/" + service_cli + "_extended.py" - try: - with open(extended_file_path, "w") as extended_file: - extended_file.write(output) - except Exception as e: - print(e) - return extended_file_path - - -def check_extended_file_exists(service_dir): - extended_file = glob.glob(service_dir + "/" + "*extended*") - if len(extended_file) > 0: - return extended_file[0] - else: - return False - - -# Returns the service cli for the particular service -# e.g audit returns audit_cli -def get_service_cli(service): - service_cli = str(inspect.getfile(cli.commands[service].callback)) - service_cli = service_cli.rsplit('/', 1)[-1][:-3] - return service_cli - - -# Returns the generated service cli for a command -# oci audit -> audit_cli -def get_service_cli_from_command(command): - service_cli = get_generated_file_from_command(command) - service_cli = service_cli.rsplit('/', 1)[-1][:-3] - return service_cli - - -# Returns the parameter list as required to rename a parameter -def parameter_list(command, params): - param_dict = [] - for p in params: - param = get_param(command, p) - param = vars(param) - param_dict.append(param) - return param_dict - - -# Gets the parameter object from the command -def get_param(command, param): - for p in command.params: - if p.name == param: - return p - - raise Exception("Invalid param, does not exist. {} {}".format(str(command.name), param)) - - -def render_output(dict, template): - template = env.get_template(template) - return template.render(dict) - - -def check_existing_manual_change(extended_file, command): - existing_change = False - try: - with open(extended_file, 'r') as file: - contents = file.readlines() - for i, line in enumerate(contents): - if 'cli_util.copy_params_from_generated_command' in line and command in line: - existing_change = True - break - except Exception as e: - print(e) - - return existing_change - - -def get_command_from_extended(extended_file, command): - command_name = command.callback.__name__ - try: - with open(extended_file, 'r') as file: - name = "" - contents = file.readlines() - for line in contents: - if "copy_params_from_generated_command" in line: - name = line - if command_name in line and "def" in line: - command_name = name - command_name = command_name.split('.')[2] - command_name = command_name.split(',')[0] - break - except Exception as e: - print(e) - - return command_name - - -def get_generated_file_from_command(command): - return str(sys.modules[command.callback.__module__].__file__) diff --git a/scripts/auto_gen_utils/python_cli/templates/copy_params_from_generated.py.js2 b/scripts/auto_gen_utils/python_cli/templates/copy_params_from_generated.py.js2 deleted file mode 100644 index 5fdacc7953..0000000000 --- a/scripts/auto_gen_utils/python_cli/templates/copy_params_from_generated.py.js2 +++ /dev/null @@ -1,2 +0,0 @@ -@cli_util.copy_params_from_generated_command({{service_cli}}.{{command}}, params_to_exclude={{params_to_exclude}}) - diff --git a/scripts/auto_gen_utils/python_cli/templates/flatten_parameter.py.js2 b/scripts/auto_gen_utils/python_cli/templates/flatten_parameter.py.js2 deleted file mode 100644 index c91ada355b..0000000000 --- a/scripts/auto_gen_utils/python_cli/templates/flatten_parameter.py.js2 +++ /dev/null @@ -1,24 +0,0 @@ - - -@cli_util.copy_params_from_generated_command({{service_cli}}.{{command}}, params_to_exclude={{params_to_exclude}}) -@{{service_cli}}.{{parent_command}}.command(name='{{command_name}}', help={{service_cli}}.{{command}}.help) -{% for key, value in params_options.items() -%} -@cli_util.option('--{{key}}', {{", ".join(value)}}) -{% endfor -%} -@click.pass_context -{{json_skeleton_generation_handler}} -@cli_util.wrap_exceptions -def {{command}}_extended(ctx, **kwargs): - {% for param in flatten_parameters -%} - {{param[0]}} = {} - {% for flattened in param[1] -%} - if '{{flattened}}' in kwargs: - {{param[0]}}['{{flattened}}'] = kwargs['{{flattened}}'] - kwargs.pop('{{flattened}}') - {% endfor %} - if len({{param[0]}}) > 0: - kwargs['{{param[0]}}'] = json.dumps({{param[0]}}) - - {% endfor -%} - ctx.invoke({{service_cli}}.{{command}}, **kwargs) - diff --git a/scripts/auto_gen_utils/python_cli/templates/flatten_parameter_kwargs.py.js2 b/scripts/auto_gen_utils/python_cli/templates/flatten_parameter_kwargs.py.js2 deleted file mode 100644 index 3bb86601fc..0000000000 --- a/scripts/auto_gen_utils/python_cli/templates/flatten_parameter_kwargs.py.js2 +++ /dev/null @@ -1,13 +0,0 @@ - - {% for param in flatten_parameters -%} - {{param[0]}} = {} - {% for flattened in param[1] -%} - if '{{flattened}}' in kwargs: - {{param[0]}}['{{flattened}}'] = kwargs['{{flattened}}'] - kwargs.pop('{{flattened}}') - {% endfor %} - if len({{param[0]}}) > 0: - kwargs['{{param[0]}}'] = json.dumps({{param[0]}}) - -{% endfor -%} - diff --git a/scripts/auto_gen_utils/python_cli/templates/flatten_parameter_option.py.js2 b/scripts/auto_gen_utils/python_cli/templates/flatten_parameter_option.py.js2 deleted file mode 100644 index 6ff45c5378..0000000000 --- a/scripts/auto_gen_utils/python_cli/templates/flatten_parameter_option.py.js2 +++ /dev/null @@ -1,3 +0,0 @@ -{% for key, value in params_options.items() -%} -@cli_util.option('--{{key}}', {{", ".join(value)}}) -{% endfor -%} \ No newline at end of file diff --git a/scripts/auto_gen_utils/python_cli/templates/jira_exception_message.py.js2 b/scripts/auto_gen_utils/python_cli/templates/jira_exception_message.py.js2 deleted file mode 100644 index 82f639fb9c..0000000000 --- a/scripts/auto_gen_utils/python_cli/templates/jira_exception_message.py.js2 +++ /dev/null @@ -1,4 +0,0 @@ -{% for key, value in raised_exceptions.items() %} -Input line : {{key}} -Error is : {{value.message}} -{% endfor %} diff --git a/scripts/auto_gen_utils/python_cli/templates/move_command.py.js2 b/scripts/auto_gen_utils/python_cli/templates/move_command.py.js2 deleted file mode 100644 index 12d7400588..0000000000 --- a/scripts/auto_gen_utils/python_cli/templates/move_command.py.js2 +++ /dev/null @@ -1,6 +0,0 @@ - - -# oci {{old_command}} -> oci {{new_command}} -{{service_cli}}.{{old_parent_group}}.commands.pop({{service_cli}}.{{command_group}}.name) -{{service_cli}}.{{new_parent_group}}.add_command({{service_cli}}.{{command_group}}) - diff --git a/scripts/auto_gen_utils/python_cli/templates/move_group.py.js2 b/scripts/auto_gen_utils/python_cli/templates/move_group.py.js2 deleted file mode 100644 index 4b08ca8e97..0000000000 --- a/scripts/auto_gen_utils/python_cli/templates/move_group.py.js2 +++ /dev/null @@ -1,8 +0,0 @@ - - -# Move commands under 'oci {{old_group}}' -> 'oci {{new_group}}' -{{new_service_cli}}.{{old_parent_group}}.commands.pop({{service_cli}}.{{old_command_group}}.name) -{% for command in moved_commands -%} -{{new_service_cli}}.{{new_parent_group}}.add_command({{service_cli}}.{{command}}) -{% endfor -%} - diff --git a/scripts/auto_gen_utils/python_cli/templates/new_extended_file.py.js2 b/scripts/auto_gen_utils/python_cli/templates/new_extended_file.py.js2 deleted file mode 100644 index 73b7ba1d2b..0000000000 --- a/scripts/auto_gen_utils/python_cli/templates/new_extended_file.py.js2 +++ /dev/null @@ -1,14 +0,0 @@ -# coding: utf-8 -# Copyright (c) 2016, 2020, Oracle and/or its affiliates. All rights reserved. -# This software is dual-licensed to you under the Universal Permissive License (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl or Apache License 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose either license. - -import click # noqa: F401 -import json # noqa: F401 -from {{module_path}} import {{service_cli}} -{% if child_group is not none -%} -from {{child_module_path}} import {{child_service_cli}} -{% endif -%} -from oci_cli import cli_util # noqa: F401 -from oci_cli import custom_types # noqa: F401 -from oci_cli import json_skeleton_utils # noqa: F401 - diff --git a/scripts/auto_gen_utils/python_cli/templates/remove_command.py.js2 b/scripts/auto_gen_utils/python_cli/templates/remove_command.py.js2 deleted file mode 100644 index 4dbf6ed8eb..0000000000 --- a/scripts/auto_gen_utils/python_cli/templates/remove_command.py.js2 +++ /dev/null @@ -1,9 +0,0 @@ - - -{% for command in removed_commands -%} -# Remove {{command['command']}} from oci {{command['full_command']}} -{% endfor -%} -{% for command in removed_commands -%} -{{command['service_cli']}}.{{command['parent_group']}}.commands.pop({{command['service_cli']}}.{{command['command_group']}}.name) -{% endfor -%} - diff --git a/scripts/auto_gen_utils/python_cli/templates/remove_parameter.py.js2 b/scripts/auto_gen_utils/python_cli/templates/remove_parameter.py.js2 deleted file mode 100644 index 8f286f0b38..0000000000 --- a/scripts/auto_gen_utils/python_cli/templates/remove_parameter.py.js2 +++ /dev/null @@ -1,11 +0,0 @@ - - -@cli_util.copy_params_from_generated_command({{service_cli}}.{{command}}, params_to_exclude={{params_to_exclude}}) -@{{service_cli}}.{{parent_command}}.command(name={{service_cli}}.{{command}}.name, help={{service_cli}}.{{command}}.help) -@click.pass_context -{{json_skeleton_generation_handler}} -@cli_util.wrap_exceptions -def {{command}}_extended(ctx, **kwargs): - - ctx.invoke({{service_cli}}.{{command}}, **kwargs) - diff --git a/scripts/auto_gen_utils/python_cli/templates/rename_command.py.js2 b/scripts/auto_gen_utils/python_cli/templates/rename_command.py.js2 deleted file mode 100644 index 9dbb290132..0000000000 --- a/scripts/auto_gen_utils/python_cli/templates/rename_command.py.js2 +++ /dev/null @@ -1,9 +0,0 @@ - - -{% for command in renamed_commands -%} -# oci {{command['old_command']}} -> oci {{command['new_command']}} -{% endfor -%} -{% for command in renamed_commands -%} -cli_util.rename_command({{command['service_cli']}}, {{command['service_cli']}}.{{command['command_parent_group']}}, {{command['child_service_cli']}}.{{command['command_group']}}, "{{command['new_name']}}") -{% endfor -%} - diff --git a/scripts/auto_gen_utils/python_cli/templates/rename_parameter.py.js2 b/scripts/auto_gen_utils/python_cli/templates/rename_parameter.py.js2 deleted file mode 100644 index 0faf5a1a23..0000000000 --- a/scripts/auto_gen_utils/python_cli/templates/rename_parameter.py.js2 +++ /dev/null @@ -1,20 +0,0 @@ - - -@cli_util.copy_params_from_generated_command({{service_cli}}.{{command}}, params_to_exclude={{params_to_exclude}}) -@{{service_cli}}.{{parent_command}}.command(name={{service_cli}}.{{command}}.name, help={{service_cli}}.{{command}}.help) -{% for param in param_dict -%} -@cli_util.option('--{{param[1]}}',{{param[0][param[0]['name']]}}help=u"""{{param[0]['help']}}""") -{% endfor -%} -@click.pass_context -{{json_skeleton_generation_handler}} -@cli_util.wrap_exceptions -def {{command}}_extended(ctx, **kwargs): - {%- for rename in rename_parameters %} - - if '{{ rename[1] }}' in kwargs: - kwargs['{{ rename[0] }}'] = kwargs['{{ rename[1] }}'] - kwargs.pop('{{ rename[1] }}') - - {% endfor -%} - ctx.invoke({{service_cli}}.{{command}}, **kwargs) - diff --git a/scripts/auto_gen_utils/python_cli/templates/rename_parameter_kwargs.py.js2 b/scripts/auto_gen_utils/python_cli/templates/rename_parameter_kwargs.py.js2 deleted file mode 100644 index 7a44588b33..0000000000 --- a/scripts/auto_gen_utils/python_cli/templates/rename_parameter_kwargs.py.js2 +++ /dev/null @@ -1,6 +0,0 @@ - - {% for rename in rename_parameters -%} - if '{{ rename[1] }}' in kwargs: - kwargs['{{ rename[0] }}'] = kwargs['{{ rename[1] }}'] - kwargs.pop('{{ rename[1] }}') -{% endfor -%} \ No newline at end of file diff --git a/scripts/auto_gen_utils/python_cli/templates/rename_parameter_option.py.js2 b/scripts/auto_gen_utils/python_cli/templates/rename_parameter_option.py.js2 deleted file mode 100644 index a4843698f4..0000000000 --- a/scripts/auto_gen_utils/python_cli/templates/rename_parameter_option.py.js2 +++ /dev/null @@ -1,3 +0,0 @@ -{% for param in param_dict -%} -@cli_util.option('--{{param[1]}}',{{param[0][param[0]['name']] or ' '}}help="""{{param[0]['help']}}""") -{% endfor -%} \ No newline at end of file diff --git a/scripts/auto_gen_utils/requirements-ocibuild.txt b/scripts/auto_gen_utils/requirements-ocibuild.txt deleted file mode 100644 index 127fc53786..0000000000 --- a/scripts/auto_gen_utils/requirements-ocibuild.txt +++ /dev/null @@ -1,83 +0,0 @@ -# This requirements file is used for running unit tests in -# OCI Build. -# It needs to work with both Python 2 and Python 3. -# TODO: Use this requirements-ocibuild.txt instead of the requirements.txt everywhere. - -asn1crypto==1.5.1 -atomicwrites==1.4.1; python_version < '3.7.0' -atomicwrites==1.1.5; python_version >= '3.7.0' -attrs==18.1.0 -certifi==2018.4.16 - -cffi==1.11.5; python_version < '3' -cffi==1.16.0; python_version >= '3' - -chardet==3.0.4 -click==7.1.2 -configparser~=4.0.2 - -cryptography==3.2.1; python_version < '3' -cryptography==39.0.2; python_version >= '3' - -defusedxml==0.6.0 -dotmap==1.2.20 -enum34==1.1.6 -funcsigs==1.0.2 -future==0.16.0 -gitdb2==2.0.3 -GitPython==2.1.9 -idna==2.7 -ipaddress==1.0.22 - -jira==1.0.15; python_version < '3.7.0' -jira==3.2.0; python_version >= '3.7.0' - -more-itertools==4.3.0 -oauthlib==2.1.0 -ordereddict==1.1 -parse==1.8.4 -pathlib2==2.3.3 -pbr==4.1.0 - -pluggy==1.5.0; python_version >= '3' -pluggy==0.13.1; python_version < '3' - -pycparser==2.18 -PyGithub<1.57 -pyOpenSSL==18.0.0 - -pytest==8.1.1; python_version > '3.4' -pytest==4.6; python_version <= '3.4' - -pytz==2018.5 -requests==2.25.1 - -requests-oauthlib==0.8.0; python_version < '3.7.0' -requests-oauthlib>=1.1.0; python_version >= '3.7.0' - -requests-toolbelt==0.8.0 -scandir==1.9.0 -simplejson==3.16.0 -six==1.11.0 -smmap2==2.0.3 -uritemplate==0.6 -urllib3==1.26.2 -verlib==0.1 -voluptuous==0.11.5 -flake8==3.7.9 -stashy==0.5 -Jinja2==2.11.2 -python-dateutil==2.7.3 -vcrpy==1.13.0 -setuptools==44.1.1 -packaging==19.0 - -recordclass==0.21.1; python_version >= '3.6' -recordclass==0.12.0.1; python_version < '3.6' - -slack-sdk==3.17.0; python_version >= '3.7.0' - -# Pretty old, but a lot of our scripts in TeamCity still run with Python 2, -# and all we need is Object Storage put_object -oci==2.104.3; python_version >= '3.7.0' -oci==2.53.0; python_version < '3' \ No newline at end of file diff --git a/scripts/auto_gen_utils/requirements.txt b/scripts/auto_gen_utils/requirements.txt deleted file mode 100644 index ec288ed721..0000000000 --- a/scripts/auto_gen_utils/requirements.txt +++ /dev/null @@ -1,59 +0,0 @@ -asn1crypto==1.5.1 -atomicwrites==1.4.1; python_version < '3.7.0' -atomicwrites==1.1.5; python_version >= '3.7.0' -attrs==18.1.0 -certifi==2018.4.16 -cffi==1.11.5 -chardet==3.0.4 -click==7.1.2 -configparser~=4.0.2 -cryptography==3.2.1 -defusedxml==0.6.0 -dotmap==1.2.20 -enum34==1.1.6 -funcsigs==1.0.2 -future==0.16.0 -gitdb2==2.0.3 -GitPython==2.1.9 -idna==2.7 -ipaddress==1.0.22 -jira==1.0.15; python_version < '3.7.0' -jira==3.2.0; python_version >= '3.7.0' -more-itertools==4.3.0 -oauthlib==2.1.0 -ordereddict==1.1 -parse==1.8.4 -pathlib2==2.3.3 -pbr==4.1.0 -pluggy==0.7.1 -pycparser==2.18 -PyGithub<1.57 -pyOpenSSL==18.0.0 -pytest==3.7.2 -pytz==2018.5 -requests==2.25.1 -requests-oauthlib==0.8.0; python_version < '3.7.0' -requests-oauthlib>=1.1.0; python_version >= '3.7.0' -requests-toolbelt==0.8.0 -scandir==1.9.0 -simplejson==3.16.0 -six==1.11.0 -smmap2==2.0.3 -uritemplate==0.6 -urllib3==1.26.2 -verlib==0.1 -voluptuous==0.11.5 -flake8==3.7.9 -stashy==0.5 -Jinja2==2.11.2 -python-dateutil==2.7.3 -vcrpy==1.13.0 -setuptools==44.1.1 -packaging==19.0 -recordclass==0.12.0.1 -slack-sdk==3.17.0; python_version >= '3.7.0' - -# Pretty old, but a lot of our scripts in TeamCity still run with Python 2, -# and all we need is Object Storage put_object -oci==2.104.3; python_version >= '3.7.0' -oci==2.53.0; python_version < '3' \ No newline at end of file diff --git a/scripts/auto_gen_utils/sdk_regions_updater/README.md b/scripts/auto_gen_utils/sdk_regions_updater/README.md deleted file mode 100644 index eef897d6c9..0000000000 --- a/scripts/auto_gen_utils/sdk_regions_updater/README.md +++ /dev/null @@ -1,47 +0,0 @@ -SDK Regions Updater -=================== - -Description ------------ -This package contains scripts to add new realms/regions to SDK's. The tools support the following OCI SDK's: - -* .NET SDK -* Go SDK -* Python SDK -* Ruby SDK -* Java SDK (not implemented) -* Typescript SDK (not implemented) - -Installing dependencies ------------------------- -To install dependencies for this project, from the root of the repo execute the following command: - - `pip install --trusted-host=artifactory.oci.oraclecorp.com -i https://artifactory.oci.oraclecorp.com/api/pypi/global-release-pypi/simple -U skclient` - `pip install -r requirements.txt` - -Running scripts locally -------------------------- -Before running the scripts, an environment variables must be set for each of SDK's: - -* DotNetSDK_path=/path/to/oci-dotnet-sdk -* GoSDK_path=/path/to/go/src/github.com/oracle/oci-go-sdk -* PythonSDK_path=/path/to/python-sdk -* RubySDK_path=/path/to/ruby-sdk -* JavaSDK_path=/path/to/java-sdk -* TypescriptSDK_path=/path/to/oci-typescript-sdk - -To update regions for one specific SDK, execute the following command from the root of the repo: - - `python -m sdk_regions_updater.sdk_regions_updater --SDK ` - -For example, to update regions for .NET SDK, use command: - - `python -m sdk_regions_updater.sdk_regions_updater --SDK DotNetSDK` - -In order to update regions for all supported SDK's, specify "All": - - `python -m sdk_regions_updater.sdk_regions_updater --SDK All` - -Running the script without providing --SDK argument will show the usage information, including a list of allowed values for --SDK argument: - - `python -m sdk_regions_updater.sdk_regions_updater` \ No newline at end of file diff --git a/scripts/auto_gen_utils/sdk_regions_updater/__init__.py b/scripts/auto_gen_utils/sdk_regions_updater/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/scripts/auto_gen_utils/sdk_regions_updater/config b/scripts/auto_gen_utils/sdk_regions_updater/config deleted file mode 100644 index c6deb38293..0000000000 --- a/scripts/auto_gen_utils/sdk_regions_updater/config +++ /dev/null @@ -1,6 +0,0 @@ -DotNetSDK_path=/path/to/oci-dotnet-sdk -JavaSDK_path=/path/to/java-sdk -GoSDK_path=/path/to/go/src/github.com/oracle/oci-go-sdk -PythonSDK_path=/path/to/python-sdk -RubySDK_path=/path/to/ruby-sdk -TypescriptSDK_path=/path/to/oci-typescript-sdk \ No newline at end of file diff --git a/scripts/auto_gen_utils/sdk_regions_updater/dotnet_sdk_region_updater.py b/scripts/auto_gen_utils/sdk_regions_updater/dotnet_sdk_region_updater.py deleted file mode 100644 index ee018db4b5..0000000000 --- a/scripts/auto_gen_utils/sdk_regions_updater/dotnet_sdk_region_updater.py +++ /dev/null @@ -1,71 +0,0 @@ -import logging -import os -from .region_updater_utils import get_regions_by_realms, update_regions_json_file, get_regions_from_json_file - - -class DotNetSDKRegionsUpdater: - script_dir = os.path.dirname(os.path.realpath(__file__)) - regions_json_file_path = os.path.join("Commontests", "Regions.json") - realms_file_path = os.path.join("Common", "Src", "RealmDefinitions.cs") - regions_file_path = os.path.join("Common", "Src", "RegionDefinitions.cs") - realms_template_path = os.path.join(script_dir, "templates", "dotnet-sdk-realms.tpl") - regions_template_path = os.path.join(script_dir, "templates", "dotnet-sdk-regions.tpl") - realm_line_template = ' public static readonly Realm {} = new Realm("{}", "{}");\n' - region_line_template = ' public static readonly Region {} = Register("{}", Realm.{}, "{}");\n' - region_no_short_code_line_template = ' public static readonly Region {} = Register("{}", Realm.{});\n' - - def __init__(self, sdk_path): - self.logger = logging.getLogger("OCI-DotNetSDK-Regions-Updater") - logging.basicConfig(level=logging.INFO) - self.dotnet_sdk_path = sdk_path - - def _update_realms_file(self, regions): - realms_source_file_full_path = os.path.join(self.dotnet_sdk_path, self.realms_file_path) - processed_realms = [] - new_realms_file_content = '' - for region in regions: - if region['realmKey'].lower() not in processed_realms: - processed_realms.append(region['realmKey'].lower()) - new_line = self.realm_line_template.format(region['realmKey'].upper(), region['realmKey'].lower(), region['realmDomainComponent']) - new_realms_file_content += new_line - new_realms_file_content = new_realms_file_content.rstrip() - with open(self.realms_template_path, 'r') as ft, open(realms_source_file_full_path, 'w') as fw: - realms_template = ft.read() - realms_code = realms_template.replace('%REALMS_DEFINITIONS%', new_realms_file_content) - self.logger.log(logging.INFO, 'Writing realms source code to {}'.format(realms_source_file_full_path)) - fw.write(realms_code) - - def _update_regions_file(self, regions): - regions_source_file_full_path = os.path.join(self.dotnet_sdk_path, self.regions_file_path) - processed_regions = [] - new_regions_file_content = '' - regions_by_realm_number = get_regions_by_realms(regions) - # Sort realms so that the regions can be added by realm - realm_numbers = list(regions_by_realm_number.keys()) - realm_numbers.sort() - for number in realm_numbers: - comment_line = ' // OC{}\n'.format(number) - new_regions_file_content += comment_line - for region in regions_by_realm_number[number]: - if region['regionIdentifier'] not in processed_regions: - processed_regions.append(region['regionIdentifier']) - # Region key (short code) is optional. - if 'regionKey' in region: - new_line = self.region_line_template.format(region['regionIdentifier'].upper().replace('-', '_'), region['regionIdentifier'].lower(), region['realmKey'].upper(), region['regionKey'].lower()) - else: - new_line = self.region_no_short_code_line_template.format(region['regionIdentifier'].upper().replace('-', '_'), region['regionIdentifier'].lower(), region['realmKey'].upper()) - new_regions_file_content += new_line - new_regions_file_content += '\n' - new_regions_file_content = new_regions_file_content.rstrip() - with open(self.regions_template_path, 'r') as ft, open(regions_source_file_full_path, 'w') as fw: - regions_template = ft.read() - regions_code = regions_template.replace('%REGIONS_DEFINITIONS%', new_regions_file_content) - self.logger.log(logging.INFO, 'Writing regions source code to {}'.format(regions_source_file_full_path)) - fw.write(regions_code) - - def update_regions(self, new_regions): - regions_json_file_path = os.path.join(self.dotnet_sdk_path, self.regions_json_file_path) - update_regions_json_file(new_regions, regions_json_file_path) - regions = get_regions_from_json_file(regions_json_file_path) - self._update_realms_file(regions) - self._update_regions_file(regions) diff --git a/scripts/auto_gen_utils/sdk_regions_updater/go_sdk_region_updater.py b/scripts/auto_gen_utils/sdk_regions_updater/go_sdk_region_updater.py deleted file mode 100644 index d9a5550d3e..0000000000 --- a/scripts/auto_gen_utils/sdk_regions_updater/go_sdk_region_updater.py +++ /dev/null @@ -1,95 +0,0 @@ -import logging -import os -from .region_updater_utils import get_regions_by_realms, update_regions_json_file, get_regions_from_json_file - - -class GoSDKRegionsUpdater: - script_dir = os.path.dirname(os.path.realpath(__file__)) - regions_json_file_path = os.path.join("common", "regions.json") - regions_source_file_path = os.path.join("common", "regions.go") - regions_template_path = os.path.join(script_dir, "templates", "go-sdk-regions.tpl") - region_realm_mapping_entry_template = ' Region{}: "{}",\n' - short_name_region_mapping_entry_template = ' "{}": Region{},\n' - region_definition_comment_template = ' //Region{} region {}\n' - gov_region_definition_comment_template = ' //Region{} gov region {}\n' - region_definition_template = ' Region{} Region = "{}"\n' - realm_definition_template = ' "{}": "{}",\n' - # Some earlier regions in GoSDK were named in a way that is different from the current naming conventions. - # These legacy names need to be handled. - special_region_names = { - 'us-phoenix-1': 'PHX', - 'us-ashburn-1': 'IAD', - 'us-sanjose-1': 'SJC1', - 'eu-frankfurt-1': 'FRA', - 'uk-london-1': 'LHR' - } - - def __init__(self, sdk_path): - self.logger = logging.getLogger("OCI-GoSDK-Regions-Updater") - logging.basicConfig(level=logging.INFO) - self.go_sdk_path = sdk_path - - def _build_region_name(self, region_id): - if region_id.lower() in self.special_region_names: - return self.special_region_names[region_id.lower()] - else: - region_name_parts = region_id.lower().split('-') - region_name = region_name_parts[0].upper() - for part in region_name_parts[1:]: - region_name += part.title() - return region_name - - def _build_region_comment(self, region_id): - region_name = self._build_region_name(region_id) - parts = region_id.lower().split('-') - if len(parts) == 4 and parts[1] == 'gov': - return self.gov_region_definition_comment_template.format(region_name, parts[2].title()) - else: - return self.region_definition_comment_template.format(region_name, parts[-2].title()) - - def _update_regions_file(self, regions): - regions_source_file_full_path = os.path.join(self.go_sdk_path, self.regions_source_file_path) - processed_regions = [] - processed_realms = [] - short_name_regions = '' - region_realm_mappings = '' - realms_definitions = '' - regions_definitions = '' - regions_by_realm_number = get_regions_by_realms(regions) - # Sort realms so that the regions can be added by realm - realm_numbers = list(regions_by_realm_number.keys()) - realm_numbers.sort() - for number in realm_numbers: - for region in regions_by_realm_number[number]: - if region['realmKey'].lower() not in processed_realms: - realms_definitions += self.realm_definition_template.format(region['realmKey'].lower(), region['realmDomainComponent'].lower()) - processed_realms.append(region['realmKey'].lower()) - if region['regionIdentifier'] not in processed_regions: - region_name = self._build_region_name(region['regionIdentifier']) - # Region key (short code) is optional. - if 'regionKey' in region: - short_name_regions += self.short_name_region_mapping_entry_template.format(region['regionKey'].lower(), region_name) - region_realm_mappings += self.region_realm_mapping_entry_template.format(region_name, region['realmKey'].lower()) - regions_definitions += self._build_region_comment(region['regionIdentifier']) - regions_definitions += self.region_definition_template.format(region_name, region['regionIdentifier'].lower()) - processed_regions.append(region['regionIdentifier']) - region_realm_mappings += '\n' - short_name_regions = short_name_regions.rstrip() - region_realm_mappings = region_realm_mappings.rstrip() - regions_definitions = regions_definitions.rstrip() - realms_definitions = realms_definitions.rstrip() - with open(self.regions_template_path, 'r') as ft, open(regions_source_file_full_path, 'w') as fw: - regions_template = ft.read() - regions_code = (regions_template.replace('%REGIONS_DEFINITIONS%', regions_definitions) - .replace('%REALMS_DEFINITIONS%', realms_definitions) - .replace('%SHORT_NAME_REGIONS%', short_name_regions) - .replace('%REGION_REALM_MAPPINGS%', region_realm_mappings) - ) - self.logger.log(logging.INFO, 'Writing regions source code to {}'.format(regions_source_file_full_path)) - fw.write(regions_code) - - def update_regions(self, new_regions): - regions_json_file_path = os.path.join(self.go_sdk_path, self.regions_json_file_path) - update_regions_json_file(new_regions, regions_json_file_path) - regions = get_regions_from_json_file(regions_json_file_path) - self._update_regions_file(regions) diff --git a/scripts/auto_gen_utils/sdk_regions_updater/java_sdk_region_updater.py b/scripts/auto_gen_utils/sdk_regions_updater/java_sdk_region_updater.py deleted file mode 100644 index 6536dc793f..0000000000 --- a/scripts/auto_gen_utils/sdk_regions_updater/java_sdk_region_updater.py +++ /dev/null @@ -1,131 +0,0 @@ -import logging -import os -from .region_updater_utils import get_regions_by_realms, update_regions_json_file - - -class JavaSDKRegionsUpdater: - realms_file_path = os.path.join("bmc-common", "src", "main", "java", "com", "oracle", "bmc", "Realm.java") - regions_file_path = os.path.join("bmc-common", "src", "main", "java", "com", "oracle", "bmc", "Region.java") - regions_json_file_path = os.path.join("bmc-common", "src", "test", "resources", "regions.json") - realm_line_prefix = "public static final Realm " - region_line_prefix = "public static final Region " - realm_line_template = ' public static final Realm {} = new Realm("{}", "{}");\n' - region_line_template = ' public static final Region {} = register("{}", Realm.{}, "{}");\n' - region_no_short_code_line_template = ' public static final Region {} = register("{}", Realm.{};\n' - - def __init__(self, sdk_path): - self.logger = logging.getLogger("OCI-JavaSDK-Regions-Updater") - logging.basicConfig(level=logging.DEBUG) - self.java_sdk_path = sdk_path - - # We are assuming that realms are added in the right orders. - # New realm numbers are always larger than existing ones. - # If the assumption is not correct, realms will be organized out of order. - # This does not affect code functionalities though. - def _update_realms_file(self, new_regions_by_realm): - realms_file_full_path = os.path.join(self.java_sdk_path, self.realms_file_path) - existing_realms = [] - should_inject_realms = False - new_realms_file_content = '' - with open(realms_file_full_path) as f: - for line in f.readlines(): - if line.strip().startswith(self.realm_line_prefix): - should_inject_realms = True - realm_id = line.strip()[len(self.realm_line_prefix):].split('=')[0].strip() - existing_realms.append(realm_id) - else: - if should_inject_realms: - # need to inject new realm(s) here - realm_numbers = list(new_regions_by_realm.keys()) - realm_numbers.sort() - for number in realm_numbers: - new_region = new_regions_by_realm[number][0] - if new_region['realmKey'].upper() in existing_realms: - self.logger.log(logging.INFO, "realm {} already exists. Will not add again.".format(new_region['realmKey'])) - else: - self.logger.log(logging.INFO, "Will add new realm {} .".format(new_region['realmKey'])) - new_line = self.realm_line_template.format(new_region['realmKey'].upper(), new_region['realmKey'].lower(), new_region['realmDomainComponent']) - new_realms_file_content += new_line - should_inject_realms = False - if (new_realms_file_content == ''): - new_realms_file_content = line - else: - new_realms_file_content += line - with open(realms_file_full_path, 'w') as fw: - fw.write(new_realms_file_content) - - def _update_regions_file(self, new_regions_by_realm): - regions_file_full_path = os.path.join(self.java_sdk_path, self.regions_file_path) - existing_regions = [] - should_inject_regions = False - new_regions_file_content = '' - current_realm = 0 - is_region_definition_complete = True - with open(regions_file_full_path) as f: - for line in f.readlines(): - line_stripped = line.strip() - if line_stripped.startswith('// OC'): - # This is the beginning of the code block defining regions for a realm. - current_realm = int(line_stripped.split(' ')[1][2:]) - elif line_stripped.startswith(self.region_line_prefix): - # This line is region definition. - should_inject_regions = True - region_name = line_stripped[len(self.region_line_prefix):].split('=')[0].strip() - existing_regions.append(region_name) - # The region definition can be broken into multiple lines. If the current line - # does not end with ";", then this is not a complete region definition statement. - if not line_stripped.endswith(';'): - is_region_definition_complete = False - else: - # Check if the we have a region definition statement that is not complete. - # The first ";" should complete the defintion statement. - if not is_region_definition_complete: - if line_stripped.endswith(';'): - is_region_definition_complete = True - # If line_stripped != '' and not line_stripped.startswith('// OC') and should_inject_regions: - elif should_inject_regions: - if line_stripped == '': - # At an empty line, inject new region definitions for this realm, if any - if current_realm in new_regions_by_realm: - # need to inject new region(s) here - for region in new_regions_by_realm[current_realm]: - region_name = region['regionIdentifier'].upper().replace('-', '_') - if region_name in existing_regions: - self.logger.log(logging.INFO, "Region {} already exists. Will not add again.".format(region['regionIdentifier'])) - else: - self.logger.log(logging.INFO, "Will add new region {} under realm OC{}.".format(region['regionIdentifier'], current_realm)) - new_line = self.region_line_template.format(region_name, region['regionIdentifier'].lower(), region['realmKey'].upper(), region['regionKey'].lower()) - new_regions_file_content += new_line - del new_regions_by_realm[current_realm] - # Check if there are additional regions left in new_regions_by_realm. - if len(new_regions_by_realm) == 0: - should_inject_regions = False - else: - # This is not a region defintion, and not an empty space, and not a region block comment. - # This must be the first line of code after the whole region definiton block. - # If there are new regions under new realms, add them here. - realm_numbers = list(new_regions_by_realm.keys()) - realm_numbers.sort() - for number in realm_numbers: - comment_line = ' // OC{}\n'.format(number) - new_regions_file_content += comment_line - for region in new_regions_by_realm[number]: - self.logger.log(logging.INFO, "Will add new region {} for new realm OC{}.".format(region['regionIdentifier'], number)) - new_line = self.region_line_template.format(region['regionIdentifier'].upper().replace('-', '_'), region['regionIdentifier'].lower(), region['realmKey'].upper(), region['regionKey'].lower()) - new_regions_file_content += new_line - new_regions_file_content += '\n' - should_inject_regions = False - - if (new_regions_file_content == ''): - new_regions_file_content = line - else: - new_regions_file_content += line - with open(regions_file_full_path, 'w') as fw: - fw.write(new_regions_file_content) - - def update_regions(self, new_regions): - regions_json_file_path = os.path.join(self.java_sdk_path, self.regions_json_file_path) - update_regions_json_file(new_regions, regions_json_file_path) - new_regions_by_realm = get_regions_by_realms(new_regions) - self._update_realms_file(new_regions_by_realm) - self._update_regions_file(new_regions_by_realm) diff --git a/scripts/auto_gen_utils/sdk_regions_updater/python_sdk_region_updater.py b/scripts/auto_gen_utils/sdk_regions_updater/python_sdk_region_updater.py deleted file mode 100644 index 9180dcdad2..0000000000 --- a/scripts/auto_gen_utils/sdk_regions_updater/python_sdk_region_updater.py +++ /dev/null @@ -1,62 +0,0 @@ -import logging -import os -from .region_updater_utils import get_regions_by_realms, update_regions_json_file, get_regions_from_json_file - - -class PythonSDKRegionsUpdater: - script_dir = os.path.dirname(os.path.realpath(__file__)) - regions_json_file_path = os.path.join("tests", "resources", "regions.json") - regions_source_file_path = os.path.join("src", "oci", "regions_definitions.py") - regions_template_path = os.path.join(script_dir, "templates", "python-sdk-regions.tpl") - mapping_entry_template = ' \'{}\': \'{}\',\n' - list_item_template = ' \'{}\',\n' - - def __init__(self, sdk_path): - self.logger = logging.getLogger("OCI-PythonSDK-Regions-Updater") - logging.basicConfig(level=logging.INFO) - self.python_sdk_path = sdk_path - - def _update_regions_file(self, regions): - regions_source_file_full_path = os.path.join(self.python_sdk_path, self.regions_source_file_path) - processed_regions = [] - processed_realms = [] - short_name_regions = '' - region_realm_mappings = '' - realms_definitions = '' - regions_definitions = '' - regions_by_realm_number = get_regions_by_realms(regions) - # Sort realms so that the regions can be added by realm - realm_numbers = list(regions_by_realm_number.keys()) - realm_numbers.sort() - for number in realm_numbers: - for region in regions_by_realm_number[number]: - if region['realmKey'].lower() not in processed_realms: - realms_definitions += self.mapping_entry_template.format(region['realmKey'].lower(), region['realmDomainComponent'].lower()) - processed_realms.append(region['realmKey'].lower()) - if region['regionIdentifier'] not in processed_regions: - # Region key (short code) is optional. - if 'regionKey' in region: - short_name_regions += self.mapping_entry_template.format(region['regionKey'].lower(), region['regionIdentifier'].lower()) - region_realm_mappings += self.mapping_entry_template.format(region['regionIdentifier'].lower(), region['realmKey'].lower()) - regions_definitions += self.list_item_template.format(region['regionIdentifier'].lower()) - processed_regions.append(region['regionIdentifier']) - region_realm_mappings += '\n' - short_name_regions = short_name_regions.rstrip().rstrip(',') - region_realm_mappings = region_realm_mappings.rstrip().rstrip(',') - regions_definitions = regions_definitions.rstrip().rstrip(',') - realms_definitions = realms_definitions.rstrip().rstrip(',') - with open(self.regions_template_path, 'r') as ft, open(regions_source_file_full_path, 'w') as fw: - regions_template = ft.read() - regions_code = (regions_template.replace('%REGIONS_DEFINITIONS%', regions_definitions) - .replace('%REALMS_DEFINITIONS%', realms_definitions) - .replace('%SHORT_NAME_REGIONS%', short_name_regions) - .replace('%REGION_REALM_MAPPINGS%', region_realm_mappings) - ) - self.logger.log(logging.INFO, 'Writing regions source code to {}'.format(regions_source_file_full_path)) - fw.write(regions_code) - - def update_regions(self, new_regions): - regions_json_file_path = os.path.join(self.python_sdk_path, self.regions_json_file_path) - update_regions_json_file(new_regions, regions_json_file_path) - regions = get_regions_from_json_file(regions_json_file_path) - self._update_regions_file(regions) diff --git a/scripts/auto_gen_utils/sdk_regions_updater/region_updater_utils.py b/scripts/auto_gen_utils/sdk_regions_updater/region_updater_utils.py deleted file mode 100644 index 885a382e7e..0000000000 --- a/scripts/auto_gen_utils/sdk_regions_updater/region_updater_utils.py +++ /dev/null @@ -1,211 +0,0 @@ -import json -import os -import skclient -import sys -import time - -from requests.exceptions import ConnectionError -from skclient.rest import ApiException -from functools import wraps - -# Add the root of the package, two directories up, to the sys.path -dir_path = os.path.dirname(os.path.realpath(__file__)) -sys.path.append(os.path.join(dir_path, '..')) -import config # noqa: ignore=F402 -import util # noqa: ignore=F402 - -_STOREKEEPER_CLIENT = None - - -def setup_endpoint(endpoint): - if not endpoint: - raise Exception('Endpoint is not configured: %s' % endpoint) - skclient.Configuration().host = endpoint - - -def setup_skclient(r0=False): - if not r0: - sk_endpoint = os.environ.get('SK_ENDPOINT') - else: - sk_endpoint = 'https://storekeeper.oci.oraclecorp.com/v1' - - setup_endpoint(sk_endpoint) - print('storekeeper endpoint:', skclient.Configuration().host) - setup_skuser(r0) - - -def setup_skuser(r0=False): - tenant_id = os.environ.get('TENANT_ID') - user_id = os.environ.get('USER_ID') - user_fingerprint = os.environ.get('USER_FINGERPRINT') - private_key_filename = os.environ.get('PRIVATE_KEY_FILENAME') - print('tenant_id: {}'.format(tenant_id)) - print('user_id: {}'.format(user_id)) - print('user_fingerprint: {}'.format(user_fingerprint)) - print('private_key_filename: {}'.format(private_key_filename)) - - if tenant_id and user_id and user_fingerprint and private_key_filename: - skclient.setup_user(tenant_id, user_id, user_fingerprint, - private_key_filename) - else: - raise ValueError('Please provide values for environment variables TENANT_ID, USER_ID, USER_FINGERPRINT, and PRIVATE_KEY_FILENAME.') - - -def retry(exceptions, tries=3, delay=5, backoff=2, logger=None): - def deco_retry(f): - @wraps(f) - def f_retry(*args, **kwargs): - nextretry, nextdelay = tries, delay - while nextretry > 0: - try: - return f(*args, **kwargs) - except exceptions as e: - print("Retrying in {} seconds because of '{}' ".format( - nextdelay, e)) - if logger: - msg = "Retrying in {} seconds because of '{}' " \ - "...".format(nextdelay, e) - logger.warning(msg) - time.sleep(nextdelay) - nextretry -= 1 - nextdelay *= backoff - return f(*args, **kwargs) - return f_retry - return deco_retry - - -@retry(ConnectionError, tries=3, delay=30) -def sk_api_retry(func, *args, **kwargs): - try: - return func(*args, **kwargs) - except ApiException as ex: - if 599 > ex.status >= 500: - raise ConnectionError("SK api failed due to {}".format(ex)) - raise - - -def STOREKEEPER_CLIENT(): - global _STOREKEEPER_CLIENT - - if _STOREKEEPER_CLIENT: - return _STOREKEEPER_CLIENT - - setup_skclient(True) - _STOREKEEPER_CLIENT = skclient.StoreKeeperApi() - return _STOREKEEPER_CLIENT - - -def list_regions_from_storekeeper(): - page_token = 0 - regions = [] - while (page_token is not None and int(page_token) >= 0): - response = sk_api_retry( - STOREKEEPER_CLIENT().list_regions, - num_results=50, page_token=page_token) - page_token = response.next_page_token - print("page_token is {}".format(page_token)) - for region in response.regions: - if region.realm_name is not None and region.realm_name != "OC0" and region.region_state == "active": - print("canonical_name: {}; name: {}; realm: {}; status: {}".format(region.canonical_name, region.name, region.realm_name, region.region_state)) - regions.append(region) - return regions - - -def is_region_type_in_special_list(region, special_region_types): - print("Region: {} has region_service_types: {}".format(region.canonical_name, region.region_service_types)) - for rtype in region.region_service_types: - for special_region_type in special_region_types: - if special_region_type in rtype.region_service_type: - print('Region has special type {}'.format(special_region_type)) - return True - return False - - -def get_region_from_storekeeper(region_id, region_types_to_ignore=[]): - try: - region = STOREKEEPER_CLIENT().get_region(region_id) - except ApiException as e: - print("Caught exception {}".format(e)) - region = None - # There must be some error either in the DEX ticket (maybe mis-typed region id) - # or Storekeeper is not behaving correctly. We cannot proceed. - if region is None: - return "" - # Before returning the region information, check if the region is supposed to be - # processed. region_types_to_ignore contains a list of types that should not be - # included in SDK. Return None for those types. The caller should handle None - # properly. - if region.region_service_types is not None: - if is_region_type_in_special_list(region, region_types_to_ignore): - print('region {} with special type will be ignored'.format(region_id)) - return None - else: - # The absense of region_service_types in the region returned from Storekeeper - # will be treated as normal region and it will be processed. - print('region_service_types not found') - region_json = {} - region_json['regionKey'] = region.canonical_short_code - region_json['realmKey'] = region.realm_name.lower() - region_json['regionIdentifier'] = region.canonical_name - region_json['realmDomainComponent'] = region.realm_domain_name - return region_json - - -def get_regions_by_realms(regions): - regions_by_realm_number = {} - for region in regions: - realm_number = int(region['realmKey'][2:]) - if realm_number not in regions_by_realm_number: - regions_by_realm_number[realm_number] = [region] - else: - regions_by_realm_number[realm_number].append(region) - return regions_by_realm_number - - -def get_regions_from_json_file(regions_json_file_full_path): - regions = None - if not os.path.isfile(regions_json_file_full_path): - raise ValueError('{} is not a valid file.'.format(regions_json_file_full_path)) - with open(regions_json_file_full_path, 'r') as f: - regions = json.load(f) - return regions - - -def update_regions_json_file(new_regions, regions_json_file_full_path, json_indent=4): - data = get_regions_from_json_file(regions_json_file_full_path) - existing_region_ids = [] - for item in data: - existing_region_ids.append(item['regionIdentifier']) - for new_region in new_regions: - if new_region['regionIdentifier'] not in existing_region_ids: - data.append(new_region) - with open(regions_json_file_full_path, 'w') as f: - json.dump(data, f, indent=json_indent) - - -def get_new_regions_info_for_branch(branch='preview'): - issues = util.get_region_support_tickets_to_process(branch) - return get_new_regions_info_from_issues(issues) - - -def get_new_regions_info_from_issues(issues): - ids = util.get_region_id_from_dex_tickets(issues) - new_regions = [] - for id in ids: - print('Getting region details for {}'.format(id)) - new_regions.append(get_region_from_storekeeper(id)) - return new_regions - - -def get_issues_with_special_regions_to_be_ignored(issues, region_types_to_ignore): - issues_to_ignore = [] - issues_with_invalid_regions = [] - for issue in issues: - region_id = issue.raw['fields']['summary'].split()[-1] - print('Getting region details for {}'.format(region_id)) - new_region = get_region_from_storekeeper(region_id, region_types_to_ignore) - if new_region is None: - issues_to_ignore.append(issue) - elif new_region == "": - issues_with_invalid_regions.append(issue) - return issues_to_ignore, issues_with_invalid_regions \ No newline at end of file diff --git a/scripts/auto_gen_utils/sdk_regions_updater/ruby_sdk_region_updater.py b/scripts/auto_gen_utils/sdk_regions_updater/ruby_sdk_region_updater.py deleted file mode 100644 index b4581d5ed3..0000000000 --- a/scripts/auto_gen_utils/sdk_regions_updater/ruby_sdk_region_updater.py +++ /dev/null @@ -1,63 +0,0 @@ -import logging -import os -from .region_updater_utils import get_regions_by_realms, update_regions_json_file, get_regions_from_json_file - - -class RubySDKRegionsUpdater: - script_dir = os.path.dirname(os.path.realpath(__file__)) - regions_json_file_path = os.path.join("test", "resources", "regions.json") - regions_source_file_path = os.path.join("lib", "oci", "regions_definitions.rb") - regions_template_path = os.path.join(script_dir, "templates", "ruby-sdk-regions.tpl") - mapping_entry_template = ' \'{}\': \'{}\'.freeze,\n' - short_name_region_mapping_entry_template = ' \'{}\': REGION_{},\n' - region_enum_item_template = ' REGION_{} = \'{}\'.freeze,\n' - - def __init__(self, sdk_path): - self.logger = logging.getLogger("OCI-RubySDK-Regions-Updater") - logging.basicConfig(level=logging.INFO) - self.ruby_sdk_path = sdk_path - - def _update_regions_file(self, regions): - regions_source_file_full_path = os.path.join(self.ruby_sdk_path, self.regions_source_file_path) - processed_regions = [] - processed_realms = [] - short_name_regions = '' - region_realm_mappings = '' - realms_definitions = '' - regions_definitions = '' - regions_by_realm_number = get_regions_by_realms(regions) - # Sort realms so that the regions can be added by realm - realm_numbers = list(regions_by_realm_number.keys()) - realm_numbers.sort() - for number in realm_numbers: - for region in regions_by_realm_number[number]: - if region['realmKey'].lower() not in processed_realms: - realms_definitions += self.mapping_entry_template.format(region['realmKey'].lower(), region['realmDomainComponent'].lower()) - processed_realms.append(region['realmKey'].lower()) - if region['regionIdentifier'] not in processed_regions: - # Region key (short code) is optional. - if 'regionKey' in region: - short_name_regions += self.short_name_region_mapping_entry_template.format(region['regionKey'].lower(), region['regionIdentifier'].upper().replace('-', '_')) - region_realm_mappings += self.mapping_entry_template.format(region['regionIdentifier'].lower(), region['realmKey'].lower()) - regions_definitions += self.region_enum_item_template.format(region['regionIdentifier'].upper().replace('-', '_'), region['regionIdentifier'].lower()) - processed_regions.append(region['regionIdentifier']) - region_realm_mappings += '\n' - short_name_regions = short_name_regions.rstrip().rstrip(',') - region_realm_mappings = region_realm_mappings.rstrip().rstrip(',') - regions_definitions = regions_definitions.rstrip().rstrip(',') - realms_definitions = realms_definitions.rstrip().rstrip(',') - with open(self.regions_template_path, 'r') as ft, open(regions_source_file_full_path, 'w') as fw: - regions_template = ft.read() - regions_code = (regions_template.replace('%REGIONS_DEFINITIONS%', regions_definitions) - .replace('%REALMS_DEFINITIONS%', realms_definitions) - .replace('%SHORT_NAME_REGIONS%', short_name_regions) - .replace('%REGION_REALM_MAPPINGS%', region_realm_mappings) - ) - self.logger.log(logging.INFO, 'Writing regions source code to {}'.format(regions_source_file_full_path)) - fw.write(regions_code) - - def update_regions(self, new_regions): - regions_json_file_path = os.path.join(self.ruby_sdk_path, self.regions_json_file_path) - update_regions_json_file(new_regions, regions_json_file_path) - regions = get_regions_from_json_file(regions_json_file_path) - self._update_regions_file(regions) diff --git a/scripts/auto_gen_utils/sdk_regions_updater/sdk_regions_updater.py b/scripts/auto_gen_utils/sdk_regions_updater/sdk_regions_updater.py deleted file mode 100644 index 451500bfcd..0000000000 --- a/scripts/auto_gen_utils/sdk_regions_updater/sdk_regions_updater.py +++ /dev/null @@ -1,106 +0,0 @@ -import argparse -import configparser -import logging -import os -import sys -from .dotnet_sdk_region_updater import DotNetSDKRegionsUpdater -from .go_sdk_region_updater import GoSDKRegionsUpdater -from .java_sdk_region_updater import JavaSDKRegionsUpdater -from .python_sdk_region_updater import PythonSDKRegionsUpdater -from .ruby_sdk_region_updater import RubySDKRegionsUpdater -from .typescript_sdk_region_updater import TypescriptSDKRegionsUpdater -from itertools import chain -from .region_updater_utils import get_new_regions_info_for_branch - -# Add the root of the package, two directories up, to the sys.path -dir_path = os.path.dirname(os.path.realpath(__file__)) -sys.path.append(os.path.join(dir_path, '..')) -import config # noqa: ignore=F402 - - -class SdkRegionsUpdater: - def __init__(self, sdk=None, branch='preview'): - self._sdk_updater_configuration(sdk, branch) - - def _sdk_updater_configuration(self, sdk, branch): - if sdk is None: - args_parser = argparse.ArgumentParser() - args_parser.add_argument('--SDK', - required=True, - type=str, - choices=config.SDKS_SUPPORTING_REGION_UPDATE + ['All'], - dest="sdk") - args_parser.add_argument('--Branch', - default='preview', - type=str, - dest='branch', - help='The branch to update (default = preview).') - args = args_parser.parse_args() - self.sdk = args.sdk - self.branch = args.branch - else: - self.sdk = sdk - self.branch = branch - - config_parser = configparser.ConfigParser() - script_dir = os.path.dirname(os.path.realpath(__file__)) - with open(os.path.join(script_dir, "config")) as lines: - lines = chain(("[default]",), lines) - config_parser.read_file(lines) - self.config = config_parser['default'] - self.logger = logging.getLogger("OCI-SDK-Regions-Updater") - logging.basicConfig(level=logging.DEBUG) - - def _get_config_value(self, key): - val = os.environ.get(key) - if val is None: - self.logger.log(logging.INFO, 'Unable to get value for {} from env variable. Will try to get value from config file.'.format(key)) - val = self.config.get(key) - if val is None or val == '': - raise ValueError('Unable to get value for {} from env variable or config file.'.format(key)) - else: - return val - - def _validate_path_exists(self, path): - if os.path.exists(path): - self.logger.log(logging.INFO, '{} is a valid path'.format(path)) - else: - raise ValueError('{} is not a valid path'.format(path)) - - def _update_sdk(self, sdk, new_regions): - self.logger.log(logging.INFO, 'Updating {}'.format(sdk)) - sdk_path = self._get_config_value('{}_path'.format(sdk)) - self._validate_path_exists(sdk_path) - if sdk == config.DOTNET_SDK_NAME: - sdk_updater = DotNetSDKRegionsUpdater(sdk_path) - elif sdk == config.JAVA_SDK_NAME: - sdk_updater = JavaSDKRegionsUpdater(sdk_path) - elif sdk == config.TYPESCRIPT_SDK_NAME: - sdk_updater = TypescriptSDKRegionsUpdater(sdk_path) - elif sdk == config.PYTHON_SDK_NAME: - sdk_updater = PythonSDKRegionsUpdater(sdk_path) - elif sdk == config.GO_SDK_NAME: - sdk_updater = GoSDKRegionsUpdater(sdk_path) - elif sdk == config.RUBY_SDK_NAME: - sdk_updater = RubySDKRegionsUpdater(sdk_path) - else: - raise ValueError('{} is not supported.'.format(sdk)) - sdk_updater.update_regions(new_regions) - - def update(self, new_regions=None): - if not new_regions: - new_regions = get_new_regions_info_for_branch(self.branch) - if len(new_regions) == 0: - self.logger.log(logging.INFO, 'No new regions to update.') - elif self.sdk.lower() == 'all': - for supported_sdk in config.SDKS_SUPPORTING_REGION_UPDATE: - self._update_sdk(supported_sdk, new_regions) - else: - self._update_sdk(self.sdk, new_regions) - return len(new_regions) - - -if __name__ == "__main__": - updater = SdkRegionsUpdater() - num_regions_updated = updater.update() - print('{} regions updated.'.format(num_regions_updated)) diff --git a/scripts/auto_gen_utils/sdk_regions_updater/templates/dotnet-sdk-realms.tpl b/scripts/auto_gen_utils/sdk_regions_updater/templates/dotnet-sdk-realms.tpl deleted file mode 100644 index f918b7a6a4..0000000000 --- a/scripts/auto_gen_utils/sdk_regions_updater/templates/dotnet-sdk-realms.tpl +++ /dev/null @@ -1,17 +0,0 @@ -/* - * Copyright (c) 2020, 2024, Oracle and/or its affiliates. All rights reserved. - * This software is dual-licensed to you under the Universal Permissive License (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl or Apache License 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose either license. - */ - -namespace Oci.Common -{ - /// - /// Class containing all Identity realms. - ///
    - /// Accounts (tenancies) are per Realm. - ///
    - public partial class Realm - { -%REALMS_DEFINITIONS% - } -} \ No newline at end of file diff --git a/scripts/auto_gen_utils/sdk_regions_updater/templates/dotnet-sdk-regions.tpl b/scripts/auto_gen_utils/sdk_regions_updater/templates/dotnet-sdk-regions.tpl deleted file mode 100644 index bb55e07ad7..0000000000 --- a/scripts/auto_gen_utils/sdk_regions_updater/templates/dotnet-sdk-regions.tpl +++ /dev/null @@ -1,17 +0,0 @@ -/* - * Copyright (c) 2020, 2024, Oracle and/or its affiliates. All rights reserved. - * This software is dual-licensed to you under the Universal Permissive License (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl or Apache License 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose either license. - */ - -namespace Oci.Common -{ - /// - /// Class containing all of the known Regions that can be contacted. - ///
    - /// Note, not all services may be available in all regions. - ///
    - public partial class Region - { -%REGIONS_DEFINITIONS% - } -} \ No newline at end of file diff --git a/scripts/auto_gen_utils/sdk_regions_updater/templates/go-sdk-regions.tpl b/scripts/auto_gen_utils/sdk_regions_updater/templates/go-sdk-regions.tpl deleted file mode 100644 index 0d950dd7fc..0000000000 --- a/scripts/auto_gen_utils/sdk_regions_updater/templates/go-sdk-regions.tpl +++ /dev/null @@ -1,20 +0,0 @@ -// Copyright (c) 2016, 2018, 2024, Oracle and/or its affiliates. All rights reserved. -// This software is dual-licensed to you under the Universal Permissive License (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl or Apache License 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose either license. - -package common - -const ( -%REGIONS_DEFINITIONS% -) - -var shortNameRegion = map[string]Region{ -%SHORT_NAME_REGIONS% -} - -var realm = map[string]string{ -%REALMS_DEFINITIONS% -} - -var regionRealm = map[Region]string{ -%REGION_REALM_MAPPINGS% -} diff --git a/scripts/auto_gen_utils/sdk_regions_updater/templates/python-sdk-regions.tpl b/scripts/auto_gen_utils/sdk_regions_updater/templates/python-sdk-regions.tpl deleted file mode 100644 index fa9cb3b506..0000000000 --- a/scripts/auto_gen_utils/sdk_regions_updater/templates/python-sdk-regions.tpl +++ /dev/null @@ -1,16 +0,0 @@ -# coding: utf-8 -# Copyright (c) 2016, 2024, Oracle and/or its affiliates. All rights reserved. -# This software is dual-licensed to you under the Universal Permissive License (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl or Apache License 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose either license. - -REGIONS_SHORT_NAMES = { -%SHORT_NAME_REGIONS% -} -REGION_REALMS = { -%REGION_REALM_MAPPINGS% -} -REALMS = { -%REALMS_DEFINITIONS% -} -REGIONS = [ -%REGIONS_DEFINITIONS% -] diff --git a/scripts/auto_gen_utils/sdk_regions_updater/templates/ruby-sdk-regions.tpl b/scripts/auto_gen_utils/sdk_regions_updater/templates/ruby-sdk-regions.tpl deleted file mode 100644 index 74d885abf6..0000000000 --- a/scripts/auto_gen_utils/sdk_regions_updater/templates/ruby-sdk-regions.tpl +++ /dev/null @@ -1,30 +0,0 @@ -# Copyright (c) 2016, 2024, Oracle and/or its affiliates. All rights reserved. -# This software is dual-licensed to you under the Universal Permissive License (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl or Apache License 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose either license. - -module OCI - # rubocop:disable Style/MutableConstant - - # Module defining available regions - module Regions - REGION_ENUM = [ -%REGIONS_DEFINITIONS% - ] - - REGION_SHORT_NAMES_TO_LONG_NAMES = { -%SHORT_NAME_REGIONS% - } - - # --- Start of region realm mapping --- - REGION_REALM_MAPPING = { -%REGION_REALM_MAPPINGS% - } - # --- end of region realm mapping --- - - # --- Start of realm domain mapping --- - REALM_DOMAIN_MAPPING = { -%REALMS_DEFINITIONS% - } - # --- end of realm domain mapping --- - end - # rubocop:enable Style/MutableConstant -end diff --git a/scripts/auto_gen_utils/sdk_regions_updater/typescript_sdk_region_updater.py b/scripts/auto_gen_utils/sdk_regions_updater/typescript_sdk_region_updater.py deleted file mode 100644 index 594e92c3e5..0000000000 --- a/scripts/auto_gen_utils/sdk_regions_updater/typescript_sdk_region_updater.py +++ /dev/null @@ -1,128 +0,0 @@ -import logging -import os -from .region_updater_utils import get_regions_by_realms, update_regions_json_file - - -class TypescriptSDKRegionsUpdater: - realms_file_path = os.path.join("lib", "common", "lib", "realm.ts") - regions_file_path = os.path.join("lib", "common", "lib", "region.ts") - regions_json_file_path = os.path.join("lib", "common", "tests", "resources", "regions.json") - developer_tool_config_json_file_path = os.path.join("lib", "common", "tests", "resources", "developer-tool-configuration.json") - realm_line_substr = "Realm = Realm.register" - region_line_substr = "Region = Region.register" - realm_line_template = ' public static {}: Realm = Realm.register("{}", "{}");\n' - region_line_template = ' public static {}: Region = Region.register("{}", Realm.{}, "{}");\n' - region_no_short_code_line_template = ' public static {}: Region = Region.register("{}", Realm.{});\n' - - def __init__(self, sdk_path): - self.logger = logging.getLogger("OCI-TypescriptSDK-Regions-Updater") - logging.basicConfig(level=logging.DEBUG) - self.typescript_sdk_path = sdk_path - - # We are assuming that realms are added in the right orders. - # New realm numbers are always larger than existing ones. - # If the assumption is not correct, realms will be organized out of order. - # This does not affect code functionalities though. - def _update_realms_file(self, new_regions_by_realm): - realms_file_full_path = os.path.join(self.typescript_sdk_path, self.realms_file_path) - existing_realms = [] - should_inject_realms = False - new_realms_file_content = '' - with open(realms_file_full_path) as f: - for line in f.readlines(): - if self.realm_line_substr in line: - should_inject_realms = True - realm_id = line.strip().split()[2][:-1] - existing_realms.append(realm_id) - else: - if should_inject_realms: - # Need to inject new realm(s) here - # When multiple new realms are added, they need to be organized in order. - realm_numbers = list(new_regions_by_realm.keys()) - realm_numbers.sort() - for number in realm_numbers: - new_region = new_regions_by_realm[number][0] - if new_region['realmKey'].upper() in existing_realms: - self.logger.log(logging.INFO, "realm {} already exists. Will not add again.".format(new_region['realmKey'])) - else: - self.logger.log(logging.INFO, "Will add new realm {} .".format(new_region['realmKey'])) - new_line = self.realm_line_template.format(new_region['realmKey'].upper(), new_region['realmKey'].lower(), new_region['realmDomainComponent']) - new_realms_file_content += new_line - should_inject_realms = False - if (new_realms_file_content == ''): - new_realms_file_content = line - else: - new_realms_file_content += line - with open(realms_file_full_path, 'w') as fw: - fw.write(new_realms_file_content) - - def _update_regions_file(self, new_regions_by_realm): - regions_file_full_path = os.path.join(self.typescript_sdk_path, self.regions_file_path) - existing_regions = [] - should_inject_regions = False - new_regions_file_content = '' - current_realm = 0 - with open(regions_file_full_path) as f: - for line in f.readlines(): - line_stripped = line.strip() - if line_stripped.startswith('// OC'): - # This is the beginning of the code block defining regions for a realm. - current_realm = int(line_stripped.split(' ')[1][2:]) - elif self.region_line_substr in line_stripped: - # This line is region definition. - should_inject_regions = True - region_name = line_stripped.split()[2][:-1] - existing_regions.append(region_name) - else: - # if line_stripped != '' and not line_stripped.startswith('// OC') and should_inject_regions: - if should_inject_regions: - if line_stripped == '': - # At an empty line, inject new region definitions for this realm, if any - if current_realm in new_regions_by_realm: - # need to inject new region(s) here - for region in new_regions_by_realm[current_realm]: - region_name = region['regionIdentifier'].upper().replace('-', '_') - if region_name in existing_regions: - self.logger.log(logging.INFO, "Region {} already exists. Will not add again.".format(region['regionIdentifier'])) - else: - self.logger.log(logging.INFO, "Will add new region {} under realm OC{}.".format(region['regionIdentifier'], current_realm)) - new_line = self.region_line_template.format(region_name, region['regionIdentifier'].lower(), region['realmKey'].upper(), region['regionKey'].lower()) - new_regions_file_content += new_line - del new_regions_by_realm[current_realm] - # Check if there are additional regions left in new_regions_by_realm. - if len(new_regions_by_realm) == 0: - should_inject_regions = False - else: - # This is not a region definition, and not an empty space, and not a region block comment. - # This must be the first line of code after the whole region definition block. - # If there are new regions under new realms, add them here. - realm_numbers = list(new_regions_by_realm.keys()) - realm_numbers.sort() - for number in realm_numbers: - comment_line = ' // OC{}\n'.format(number) - new_regions_file_content += comment_line - for region in new_regions_by_realm[number]: - self.logger.log(logging.INFO, "Will add new region {} for new realm OC{}.".format(region['regionIdentifier'], number)) - new_line = self.region_line_template.format(region['regionIdentifier'].upper().replace('-', '_'), region['regionIdentifier'].lower(), region['realmKey'].upper(), region['regionKey'].lower()) - new_regions_file_content += new_line - new_regions_file_content += '\n' - should_inject_regions = False - - if (new_regions_file_content == ''): - new_regions_file_content = line - else: - new_regions_file_content += line - with open(regions_file_full_path, 'w') as fw: - fw.write(new_regions_file_content) - - def update_regions(self, new_regions): - regions_json_file_path = os.path.join(self.typescript_sdk_path, self.regions_json_file_path) - developer_tool_config_json_file_path = os.path.join(self.typescript_sdk_path, self.developer_tool_config_json_file_path) - update_regions_json_file(new_regions, regions_json_file_path) - try: - update_regions_json_file(new_regions, developer_tool_config_json_file_path, json_indent=2) - except: - pass - new_regions_by_realm = get_regions_by_realms(new_regions) - self._update_realms_file(new_regions_by_realm) - self._update_regions_file(new_regions_by_realm) diff --git a/scripts/auto_gen_utils/setup.cfg b/scripts/auto_gen_utils/setup.cfg deleted file mode 100644 index 1c0423a855..0000000000 --- a/scripts/auto_gen_utils/setup.cfg +++ /dev/null @@ -1,4 +0,0 @@ -# http://flake8.pycqa.org/en/latest/user/error-codes.html -[flake8] -ignore=E501,E231,E124,E128,W291,W293,E241 -exclude=temp diff --git a/scripts/auto_gen_utils/shared/__init__.py b/scripts/auto_gen_utils/shared/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/scripts/auto_gen_utils/shared/bitbucket_utils.py b/scripts/auto_gen_utils/shared/bitbucket_utils.py deleted file mode 100644 index 293d201258..0000000000 --- a/scripts/auto_gen_utils/shared/bitbucket_utils.py +++ /dev/null @@ -1,603 +0,0 @@ -import getpass -import json -import requests -import os -import re -import tempfile -import datetime -import time -import sys -import config - -from git import Repo, GitCommandError -from six import text_type - - -verbose = True -dry_run = False - - -# -# Bitbucket - -BITBUCKET_PR_QUERY_URL_WITHOUT_START_ADDITIONAL_PARAMS = 'https://bitbucket.oci.oraclecorp.com/rest/api/1.0/projects/{project}/repos/{repo}/pull-requests?state={state}&direction={direction}{at_str}' -BITBUCKET_PR_QUERY_URL_WITH_START_ADDITIONAL_PARAMS = 'https://bitbucket.oci.oraclecorp.com/rest/api/1.0/projects/{project}/repos/{repo}/pull-requests?state={state}&direction={direction}&start={start}{at_str}' -BITBUCKET_PR_QUERY_URL_WITHOUT_START = 'https://bitbucket.oci.oraclecorp.com/rest/api/1.0/projects/{}/repos/{}/pull-requests?state={}' -BITBUCKET_PR_QUERY_URL_WITH_START = 'https://bitbucket.oci.oraclecorp.com/rest/api/1.0/projects/{}/repos/{}/pull-requests?state={}&start={}' - - -def printv(s): - global verbose - if verbose: - if s: - if not isinstance(s, str) and not isinstance(s, text_type): - # we only need to turn it into a string if it's not a string already - s = str(s) - if sys.version.startswith("2"): - s = s.encode('utf-8') - print(s) - - -def setup_bitbucket(args): - global bitbucket_auth - username = os.environ.get('BITBUCKET_USERNAME') - if not username: - print("BITBUCKET_USERNAME not set, falling back to using JIRA_USERNAME.") - username = os.environ.get('JIRA_USERNAME') - if not username: - print("JIRA_USERNAME not set either.") - username = getpass.getpass("Bitbucket API USERNAME:") - - password = os.environ.get('BITBUCKET_PASSWORD') - if not password: - print("BITBUCKET_PASSWORD not set, falling back to using JIRA_PASSWORD.") - password = os.environ.get('JIRA_PASSWORD') - if not password: - print("JIRA_PASSWORD not set either.") - password = getpass.getpass("Bitbucket API Password:") - - bitbucket_auth = (username, password) - - -def filter_for_pr_matching_branch_suffix(pr, branch_suffix): - if 'fromRef' not in pr or 'id' not in pr['fromRef']: - return False - - source_branch = pr['fromRef']['id'] - return branch_suffix in source_branch - - -def filter_for_pr_matching_substring(pr, substring): - substring = substring.lower() - result = ('title' in pr and substring in pr['title'].lower()) or ('description' in pr and substring in pr['description'].lower()) or ('fromRef' in pr and 'id' in pr['fromRef'] and substring in pr['fromRef']['id'].lower()) - # printv("#### filter_for_pr_matching_substring: substring '{}' in {} pr {}: result {}, title '{}', description '{}', fromRef '{}'".format(substring, pr['toRef']['repository']['name'], - # pr['id'], result, pr['title'].encode('utf8') if 'title' in pr else "", pr['description'].encode('utf8') if 'description' in pr else "", pr['fromRef']['id'].encode('utf8'))) - return result - - -def stop_listing_if_pr_older_than_date(pr, dt): - # need to convert date in "2019-11-13T03:06:09.000+0000" (JIRA) form to - # "1573763297726" (Bitbucket) milliseconds form - utc_time = datetime.datetime.strptime(dt, "%Y-%m-%dT%H:%M:%S.%f+0000") - epoch = datetime.datetime.utcfromtimestamp(0) - milliseconds = int((utc_time - epoch).total_seconds() * 1000.0) - # If the PR was last updated before the DEXREQ ticket was opened, it can't belong to the DEXREQ ticket - if 'updatedDate' in pr and int(pr['updatedDate']) < milliseconds: - # printv("#### stop_listing_if_pr_older_than_date, dt {} ({}), pr updatedDate {}".format(milliseconds, dt, pr['updatedDate'])) - return True - else: - return False - - -def get_newest_pullrequest_with_string_after(project, repo, title_substring, dt, use_cache=True): - return get_newest_pullrequest_matching(project, repo, lambda pr: filter_for_pr_matching_substring(pr, title_substring), - stop_listing=lambda pr: stop_listing_if_pr_older_than_date(pr, dt), use_cache=use_cache) - - -def get_spec_pr_branch_reference(pipeline, branch_suffix): - return "refs/heads/spec-{debug_dexreq_branch_prefix}auto-v2-{pipeline}-{suffix}-diff".format( - debug_dexreq_branch_prefix=config.get_debug_dexreq_branch_prefix(), - pipeline=pipeline, - suffix=branch_suffix) - - -def get_newest_pullrequest_matching_branch_suffix(project, repo, branch_suffix, pipeline="preview", use_cache=True): - return get_newest_pullrequest_matching(project, repo, lambda pr: filter_for_pr_matching_branch_suffix(pr, branch_suffix), - direction="outgoing", at=get_spec_pr_branch_reference(pipeline, branch_suffix), use_cache=use_cache) - - -def get_newest_pullrequest_matching(project, repo, filter, stop_listing=lambda pr: False, direction="incoming", state="ALL", at=None, use_cache=True): - prs = get_all_pullrequest_matching(project, repo, filter, stop_listing, direction, state, at, use_cache=use_cache) - candidate = None - - # We want the newest as in "most recently created" - # But Bitbucket returns in order of "most recently updated" - # So we keep track of the highest matching id - for pr in prs: - if filter(pr): - if not candidate or candidate['id'] < pr['id']: - candidate = pr - - return candidate - - -def get_all_pullrequest_with_string_after(project, repo, title_substring, dt, use_cache=True): - return get_all_pullrequest_matching(project, repo, lambda pr: filter_for_pr_matching_substring(pr, title_substring), - stop_listing=lambda pr: stop_listing_if_pr_older_than_date(pr, dt), use_cache=use_cache) - - -def get_all_pullrequest_matching(project, repo, filter, stop_listing=lambda pr: False, direction="incoming", state="ALL", at=None, use_cache=True): - prs = get_all_pullrequest_unfiltered(project, repo, stop_listing, direction, state, at, use_cache=use_cache) - - # Search the PRs to see if there are any that matches the filter - return [pr for pr in prs if filter(pr)] - - -pullrequest_cache = {} - - -# Note: If stop_listing is being used to stop listing PRs older than a particular DEXREQ ticket, make sure to prime the cache. -# Request once with the oldest DEXREQ ticket. -def get_all_pullrequest_unfiltered(project, repo, stop_listing=lambda pr: False, direction="incoming", state="ALL", at=None, use_cache=True): - global bitbucket_auth - url = BITBUCKET_PR_QUERY_URL_WITHOUT_START_ADDITIONAL_PARAMS.format( - project=project, - repo=repo, - state=state, - direction=direction, - at_str="&at={}".format(at) if at else "") - - initial_url = url - - if use_cache and initial_url in pullrequest_cache: - printv("Returning cached PRs for {}".format(initial_url)) - return pullrequest_cache[initial_url] - - headers = { - "X-Atlassian-Token": "no-check", - "Content-Type": "application/json" - } - - prs = [] - - is_last_page = False - while not is_last_page: - printv("Querying PRs from {}".format(url)) - response = requests.get(url, verify=False, auth=bitbucket_auth, headers=headers) - - print("***************************************************") - print(response) - print(response.status_code) - if len(response.content) == 0: - print("no response content returned") - else: - requested_truncated_length = 250 - truncated_response_content_length = requested_truncated_length if requested_truncated_length < len(response.content) else len(response.content) - print(response.content[0:truncated_response_content_length - 1]) - print("***************************************************") - - count_of_429s = 0 - max_retries = 10 - if response.status_code == 429: - print("Received 429, waiting before resuming fetching pull requests") - time.sleep(30) - count_of_429s = count_of_429s + 1 - - if count_of_429s < max_retries: - continue - - is_last_page = 'isLastPage' in response.json() and response.json()['isLastPage'] - if not is_last_page: - url = BITBUCKET_PR_QUERY_URL_WITH_START_ADDITIONAL_PARAMS.format( - project=project, - repo=repo, - state=state, - direction=direction, - at_str="&at={}".format(at) if at else "", - start=response.json()['nextPageStart']) - - if 'values' not in response.json(): - continue - - for pr in response.json()['values']: - printv("Query returned: {}/{} PR {}".format(project, repo, pr['id'])) - prs.append(pr) - - if stop_listing(pr): - printv("Stop listing") - is_last_page = True - break - - pullrequest_cache[initial_url] = prs - - return prs - - -def get_pullrequest(project, repo, pullrequest_id): - global bitbucket_auth - url = 'https://bitbucket.oci.oraclecorp.com/rest/api/1.0/projects/{}/repos/{}/pull-requests/{}'.format( - project, repo, pullrequest_id) - - headers = { - "X-Atlassian-Token": "no-check", - "Content-Type": "application/json" - } - - response = requests.get(url, verify=False, auth=bitbucket_auth, headers=headers) - return response - - -def get_pullrequest_from_url(url): - m = re.search("^.*bitbucket.*/projects/([^/]*)/repos/([^/]*)/pull-requests/([0-9]*).*$", url) - if m: - return get_pullrequest(m.group(1), m.group(2), m.group(3)) - else: - return None - - -def get_pullrequest_commits(project, repo, pullrequest_id): - global bitbucket_auth - url = 'https://bitbucket.oci.oraclecorp.com/rest/api/1.0/projects/{}/repos/{}/pull-requests/{}/commits'.format( - project, repo, pullrequest_id) - - headers = { - "X-Atlassian-Token": "no-check", - "Content-Type": "application/json" - } - - response = requests.get(url, verify=False, auth=bitbucket_auth, headers=headers) - return response - - -def get_pullrequest_merge_status(project, repo, pullrequest_id): - global bitbucket_auth - url = 'https://bitbucket.oci.oraclecorp.com/rest/api/1.0/projects/{}/repos/{}/pull-requests/{}/merge'.format( - project, repo, pullrequest_id) - - headers = { - "X-Atlassian-Token": "no-check", - "Content-Type": "application/json" - } - - response = requests.get(url, verify=False, auth=bitbucket_auth, headers=headers) - return response - - -def get_file_content_from_commit_id_and_path(project, repo, file_path, commit_id): - global bitbucket_auth - url = 'https://bitbucket.oci.oraclecorp.com/rest/api/1.0/projects/{}/repos/{}/raw/{}?at={}'.format( - project, repo, file_path, commit_id) - - headers = { - "X-Atlassian-Token": "no-check", - "Content-Type": "application/json" - } - - response = requests.get(url, verify=False, auth=bitbucket_auth, headers=headers) - return response - - -def get_pullrequest_changes(project, repo, pullrequest_id): - global bitbucket_auth - url = 'https://bitbucket.oci.oraclecorp.com/rest/api/1.0/projects/{}/repos/{}/pull-requests/{}/changes'.format( - project, repo, pullrequest_id) - - headers = { - "X-Atlassian-Token": "no-check", - "Content-Type": "application/json" - } - params = {"start": "0", "limit": "1000"} - response = requests.get(url, verify=False, auth=bitbucket_auth, headers=headers, params=params) - return response - - -def get_pullrequest_diff(project, repo, pullrequest_id): - global bitbucket_auth - url = 'https://bitbucket.oci.oraclecorp.com/rest/api/1.0/projects/{}/repos/{}/pull-requests/{}/diff'.format( - project, repo, pullrequest_id) - - headers = { - "X-Atlassian-Token": "no-check", - "Content-Type": "application/json" - } - - response = requests.get(url, verify=False, auth=bitbucket_auth, headers=headers) - return response - - -def get_commit_diff(project, repo, commit_hash, commit_path): - global bitbucket_auth - url = 'https://bitbucket.oci.oraclecorp.com/rest/api/1.0/projects/{}/repos/{}/commits/{}/diff'.format( - project, repo, commit_hash) - if commit_path: - url = url + "/{}".format(commit_path) - - headers = { - "X-Atlassian-Token": "no-check", - "Content-Type": "application/json" - } - - response = requests.get(url, verify=False, auth=bitbucket_auth, headers=headers) - return response - - -def get_repo(project, repo): - global bitbucket_auth - url = 'https://bitbucket.oci.oraclecorp.com/rest/api/1.0/projects/{}/repos/{}'.format( - project, repo) - - printv("get_repo URL: {}".format(url)) - - headers = { - "X-Atlassian-Token": "no-check", - "Content-Type": "application/json" - } - - response = requests.get(url, verify=False, auth=bitbucket_auth, headers=headers) - return response - - -MAX_COMMENT_LENGTH = 32000 - - -DEFAULT_COMMENT_HEADER = "(continued from comment above)\n\n" -DEFAULT_COMMENT_FOOTER = "\n\n(continued in comment below)" - - -def make_general_comment(project, repo, pullrequest_id, text, parent_id=None, comment_header=DEFAULT_COMMENT_HEADER, comment_footer=DEFAULT_COMMENT_FOOTER): - # $ curl -k -u mricken https://bitbucket.oci.oraclecorp.com/rest/api/1.0/projects/SDK/repos/bmc-sdk-swagger/pull-requests/257/comments -X POST -H "X-Atlassian-Token: no-check" -H "Content-Type: application/json" -d '{"text":"A test comment."}' - # Enter host password for user 'mricken': - # {"properties":{"repositoryId":3103},"id":122246,"version":0,"text":"A test comment.","author":{"name":"mricken","emailAddress":"mathias.ricken@oracle.com","id":1776,"displayName":"Mathias Ricken","active":true,"slug":"mricken","type":"NORMAL","links":{"self":[{"href":"https://bitbucket.oci.oraclecorp.com/users/mricken"}]}},"createdDate":1524504470238,"updatedDate":1524504470238,"comments":[],"tasks":[],"permittedOperations":{"editable":true,"deletable":true}} - - url = 'https://bitbucket.oci.oraclecorp.com/rest/api/1.0/projects/{}/repos/{}/pull-requests/{}/comments'.format( - project, repo, pullrequest_id) - - continued = False - first_response = None - in_code_block = False - while text: - if len(text) <= MAX_COMMENT_LENGTH: - first_text = "{}{}{}".format( - comment_header if continued else "", - "```\n" if in_code_block else "", - text) - text = "" - else: - break_pos = text[:MAX_COMMENT_LENGTH].rfind("-----\n") - if break_pos == -1: - break_pos = text[:MAX_COMMENT_LENGTH].rfind("\n") - if break_pos == -1: - break_pos = text[:MAX_COMMENT_LENGTH].rfind(" ") - if break_pos == -1: - break_pos = MAX_COMMENT_LENGTH - - partial_text = text[0:break_pos] - ends_in_code_block = in_code_block - for line in partial_text.split("\n"): - if "```" in line: - ends_in_code_block = not ends_in_code_block - - first_text = "{}{}{}{}{}".format( - comment_header if continued else "", - "```\n" if in_code_block else "", - partial_text, - "\n```\n" if ends_in_code_block else "", - comment_footer) - text = text[break_pos + 1:] - continued = True - in_code_block = ends_in_code_block - - if parent_id: - data = '''{{ - "text": "{}", - "parent": {{ - "id": {} - }} - }}'''.format(json.dumps(first_text)[1:-1], parent_id) # JSON escape, but this encloses in "" -- get rid of first and last character - else: - data = '''{{ - "text": "{}" - }}'''.format(json.dumps(first_text)[1:-1]) # JSON escape, but this encloses in "" -- get rid of first and last character - - headers = { - "X-Atlassian-Token": "no-check", - "Content-Type": "application/json" - } - - if dry_run: - print("DRY-RUN: {}".format(first_text)) - else: - response = requests.post(url, data=data, verify=False, auth=bitbucket_auth, headers=headers) - - printv(response.text) - if response.status_code >= 400: - printv(response.text) - - if not first_response: - first_response = response - - printv(first_response.text) - - if not parent_id: - parent_id = first_response.json()['id'] - - return first_response - - -def get_pr_target_branch(pr): - base_ref = pr.json()['toRef'] - base_branch = base_ref['displayId'] - - return base_branch - - -def get_pr_source_branch(pr): - change_ref = pr.json()['fromRef'] - change_branch = change_ref['displayId'] - - return change_branch - - -def get_pr_source_project(pr): - return pr.json()['fromRef']['repository']['project']['key'] - - -def get_pr_source_repo(pr): - return pr.json()['fromRef']['repository']['name'] - - -def get_pr_source_self_url(pr): - links = pr.json()['fromRef']['repository']['links'] - - for self_link in links['self']: - return self_link['href'] - raise ValueError('No self URL found') - - -def get_repo_permissions_url(pr): - self_url = get_pr_source_self_url(pr) - return self_url.replace('/browse', '/permissions') - - -def get_pr_source_clone_ssh_url(pr): - links = pr.json()['fromRef']['repository']['links'] - - if 'clone' not in links: - return None - - for clone_link in links['clone']: - if clone_link['name'] == 'ssh': - return clone_link['href'] - raise ValueError('No clone URL found') - - -def get_bitbucket_build_status_for_commit(commit): - # https://bitbucket.oci.oraclecorp.com/rest/build-status/1.0/commits/97a464dfa9933e6842fa9eb3e954d603df502e78 - url = 'https://bitbucket.oci.oraclecorp.com/rest/build-status/1.0/commits/{commit}'.format(commit=commit) - r = requests.get(url, auth=bitbucket_auth) - - if r.status_code >= 300: - printv(r.json()) - - raise ValueError("Failed to get Bitbucket build status: {}".format(r.json())) - - return r.json() - - -def get_bitbucket_build_status_for_pr(pr): - change_ref = pr['fromRef'] - latest_commit = change_ref['latestCommit'] - - return get_bitbucket_build_status_for_commit(latest_commit) - - -# This is currently being used in PythonCLI PR builder. -# Ref: https://bitbucket.oci.oraclecorp.com/projects/SDK/repos/python-cli/browse/scripts/comment_on_pr.py -def get_pr_from_branch(project, repo, branch, state="OPEN"): - global bitbucket_auth - url = BITBUCKET_PR_QUERY_URL_WITHOUT_START.format(project, repo, state) - headers = { - "X-Atlassian-Token": "no-check", - "Content-Type": "application/json" - } - - # Search the PRs to see if there are any that matches the filter - is_last_page = False - while not is_last_page: - response = requests.get(url, verify=False, auth=bitbucket_auth, headers=headers) - printv(response.json()) - - for pr in response.json()['values']: - if pr['fromRef']['displayId'] == branch: - return pr['id'] - - is_last_page = response.json()['isLastPage'] - - if not is_last_page: - url = BITBUCKET_PR_QUERY_URL_WITH_START.format(project, repo, state, response.json()['nextPageStart']) - - return None - - -def get_git_clone_url_from_ref(ref): - for clone_link in ref['repository']['links']['clone']: - if clone_link['name'] == 'ssh': - return clone_link['href'] - raise ValueError('No clone URL found') - - -def git_clone(clone_url, branch, to_dir): - printv("Cloning {}".format(clone_url)) - repo = Repo.clone_from(clone_url, to_dir) - printv("Checking out {}".format(branch)) - repo.git.checkout(branch) - return repo - - -def clone_target_branch(pr_id, repo): - pr = get_pullrequest("SDK", repo, pr_id) - printv(pr.text) - base_ref = pr.json()['toRef'] - base_clone_url = get_git_clone_url_from_ref(base_ref) - base_branch = base_ref['displayId'] - - output_dir = tempfile.mkdtemp(prefix='warn_about_backward_incompatible_changes') - base_dir = '{}/base'.format(output_dir) - - printv("Checking out base in {}".format(base_dir)) - - try: - git_clone(base_clone_url, base_branch, base_dir) - except GitCommandError as e: - print(e) - print("ERROR, could not check out commit or branch '{}' from {}".format(base_branch, base_clone_url)) - return None - return base_dir - - -def decline_pr(project, repo, pullrequest_id, version): - url = 'https://bitbucket.oci.oraclecorp.com/rest/api/1.0/projects/{}/repos/{}/pull-requests/{}/decline?version={}'.format( - project, repo, pullrequest_id, version) - - headers = { - "X-Atlassian-Token": "no-check", - "Content-Type": "application/json" - } - - response = None - if dry_run: - print("DRY-RUN: not declining {}/{} PR {}".format(project, repo, pullrequest_id)) - else: - response = requests.post(url, verify=False, auth=bitbucket_auth, headers=headers) - - printv(response) - if response.status_code >= 400: - printv(response.json()) - - print("Declined {}/{} PR {}".format(project, repo, pullrequest_id)) - - return response - - -def merge_pr(project, repo, pullrequest_id, version): - url = 'https://bitbucket.oci.oraclecorp.com/rest/api/1.0/projects/{}/repos/{}/pull-requests/{}/merge?version={}'.format( - project, repo, pullrequest_id, version) - - headers = { - "X-Atlassian-Token": "no-check", - "Content-Type": "application/json" - } - - response = None - if dry_run: - print("DRY-RUN: not merging {}/{} PR {}".format(project, repo, pullrequest_id)) - else: - response = requests.post(url, verify=False, auth=bitbucket_auth, headers=headers) - - printv(response) - if response.status_code >= 200 and response.status_code < 300: - print("Merged {}/{} PR {}".format(project, repo, pullrequest_id)) - else: - print(response.json()) - - return response diff --git a/scripts/auto_gen_utils/shared/buildsvc_tc_compatibility.py b/scripts/auto_gen_utils/shared/buildsvc_tc_compatibility.py deleted file mode 100644 index a63bf27853..0000000000 --- a/scripts/auto_gen_utils/shared/buildsvc_tc_compatibility.py +++ /dev/null @@ -1,93 +0,0 @@ -import os -import re - - -def parse_build_id(build_id): - is_build_service = False - buildsvc_build_number = None - buildsvc_branch = None - - search_result = re.search("^buildsvc(-(.*))?-([0-9]+)*$", build_id) - if search_result: - is_build_service = True - # - group 2 is branch (optional) - # - group 3 is build number - buildsvc_build_number = search_result.group(3) - if search_result.group(2): - buildsvc_branch = search_result.group(2) - - return is_build_service, build_id, buildsvc_branch, buildsvc_build_number - - -def parse_vcs_root(vcs_root): - # looks like ssh://git@bitbucket.oci.oraclecorp.com:7999/sdk/dexreq-surfaces.git - project = None - repo = None - if vcs_root.startswith("ssh://git@bitbucket.oci.oraclecorp.com:7999/"): - search_result = re.search("^ssh://git@bitbucket.oci.oraclecorp.com:7999/([^/]*)/([^/]*).git$", vcs_root, re.IGNORECASE) - - if search_result: - project = search_result.group(1) - repo = search_result.group(2) - - if project: - project = project.upper() - - return project, repo - - -# build_step_name=None means use value from os.environ.get('BLD_STEP') -# If you don't want to provide a build_step_name, use build_step_name="" -# -# project=None or repo=None means use the value from os.environ.get('BLD_VCS_ROOT') -# or os.environ.get('BLD_VSC_ROOT') (yes, VSC_ROOT... https://jira.oci.oraclecorp.com/browse/BLD-3445) -def build_log_link(build_id, text="here", project=None, repo=None, default_branch="main", build_step_name=None, additional_url_parts=""): - vcs_root = os.environ.get('BLD_VCS_ROOT') or os.environ.get('BLD_VSC_ROOT') - - if project is None: - if vcs_root: - project, r = parse_vcs_root(vcs_root) - - if not project: - project = "SDK" - - if repo is None: - if vcs_root: - p, repo = parse_vcs_root(vcs_root) - - if not repo: - repo = "dexreq-surfaces" - - is_build_service, build_id, buildsvc_branch, buildsvc_build_number = parse_build_id(build_id) - if is_build_service: - if buildsvc_branch is None: - buildsvc_branch = default_branch - build_step_part = "" - if build_step_name is None: - build_step_name = os.environ.get('BLD_STEP') - if build_step_name: - build_step_part = "/steps/{build_step_name}".format(build_step_name=build_step_name) - return "[{text}|https://devops.oci.oraclecorp.com/build/teams/{project}/projects/{repo}/branches/{buildsvc_branch}/builds/{buildsvc_build_number}{build_step_part}{additional_url_parts}]".format( - text=text, - project=project, - repo=repo, - buildsvc_branch=buildsvc_branch, - buildsvc_build_number=buildsvc_build_number, - build_step_part=build_step_part, - additional_url_parts=additional_url_parts - ) - - return "[{text}|https://teamcity.oci.oraclecorp.com/viewLog.html?buildId={build_id}{additional_url_parts}]".format( - text=text, - build_id=build_id, - additional_url_parts=additional_url_parts - ) - - -# build_step_name=None means use value from os.environ.get('BLD_STEP') -# If you don't want to provide a build_step_name, use build_step_name="" -# -# project=None or repo=None means use the value from os.environ.get('BLD_VCS_ROOT') -# or os.environ.get('BLD_VSC_ROOT') (yes, VSC_ROOT... https://jira.oci.oraclecorp.com/browse/BLD-3445) -def build_artifacts_link(build_id, text="here", project=None, repo=None, default_branch="main", build_step_name=None): - return build_log_link(build_id, text=text, project=project, repo=repo, default_branch=default_branch, build_step_name=build_step_name, additional_url_parts="&tab=artifacts") diff --git a/scripts/auto_gen_utils/shared/version_utils.py b/scripts/auto_gen_utils/shared/version_utils.py deleted file mode 100644 index 4fd2df019c..0000000000 --- a/scripts/auto_gen_utils/shared/version_utils.py +++ /dev/null @@ -1,160 +0,0 @@ -import re - - -def is_int(s): - try: - int(s) - return True - except ValueError: - return False - - -preview_markers = ["-RELEASEPREVIEW", "-PREVIEW"] - - -def is_version_increasing(old_version, version): - # Versioning schemes we have to deal with: - # 0.1.9 - # 0.0.15-releasePreview < 0.0.15 (database) - # 0.2.17-PREVIEW < 0.2.17 (objectstorage, identity) - - old_v = old_version.upper() - new_v = version.upper() - - old_has_preview_marker = False - new_has_preview_marker = False - for marker in preview_markers: - if marker in old_v: - old_has_preview_marker = True - old_v = old_v.replace(marker, '') - if marker in new_v: - new_has_preview_marker = True - new_v = new_v.replace(marker, '') - - # Now none of the versions have a preview marker anymore - - old_v = old_v.replace('-', '.') - new_v = new_v.replace('-', '.') - - print('Old version: {} (preview marker? {})'.format(old_v, old_has_preview_marker)) - print('New version: {} (preview marker? {})'.format(new_v, new_has_preview_marker)) - - old_parts = old_v.split('.') - new_parts = new_v.split('.') - - common_length = min(len(old_parts), len(new_parts)) - - for index in range(0, common_length): - old_part = old_parts[index] - new_part = new_parts[index] - - if is_int(old_part) and is_int(new_part): - old_part = int(old_part) - new_part = int(new_part) - - if old_part < new_part: - # Old version is less - return True - elif new_part < old_part: - # New version is less, this is a regression in version. - return False - - # If we are back here, then the common segment has been all the same. - # If the lengths are the same, then it comes down to the preview flag. - if len(old_parts) == len(new_parts): - if old_has_preview_marker and not new_has_preview_marker: - # old version was a preview, new version is not. - return True - elif new_has_preview_marker and not old_has_preview_marker: - # old version was not a preview, new version is. This is a regression in version. - return False - else: - # either both were previews or both were not -- same version - return False - - # Otherwise, one of them is longer. The longer version is the newer one. - if len(old_parts) > len(new_parts): - # The old version was longer. This is a regression in version. - return False - - return True - - -# Result is None if the version _is_ acceptible -# otherwise, the error message is returned -def is_version_not_acceptable(version): - # see https://confluence.oci.oraclecorp.com/display/DEX/Creating+a+Spec+Artifact+in+Artifactory#CreatingaSpecArtifactinArtifactory-VersionNumberSchemes - # - # Recommended: - # 0.0.1 - # - # Also acceptable: - # - # 1 - # 1.2 - # 1-2 - # 1.2.3-PREVIEW - # 1.2.3-releasePreview - # 1.2.3-4 - # 1.2.3-4-PREVIEW - # - # Not acceptable: - # -SNAPSHOT - # - # But acceptable form for all the above: - # -20180409.232938-5 - original = version - - if version.upper().endswith("-SNAPSHOT"): - return 'Version not acceptable for SDK generation: "{}" is a snapshot version.'.format(original) - - timed_snapshot_match = re.search("-[0-9]{4}[01][0-9][0123][0-9].[012][0-9][0-5][0-9][0-5][0-9]-[0-9]*$", version) - if timed_snapshot_match: - return 'Version not acceptable for SDK generation: "{}" is a timed snapshot version.'.format(original) - - dash_count = version.count("-") - if dash_count > 2: - return 'Version not acceptable for SDK generation: "{}" has more than 2 dashes'.format(version) - - suffix = None - last_dash = version.rfind("-") - if last_dash != -1: - suffix = version[last_dash:] - - if not suffix.upper() in preview_markers and not is_int(suffix[1:]): - # We do want to use the version without the timed snapshot here, not `original` - return 'Version not acceptable for SDK generation: "{}" does not end in {}, or a build number'.format(version, - ", ".join(preview_markers)) - - second_to_last_dash = version.rfind("-", 0, last_dash) - if second_to_last_dash != -1: - # If there are two dashes, then the suffix has to be "-PREVIEW" - # and the part between dashes has to be an integer build number - between_dashes = version[second_to_last_dash + 1:last_dash] - if not suffix.upper() == "-PREVIEW": - return 'Version not acceptable for SDK generation: "{}" has two dashes, but it does not end with "-PREVIEW"'.format(version) - if not is_int(between_dashes): - return 'Version not acceptable for SDK generation: "{}" has two dashes, but the part between the dashes is not a build number'.format(version) - version = version[0:version.find("-")] - - if dash_count == 1: - dash_comment = " (after removing anything after the dash)" - elif dash_count > 1: - dash_comment = " (after removing anything after the first dash)" - else: - dash_comment = "" - - if not re.match(r"^[0-9]{1,}(\.[0-9]{1,}(\.[0-9]{1,})?)?$", version): - return 'Version not acceptable for SDK generation: "{}"{} does not fit the patterns for one-, two-, or three-part numeric versions ("123", 1.2" or "1.2.3")'.format(version, - dash_comment) - - if suffix and suffix.upper() in preview_markers: - # If we had a preview marker, it has to be a three-part version - # 1.2.3-PREVIEW - # 1.2.3-releasePreview - # 1.2.3-4-PREVIEW - if version.count(".") != 2: - return 'Version not acceptable for SDK generation: "{}"{} has suffix "{}", but is not a three-part numeric version ("1.2.3")'.format(version, - dash_comment, suffix) - - return None diff --git a/scripts/auto_gen_utils/team_city_scripts/api_review/GATE_CHECK_README.md b/scripts/auto_gen_utils/team_city_scripts/api_review/GATE_CHECK_README.md deleted file mode 100644 index eac9111daf..0000000000 --- a/scripts/auto_gen_utils/team_city_scripts/api_review/GATE_CHECK_README.md +++ /dev/null @@ -1,66 +0,0 @@ -#API Review Ticket Readiness Validation Check - -## Introduction -The API Review Ticket Readiness Validation Check automation aims to reduce some of the manual load on the API Review Board -TPM's by doing an initial checks on an API Review Ticket raised for review. - -The automation currrently find all tickets that are in `Needs triage` state and runs the following checks:- -1. There should be a UDX ticket linked to the ticket via the issue links field. -2. The API review PR link to review the spec changes is present. -3. The API Review PR is in `OPEN` state and does not have any merge conflicts. -4. The API Review PR has been peer-reviewed by at-least 1 member of the team requesting a review. -5. The API Review PR has a valid spec validation output file included in the PR. -6. The API Review PR should have at least one file with `cond.yaml` in it's name in the PR to make sure the requestor has a spec file to review attached. -7. The Spec Validation Output included should use a Spec Validator tool version that is at most 3 minor versions behind the latest version. -8. The Spec Validation Output shouldn't have any `Errors` or `Warnings`. -9. If there is an increase in either the `Supresses errors` or `Supressed warnings`, then, a corresponding spec validation config yaml file should also be present. - - -### Automation Check Fail Scenario -If the ticket fails any of the above checks we assign the ticket back to reporter and move its status to `More Information Needed` -and add a comment for the reporter with all issues detected for them to review and fix. - -### Automation Check Pass Scenario -If the ticket passes all the above checks, then the automation assigns it to the API Review board TPM for triaging. -The automation also adds a comment on the ticket to specify all the checks done that can be referenced by the TPM to cross check the results. - -PS: The automation adds a label `APIRB-Ready-Check-Pass` to all tickets that pass the check to make sure it doesn't re-pick -already tested ticket again. - -#### Some Caveats for Pass scenario -- As with all automation, there can be some scenarios where the tool doesn't work as intended. As such, the TPM, or the - requestor may request to bypass the automation check in such scenarios by adding the label `ByPass-APIRB-Ready-Check` - to the ticket. -- There might be cases where the automation tool may not be able to do some checks. In such scenarios, the automation tool - will add the reason in comments for the TPM and add a label `manual-check-required` to identify such tickets. -- For internal tickets, where there is no UDX assigned, the service team members can bypass this check by adding the - label `Bypass-APIRB-Ready-Check-UDX` to the APIRB Jira ticket. - - -## Configuring the TC job without modifying the script -With changing requirements there might be cases where some modification may be required in order to keep the script behaving -per the current need. The script supports modifying the script values by setting the following environment variables:- -- `GATE_CHECK_PASS_LABEL` The label that will be added to tickets when the automation passes all checks. -- `BYPASS_GATE_CHECK_LABEL` The label that the TPM will use to bypass the automation check -- `MANUAL_CHECK_LABEL` The label that will be added to tickets when additional manual checks may be required. -- `QUERY_TEMPLATE` The JQL Query used to check which tickets would be considered for automation check. -- `DEFAULT_SPEC_VALIDATOR_TOOL_VERSION` Fallback version of the Spec Validator Tool version -- `VALIDATOR_TOOL_VERSION_CUTOFF` Default minor version cutoff allowed for the Tool -- `API_REVIEW_PR_PREFIX` The Prefix used to provide API Review PR links to the automation -- `DEFAULT_TICKET_TRIAGE_ASSIGNEE` The TPM username who will be assigned this ticket when automation check passes. -- `BYPASS_UDX_CHECK_LABEL` The label that the users can use to bypass the UDX check - -## Running the script locally -You can run the script via the following command:- -``` -python ./team_city_scripts/api_review/gate_check_api_review_tickets.py -``` -If you wish to run the script in dry run mode, please use:- -``` -python ./team_city_scripts/api_review/gate_check_api_review_tickets.py --dry-run -``` -PS: You will need to be connected to Oracle Network and set the following environment variables at minimum for this to work and :- -- JIRA_USERNAME -- JIRA_PASSWORD -- BITBUCKET_USERNAME -- BITBUCKET_PASSWORD diff --git a/scripts/auto_gen_utils/team_city_scripts/api_review/gate_check_api_review_tickets.py b/scripts/auto_gen_utils/team_city_scripts/api_review/gate_check_api_review_tickets.py deleted file mode 100644 index 20ccc78684..0000000000 --- a/scripts/auto_gen_utils/team_city_scripts/api_review/gate_check_api_review_tickets.py +++ /dev/null @@ -1,540 +0,0 @@ -import os -import sys -import argparse -import re -import requests -import logging - -# Add the root of the package, two directories up, to the sys.path -dir_path = os.path.dirname(os.path.realpath(__file__)) -sys.path.append(os.path.join(dir_path, '../..')) - -import util # noqa: ignore=F402 -import config # noqa: ignore=F402 -from shared.bitbucket_utils import get_pullrequest, setup_bitbucket, get_pullrequest_diff, get_pullrequest_changes, get_pullrequest_merge_status, get_file_content_from_commit_id_and_path # noqa: ignore=F402 - -# Constants -API_SPEC_VALIDATION_OUTPUT_FILE_PATTERN = 'validation-output' -API_SPEC_CONFIG_FILE_PATTERN = 'validation-config' -API_SEPC_FILE_PATTERN = 'cond.yaml' -PR_SEARCH_PATTERN = '^.*bitbucket.*/projects/([^/]*)/repos/([^/]*)/pull-requests/([0-9]*).*$' -SPEC_VALIDATOR_PATTERN = '`(.*)`' -SPEC_VALIDATOR_VERSION = 'Validator version' -SPEC_ERRORS = 'Errors' -SPEC_WARNINGS = 'Warnings' -SPEC_SUPPRESSED_ERRORS = 'Suppressed errors' -SPEC_SUPPRESSED_WARNINGS = 'Suppressed warnings' -GATE_CHECK_PASSED = True -MANUAL_CHECK_REQUIRED = False -GATE_CHECK_FAIL_COMMENTS = [] -GATE_CHECK_PASS_COMMENTS = [] - -# Configurable TC Job environment variables -GATE_CHECK_PASS_LABEL = os.environ.get('GATE_CHECK_PASS_LABEL') or 'APIRB-Ready-Check-Pass' -BYPASS_GATE_CHECK_LABEL = os.environ.get('BYPASS_GATE_CHECK_LABEL') or 'ByPass-APIRB-Ready-Check' -BYPASS_UDX_CHECK_LABEL = os.environ.get('BYPASS_UDX_CHECK_LABEL') or 'Bypass-APIRB-Ready-Check-UDX' -MANUAL_CHECK_LABEL = os.environ.get('MANUAL_CHECK_LABEL') or 'manual-check-required' -TICKETS_IN_TRIAGE_QUERY = 'project = APIReviewBoard AND status in ("Needs Triage") and (labels not in ({}, {}) or labels is EMPTY)'.format(GATE_CHECK_PASS_LABEL, BYPASS_GATE_CHECK_LABEL) -QUERY_TEMPLATE = os.environ.get('QUERY') or TICKETS_IN_TRIAGE_QUERY -DEFAULT_SPEC_VALIDATOR_TOOL_VERSION = os.environ.get('DEFAULT_SPEC_VALIDATOR_TOOL_VERSION') or '1.95.0' -VALIDATOR_TOOL_VERSION_CUTOFF = int(os.environ.get('VALIDATOR_TOOL_VERSION_CUTOFF')) if 'VALIDATOR_TOOL_VERSION_CUTOFF' in os.environ else 3 -API_REVIEW_PR_PREFIX = os.environ.get('API_REVIEW_PR_PREFIX') or 'API Review PR:' -DEFAULT_TICKET_TRIAGE_ASSIGNEE = os.environ.get('DEFAULT_TICKET_TRIAGE_ASSIGNEE') or 'arshanm' -AUTOMATION_USER = os.environ.get('BITBUCKET_USERNAME') or 'gear-dexreq-automation' - -# create logger -logging.basicConfig() -logger = logging.getLogger('APIRB_TICKET_CHECK') -logger.setLevel(logging.DEBUG) - - -def init_gate_check(): - global GATE_CHECK_PASSED - global MANUAL_CHECK_REQUIRED - global GATE_CHECK_FAIL_COMMENTS - global GATE_CHECK_PASS_COMMENTS - GATE_CHECK_PASSED = True - MANUAL_CHECK_REQUIRED = False - GATE_CHECK_FAIL_COMMENTS = [] - GATE_CHECK_PASS_COMMENTS = [] - - -def query_actionable_tickets(): - query = QUERY_TEMPLATE - actionable_tickets = util.jira_search_issues(query) - print('Total of {} actionable tickets found matching query `{}`.'.format(len(actionable_tickets), query)) - return actionable_tickets - - -def is_udx_check_bypassed(api_ticket): - labels = api_ticket.fields.labels - if BYPASS_UDX_CHECK_LABEL in labels: - return True - return False - - -def udx_ticket_check(api_ticket): - if is_udx_check_bypassed(api_ticket): - print('Skipping UDX ticket check for ticket:{}'.format(api_ticket.key)) - return - - links = [] - for link in api_ticket.fields.issuelinks: - if hasattr(link, 'outwardIssue'): - links.append(link.outwardIssue) - if hasattr(link, 'inwardIssue'): - links.append(link.inwardIssue) - udx_tickets = list(filter(lambda x: 'UDX' in x.key, links)) - if len(udx_tickets) == 0: - comment = 'There is no UDX ticket linked to this ticket. Please add the link to the UDX ticket this API Review' \ - ' is for, under the *issue links* section of this ticket. If this review is for an internal API,' \ - 'then, you may skip the UDX check by adding the label: {} to the ticket.'.format(BYPASS_UDX_CHECK_LABEL) - add_gate_check_fail_comment(comment) - else: - print("Found related UDX ticket:{}\n".format(udx_tickets[0])) - - -def get_api_spec_pr_link_from_ticket(api_ticket): - """ - Get PR link from ticket - @param api_ticket: API Review Board ticket being checked for API Review ticket readiness - @return: pr_link - """ - pr_link = None - description = api_ticket.fields.description - if description is None: - return pr_link - # Parse description to get bitbucket API PR Review link - for line in description.splitlines(): - line = line.strip().replace('*', '') - if API_REVIEW_PR_PREFIX in line: - try: - pr_link = line.split(API_REVIEW_PR_PREFIX, 1)[1].strip() - print('PR link found: {}'.format(pr_link)) - return pr_link - except IndexError: - break - return None - - -def is_pr_approved(pr): - """ - Check if PR has been peer reviewed by at-least one person - @param pr: Bitbucket Pull Request - @return: bool - """ - if 'reviewers' not in pr.json(): - # No reviewers assigned to PR - return False - for reviewer in pr.json()['reviewers']: - if reviewer['status'] == 'APPROVED': - add_get_check_pass_comment('PR has been approved by {}'.format(reviewer['user']['displayName'])) - return True - return False - - -def get_pr_info(url): - m = re.search(PR_SEARCH_PATTERN, url) - if m: - return m.group(1), m.group(2), m.group(3) - else: - return None, None, None - - -def get_latest_spec_validation_tool_version(): - global MANUAL_CHECK_REQUIRED - version = DEFAULT_SPEC_VALIDATOR_TOOL_VERSION - object_storage_path = 'https://objectstorage.us-phoenix-1.oraclecloud.com' - par = os.environ.get('PAR') or 'bfg3NAGcV8PHbO6teLBE9NWSVDZG99DHTF0o6SrQ6RU9Y-5hSJ_HKD0ghNhSAQBL' - namespace = 'dex-us-phoenix-1' - bucket = 'generated_markdown' - version_file_path = 'bmc-sdk-swagger-validator-latest-non-snapshot-version.txt' - url = object_storage_path + '/p/' + par + '/n/' + namespace + '/b/' + bucket + '/o/' + version_file_path - response = requests.get(url) - if response.status_code == 200: - version = re.search(SPEC_VALIDATOR_PATTERN, response.text.split(" ").pop()).group(1) - print('Found the latest spec validator version: {}'.format(version)) - else: - comment = 'Manual Check Required: Call to get latest version of the Spec validator tool failed, Defaulting to: {}'.format(version) - add_get_check_pass_comment(comment) - MANUAL_CHECK_REQUIRED = True - return version - - -def get_spec_prefix(value): - return 'INFO ' + value - - -def parse_spec_validator_file(spec_validator_file): - """ - Parse Spec validator file for getting different values like Spec validator tool version, Errors, etc - @param spec_validator_file: Spec validation file contents - @return: mapping for parsed contents - """ - spec_validator_mapping = dict() - for line in spec_validator_file: - if get_spec_prefix(SPEC_VALIDATOR_VERSION) in line: - spec_validator_mapping[SPEC_VALIDATOR_VERSION] = line.split(" ").pop() - elif get_spec_prefix(SPEC_ERRORS) in line: - spec_validator_mapping[SPEC_ERRORS] = line.split(" ").pop() - elif get_spec_prefix(SPEC_WARNINGS) in line: - spec_validator_mapping[SPEC_WARNINGS] = line.split(" ").pop() - elif get_spec_prefix(SPEC_SUPPRESSED_ERRORS) in line: - spec_validator_mapping[SPEC_SUPPRESSED_ERRORS] = line.split(" ").pop() - elif SPEC_SUPPRESSED_WARNINGS in line: - spec_validator_mapping[SPEC_SUPPRESSED_WARNINGS] = line.split(" ").pop() - - return spec_validator_mapping - - -def is_spec_validation_tool_version_valid(version_used, latest_version): - print('Spec Validation tool version used: {} Latest tool version: {}'.format(version_used, latest_version)) - version_used = version_used.split('.') - latest_version = latest_version.split('.') - if version_used[0] == latest_version[0] \ - and (int(version_used[1]) + VALIDATOR_TOOL_VERSION_CUTOFF) >= int(latest_version[1]): - return True - return False - - -def is_spec_key_value_zero(file_under_test, spec_mapping, key): - check = True - try: - if key in spec_mapping and int(spec_mapping[key]) > 0: - comment = 'For file {}, API Review ticket readiness check found {} {} in Spec Validation output which needs to be fixed ' \ - 'or suppressed before a review by the API Review Board'.format(file_under_test, spec_mapping[key], key) - add_gate_check_fail_comment(comment) - except ValueError: - comment = 'ERROR: Parsing the Spec Validator mapping content {}'.format(key) - add_gate_check_fail_comment(comment) - check = False - comment = 'For file {}, API Review ticket readiness check found no {}'.format(file_under_test, key) - add_get_check_pass_comment(comment) - return check - - -def is_suppressed_warnings_or_suppressed_errors_increased(file_under_test, original_mappings, updated_mappings): - check = False - try: - if SPEC_SUPPRESSED_ERRORS in original_mappings and SPEC_SUPPRESSED_ERRORS in updated_mappings: - if int(original_mappings[SPEC_SUPPRESSED_ERRORS]) < int(updated_mappings[SPEC_SUPPRESSED_ERRORS]): - print('For file {}, Increase in {} was found'.format(file_under_test, SPEC_SUPPRESSED_ERRORS)) - check = True - else: - print('For file {}, No Increase in {} was found'.format(file_under_test, SPEC_SUPPRESSED_ERRORS)) - else: - print('Error: For file {}, Spec Validator Mapping key {} not found'.format(file_under_test, SPEC_SUPPRESSED_ERRORS)) - - if SPEC_SUPPRESSED_WARNINGS in original_mappings and SPEC_SUPPRESSED_WARNINGS in updated_mappings: - if int(original_mappings[SPEC_SUPPRESSED_WARNINGS]) < int(updated_mappings[SPEC_SUPPRESSED_WARNINGS]): - print('For file {}, Increase in {} was found'.format(file_under_test, SPEC_SUPPRESSED_WARNINGS)) - check = True - else: - print('For file {}, No Increase in {} was found'.format(file_under_test, SPEC_SUPPRESSED_WARNINGS)) - else: - print('Error: For file {}, Spec Validator Mapping key {} not found'.format(file_under_test, SPEC_SUPPRESSED_WARNINGS)) - - except ValueError: - print('Error: For file {}, Error parsing the Spec Validator Mapping values {}/{}'.format(file_under_test, SPEC_SUPPRESSED_ERRORS, SPEC_SUPPRESSED_WARNINGS)) - return check - - -def get_validation_config_file_from_output_file(output_file): - file_name = os.path.splitext(output_file)[0] - return file_name.replace(API_SPEC_VALIDATION_OUTPUT_FILE_PATTERN, API_SPEC_CONFIG_FILE_PATTERN) + '.yaml' - - -def get_pr_change_nodes_for_pattern(pr_changes, pattern): - api_spec_node_list = [] - for node in pr_changes.json()['values']: - if node and node.get('type') != 'DELETE' and node.get('path') and pattern in node['path'].get('name'): - api_spec_node_list.append(node) - return api_spec_node_list - - -def is_spec_validation_output_valid(project, repo, pr_id, pr_changes): - global MANUAL_CHECK_REQUIRED - check = True - api_spec_nodes = get_pr_change_nodes_for_pattern(pr_changes, API_SPEC_VALIDATION_OUTPUT_FILE_PATTERN) - if not api_spec_nodes: - check = False - comment = 'API Spec Validation output file was not found. ' + \ - 'Please make sure that the validation output file name contains the {} pattern'.format(API_SPEC_VALIDATION_OUTPUT_FILE_PATTERN) + \ - 'You may need to copy the validator output file from the target directory if your local build only sometimes copies it.' - add_gate_check_fail_comment(comment) - return check - else: - comment = 'API Spec Validation output file is Present' - add_get_check_pass_comment(comment) - - # 1. Check if API spec diff contains valid spec-validation-output - for api_spec_node in api_spec_nodes: - file_under_test = api_spec_node['path']['name'] - print("Validating Spec validation output file:{}".format(file_under_test)) - updated_spec_validator_mappings = None - original_spec_validator_mappings = None - pr_diff = get_pullrequest_diff(project, repo, pr_id) - if pr_diff and pr_diff.json() and not hasattr(pr_diff.json(), 'errors'): - updated_pr_spec_validator_file = get_file_content_from_commit_id_and_path(project, repo, api_spec_node['path']['toString'], pr_diff.json()['toHash']) - if updated_pr_spec_validator_file and updated_pr_spec_validator_file.content: - updated_spec_validator_mappings = parse_spec_validator_file(updated_pr_spec_validator_file.content.split('\n')) - - original_pr_spec_validator_file = get_file_content_from_commit_id_and_path(project, repo, api_spec_node['path']['toString'], pr_diff.json()['fromHash']) - if original_pr_spec_validator_file and original_pr_spec_validator_file.content: - original_spec_validator_mappings = parse_spec_validator_file(original_pr_spec_validator_file.content.split('\n')) - - if updated_spec_validator_mappings: - # 1.1 Check if validation tools used is within the VALIDATOR_TOOL_VERSION_CUTOFF of latest version - print('For file {}, Spec validation Tool results found: {}'.format(file_under_test, updated_spec_validator_mappings)) - latest_spec_validation_tool_version = get_latest_spec_validation_tool_version() - if is_spec_validation_tool_version_valid(updated_spec_validator_mappings[SPEC_VALIDATOR_VERSION], latest_spec_validation_tool_version): - comment = 'For file {}, Spec Validation tool version used passes check, ' \ - 'Version used: {} and Latest Version is: {}'.format(file_under_test, updated_spec_validator_mappings[SPEC_VALIDATOR_VERSION], latest_spec_validation_tool_version) - add_get_check_pass_comment(comment) - else: - check = False - comment = 'For file {}, Spec Validation tool version used is older than allowed. ' \ - 'Please update it to version *{}* or later, re-run the tool and add its output to the PR '.format(file_under_test, latest_spec_validation_tool_version) - add_gate_check_fail_comment(comment) - - # 1.2 Check if there are Non Zero errors in Spec Validation output - if not is_spec_key_value_zero(file_under_test, updated_spec_validator_mappings, SPEC_ERRORS): - check = False - - # 1.3 Check if there are Non Zero warnings in Spec Validation output - if not is_spec_key_value_zero(file_under_test, updated_spec_validator_mappings, SPEC_WARNINGS): - check = False - - else: - print('Error: For file {}, Unable to parse Updated spec validation output file to mappings!'.format(file_under_test)) - - # 1.4 Check if there was an increase in Suppressed Warnings and/or errors - if original_spec_validator_mappings: - if is_suppressed_warnings_or_suppressed_errors_increased(file_under_test, original_spec_validator_mappings, updated_spec_validator_mappings): - print('For file {}, Spec Validation tool detected an increase in suppressed warnings and suppressed errors'.format(file_under_test)) - # 2.4.1 Check if Spec Validator Yaml file is present - validation_config_file_to_search = get_validation_config_file_from_output_file(file_under_test) - print('Searching For file {}, in PR'.format(validation_config_file_to_search)) - validation_config_files = get_pr_change_nodes_for_pattern(pr_changes, validation_config_file_to_search) - if not validation_config_files: - comment = 'There was an increase in suppressed warnings/errors in Spec Validation output file {}, '\ - 'however, spec validation config file {} was not included in the PR. ' \ - 'Please add the corresponding spec validation config files to the PR. ' \ - 'If the validation config file is present with some other name, then you will need to ' \ - 'rename that file to match this pattern and update the PR'.format(file_under_test, validation_config_file_to_search) - add_gate_check_fail_comment(comment) - check = False - else: - comment = 'There was an increase in suppressed warnings/errors in Spec Validation output file {} and ' \ - 'corresponding Spec Validation config file {} was found'.format(file_under_test, validation_config_file_to_search) - add_get_check_pass_comment(comment) - else: - comment = 'For file {}, No increase in suppressed warnings or suppressed errors was found'.format(file_under_test) - add_get_check_pass_comment(comment) - - else: - comment = 'Manual Check Required: For file {}, Unable to find Original version' \ - 'to check if there was an increase in Suppressed errors/warnings'.format(file_under_test) - add_get_check_pass_comment(comment) - MANUAL_CHECK_REQUIRED = True - return check - - -def add_get_check_pass_comment(comment): - global GATE_CHECK_PASS_COMMENTS - print(comment) - GATE_CHECK_PASS_COMMENTS.append(comment) - - -def add_gate_check_fail_comment(comment): - global GATE_CHECK_PASSED - global GATE_CHECK_FAIL_COMMENTS - print(comment) - GATE_CHECK_PASSED = False - GATE_CHECK_FAIL_COMMENTS.append(comment) - - -def api_spec_pr_check(api_ticket): - """ - Check if API Ticket has API Spec change PR which conforms to the requirements for API Review. - @param api_ticket: - @return: None - """ - global GATE_CHECK_PASSED - global GATE_CHECK_FAIL_COMMENTS - - pr_link = get_api_spec_pr_link_from_ticket(api_ticket) - if pr_link is None: - comment = 'API Review PR with prefix *{}* is missing in description. ' \ - 'Please add the PR link with this prefix in a separate line in the ticket description'.format(API_REVIEW_PR_PREFIX) - add_gate_check_fail_comment(comment) - - else: - project, repo, pr_id = get_pr_info(pr_link) - # Check if PR is valid for our use-case - if not project or not repo or not pr_id: - comment = 'PR link: {} is invalid. The PR link should match the pattern described in the issue template'.format(pr_link) - add_gate_check_fail_comment(comment) - return - - pr = get_pullrequest(project, repo, pr_id) - # Check if automation user has access to PR Repo - if not pr or not pr.json() or hasattr(pr.json(), 'errors'): - logger.error('Cannot access PR: {}'.format(pr)) - if pr.json(): - logger.error(pr.json()) - comment = 'The automation user *{}* does not have access to the Repo {}. ' \ - 'Kindly grant temporary read-only access to this user for API Review ticket readiness check automation validation'.format(AUTOMATION_USER, repo) - add_gate_check_fail_comment(comment) - return - - # Check if PR is Open if not put a comment that it is closed - if pr.json()['state'] != 'OPEN': - comment = 'PR link: {} is not in *Open* state. The PR should be in *Open* state for the API review process.'.format(pr_link) - add_gate_check_fail_comment(comment) - - # Check if PR can be merged - merge_status = get_pullrequest_merge_status(project, repo, pr_id) - if not merge_status or not merge_status.json() or hasattr(merge_status.json(), 'errors') or merge_status.json().get('conflicted'): - comment = 'PR link {} has a merge conflict or has been already merged. Please resolve the merge conflict, ' \ - 'update the PR and make sure that it is in *OPEN* state'.format(pr_link) - add_gate_check_fail_comment(comment) - - # Check if PR has at-least one approval - if not is_pr_approved(pr): - comment = 'PR link {} has not been approved by at-least 1 member of your team. ' \ - 'The PR should be peer-approved by at-least 1 person from your team before API Review Board will review'.format(pr_link) - add_gate_check_fail_comment(comment) - - # Check if valid diff is present - pr_changes = get_pullrequest_changes(project, repo, pr_id) - if not pr_changes and not pr_changes.json() and pr_changes.json()['values'] is not None: - comment = 'No changes in found in the PR link attached to the ticket! Please update it with a valid PR link' - add_gate_check_fail_comment(comment) - return - - # Check if at-least one spec file is present - spec_files = get_pr_change_nodes_for_pattern(pr_changes=pr_changes, pattern=API_SEPC_FILE_PATTERN) - if spec_files: - add_get_check_pass_comment('A spec file with file name pattern:{} was found!'.format(API_SEPC_FILE_PATTERN)) - else: - comment = 'No spec file found matching the file name pattern {}. '.format(API_SEPC_FILE_PATTERN) + \ - 'Please make sure you have a spec file attached in the PR under review. ' + \ - 'Please request a manual bypass if no spec file can match this pattern in your PR' - add_gate_check_fail_comment(comment) - - # Check if API spec is valid - if is_spec_validation_output_valid(project, repo, pr_id, pr_changes): - print('Spec validation is valid for this PR') - else: - print('Spec validation is Invalid for this PR') - - -def decorate_comment_for_jira(api_ticket, comment_list, gate_check_status, reporter=None): - reporter = api_ticket.fields.reporter.name if reporter is None else reporter - reporter_greeting = 'Hi [~{}],\n'.format(reporter) - if comment_list: - comment_list[0] = '\n* ' + comment_list[0] - if gate_check_status: - return reporter_greeting + 'The API Review ticket readiness check has passed. ' \ - + 'Specific things checked are mentioned below, but this is fyi only and does not require action from your side:-\n' \ - + '\n* '.join(comment_list) \ - + '\n\nThe ticket is ready for assignment to an API Review Board member.' - else: - return reporter_greeting \ - + 'The API Review ticket readiness check has failed! ' \ - + 'Transitioning the ticket status to *More information Needed* and re-assigning it back to you. ' \ - + '\n\nPlease Fix the issues mentioned below, update the PR if needed and put the ticket back into ' \ - + '*Needs Triage* status so that the API Review ticket readiness check can run again.' \ - + '\n\nIssues found:-' \ - + '\n* '.join(comment_list) - - -def gate_check_passed(api_ticket): - # Reassign ticket to reporter Default Triage Reviewer and API Review ticket readiness check pass comment - print('DEFAULT_TICKET_TRIAGE_ASSIGNEE is {}'.format(DEFAULT_TICKET_TRIAGE_ASSIGNEE)) - user = util.JIRA_CLIENT().user(DEFAULT_TICKET_TRIAGE_ASSIGNEE) - if user: - print('Re-assigning ticket {} to {}'.format(api_ticket.key, user.displayName)) - labels = api_ticket.fields.labels - print('Adding label {} to ticket {} with existing labels {}'.format(GATE_CHECK_PASS_LABEL, api_ticket.key, labels)) - global MANUAL_CHECK_REQUIRED - if MANUAL_CHECK_REQUIRED: - print('Adding label {} to ticket {}'.format(MANUAL_CHECK_LABEL, api_ticket.key)) - global GATE_CHECK_PASS_COMMENTS - comment = decorate_comment_for_jira(api_ticket, GATE_CHECK_PASS_COMMENTS, True, user.name) - print('\nComment for {}:\n\n{}\n\n'.format(api_ticket.key, comment)) - if not config.IS_DRY_RUN: - util.JIRA_CLIENT().assign_issue(api_ticket.key, user.name) - labels.append(GATE_CHECK_PASS_LABEL) - if MANUAL_CHECK_REQUIRED: - labels.append(MANUAL_CHECK_LABEL) - api_ticket.update(fields={"labels": labels}) - util.add_jira_comment(api_ticket.key, comment) - else: - print('Error: User {} not found!'.format(DEFAULT_TICKET_TRIAGE_ASSIGNEE)) - - -def gate_check_failed(api_ticket): - issue = api_ticket.key - # Comment on the ticket failed API Review ticket readiness check with list of errors - global GATE_CHECK_FAIL_COMMENTS - comment = decorate_comment_for_jira(api_ticket, GATE_CHECK_FAIL_COMMENTS, False) - print('\nComment for {}:\n\n{}\n\n'.format(api_ticket.key, comment)) - if not config.IS_DRY_RUN: - util.add_jira_comment(api_ticket.key, comment) - - # Transition issue to More Information Needed. - print('Transitioning {} to More Information Needed'.format(issue)) - if not config.IS_DRY_RUN: - util.transition_issue_overall_status(util.JIRA_CLIENT(), api_ticket, config.STATUS_MORE_INFORMATION_NEEDED) - - # Reassign ticket back to reporter - print('Re-assigning {} to {}'.format(issue, api_ticket.fields.reporter.displayName)) - if not config.IS_DRY_RUN: - util.JIRA_CLIENT().assign_issue(api_ticket.key, api_ticket.fields.reporter.name) - - -def gate_check(api_ticket): - print('Clearing API Review ticket readiness check counters') - init_gate_check() - print('Running API Review ticket readiness check on ticket {}'.format(api_ticket.key)) - print("=" * 30) - setup_bitbucket(None) - # Check for UDX ticket in related issues - udx_ticket_check(api_ticket) - # Check for API SPEC PR - api_spec_pr_check(api_ticket) - global GATE_CHECK_PASSED - if GATE_CHECK_PASSED: - gate_check_passed(api_ticket) - else: - gate_check_failed(api_ticket) - print('=' * 30) - - -if __name__ == "__main__": - parser = argparse.ArgumentParser(description='API Review Reminder') - parser.add_argument('--dry-run', - default=False, - action='store_true', - help='Perform a dry-run') - - args = parser.parse_args() - config.IS_DRY_RUN = args.dry_run - - if config.IS_DRY_RUN: - print('Running in dry-run mode') - - # Find all tickets to API Review ticket readiness check - tickets_in_triage = query_actionable_tickets() - if len(tickets_in_triage) == 0: - print('No actionable tickets found for Query: {}'.format(QUERY_TEMPLATE)) - else: - # API Review ticket readiness check all tickets - for ticket in tickets_in_triage: - try: - gate_check(ticket) - except Exception as err: - logger.error('Exception occurred while processing ticket:{} exception:{}'.format(ticket.key, err)) - print('API Review ticket readiness check Job finished!') diff --git a/scripts/auto_gen_utils/team_city_scripts/api_review/process_api_review_tickets.py b/scripts/auto_gen_utils/team_city_scripts/api_review/process_api_review_tickets.py deleted file mode 100644 index 6270493640..0000000000 --- a/scripts/auto_gen_utils/team_city_scripts/api_review/process_api_review_tickets.py +++ /dev/null @@ -1,80 +0,0 @@ -import os -import sys -import argparse -from datetime import datetime - -# Add the root of the package, two directories up, to the sys.path -dir_path = os.path.dirname(os.path.realpath(__file__)) -sys.path.append(os.path.join(dir_path, '../..')) - -import util # noqa: ignore=F402 -import config # noqa: ignore=F402 - -cutoff = os.environ.get('CUTOFF') or '1d' -REMINDER_TEMPLATE = """\ -Hello {stakeholders}, -This ticket has not been updated in the past {cutoff}. Can you let us know the latest status on this API Review?""" -QUERY_TEMPLATE = 'project = "Developer Experience" AND labels in (APIConsistency) AND (status = "In Progress" OR (status = "Ready for Work" AND assignee != aarthurs)) AND updated < -{cutoff} ORDER BY updated ASC' - - -def query_actionable_tickets(): - query = QUERY_TEMPLATE.format(cutoff=cutoff) - actionable_tickets = util.jira_search_issues(query) - - print('Total of {} actionable tickets found matching query `{}`.'.format(len(actionable_tickets), query)) - for ticket in actionable_tickets: - print('{} - `{}` was last updated on {}'.format(ticket.key, ticket.fields.summary, ticket.fields.updated)) - - return actionable_tickets - - -def find_stakeholders(ticket): - stakeholders = find_all_assignees(ticket.key) - stakeholders.add(ticket.fields.creator.name) - return stakeholders - - -def post_reminder(ticket): - print('Posting reminder on ticket {}:'.format(ticket.key)) - stakeholders = find_stakeholders(ticket) - reminder = REMINDER_TEMPLATE.format(stakeholders=decorate(stakeholders), cutoff=cutoff) - print(reminder) - if not config.IS_DRY_RUN: - util.add_jira_comment(ticket.key, reminder) - - -def find_all_assignees(key): - assignees = set() - issue = util.JIRA_CLIENT().issue(key, expand='changelog') - changelog = issue.changelog - for history in changelog.histories: - for item in history.items: - if item.field == 'assignee': - assignees.add(getattr(item, 'from')) - assignees.add(item.to) - assignees.discard(None) - return assignees - - -def decorate(names): - return ', '.join('[~{}]'.format(name) for name in names) - - -if __name__ == "__main__": - parser = argparse.ArgumentParser(description='API Review Reminder') - parser.add_argument('--dry-run', - default=False, - action='store_true', - help='Perform a dry-run') - - args = parser.parse_args() - config.IS_DRY_RUN = args.dry_run - - if config.IS_DRY_RUN: - print('Running in dry-run mode') - print('Current time is {}'.format(datetime.now())) - print('Using cutoff {}'.format(cutoff)) - - actionable_tickets = query_actionable_tickets() - for ticket in actionable_tickets: - post_reminder(ticket) diff --git a/scripts/auto_gen_utils/team_city_scripts/bug_bash/process_bug_bash_tickets.py b/scripts/auto_gen_utils/team_city_scripts/bug_bash/process_bug_bash_tickets.py deleted file mode 100644 index 7a0d8810bc..0000000000 --- a/scripts/auto_gen_utils/team_city_scripts/bug_bash/process_bug_bash_tickets.py +++ /dev/null @@ -1,273 +0,0 @@ - -import os -import sys -import argparse - -# Add the root of the package, two directories up, to the sys.path -dir_path = os.path.dirname(os.path.realpath(__file__)) -sys.path.append(os.path.join(dir_path, '../..')) - -from sdk_regions_updater.region_updater_utils import get_issues_with_special_regions_to_be_ignored, get_region_from_storekeeper # noqa: ignore=F402 -import util # noqa: ignore=F402 -import config # noqa: ignore=F402 - -ALL_TEST_PASS_PREFIX = 'All tests passed' -REGION_TYPES_TO_IGNORE = ['(ONSR)', 'Development'] -USER_OCID_PREFIX = os.environ.get('USER_OCID_PREFIX') or 'User OCID -' -TENANCY_OCID_PREFIX = os.environ.get('TENANCY_OCID_PREFIX') or 'Tenancy OCID -' -ENV_FILE_LOCATION = os.environ.get('ENV_FILE_LOCATION') or r'/tmp/region_info.txt' -USER_AUTH_RESULT_FILE = os.environ.get('USER_AUTH_RESULT_FILE') or r'/tmp/user_auth_test.txt' -INSTANCE_AUTH_RESULT_FILE = os.environ.get('INSTANCE_AUTH_RESULT_FILE') or r'/tmp/instance_auth_test.txt' -FORMAT_BLOCK = os.environ.get('FORMAT_BLOCK') or '{noformat}' -OCI_SDK_KEY_URL = os.environ.get('OCI_SDK_KEY_URL') or 'https://bitbucket.oci.oraclecorp.com/projects/SDK/repos/oci-sdk-keys/browse/oci_sdk_user_key_public.pem' - -ISSUE_ID = 'ISSUE_ID' -REGION_TO_TEST = 'REGION_TO_TEST' -REALM_DOMAIN = 'REALM_DOMAIN' -OCI_TENANCY_ID = 'OCI_TENANCY_ID' -OCI_USER_OCID = 'OCI_USER_OCID' -PRE_RUN = 'pre-run' -POST_RUN = 'post-run' -MAX_COMMENT_LENGTH = 32000 -BUILD_COUNTER = os.environ.get('BUILD_COUNTER') or 'Latest Build ID' -TEAM_CITY_URL = os.environ.get('TEAM_CITY_URL') or 'https://teamcity.oci.oraclecorp.com/buildConfiguration/Sdk_SelfService_BugBash' -TEAM_CITY_LOG = 'Team city URL: {} Build-Counter: {}.'.format(TEAM_CITY_URL, BUILD_COUNTER) -USER_AUTH_TEST_ALT_COMMENT = 'API key authentication tests have passed. The result log was too big to be added as comment, please see Team City for detailed logs. ' -INSTANCE_AUTH_TEST_ALT_COMMENT = 'Instance Principals Authentication Test has passed. The result log was too big to be added as comment, please see Team City for detailed logs. ' -CLI_TEST_PREFIX = 'Testing Python-CLI' -CLI_ALT_COMMENT = 'The result log was too big to be added as comment, please see Team City for detailed logs for CLI result.' -CLI_TEST_SUFFIX = 'Please check teamcity logs for detailed result.' -DEFAULT_BUGBASH_AUTOMATION_OWNER = os.environ.get('DEFAULT_BUGBASH_AUTOMATION_OWNER') or 'anurggar' - -def get_jira_test_comment(prefix, test_results): - return '{}\n{}\n{}\n{}'.format(prefix, FORMAT_BLOCK, test_results, FORMAT_BLOCK) - - -def format_env_variable(name, value): - return '{}={}{}'.format(name, value, os.linesep) - - -def get_issue_id_from_env_file(): - issue_id = '' - if os.path.exists(ENV_FILE_LOCATION): - with open(ENV_FILE_LOCATION, 'r') as env_file: - for line in env_file: - if ISSUE_ID in line: - issue_id = line.split('{}{}'.format(ISSUE_ID, "="), 1)[1].strip() - break - else: - print('ERROR: File {} not found'.format(ENV_FILE_LOCATION)) - return issue_id - - -def build_env_file(issue_id, region_id, user_ocid, tenancy_ocid, realm): - if os.path.exists(ENV_FILE_LOCATION): - print("File exists.. Removing old env file") - os.remove(ENV_FILE_LOCATION) - - with open(ENV_FILE_LOCATION, 'w') as env_file: - env_file.write(format_env_variable(ISSUE_ID, issue_id)) - env_file.write(format_env_variable(REGION_TO_TEST, region_id)) - env_file.write(format_env_variable(REALM_DOMAIN, realm)) - env_file.write(format_env_variable(OCI_USER_OCID, user_ocid)) - env_file.write(format_env_variable(OCI_TENANCY_ID, tenancy_ocid)) - - -def get_user_ocid_tenancy_ocid_from_description(description): - user_ocid = "" - tenancy_ocid = "" - if description: - # Parse description to get bitbucket API PR Review link - for line in description.splitlines(): - line = line.strip() - if TENANCY_OCID_PREFIX in line: - tenancy_ocid = line.split(TENANCY_OCID_PREFIX, 1)[1].strip() - if USER_OCID_PREFIX in line: - user_ocid = line.split(USER_OCID_PREFIX, 1)[1].strip() - return user_ocid, tenancy_ocid - - -def setup_issue_for_bugbash_testing(issue): - region_id = issue.raw['fields']['summary'].split()[-1] - reporter = issue.fields.reporter.name - assignee = issue.fields.assignee.name - description = issue.fields.description - fail_comment = 'Hi [~{}], [~{}]\n'.format(reporter, assignee) - - if region_id and description: - user_ocid, tenancy_ocid = get_user_ocid_tenancy_ocid_from_description(description) - region_info = get_region_from_storekeeper(region_id) - realm = region_info['realmDomainComponent'] if region_info else None - if user_ocid and tenancy_ocid and realm: - build_env_file(issue.key, region_id, user_ocid, tenancy_ocid, realm) - comment = 'Starting Bug Bash testing for region: {}'.format(region_id) - print(comment) - if not config.IS_DRY_RUN: - util.add_jira_comment(issue.key, comment) - else: - fail_comment += "ERROR: Description is missing either {}:{} or {}:{} or {}:{}".format(USER_OCID_PREFIX, user_ocid, TENANCY_OCID_PREFIX, tenancy_ocid, REALM_DOMAIN, realm) - print(fail_comment) - if not config.IS_DRY_RUN: - util.add_jira_comment(issue.key, fail_comment) - else: - fail_comment += 'ERROR: Region id: {} or description:{} is invalid. Please make sure to use full region identifier in the ticket title preceeded by a -'.format(region_id, description) - print(fail_comment) - if not config.IS_DRY_RUN: - util.add_jira_comment(issue.key, fail_comment) - - if not config.IS_DRY_RUN: - # Transition the issue to In-Progress so that the ticket is not Picked up again - print('INFO: Transitioning issue {} to IN-PROGRESS.'.format(issue.key)) - util.transition_issue_overall_status(util.JIRA_CLIENT(), issue, config.STATUS_IN_PROGRESS) - - -def process_new_tickets_with_invalid_regions(issues_with_invalid_regions): - if not issues_with_invalid_regions: - print('No invalid bug bash tickets found') - return - for issue in issues_with_invalid_regions: - region_id = issue.raw['fields']['summary'].split()[-1] - contact = issue.fields.assignee.name if issue.fields.assignee else DEFAULT_BUGBASH_AUTOMATION_OWNER - comment = '[~{}],\nRegion {} is invalid. Please check with Region build team to see what is the issue'.format(contact, region_id) - print(comment) - if not config.IS_DRY_RUN: - util.add_jira_comment(issue.key, comment) - util.transition_issue_overall_status(util.JIRA_CLIENT(), issue, config.STATUS_IN_REVIEW) - - -def process_new_tickets_with_special_regions(issues_to_ignore): - if not issues_to_ignore: - print('No new Special regions bug bash tickets found') - return - for issue in issues_to_ignore: - region_id = issue.raw['fields']['summary'].split()[-1] - comment = 'Region {} is a secret region that will not be tested in Bug Bash. Closing the issue directly.'.format( - region_id) - print(comment) - if not config.IS_DRY_RUN: - util.add_jira_comment(issue.key, comment) - util.transition_issue_overall_status(util.JIRA_CLIENT(), issue, config.STATUS_CLOSED) - - -def post_bug_bash_processing(): - issue_id = get_issue_id_from_env_file() - print('Found issue id:{} from env file:{}'.format(issue_id, ENV_FILE_LOCATION)) - if issue_id: - issue = util.JIRA_CLIENT().issue(id=issue_id) - if issue: - print('Issue:{} exists'.format(issue.key)) - user_auth_comment, instance_auth_comment = '', '' - sdk_user_auth_comment, cli_user_auth_comment = '', '' - user_auth_test_passed, instance_auth_test_passed = False, False - - if os.path.exists(USER_AUTH_RESULT_FILE): - with open(USER_AUTH_RESULT_FILE, 'r') as user_auth_file: - #user_auth_comment = get_jira_test_comment(prefix='API Key Authentication tests results:-', test_results=user_auth_file.read()) - user_auth_comment = user_auth_file.read() - print('*' * 30, '\n', 'User Auth comment', '\n', '*' * 30) - print(user_auth_comment) - sdk_user_auth_comment, cli_user_auth_comment = user_auth_comment.split(CLI_TEST_PREFIX, 1) - sdk_user_auth_comment = get_jira_test_comment(prefix='API Key Authentication tests results:-', - test_results=sdk_user_auth_comment) - add_result_log_comment(issue_key=issue_id, comment=sdk_user_auth_comment, - alt_comment=USER_AUTH_TEST_ALT_COMMENT) - # CLI test result - cli_user_auth_comment = get_jira_test_comment(prefix=CLI_TEST_PREFIX, - test_results=cli_user_auth_comment) - add_result_log_comment(issue_key=issue_id, - comment=cli_user_auth_comment + CLI_TEST_SUFFIX + TEAM_CITY_LOG, - alt_comment=CLI_ALT_COMMENT) - - else: - print('WARN: User auth result file not found at {}'.format(USER_AUTH_RESULT_FILE)) - - if os.path.exists(INSTANCE_AUTH_RESULT_FILE): - with open(INSTANCE_AUTH_RESULT_FILE, 'r') as instance_auth_file: - instance_auth_comment = get_jira_test_comment(prefix='Instance Principals Authentication Test Results:-', - test_results=instance_auth_file.read()) - print('*' * 30, '\n', 'Instance Auth comment', '\n', '*' * 30) - print(instance_auth_comment) - add_result_log_comment(issue_key=issue_id, comment=instance_auth_comment, alt_comment=INSTANCE_AUTH_TEST_ALT_COMMENT) - else: - print('WARN: Instance auth result file not found at {}'.format(INSTANCE_AUTH_RESULT_FILE)) - - user_auth_test_passed = ALL_TEST_PASS_PREFIX in user_auth_comment - region_id = issue.raw['fields']['summary'].split()[-1] - instance_auth_test_passed = region_id in instance_auth_comment - - if user_auth_test_passed and instance_auth_test_passed: - if not config.IS_DRY_RUN: - util.add_jira_comment(issue_key=issue_id, comment=ALL_TEST_PASS_PREFIX) - util.transition_issue_overall_status(util.JIRA_CLIENT(), issue, config.STATUS_CLOSED) - else: - fail_comment = '' - if not user_auth_test_passed: - fail_comment += '* API key authentication tests have failed! Make sure the following details are correct:-\n' \ - + '** The region identifier in the summary is correct\n' \ - + '** The Tenancy in {} is correct and matches the value for the user mentioned in the description\n'.format(TENANCY_OCID_PREFIX) \ - + '** The User in {} is correct and matches the USER OCID value for the user mentioned in the description\n'.format(USER_OCID_PREFIX) \ - + '** Check if the User has the OCI SDK public key listed under them. If not please add the key under this user from here: {}\n'.format(OCI_SDK_KEY_URL) - if not instance_auth_test_passed: - fail_comment += '* Instance Principals Authentication Test Failed! Please make sure that the Regions Build team has added capacity in this tenancy to create new Instances! \n' - - add_fail_comment(issue, fail_comment) - else: - print('ERROR: issue-id:{} not found by JIRA client'.format(issue_id)) - - -def add_result_log_comment(issue_key, comment, alt_comment): - if not config.IS_DRY_RUN: - if len(comment) >= MAX_COMMENT_LENGTH: - util.add_jira_comment(issue_key=issue_key, comment=alt_comment + TEAM_CITY_LOG) - else: - util.add_jira_comment(issue_key=issue_key, comment=comment) - - -def add_fail_comment(issue, fail_comment): - reporter = issue.fields.reporter.name - assignee = issue.fields.assignee.name - comment = 'Hi [~{}], [~{}]\nThe following tests have failed:-\n'.format(reporter, assignee) - comment += fail_comment - comment += '\nSee Team City for more details on this failure. ' + TEAM_CITY_LOG - print(comment) - if not config.IS_DRY_RUN: - util.add_jira_comment(issue_key=issue.key, comment=comment) - - -def process_new_tickets(): - issues = util.get_unprocessed_bug_bash_tickets() - if not issues: - print('No new unprocessed bug bash tickets found') - return - issues_to_ignore, issues_with_invalid_regions = get_issues_with_special_regions_to_be_ignored(issues, REGION_TYPES_TO_IGNORE) - process_new_tickets_with_special_regions(issues_to_ignore) - process_new_tickets_with_invalid_regions(issues_with_invalid_regions) - for issue in [i for i in issues if i not in issues_to_ignore and i not in issues_with_invalid_regions]: - setup_issue_for_bugbash_testing(issue) - # TODO find a way to run this for multiple tickets - break - - -if __name__ == "__main__": - parser = argparse.ArgumentParser(description='API Review Reminder') - parser.add_argument('--dry-run', - default=False, - action='store_true', - help='Perform a dry-run') - parser.add_argument('--run-type', - required=True, - choices=[PRE_RUN, POST_RUN], - help='Run type for processing bug bash tickets, {}: before processing ticket, {}: for closing the ticket'.format(PRE_RUN, POST_RUN)) - - args = parser.parse_args() - config.IS_DRY_RUN = args.dry_run - run_type = args.run_type - - if config.IS_DRY_RUN: - print('Running in dry-run mode') - - if run_type == PRE_RUN: - # Find new tickets to process and do bug bash testing - process_new_tickets() - elif run_type == POST_RUN: - post_bug_bash_processing() diff --git a/scripts/auto_gen_utils/team_city_scripts/cli/public/10_report_cli_and_python_sdk_generation_success.sh b/scripts/auto_gen_utils/team_city_scripts/cli/public/10_report_cli_and_python_sdk_generation_success.sh deleted file mode 100755 index 836132857a..0000000000 --- a/scripts/auto_gen_utils/team_city_scripts/cli/public/10_report_cli_and_python_sdk_generation_success.sh +++ /dev/null @@ -1,28 +0,0 @@ -set -e -set -x - -pwd -ls -la - -cd python-cli -# activate python env -source scripts/common_build_functions.sh -f_activate_virtualenv - -if [[ `git branch|grep "\*"|grep "bulk"` ]]; then - echo "Found bulk preview" - export BUILD_TYPE_ARG="--build-type bulk_pending_merge_public" -else - export BUILD_TYPE_ARG="--build-type individual_public" -fi -cd .. - -# commit changes from generation and build.sh for python-cli and python-sdk -# this step will run no matter what happened before it so it can report success / failure to the JIRA tickets -cd autogen -# This should be defined within the TC job configuration: export BUILD_ID=%teamcity.build.id% -python ./3_report_generation_status.py --build-id $BUILD_ID $BUILD_TYPE_ARG -cd .. - -ls -la ./python-cli -ls -la ./python-sdk \ No newline at end of file diff --git a/scripts/auto_gen_utils/team_city_scripts/cli/public/11_commit_and_push_generated_changes.sh b/scripts/auto_gen_utils/team_city_scripts/cli/public/11_commit_and_push_generated_changes.sh deleted file mode 100755 index bfb8d65a0c..0000000000 --- a/scripts/auto_gen_utils/team_city_scripts/cli/public/11_commit_and_push_generated_changes.sh +++ /dev/null @@ -1,35 +0,0 @@ -set -x -set -e - -# this is ONLY for master -# earlier in order to get the CLI artifact to work we updated the CLI setup.py to reference a specific SDK build -# we DO NOT want to check this in since the master version of setup.py cannot reference a specific internal build of the SDK -# so here before we commit the changes, we revert back to the original SDK version without the team city build number -SDK_VERSION=$(cat ~/.SDK_VERSION) -echo "SDK version: $SDK_VERSION" - -cd python-cli - -# activate python env -source scripts/common_build_functions.sh -f_activate_virtualenv - -./scripts/replace_oci_version.py $SDK_VERSION -cd .. - -# get the branch we're on -cd python-cli -branch=`git branch|grep "^\*"|cut -c3-` -if [[ ${branch} == *"bulk"* ]]; then - build_type="bulk_pending_merge_public" -else - build_type="individual_public" -fi -cd .. - -# commit changes from generation and build.sh for python-cli and python-sdk -cd autogen -ls -la -# This should be defined within the TC job configuration: export BUILD_ID=%teamcity.build.id% -python ./4_on_generation_complete.py --build-id $BUILD_ID --build-type ${build_type} -cd .. \ No newline at end of file diff --git a/scripts/auto_gen_utils/team_city_scripts/cli/public/1_setup_cli_and_python_sdk_public_branches.sh b/scripts/auto_gen_utils/team_city_scripts/cli/public/1_setup_cli_and_python_sdk_public_branches.sh deleted file mode 100755 index ce493b5f70..0000000000 --- a/scripts/auto_gen_utils/team_city_scripts/cli/public/1_setup_cli_and_python_sdk_public_branches.sh +++ /dev/null @@ -1,61 +0,0 @@ -set -e -set -x - -# must disable StrictHostKeyChecking so that we don't get an interactive -# prompt later asking to confirm the host key -# Must disable -e (fail on non-zero exit code) because ssh returns 255 -set +e -ssh -o StrictHostKeyChecking=no git@bitbucket.oci.oraclecorp.com -p 7999 -set -e - -# Old way of doing that: -# ls -la ~/.ssh -# -# cat ~/.ssh/config -# -# printf "\n\nHost * \n StrictHostKeyChecking no\n" >> ~/.ssh/config -# -# cat ~/.ssh/config - -# Setup Python env -cd python-cli -source scripts/common_build_functions.sh -f_install_and_activate_python3 -cd .. - -## AUTOGEN ## -cd autogen -pip install -U pip -pip ${PIP_TIMEOUT_PARAMETER} install --trusted-host artifactory.oci.oraclecorp.com --index-url https://artifactory.oci.oraclecorp.com/api/pypi/global-dev-pypi/simple -r requirements.txt -cd .. - -## CLI ## -cd python-cli -git pull -git fetch -pip ${PIP_TIMEOUT_PARAMETER} install --trusted-host artifactory.oci.oraclecorp.com --index-url https://artifactory.oci.oraclecorp.com/api/pypi/global-dev-pypi/simple -r requirements.txt -# pip ${PIP_TIMEOUT_PARAMETER} install --trusted-host artifactory.oci.oraclecorp.com --index-url https://artifactory.oci.oraclecorp.com/api/pypi/global-dev-pypi/simple -r requirements-internal.txt -cd .. - -## SDK ## -cd python-sdk -git pull -git fetch -pip ${PIP_TIMEOUT_PARAMETER} install --trusted-host artifactory.oci.oraclecorp.com --index-url https://artifactory.oci.oraclecorp.com/api/pypi/global-dev-pypi/simple -r requirements.txt -pip ${PIP_TIMEOUT_PARAMETER} install --trusted-host artifactory.oci.oraclecorp.com --index-url https://artifactory.oci.oraclecorp.com/api/pypi/global-dev-pypi/simple -r requirements-internal.txt -cd .. - -# run the setup job for step 2 in ./autogen -cd autogen -ls -la -pwd - -ls -la - -pip install pyOpenSSL==22.1.0 - -# checks out CLI branch with same name as SDK branch that triggered this build -python ./2_pre_generation_set_up.py --build-id $BUILD_ID - -# back out into root directory -cd .. diff --git a/scripts/auto_gen_utils/team_city_scripts/cli/public/3_record_python_sdk_generation_success.sh b/scripts/auto_gen_utils/team_city_scripts/cli/public/3_record_python_sdk_generation_success.sh deleted file mode 100755 index 7540402beb..0000000000 --- a/scripts/auto_gen_utils/team_city_scripts/cli/public/3_record_python_sdk_generation_success.sh +++ /dev/null @@ -1,5 +0,0 @@ -set -x - -pwd -ls -la -touch python_sdk_generation_success.txt \ No newline at end of file diff --git a/scripts/auto_gen_utils/team_city_scripts/cli/public/5_record_cli_generation_success.sh b/scripts/auto_gen_utils/team_city_scripts/cli/public/5_record_cli_generation_success.sh deleted file mode 100755 index a3a875a917..0000000000 --- a/scripts/auto_gen_utils/team_city_scripts/cli/public/5_record_cli_generation_success.sh +++ /dev/null @@ -1,5 +0,0 @@ -set -x - -pwd -ls -la -touch cli_generation_success.txt \ No newline at end of file diff --git a/scripts/auto_gen_utils/team_city_scripts/cli/public/6_python_sdk_build_and_publish_artifacts.sh b/scripts/auto_gen_utils/team_city_scripts/cli/public/6_python_sdk_build_and_publish_artifacts.sh deleted file mode 100755 index ad6e04194d..0000000000 --- a/scripts/auto_gen_utils/team_city_scripts/cli/public/6_python_sdk_build_and_publish_artifacts.sh +++ /dev/null @@ -1,119 +0,0 @@ -set -e -set -x - -touch sdk_build_start.txt - -# activate python environment -cd python-cli -source scripts/common_build_functions.sh -f_activate_virtualenv -cd .. - -pip ${PIP_TIMEOUT_PARAMETER} install -e ./python-sdk - -# must disable StrictHostKeyChecking so that we don't get an interactive -# prompt later asking to confirm the host key -# Must disable -e (fail on non-zero exit code) because ssh returns 255 -set +e -ssh -o StrictHostKeyChecking=no git@bitbucket.oci.oraclecorp.com -p 7999 -set -e - -# Old way of doing that: -# ls -la ~/.ssh -# -# cat ~/.ssh/config -# -# printf "\n\nHost * \n StrictHostKeyChecking no\n" >> ~/.ssh/config -# -# cat ~/.ssh/config - -# configure git for this commit -git config --global user.email "$GIT_USER_EMAIL" -git config --global user.name "$GIT_USER_NAME" - -cd python-sdk - -# the below commands are copied from: -# Sdk_PythonSdk_PreviewTestsDocsWheel -# at some point we should consolidate Python SDK Preview Build to use build.sh - -SDK_VERSION=$(tail -1 src/oci/version.py | cut -d '"' -f2) - -# TODO: come up with a better versioning scheme for these artifacts -# for right now, we are using +0.{build number} so that it is always behind the +{build number} that -# the official python build wheel job publishes -DEV_VERSION=$SDK_VERSION+0.$BUILD_NUMBER.selfservice -echo SDK Version Number $SDK_VERSION -echo Build Version Number $DEV_VERSION - -echo Rewriting version from $SDK_VERSION to $DEV_VERSION -# Replace the version with the DEV_VERSION (SDK_VERSION + Build Number) so that we can make -# referencing and declaring dependencies on preview CLIs more explicit -rm src/oci/version.py -cat < src/oci/version.py -# coding: utf-8 -# Copyright (c) 2016, 2017, Oracle and/or its affiliates. All rights reserved. - -__version__ = "$DEV_VERSION" - -EOF - -# Echo out the version to confirm -cat src/oci/version.py - -echo Building Docs -pip install sphinx --timeout 120 -pip ${PIP_TIMEOUT_PARAMETER} install sphinx_rtd_theme - -echo Running Tests - -if [ "$TEST_ENABLE" = "false" ]; then - echo "TESTS HAVE BEEN DISABLED." -else - pip ${PIP_TIMEOUT_PARAMETER} install tox - tox -e flake8,py27,py35 -fi - -echo Building Wheel -# Redirect STDOUT and STDERR to a file to avoid resource unavailable error in TeamCity jobs. -mkdir -p docs/_build/html -make build >> build_output.txt 2>&1 - -# Create a dev directory that will contain versions of the whl, zip, and docs meant for -# the dev pypi artifactory. Each artifact includes the build number in the version to avoid -# conflicts. - -mkdir -p dist/dev/ -if [ -f "dist/oci-$DEV_VERSION-py3-none-any.whl" ]; then - cp dist/oci-$DEV_VERSION-py3-none-any.whl dist/dev/oci-$DEV_VERSION-py3-none-any.whl -else - cp dist/oci-$DEV_VERSION-py2.py3-none-any.whl dist/dev/oci-$DEV_VERSION-py2.py3-none-any.whl -fi -cp dist/oci-python-sdk-$DEV_VERSION.zip dist/dev/oci-python-sdk-$DEV_VERSION.zip - - -echo Contents of dist folder -ls -la dist - -# the build script creates a virtualenv inside this folder which we need to remove or it will be checked in -# commenting this out since we are not invoking build.sh -# rm -rf ./.sdk-venv - -# AFTER building the wheel, reset src/oci/version.py back to regular version, we don't want to check in TC version -git checkout -- src/oci/version.py - -# Delete build_output.txt. -rm build_output.txt - -cd .. - -# write DEV_VERSION to a text file so next step can use it -echo $DEV_VERSION >> ~/.DEV_VERSION - -# write SDK_VERSION to a text file so next step can use it -echo $SDK_VERSION >> ~/.SDK_VERSION - -# DEV_VERSION should contain the version string for this build of the Python SDK -# we need this in the next step to know which version the CLI should depend on -echo "Dev version: $DEV_VERSION" -echo "SDK version: $SDK_VERSION" \ No newline at end of file diff --git a/scripts/auto_gen_utils/team_city_scripts/cli/public/7_record_python_sdk_build_success.sh b/scripts/auto_gen_utils/team_city_scripts/cli/public/7_record_python_sdk_build_success.sh deleted file mode 100755 index 1d5e110ec8..0000000000 --- a/scripts/auto_gen_utils/team_city_scripts/cli/public/7_record_python_sdk_build_success.sh +++ /dev/null @@ -1,5 +0,0 @@ -set -x - -pwd -ls -la -touch python_sdk_build_success.txt \ No newline at end of file diff --git a/scripts/auto_gen_utils/team_city_scripts/cli/public/8_cli_build_and_publish_artifacts.sh b/scripts/auto_gen_utils/team_city_scripts/cli/public/8_cli_build_and_publish_artifacts.sh deleted file mode 100755 index 793df737b2..0000000000 --- a/scripts/auto_gen_utils/team_city_scripts/cli/public/8_cli_build_and_publish_artifacts.sh +++ /dev/null @@ -1,47 +0,0 @@ -set -e -set -x - -touch cli_build_start.txt - -# must disable StrictHostKeyChecking so that we don't get an interactive -# prompt later asking to confirm the host key -# Must disable -e (fail on non-zero exit code) because ssh returns 255 -set +e -ssh -o StrictHostKeyChecking=no git@bitbucket.oci.oraclecorp.com -p 7999 -set -e - -# Old way of doing that: -# ls -la ~/.ssh -# -# cat ~/.ssh/config -# -# printf "\n\nHost * \n StrictHostKeyChecking no\n" >> ~/.ssh/config -# -# cat ~/.ssh/config - -# configure git for this commit -git config --global user.email "$GIT_USER_EMAIL" -git config --global user.name "$GIT_USER_NAME" - -DEV_VERSION=$(cat ~/.DEV_VERSION) -SDK_VERSION=$(cat ~/.SDK_VERSION) - -cd python-cli -source scripts/common_build_functions.sh -f_activate_virtualenv -./scripts/replace_oci_version.py $DEV_VERSION - -# sleep to allow time for Python SDK from last step to be published -sleep 300 - -# run regular python build to produce artifacts -# TEST_ENABLE is set to 'false' so tests are skipped -source scripts/build_preview.sh "individual_public" - -# the build script creates a virtualenv inside this folder which we need to remove or it will be checked in -rm -rf ./.sdk-venv - -cd .. - -# DEV_VERSION should contain the version string for this build of the CLI -echo "Dev version: $DEV_VERSION" \ No newline at end of file diff --git a/scripts/auto_gen_utils/team_city_scripts/cli/public/9_record_cli_build_success.sh b/scripts/auto_gen_utils/team_city_scripts/cli/public/9_record_cli_build_success.sh deleted file mode 100755 index bf3d97fd40..0000000000 --- a/scripts/auto_gen_utils/team_city_scripts/cli/public/9_record_cli_build_success.sh +++ /dev/null @@ -1,5 +0,0 @@ -set -x - -pwd -ls -la -touch cli_build_success.txt \ No newline at end of file diff --git a/scripts/auto_gen_utils/team_city_scripts/cli/public/constants.py b/scripts/auto_gen_utils/team_city_scripts/cli/public/constants.py deleted file mode 100644 index 6139a34af7..0000000000 --- a/scripts/auto_gen_utils/team_city_scripts/cli/public/constants.py +++ /dev/null @@ -1,36 +0,0 @@ -ENV_RELEASE_DATE = 'release_date' -ENV_SLACK_WEBHOOK = 'slack_webhook' -ENV_VERSION = 'version' - -JIRA_DATETIME_FORMAT = '%Y-%m-%dT%H:%M:%S.000+0000' - -PROJECT = 'SDK' -BULK_PUBLIC_PR_TITLE = 'Auto Generated Bulk Public' -PR_DESCRIPTION = 'description' -DEXREQ_TICKET_PATTERN = '(DEXREQ-[\d]+)' # noqa: W605 -DEXREQ_TICKET_LABELS = 'labels' - -SPEC_REPO = 'dexreq' -SPEC_PR_LINK = '(https://bitbucket.oci.oraclecorp.com/projects/SDK/repos/dexreq/pull-requests/.*)\]' # noqa: W605 -TITLE_FORMAT = '(.*)(API)*' # noqa: W605 - -CHANGELOG_TITLE_PATTERN = '[^\w]*changelog[^\w]*' # noqa: W605 -CLI_COMMAND_PATTERN = '[-*`\s]*(oci[^`]*).*' # noqa: W605 -CLI_COMMAND_FORMAT = ' * ``{command}``' -DESCRIPTION_PATTERN = '[^\w[]*' # noqa: W605 -SDK_DESCRIPTION_FORMAT = '* {text}' -CLI_DESCRIPTION_FORMAT = '\n* {text}\n' - -SDK_SECTION = 'SDK' -CLI_SECTION = 'CLI' -CLI_MANUAL_CHANGE_SECTION = 'CLI manual change' -JIRA_DOMAIN = 'https://jira.oci.oraclecorp.com/browse/' - -PR_SUMMARY_TEMPLATE = '>{count} issue{plural} found for {section}: {issue_list}\n' -SDK_CHANGELOG_REPORT = '>SDK Change Log {mentions}\n{summary}{changelog_entries}\n' -CLI_CHANGELOG_HEADER_TEMPLATE = '{version} - {release_date}\n--------------------\n' -CLI_CHANGELOG_REPORT = '>CLI Change Log {mentions}\n{summary}\n{cli_header}{cli}' -CHANGELOG_ENTRY = '\n\n<{link}|{dexreq_issue}> by {service}\n{content}' - -SDK_MENTIONS = ['<@WFABDC812>'] -CLI_MENTIONS = ['<@U03KTDB0T8V>'] diff --git a/scripts/auto_gen_utils/team_city_scripts/cli/public/gather_cli_changelog_entry.py b/scripts/auto_gen_utils/team_city_scripts/cli/public/gather_cli_changelog_entry.py deleted file mode 100644 index 1bcc5117c1..0000000000 --- a/scripts/auto_gen_utils/team_city_scripts/cli/public/gather_cli_changelog_entry.py +++ /dev/null @@ -1,266 +0,0 @@ -from collections import defaultdict -from datetime import date, timedelta -from slack_sdk.webhook import WebhookClient - -import argparse -import re -import sys -import os -import yaml - -# Add the root of the package, two directories up, to the sys.path -dir_path = os.path.dirname(os.path.realpath(__file__)) -sys.path.append(os.path.join(dir_path, '../../..')) -sys.path.append(os.path.join(dir_path, '../../')) - -from set_changed_service_env_variable import get_service_name_from_issue # noqa: ignore=F402 -from shared import bitbucket_utils # noqa: ignore=F402 - -import config # noqa: ignore=F402 -import constants # noqa: ignore=F402 -import util # noqa: ignore=F402 - - -def get_tool_name(branch): - words = re.findall('(\w*)', branch) # noqa: W605 - tool_name = set(words) & set(config.TOOL_NAMES) - return tool_name.pop() if len(tool_name) > 0 else config.CLI_NAME - - -def get_monday_date(): - today = date.today() - monday = today - timedelta(days=today.weekday()) - return monday.strftime(constants.JIRA_DATETIME_FORMAT) - - -def get_release_date(): - release = os.environ.get(constants.ENV_RELEASE_DATE) - today = date.today() - - try: - release = date.fromisoformat(release) - - if release > today: - return release - - except (TypeError, ValueError): - pass - - return today + timedelta(days=8 - today.weekday()) - - -def get_issues_from_newest_bulk_public_pr(tool_name, cut_off_date): - pr = get_bulk_public_pr(tool_name, cut_off_date) - return re.findall(constants.DEXREQ_TICKET_PATTERN, pr[constants.PR_DESCRIPTION]) - - -def get_bulk_public_pr(tool_name, cut_off_date): - pr = bitbucket_utils.get_newest_pullrequest_with_string_after( - constants.PROJECT, config.REPO_NAMES_FOR_TOOL[tool_name][-1], constants.BULK_PUBLIC_PR_TITLE, cut_off_date - ) - - if pr: - return pr - - sys.exit('No PRs found for {} matching the title \'{}\' after {}'.format( - config.REPO_NAMES_FOR_TOOL[tool_name][-1], - constants.BULK_PUBLIC_PR_TITLE, - cut_off_date.split('T')[0] - )) - - -def get_changelog_entries(issues): - output = defaultdict(str) - - for issue in issues: - jira_issue = get_jira_issue(issue) - service = get_service_name(jira_issue) - sdk_entry, cli_entry = get_raw_changelog_entries(jira_issue) - - if not is_manual_change_ticket(jira_issue): - cli_issues.append(issue) - - output[constants.SDK_SECTION] += format_entry(issue, service, sdk_entry) - output[constants.CLI_SECTION] += format_entry(issue, service, cli_entry, True) - - return output - - -def get_jira_issue(issue): - return jira.issue(issue, fields='{},{},{},{},comment'.format( - config.CUSTOM_FIELD_ID_PREVIEW_ISSUE, - config.CUSTOM_FIELD_ID_CHANGELOG, - config.CUSTOM_FIELD_ID_CLI_CHANGELOG, - constants.DEXREQ_TICKET_LABELS - )) - - -def get_service_name(jira_issue): - try: - pr = bitbucket_utils.get_pullrequest_from_url(get_spec_pr_link(jira_issue)).json() - return get_title_from_yaml_path(pr['id'], pr['fromRef']['latestCommit']) - except Exception: - return get_service_name_from_ticket(jira_issue) - - -def get_spec_pr_link(jira_issue): - for comment in reversed(jira_issue.fields.comment.comments): - match = re.search(constants.SPEC_PR_LINK, comment.body) - if match: - return match[1] - - -def get_title_from_yaml_path(id, commit_id): - file = bitbucket_utils.get_file_content_from_commit_id_and_path( - constants.PROJECT, - constants.SPEC_REPO, - get_yaml_path(id), - commit_id - ).content - y = yaml.safe_load(file) - return y['info']['title'].lower().rstrip(' api').title() - - -def get_yaml_path(id): - pr_diff = bitbucket_utils.get_pullrequest_diff(constants.PROJECT, constants.SPEC_REPO, id).json() - for diff in pr_diff['diffs']: - destination = diff['destination']['toString'] - if destination.endswith('.yaml'): - return destination - - -def get_service_name_from_ticket(jira_issue): - preview_issue = getattr(jira_issue.fields, config.CUSTOM_FIELD_ID_PREVIEW_ISSUE) - - preview = jira.issue(preview_issue, fields=config.CUSTOM_FIELD_ID_SPEC_FRIENDLY_NAME) - service = getattr(preview.fields, config.CUSTOM_FIELD_ID_SPEC_FRIENDLY_NAME) - service_from_path = get_service_name_from_issue(jira_issue.id) - - if service == service_from_path: - return service - - if service: - return '{} from {}'.format(service, service_from_path) - - return service_from_path - - -def get_raw_changelog_entries(jira_issue): - return getattr(jira_issue.fields, config.CUSTOM_FIELD_ID_CHANGELOG), getattr(jira_issue.fields, config.CUSTOM_FIELD_ID_CLI_CHANGELOG) - - -def is_manual_change_ticket(jira_issue): - labels = getattr(jira_issue.fields, constants.DEXREQ_TICKET_LABELS) - return any(label in config.CLI_PR_REQUIRED_LABELS for label in labels) - - -def format_entry(issue, service, entry, is_cli=False): - return constants.CHANGELOG_ENTRY.format( - link=constants.JIRA_DOMAIN + issue, - dexreq_issue=issue, - service=service, - content=parse(entry, is_cli) - ) - - -def parse(entry, is_cli): - cleaned_entry = previous_line = '' - for line in entry.splitlines(): - line = line.strip() - - # exclude empty lines and lines that only say "changelog" - if len(line) == 0 or re.match(constants.CHANGELOG_TITLE_PATTERN, line.lower()): - continue - - match = re.search(constants.DESCRIPTION_PATTERN, line) - line = line[match.end():] - - # exclude lines with no words, except if it's a heading underline - if len(line) == 0: - if is_cli and len(previous_line) > 0 and not previous_line.startswith('~'): - line = '~' * len(previous_line) - else: - continue - - # try to format the line if it has more than 1 word - if len(line.split()) > 1: - command = re.fullmatch(constants.CLI_COMMAND_PATTERN, line.lower()) - if command: - line = constants.CLI_COMMAND_FORMAT.format(command=command[1]) - else: - line = constants.CLI_DESCRIPTION_FORMAT.format(text=line) if is_cli else \ - constants.SDK_DESCRIPTION_FORMAT.format(text=line) - - cleaned_entry += line + '\n' - previous_line = line - - return cleaned_entry - - -def compile_reports(entries, issues, version, release_date): - manual_change_issues = list(filter(lambda issue: issue not in cli_issues, issues)) - - sdk_report = constants.SDK_CHANGELOG_REPORT.format( - mentions=' '.join(constants.SDK_MENTIONS), - summary=fill_summary_template(issues, constants.SDK_SECTION), - changelog_entries=entries.get(constants.SDK_SECTION) - ) - - cli_summary = fill_summary_template(cli_issues, constants.CLI_SECTION) + \ - fill_summary_template(manual_change_issues, constants.CLI_MANUAL_CHANGE_SECTION) - header = constants.CLI_CHANGELOG_HEADER_TEMPLATE.format(version=version, release_date=release_date) - - cli_report = constants.CLI_CHANGELOG_REPORT.format( - mentions=' '.join(constants.CLI_MENTIONS), - summary=cli_summary, - cli_header=header, - cli=entries.get(constants.CLI_SECTION) - ) - - return [sdk_report, cli_report] - - -def fill_summary_template(issue_list, section): - return constants.PR_SUMMARY_TEMPLATE.format( - count=len(issue_list), - plural='' if len(issue_list) == 1 else 's', - section=section, - issue_list=issue_list - ) - - -def post_reports_on_slack(reports): - url = os.environ.get(constants.ENV_SLACK_WEBHOOK) - webhook = WebhookClient(url) - for report in reports: - webhook.send(text=report) - - -if __name__ == '__main__': - """ - This script will be used to gather CHANGELOG entries from DEXREQ tickets included in SDK and CLI's - bulk public PRs. - """ - parser = argparse.ArgumentParser(description='Changelog entry collection post bulk public PR generation.') - parser.add_argument('--branch', - required=True, - help='The branch that triggered this job') - parser.add_argument('--tool', - default=config.CLI_NAME, - help='The tool for which to generate the preview. Accepted values: {}'.format(', '.join(config.TOOL_NAMES))) - - args = parser.parse_args() - - global cli_issues - tool_name = get_tool_name(args.branch) - version = os.environ.get(constants.ENV_VERSION) - release_date = get_release_date() - cli_issues = [] - - bitbucket_utils.setup_bitbucket(args) - jira = util.JIRA_CLIENT() - - issues = get_issues_from_newest_bulk_public_pr(tool_name, get_monday_date()) - changelog_entries = get_changelog_entries(issues) - reports = compile_reports(changelog_entries, issues, version, release_date) - post_reports_on_slack(reports) diff --git a/scripts/auto_gen_utils/team_city_scripts/cli/public/set_changed_service_env_variable.py b/scripts/auto_gen_utils/team_city_scripts/cli/public/set_changed_service_env_variable.py deleted file mode 100644 index 0c2cbaea8f..0000000000 --- a/scripts/auto_gen_utils/team_city_scripts/cli/public/set_changed_service_env_variable.py +++ /dev/null @@ -1,48 +0,0 @@ -import sys -import os -import re - -# Add the root of the package, two directories up, to the sys.path -dir_path = os.path.dirname(os.path.realpath(__file__)) -sys.path.append(os.path.join(dir_path, '../../..')) -sys.path.append(os.path.join(dir_path, '../../')) - -import config # noqa: ignore=F402 -import util # noqa: ignore=F402 -from add_or_update_scripts.cli_add_or_update_spec import determine_pom_location # noqa: ignore=F402 - -CHANGED_SERVICE = 'env.CHANGED_SERVICE' -SERVICES_DIR = "services" -POM_LOCATION_PATTERN = "services/(.*)/pom.xml" - - -def _get_service_name_from_path(file_path): - result = re.search(POM_LOCATION_PATTERN, file_path) - return result.group(1) # group(1) would return the first capture group i.e service name - - -def get_service_name_from_issue(dexreq_issue): - jira_obj = util.get_dexreq_issue(dexreq_issue) - spec_name = getattr(jira_obj.fields, config.CUSTOM_FIELD_ID_SPEC_FRIENDLY_NAME) - artifact_id = getattr(jira_obj.fields, config.CUSTOM_FIELD_ID_ARTIFACT_ID) - services_root_dir = os.path.join(config.CLI_REPO_RELATIVE_LOCATION, SERVICES_DIR) - - path = determine_pom_location(artifact_id, spec_name, services_root_dir) - return _get_service_name_from_path(path) - - -if __name__ == '__main__': - """ - This script will be used in Preview/Public CLI pipeline to determine the changed service directory. - We use this information to run make gen and make docs for the changed service. - """ - tool_name = config.CLI_NAME - last_commit_message = util.get_last_commit_message(tool_name) - issue_keys = util.parse_issue_keys_from_commit_message(last_commit_message) - if len(issue_keys) != 1: - print('More than one DEXReq issues found {}.', issue_keys) - sys.exit(0) - - service_name = get_service_name_from_issue(issue_keys[0]) - print("Changed service:" + service_name) - print("##teamcity[setParameter name='{}' value='{}']".format(CHANGED_SERVICE, service_name)) diff --git a/scripts/auto_gen_utils/team_city_scripts/datagen/preview/1_setup_testdata_preview_branch.sh b/scripts/auto_gen_utils/team_city_scripts/datagen/preview/1_setup_testdata_preview_branch.sh deleted file mode 100755 index b929aa2e31..0000000000 --- a/scripts/auto_gen_utils/team_city_scripts/datagen/preview/1_setup_testdata_preview_branch.sh +++ /dev/null @@ -1,33 +0,0 @@ -set -e -set -x - -echo Creating venv to install sdk locally -. /opt/odo/tox_sic/venv/bin/activate -virtualenv .sdk-venv -. .sdk-venv/bin/activate - -# must disable StrictHostKeyChecking so that we don't get an interactive -# prompt later asking to confirm the host key -# If `set -e`, must disable "fail on non-zero exit code" using `set +e` -# because ssh returns 255 -set +e -ssh -o StrictHostKeyChecking=no git@bitbucket.oci.oraclecorp.com -p 7999 -set -e - - -## AUTOGEN ## -cd autogen -pip ${PIP_TIMEOUT_PARAMETER} install --trusted-host artifactory.oci.oraclecorp.com --index-url https://artifactory.oci.oraclecorp.com/api/pypi/global-dev-pypi/simple -r requirements.txt --quiet - -# run the setup job for step 2 in ./autogen -ls -la -pwd - -ls -la - - - -python ./2_pre_generation_set_up.py --build-id $BUILD_ID --tool TestDataGen - -# back out into root directory -cd .. \ No newline at end of file diff --git a/scripts/auto_gen_utils/team_city_scripts/datagen/preview/6_report_gen_and_build_status.sh b/scripts/auto_gen_utils/team_city_scripts/datagen/preview/6_report_gen_and_build_status.sh deleted file mode 100755 index ba9f9c2755..0000000000 --- a/scripts/auto_gen_utils/team_city_scripts/datagen/preview/6_report_gen_and_build_status.sh +++ /dev/null @@ -1,26 +0,0 @@ -set -e -set -x - -pwd -ls -la - -# activate venv from step 1 to run below python script -ls -la ./.sdk-venv -. ./.sdk-venv/bin/activate - -cd sdk-client-test-data -branch=`git branch|grep "^\*"|cut -c3-` -if [[ ${branch} == *"bulk"* ]]; then - build_type="bulk_pending_merge_preview" -else - build_type="individual_preview" -fi - -cd .. -# commit changes from generation and build for typescript-sdk -# this step will run no matter what happened before it so it can report success / failure to the JIRA tickets -cd autogen -python ./3_report_generation_status.py --build-id $BUILD_ID --tool TestDataGen --build-type ${build_type} -cd .. - -ls -la ./oci-dotnet-sdk diff --git a/scripts/auto_gen_utils/team_city_scripts/datagen/preview/9_mark_done_post_bulk_merge.sh b/scripts/auto_gen_utils/team_city_scripts/datagen/preview/9_mark_done_post_bulk_merge.sh deleted file mode 100755 index 149f41d81c..0000000000 --- a/scripts/auto_gen_utils/team_city_scripts/datagen/preview/9_mark_done_post_bulk_merge.sh +++ /dev/null @@ -1,32 +0,0 @@ -#!/bin/bash - -set -e -set -x - -echo Creating venv to install sdk locally -. /opt/odo/tox_sic/venv/bin/activate -virtualenv .sdk-venv -. .sdk-venv/bin/activate - -# must disable StrictHostKeyChecking so that we don't get an interactive -# prompt later asking to confirm the host key -# Must disable -e (fail on non-zero exit code) because ssh returns 255 -set +e -ssh -o StrictHostKeyChecking=no git@bitbucket.oci.oraclecorp.com -p 7999 -set -e - - -# run the job to pull JIRAs from the queue and update POM -# this job expects to be run from ./autogen -cd auto-gen -ls -la -pwd - -pip ${PIP_TIMEOUT_PARAMETER} install --trusted-host artifactory.oci.oraclecorp.com --index-url https://artifactory.oci.oraclecorp.com/api/pypi/global-dev-pypi/simple -r requirements.txt - -# $BUILD_ID should be defined within the TC job configuration: export BUILD_ID=%teamcity.build.id% -FULL_VERSION=`cat ../sdk-client-test-data/release-version.txt` -python ./5_mark_preview_tickets_done_post_merge.py --build-id $BUILD_ID --tool TestDataGen --full-version $FULL_VERSION --allow-transition-overall-issue-to-done --build-conf-name Sdk_SdkTestDataAndExamples_GenerateSdkExamplesPreview - -# back out into root directory -cd .. diff --git a/scripts/auto_gen_utils/team_city_scripts/datagen/public/1_setup_testdata_branch.sh b/scripts/auto_gen_utils/team_city_scripts/datagen/public/1_setup_testdata_branch.sh deleted file mode 100755 index 239718a621..0000000000 --- a/scripts/auto_gen_utils/team_city_scripts/datagen/public/1_setup_testdata_branch.sh +++ /dev/null @@ -1,31 +0,0 @@ -set -e -set -x - -echo Creating venv to install sdk locally -. /opt/odo/tox_sic/venv/bin/activate -virtualenv .sdk-venv -. .sdk-venv/bin/activate - -# must disable StrictHostKeyChecking so that we don't get an interactive -# prompt later asking to confirm the host key -# Must disable -e (fail on non-zero exit code) because ssh returns 255 -set +e -ssh -o StrictHostKeyChecking=no git@bitbucket.oci.oraclecorp.com -p 7999 -set -e - - -## AUTOGEN ## -cd autogen -pip ${PIP_TIMEOUT_PARAMETER} install --trusted-host artifactory.oci.oraclecorp.com --index-url https://artifactory.oci.oraclecorp.com/api/pypi/global-dev-pypi/simple -r requirements.txt --quiet - -# run the setup job for step 2 in ./autogen -ls -la -pwd - -ls -la - - -python ./2_pre_generation_set_up.py --build-id $BUILD_ID --tool TestDataGen - -# back out into root directory -cd .. diff --git a/scripts/auto_gen_utils/team_city_scripts/datagen/public/6_report_gen_and_build_status.sh b/scripts/auto_gen_utils/team_city_scripts/datagen/public/6_report_gen_and_build_status.sh deleted file mode 100755 index 73302c13f0..0000000000 --- a/scripts/auto_gen_utils/team_city_scripts/datagen/public/6_report_gen_and_build_status.sh +++ /dev/null @@ -1,26 +0,0 @@ -set -e -set -x - -pwd -ls -la - -# activate venv from step 1 to run below python script -ls -la ./.sdk-venv -. ./.sdk-venv/bin/activate - -cd sdk-client-test-data -if [[ `git branch|grep "\*"|grep "bulk"` ]]; then - echo "Found bulk public" - export BUILD_TYPE_ARG="--build-type bulk_pending_merge_public" -else - export BUILD_TYPE_ARG="--build-type individual_public" -fi -cd .. - -# commit changes from generation and build for typescript-sdk -# this step will run no matter what happened before it so it can report success / failure to the JIRA tickets -cd autogen -python ./3_report_generation_status.py --build-id $BUILD_ID --tool TestDataGen $BUILD_TYPE_ARG -cd .. - -ls -la ./oci-dotnet-sdk diff --git a/scripts/auto_gen_utils/team_city_scripts/datagen/public/9_mark_done_post_bulk_merge.sh b/scripts/auto_gen_utils/team_city_scripts/datagen/public/9_mark_done_post_bulk_merge.sh deleted file mode 100755 index 89a342b6d5..0000000000 --- a/scripts/auto_gen_utils/team_city_scripts/datagen/public/9_mark_done_post_bulk_merge.sh +++ /dev/null @@ -1,32 +0,0 @@ -#!/bin/bash - -set -e -set -x - -echo Creating venv to install sdk locally -. /opt/odo/tox_sic/venv/bin/activate -virtualenv .sdk-venv -. .sdk-venv/bin/activate - -# must disable StrictHostKeyChecking so that we don't get an interactive -# prompt later asking to confirm the host key -# Must disable -e (fail on non-zero exit code) because ssh returns 255 -set +e -ssh -o StrictHostKeyChecking=no git@bitbucket.oci.oraclecorp.com -p 7999 -set -e - - -# run the job to pull JIRAs from the queue and update POM -# this job expects to be run from ./autogen -cd auto-gen -ls -la -pwd - -pip ${PIP_TIMEOUT_PARAMETER} install --trusted-host artifactory.oci.oraclecorp.com --index-url https://artifactory.oci.oraclecorp.com/api/pypi/global-dev-pypi/simple -r requirements.txt - -# $BUILD_ID should be defined within the TC job configuration: export BUILD_ID=%teamcity.build.id% -FULL_VERSION=`cat ../sdk-client-test-data/release-version.txt` -python ./5_mark_preview_tickets_done_post_merge.py --build-id $BUILD_ID --tool TestDataGen --full-version $FULL_VERSION --allow-transition-overall-issue-to-deploy --build-conf-name Sdk_SdkTestDataAndExamples_GenerateSdkExamples - -# back out into root directory -cd .. diff --git a/scripts/auto_gen_utils/team_city_scripts/dotnet_sdk/preview/1_setup_dotnet_sdk_preview_branch.sh b/scripts/auto_gen_utils/team_city_scripts/dotnet_sdk/preview/1_setup_dotnet_sdk_preview_branch.sh deleted file mode 100755 index dd9d51c9b5..0000000000 --- a/scripts/auto_gen_utils/team_city_scripts/dotnet_sdk/preview/1_setup_dotnet_sdk_preview_branch.sh +++ /dev/null @@ -1,33 +0,0 @@ -set -e -set -x - -echo Creating venv to install sdk locally -. /opt/odo/tox_sic/venv/bin/activate -virtualenv .sdk-venv -. .sdk-venv/bin/activate - -# must disable StrictHostKeyChecking so that we don't get an interactive -# prompt later asking to confirm the host key -# If `set -e`, must disable "fail on non-zero exit code" using `set +e` -# because ssh returns 255 -set +e -ssh -o StrictHostKeyChecking=no git@bitbucket.oci.oraclecorp.com -p 7999 -set -e - - -## AUTOGEN ## -cd autogen -pip ${PIP_TIMEOUT_PARAMETER} install --trusted-host artifactory.oci.oraclecorp.com --index-url https://artifactory.oci.oraclecorp.com/api/pypi/global-dev-pypi/simple -r requirements.txt --quiet - -# run the setup job for step 2 in ./autogen -ls -la -pwd - -ls -la - - - -python ./2_pre_generation_set_up.py --build-id $BUILD_ID --tool DotNetSDK - -# back out into root directory -cd .. \ No newline at end of file diff --git a/scripts/auto_gen_utils/team_city_scripts/dotnet_sdk/preview/3_record_sdk_generation_success.sh b/scripts/auto_gen_utils/team_city_scripts/dotnet_sdk/preview/3_record_sdk_generation_success.sh deleted file mode 100755 index 6dedbb7737..0000000000 --- a/scripts/auto_gen_utils/team_city_scripts/dotnet_sdk/preview/3_record_sdk_generation_success.sh +++ /dev/null @@ -1,5 +0,0 @@ -set -x - -pwd -ls -la -touch dotnet_sdk_generation_success.txt diff --git a/scripts/auto_gen_utils/team_city_scripts/dotnet_sdk/preview/5_record_sdk_build_success.sh b/scripts/auto_gen_utils/team_city_scripts/dotnet_sdk/preview/5_record_sdk_build_success.sh deleted file mode 100755 index 28798bd89c..0000000000 --- a/scripts/auto_gen_utils/team_city_scripts/dotnet_sdk/preview/5_record_sdk_build_success.sh +++ /dev/null @@ -1,5 +0,0 @@ -set -x - -pwd -ls -la -touch dotnet_sdk_build_success.txt diff --git a/scripts/auto_gen_utils/team_city_scripts/dotnet_sdk/preview/6_report_gen_and_build_status.sh b/scripts/auto_gen_utils/team_city_scripts/dotnet_sdk/preview/6_report_gen_and_build_status.sh deleted file mode 100755 index 6a2d79a6d2..0000000000 --- a/scripts/auto_gen_utils/team_city_scripts/dotnet_sdk/preview/6_report_gen_and_build_status.sh +++ /dev/null @@ -1,26 +0,0 @@ -set -e -set -x - -pwd -ls -la - -# activate venv from step 1 to run below python script -ls -la ./.sdk-venv -. ./.sdk-venv/bin/activate - -cd oci-dotnet-sdk -branch=`git branch|grep "^\*"|cut -c3-` -if [[ ${branch} == *"bulk"* ]]; then - build_type="bulk_pending_merge_preview" -else - build_type="individual_preview" -fi - -cd .. -# commit changes from generation and build for typescript-sdk -# this step will run no matter what happened before it so it can report success / failure to the JIRA tickets -cd autogen -python ./3_report_generation_status.py --build-id $BUILD_ID --tool DotNetSDK --build-type ${build_type} -cd .. - -ls -la ./oci-dotnet-sdk diff --git a/scripts/auto_gen_utils/team_city_scripts/dotnet_sdk/preview/7_commit_generated_changes.sh b/scripts/auto_gen_utils/team_city_scripts/dotnet_sdk/preview/7_commit_generated_changes.sh deleted file mode 100755 index bdbb1d2e34..0000000000 --- a/scripts/auto_gen_utils/team_city_scripts/dotnet_sdk/preview/7_commit_generated_changes.sh +++ /dev/null @@ -1,26 +0,0 @@ -set -x -set -e - -# configure git for this commit -git config --global user.email "$GIT_USER_EMAIL" -git config --global user.name "$GIT_USER_NAME" - -# activate venv from step 1 to run below python script -ls -la ./.sdk-venv -. ./.sdk-venv/bin/activate - -# get the branch we're on -cd oci-dotnet-sdk -branch=`git branch|grep "^\*"|cut -c3-` -if [[ ${branch} == *"bulk"* ]]; then - build_type="bulk_pending_merge_preview" -else - build_type="individual_preview" -fi -cd .. - -# commit changes from generation and build -cd autogen -ls -la -python ./4_on_generation_complete.py --build-id $BUILD_ID --tool DotNetSDK --build-type ${build_type} -cd .. diff --git a/scripts/auto_gen_utils/team_city_scripts/dotnet_sdk/preview/9_mark_done_post_bulk_merge.sh b/scripts/auto_gen_utils/team_city_scripts/dotnet_sdk/preview/9_mark_done_post_bulk_merge.sh deleted file mode 100755 index ac1b752e7e..0000000000 --- a/scripts/auto_gen_utils/team_city_scripts/dotnet_sdk/preview/9_mark_done_post_bulk_merge.sh +++ /dev/null @@ -1,36 +0,0 @@ -#!/bin/bash - -set -e -set -x - -echo Creating venv to install sdk locally -. /opt/odo/tox_sic/venv/bin/activate -virtualenv .sdk-venv -. .sdk-venv/bin/activate - -# must disable StrictHostKeyChecking so that we don't get an interactive -# prompt later asking to confirm the host key -# Must disable -e (fail on non-zero exit code) because ssh returns 255 -set +e -ssh -o StrictHostKeyChecking=no git@bitbucket.oci.oraclecorp.com -p 7999 -set -e - - -# run the job to pull JIRAs from the queue and update POM -# this job expects to be run from ./autogen -cd auto-gen -ls -la -pwd - -pip ${PIP_TIMEOUT_PARAMETER} install --trusted-host artifactory.oci.oraclecorp.com --index-url https://artifactory.oci.oraclecorp.com/api/pypi/global-dev-pypi/simple -r requirements.txt - -# Generate version string for .NET SDK -cd ../oci-dotnet-sdk -./nuget-package.sh version -cd ../auto-gen -# $BUILD_ID should be defined within the TC job configuration: export BUILD_ID=%teamcity.build.id% -FULL_VERSION=`cat ../oci-dotnet-sdk/version.txt`.$BUILD_ID -python ./5_mark_preview_tickets_done_post_merge.py --build-id $BUILD_ID --tool DotNetSDK --full-version $FULL_VERSION --allow-transition-overall-issue-to-done --build-conf-name Sdk_DotNetSdk_BuildDotNetSdkPreview - -# back out into root directory -cd .. diff --git a/scripts/auto_gen_utils/team_city_scripts/dotnet_sdk/public/1_setup_dotnet_sdk_branch.sh b/scripts/auto_gen_utils/team_city_scripts/dotnet_sdk/public/1_setup_dotnet_sdk_branch.sh deleted file mode 100755 index 41432a51ab..0000000000 --- a/scripts/auto_gen_utils/team_city_scripts/dotnet_sdk/public/1_setup_dotnet_sdk_branch.sh +++ /dev/null @@ -1,31 +0,0 @@ -set -e -set -x - -echo Creating venv to install sdk locally -. /opt/odo/tox_sic/venv/bin/activate -virtualenv .sdk-venv -. .sdk-venv/bin/activate - -# must disable StrictHostKeyChecking so that we don't get an interactive -# prompt later asking to confirm the host key -# Must disable -e (fail on non-zero exit code) because ssh returns 255 -set +e -ssh -o StrictHostKeyChecking=no git@bitbucket.oci.oraclecorp.com -p 7999 -set -e - - -## AUTOGEN ## -cd autogen -pip ${PIP_TIMEOUT_PARAMETER} install --trusted-host artifactory.oci.oraclecorp.com --index-url https://artifactory.oci.oraclecorp.com/api/pypi/global-dev-pypi/simple -r requirements.txt --quiet - -# run the setup job for step 2 in ./autogen -ls -la -pwd - -ls -la - - -python ./2_pre_generation_set_up.py --build-id $BUILD_ID --tool DotNetSDK - -# back out into root directory -cd .. diff --git a/scripts/auto_gen_utils/team_city_scripts/dotnet_sdk/public/3_record_sdk_generation_success.sh b/scripts/auto_gen_utils/team_city_scripts/dotnet_sdk/public/3_record_sdk_generation_success.sh deleted file mode 100755 index 6dedbb7737..0000000000 --- a/scripts/auto_gen_utils/team_city_scripts/dotnet_sdk/public/3_record_sdk_generation_success.sh +++ /dev/null @@ -1,5 +0,0 @@ -set -x - -pwd -ls -la -touch dotnet_sdk_generation_success.txt diff --git a/scripts/auto_gen_utils/team_city_scripts/dotnet_sdk/public/5_record_sdk_build_success.sh b/scripts/auto_gen_utils/team_city_scripts/dotnet_sdk/public/5_record_sdk_build_success.sh deleted file mode 100755 index 28798bd89c..0000000000 --- a/scripts/auto_gen_utils/team_city_scripts/dotnet_sdk/public/5_record_sdk_build_success.sh +++ /dev/null @@ -1,5 +0,0 @@ -set -x - -pwd -ls -la -touch dotnet_sdk_build_success.txt diff --git a/scripts/auto_gen_utils/team_city_scripts/dotnet_sdk/public/6_report_gen_and_build_status.sh b/scripts/auto_gen_utils/team_city_scripts/dotnet_sdk/public/6_report_gen_and_build_status.sh deleted file mode 100755 index 21b6718caa..0000000000 --- a/scripts/auto_gen_utils/team_city_scripts/dotnet_sdk/public/6_report_gen_and_build_status.sh +++ /dev/null @@ -1,26 +0,0 @@ -set -e -set -x - -pwd -ls -la - -# activate venv from step 1 to run below python script -ls -la ./.sdk-venv -. ./.sdk-venv/bin/activate - -cd oci-dotnet-sdk -if [[ `git branch|grep "\*"|grep "bulk"` ]]; then - echo "Found bulk public" - export BUILD_TYPE_ARG="--build-type bulk_pending_merge_public" -else - export BUILD_TYPE_ARG="--build-type individual_public" -fi -cd .. - -# commit changes from generation and build for typescript-sdk -# this step will run no matter what happened before it so it can report success / failure to the JIRA tickets -cd autogen -python ./3_report_generation_status.py --build-id $BUILD_ID --tool DotNetSDK $BUILD_TYPE_ARG -cd .. - -ls -la ./oci-dotnet-sdk diff --git a/scripts/auto_gen_utils/team_city_scripts/dotnet_sdk/public/7_commit_generated_changes.sh b/scripts/auto_gen_utils/team_city_scripts/dotnet_sdk/public/7_commit_generated_changes.sh deleted file mode 100755 index 500a9dc0fb..0000000000 --- a/scripts/auto_gen_utils/team_city_scripts/dotnet_sdk/public/7_commit_generated_changes.sh +++ /dev/null @@ -1,26 +0,0 @@ -set -x -set -e - -# configure git for this commit -git config --global user.email "$GIT_USER_EMAIL" -git config --global user.name "$GIT_USER_NAME" - -# activate venv from step 1 to run below python script -ls -la ./.sdk-venv -. ./.sdk-venv/bin/activate - -# get the branch we're on -cd oci-dotnet-sdk -branch=`git branch|grep "^\*"|cut -c3-` -if [[ ${branch} == *"bulk"* ]]; then - build_type="bulk_pending_merge_public" -else - build_type="individual_public" -fi -cd .. - -# commit changes from generation and build -cd autogen -ls -la -python ./4_on_generation_complete.py --build-id $BUILD_ID --tool DotNetSDK --build-type ${build_type} -cd .. diff --git a/scripts/auto_gen_utils/team_city_scripts/dotnet_sdk/public/9_mark_done_post_bulk_merge.sh b/scripts/auto_gen_utils/team_city_scripts/dotnet_sdk/public/9_mark_done_post_bulk_merge.sh deleted file mode 100755 index 6028719963..0000000000 --- a/scripts/auto_gen_utils/team_city_scripts/dotnet_sdk/public/9_mark_done_post_bulk_merge.sh +++ /dev/null @@ -1,36 +0,0 @@ -#!/bin/bash - -set -e -set -x - -echo Creating venv to install sdk locally -. /opt/odo/tox_sic/venv/bin/activate -virtualenv .sdk-venv -. .sdk-venv/bin/activate - -# must disable StrictHostKeyChecking so that we don't get an interactive -# prompt later asking to confirm the host key -# Must disable -e (fail on non-zero exit code) because ssh returns 255 -set +e -ssh -o StrictHostKeyChecking=no git@bitbucket.oci.oraclecorp.com -p 7999 -set -e - - -# run the job to pull JIRAs from the queue and update POM -# this job expects to be run from ./autogen -cd auto-gen -ls -la -pwd - -pip ${PIP_TIMEOUT_PARAMETER} install --trusted-host artifactory.oci.oraclecorp.com --index-url https://artifactory.oci.oraclecorp.com/api/pypi/global-dev-pypi/simple -r requirements.txt - -# Generate version string for .NET SDK -cd ../oci-dotnet-sdk -./nuget-package.sh version -cd ../auto-gen -# $BUILD_ID should be defined within the TC job configuration: export BUILD_ID=%teamcity.build.id% -FULL_VERSION=`cat ../oci-dotnet-sdk/version.txt`.$BUILD_ID -python ./5_mark_preview_tickets_done_post_merge.py --build-id $BUILD_ID --tool DotNetSDK --full-version $FULL_VERSION --allow-transition-overall-issue-to-deploy --build-conf-name Sdk_DotNetSdk_BuildDotNetSdkMaster - -# back out into root directory -cd .. diff --git a/scripts/auto_gen_utils/team_city_scripts/github_issues/github_issue_summary_header.py b/scripts/auto_gen_utils/team_city_scripts/github_issues/github_issue_summary_header.py deleted file mode 100644 index e1c1dee518..0000000000 --- a/scripts/auto_gen_utils/team_city_scripts/github_issues/github_issue_summary_header.py +++ /dev/null @@ -1,100 +0,0 @@ -import re - - -class GitHubIssueSummaryHeader: - """Class that maps a Github issue to a specific Jira issue and vice-versa. - - The contract for mapping is based on the jira issue summary being in a specific format. - TODO: This contract is brittle. In the future, consider adding specific Jira fields - so that the contract is explicit. - """ - - # Format is repo_name, github_issue_num, github_issue_summary - # Example: '[GitHub Issue (oci-java-sdk #66)]: Empty compartment id in audit events' - JIRA_ISSUE_SUMMARY_TEMPLATE = '[GitHub Issue ({} #{})]: {}' - - def __init__(self, repo_name, issue_num, summary=''): - if not repo_name: - raise ValueError('A GitHub repository name must be defined') - if not issue_num: - raise ValueError('An issue number must be defined') - - self.repo_name = repo_name.encode('utf-8').strip() - self.issue_num = int(issue_num) - self.summary = summary.encode('utf-8').strip() - - def get_repo_name(self): - return self.repo_name - - def get_issue_num(self): - return self.issue_num - - def get_summary(self): - return self.summary - - def __str__(self): - return 'GitHubIssueSummaryHeader: [repo_name: {}, issue_num: {}, summary: {}]'.format(self.repo_name, - self.issue_num, - self.summary) - - def __eq__(self, other): - if not isinstance(other, self.__class__): - return False - - if self.repo_name != other.repo_name: - return False - - if self.issue_num != other.issue_num: - return False - - # Explicitly ignore the summary in case the customer edited it between script executions - - return True - - def __ne__(self, other): - return not self.__eq__(other) - - def __hash__(self): - return hash(self.repo_name) + hash(self.issue_num) - - def to_jira_issue_summary(self): - return self.JIRA_ISSUE_SUMMARY_TEMPLATE.format(self.repo_name, self.issue_num, self.summary) - - @staticmethod - def from_jira_issue_summary(jira_summary): - """Creates a new GitHubIssueSummaryHeader from the given jira issue summary. - - :param str jira_summary: The jira summary formatted as defined by JIRA_ISSUE_SUMMARY_TEMPLATE - :return: the header, or None if the summary could not be parsed - :rtype: GitHubIssueSummaryHeader - """ - repo_name = None - search = re.search(r'GitHub Issue \((.*?) #', jira_summary) - if search: - repo_name = search.group(1) - - issue_num = None - search = re.search(r'#(.*?)\)', jira_summary) - if search: - issue_num = search.group(1) - - summary = None - search = re.search(r']: (.*)$', jira_summary) - if search: - summary = search.group(1) - - if repo_name and issue_num and summary: - return GitHubIssueSummaryHeader(repo_name, issue_num, summary) - else: - return None - - @staticmethod - def from_github_issue(repo_name, github_issue): - """Creates a new GitHubIssueSummaryHeader from the given repository name and github issue. - - :param str repo_name: The Github repository name - :param github.Issue github_issue: The issue retrieved from Github - :return: The header - :rtype: GitHubIssueSummaryHeader - """ - return GitHubIssueSummaryHeader(repo_name, github_issue.number, github_issue.title) diff --git a/scripts/auto_gen_utils/team_city_scripts/github_issues/github_label_routing_config.yaml b/scripts/auto_gen_utils/team_city_scripts/github_issues/github_label_routing_config.yaml deleted file mode 100644 index c890149968..0000000000 --- a/scripts/auto_gen_utils/team_city_scripts/github_issues/github_label_routing_config.yaml +++ /dev/null @@ -1,757 +0,0 @@ -# Configuration for defining supported labels in OCI SDK GitHub Repos. - ---- -# Defines the mapping between service labels on GitHub --> Jira Project owned by the respective service team. -# Mapping configuration supports defining more than one team teamRoutingInfo mapping, but isn't supported for MVP. -# For example, compute has multiple teams for different operations. Allowing the schema to account for this. -# However, all service labels will map to the default value for now. -serviceLabels: -- label: API Gateway - color: e21f29 - description: Issue pertains to the API Gateway service - teamRoutingInfo: - - name: default - jira: - project: APIGW - email: oci_apigw_ww_grp@oracle.com - githubUsers: - - owainlewis - - udhanshu-gargs -- label: OS Management - color: e21f29 - description: Issue pertains to the OS Management service - teamRoutingInfo: - - name: default - jira: - project: OSMS - email: oci_osms_us_grp@oracle.com - githubUsers: - - wbora -- label: Analytics - color: e21f29 - description: Issue pertains to the Analytics service - teamRoutingInfo: - - name: default - jira: - project: OAC - email: oci_oac_ww_grp@oracle.com - githubUsers: - - alanwlee - - olouchart -- label: Announcements - color: e21f29 - description: Issue pertains to the Announcements service - teamRoutingInfo: - - name: default - jira: - project: AS - email: announcement_service_engg_team_us_grp@oracle.com - githubUsers: null -- label: Archive Storage - color: e21f29 - description: Issue pertains to the Archive Storage service - teamRoutingInfo: - - name: default - jira: - project: CASPER - email: opc_casper_us_grp@oracle.com - githubUsers: - - jfriedly - - billywan - - skunjuma -- label: Audit - color: e21f29 - description: Issue pertains to the Audit service - teamRoutingInfo: - - name: default - jira: - project: SHERLOCK - email: hydra_dev_us_grp@oracle.com - githubUsers: null -- label: Block Volume - color: e21f29 - description: Issue pertains to the Block Volume service - teamRoutingInfo: - - name: default - jira: - project: BLOCK - email: sic_block_storage_us_grp@oracle.com - githubUsers: - - maxverun-oci - - zkkong-oci -- label: Compute - color: e21f29 - description: Issue pertains to the Compute service - teamRoutingInfo: - - name: default - jira: - project: BMI - email: compute_dev_us_grp@oracle.com - githubUsers: - - olegd - - nguynguy -- label: Container Engine - color: e21f29 - description: Issue pertains to the Container Engine for Kubernetes service - teamRoutingInfo: - - name: default - jira: - project: OKE - email: ken.caruso@oracle.com - githubUsers: null -- label: Data Transfer - color: e21f29 - description: Issue pertains to the Data Transfer service - teamRoutingInfo: - - name: default - jira: - project: OTEC - email: - - data_transfer_platform_dev_ww_grp@oracle.com - githubUsers: - - abhayap-oci -- label: Database - color: e21f29 - description: Issue pertains to the Database service - teamRoutingInfo: - - name: default - jira: - project: DBAAS - email: sic_dbaas_cp_us_grp@oracle.com - githubUsers: - - prafullsinghal - - ssingla2k -- label: DNS - color: e21f29 - description: Issue pertains to the DNS and Traffic Management service - teamRoutingInfo: - - name: default - jira: - project: PD - email: oci_pubdns_dev_us_grp@oracle.com - githubUsers: - - marchowes-work - - jyoung-work -- label: Email - color: e21f29 - description: Issue pertains to the Email Delivery service - teamRoutingInfo: - - name: default - jira: - project: ED - email: email-dev_us_grp@oracle.com - githubUsers: - - wallison - - pandrusz -- label: Events - color: e21f29 - description: Issue pertains to the Events service - teamRoutingInfo: - - name: default - jira: - project: CLEV - email: oci_events_dev_grp@oracle.com - githubUsers: - - miahuang - - rohabhat - - rovashis - - dkyes - - gkwen -- label: File Storage - color: e21f29 - description: Issue pertains to the File Storage service - teamRoutingInfo: - - name: default - jira: - project: FFSW - email: sic_ffsw_us_grp@oracle.com - githubUsers: - - jwmcclai - - vvoloboi -- label: Functions - color: e21f29 - description: Issue pertains to the Functions service - teamRoutingInfo: - - name: default - jira: - project: FAAS - email: serverless_grp@oracle.com - githubUsers: - - rikgibson - - tteggel -- label: Health Checks - color: e21f29 - description: Issue pertains to the Health Checks service - teamRoutingInfo: - - name: default - jira: - project: OHC - email: groan-chomskies_us_grp@oracle.com - githubUsers: - - fredtibbitts - - ascarb-oci - - stewarttfrancis -- label: IAM - color: e21f29 - description: Issue pertains to the Identity service - teamRoutingInfo: - - name: default - jira: - project: ID - email: oci_identity_team_us_grp@oracle.com - githubUsers: - - etwebste - - gpremach -- label: Key Management - color: e21f29 - description: Issue pertains to the Key Management service - teamRoutingInfo: - - name: default - jira: - project: KMS - email: sparta_kms_us_grp@oracle.com - githubUsers: - - frolinek - - asiow -- label: Load Balancing - color: e21f29 - description: Issue pertains to the Load Balancing service - teamRoutingInfo: - - name: default - jira: - project: LBCP - email: oci_lbaas_dev_us_grp@oracle.com - githubUsers: - - chencche-oci - - ajiva-oci - - gopigopa-oci -- label: Marketplace - color: e21f29 - description: Issue pertains to the Marketplace service - teamRoutingInfo: - - name: default - jira: - project: MAR - email: oci_marketplace_seattle_us_grp@oracle.com - githubUsers: - - pnichols9000 - - acato -- label: Monitoring - color: e21f29 - description: Issue pertains to the Monitoring service - teamRoutingInfo: - - name: default - jira: - project: TEL - email: pic_ion_dev_grp@oracle.com - githubUsers: - - sudha.raghavan@oracle.com - - dana.cozmei@oracle.com -- label: Networking - color: e21f29 - description: Issue pertains to the Networking service - teamRoutingInfo: - - name: default - jira: - project: VCN - email: bmc_vcn_cp_us_grp@oracle.com - githubUsers: - - aurashb - - balasiv -- label: Notifications - color: e21f29 - description: Issue pertains to the Notifications service - teamRoutingInfo: - - name: default - jira: - project: ONS - email: team_oci_ons_us_grp@oracle.com - githubUsers: - - qllioci - - paulkimoracle -- label: Object Storage - color: e21f29 - description: Issue pertains to the Object Storage service - teamRoutingInfo: - - name: default - jira: - project: CASPER - email: opc_casper_us_grp@oracle.com - githubUsers: - - jfriedly - - billywan - - skunjuma -- label: Registry - color: e21f29 - description: Issue pertains to the Registry service - teamRoutingInfo: - - name: default - jira: - project: OCIR - email: justin.ko@oracle.com - githubUsers: null -- label: Resource Manager - color: e21f29 - description: Issue pertains to the Resource Manager service - teamRoutingInfo: - - name: default - jira: - project: ORCH - email: team_oci_orm_us_grp@oracle.com - githubUsers: - - sn-narang -- label: Search - color: e21f29 - description: Issue pertains to the Search service - teamRoutingInfo: - - name: default - jira: - project: RQS - email: rqs_engg_team_us_grp@oracle.com - githubUsers: - - priteshkp - - mojiezhong -- label: SDK - color: e21f29 - description: Issue pertains to the SDK itself and not specific to any service - teamRoutingInfo: - - name: default - jira: - project: DEX - email: jalevy_org_ww@oracle.com - githubUsers: - - jodoglevy -- label: Streaming - color: e21f29 - description: Issue pertains to the Streaming service - teamRoutingInfo: - - name: default - jira: - project: STREAMSTR - email: opc_streaming_us_grp@oracle.com - githubUsers: - - JulienDollon - - tbdSomnath -- label: WAF - color: e21f29 - description: Issue pertains to the WAF service - teamRoutingInfo: - - name: default - jira: - project: WAAS - email: oci_waas_dev_us_grp@oracle.com - githubUsers: - - admtnnr - - gibson042 -- label: Application Migration - color: e21f29 - description: Issue pertains to the Application Migration service - teamRoutingInfo: - - name: default - jira: - project: MIGRATE - email: oci-ams-dev_ww_grp@oracle.com - githubUsers: null -- label: Data Flow - color: e21f29 - description: Issue pertains to the Data Flow service - teamRoutingInfo: - - name: default - jira: - project: SSS - email: sss_dev_ww_grp@oracle.com - githubUsers: - - thlau - - alexandp -- label: Data Catalog - color: e21f29 - description: Issue pertains to the Data Catalog service - teamRoutingInfo: - - name: default - jira: - project: DCAT - email: datacatalog_ww_grp@oracle.com - githubUsers: - - rvelisar - - tshephar - - gseethar - - jspeidel -- label: Data Science - color: e21f29 - description: Issue pertains to the Data Science service - teamRoutingInfo: - - name: default - jira: - project: ODSC - email: datascience_grp@oracle.com - githubUsers: - - jason_slepicka - - janie_chen - - hcavalle - - brandon_ayers -- label: Limits - color: e21f29 - description: Issue pertains to the Limits service - teamRoutingInfo: - - name: default - jira: - project: LIM - email: platform_limits_grp@oracle.com - githubUsers: - - philneworacle - - marekczajka -- label: Cloud Shell - color: e21f29 - description: Feedback/Issue related to the Cloud Shell service - teamRoutingInfo: - - name: default - jira: - project: CLOUDSH - email: oci_cloud_shell_ww_grp@oracle.com - githubUsers: - - ckasso -- label: Blockchain - color: e21f29 - description: Issue pertains to the Blockchain service - teamRoutingInfo: - - name: default - jira: - project: OBP - email: bcs_devops_ww_grp@oracle.com - githubUsers: - - maurice.gamanho@oracle.com - - carlo.innocenti@oracle.com -- label: Budget - color: e21f29 - description: Issue pertains to the Budget service - teamRoutingInfo: - - name: default - jira: - project: COMP - email: plat_compartments_us_grp@oracle.com - githubUsers: - - philneworacle - - gagankarora -- label: Content and Experience - color: e21f29 - description: Issue pertains to the Content and Experience service - teamRoutingInfo: - - name: default - jira: - project: CEC - email: cec_devops_ww_grp@oracle.com - githubUsers: - - nbshah - - sfrankli -- label: MySQL Database - color: e21f29 - description: Issue pertains to the MySQL Database service - teamRoutingInfo: - - name: default - jira: - project: MY - email: mysqlaas-ops_ww_grp@oracle.com - githubUsers: - - mark.leith@oracle.com - - airton.lastori@oracle.com - - josh.sled@oracle.com -- label: NoSQL Database - color: e21f29 - description: Issue pertains to the NoSQL Database service - teamRoutingInfo: - - name: default - jira: - project: NOSQL - email: andc_ops_ww_grp@oracle.com - githubUsers: - - chlamb - - mbrey -- label: Support Management - color: e21f29 - description: Issue pertains to the Support Management service - teamRoutingInfo: - - name: default - jira: - project: CIMS - email: oci_ops_cims_dev_us_grp@oracle.com - githubUsers: - - jayeshgangadharan - - chinmai-padalkar - - vaseer45 -- label: VMWare Solution - color: e21f29 - description: Issue pertains to the VMWare Solution service - teamRoutingInfo: - - name: default - jira: - project: OCVP - email: sic_ocvp_us_grp@oracle.com - githubUsers: - - pritams21 - - paykin -- label: Data Integration - color: e21f29 - description: Issue pertains to the Data Integration service - teamRoutingInfo: - - name: default - jira: - project: DIS - email: dis_ops_alerts_ww_grp@oracle.com - githubUsers: - - githubUser1 - - githubUser2 -- label: Data Safe - color: e21f29 - description: Issue pertains to the Data Safe service - teamRoutingInfo: - - name: default - jira: - project: DS - email: datasafe_dex_ww_grp@oracle.com - githubUsers: - - CHLIANG - - DGRAJ -- label: Digital Assistant - color: e21f29 - description: Issue pertains to the Digital Assistant service - teamRoutingInfo: - - name: default - jira: - project: ODA - email: ibcs_infra_ww_grp@oracle.com - githubUsers: - - cbroadbe - - srikantoracle -- label: Secret Management - color: e21f29 - description: Issue pertains to the Secret Management service - teamRoutingInfo: - - name: default - jira: - project: SECSVC - email: team_oci_vault_us_grp@oracle.com - githubUsers: - - frolinek - - asiow -- label: Usage - color: e21f29 - description: Issue pertains to the Usage service - teamRoutingInfo: - - name: default - jira: - project: METER - email: oci_metering_team_us_grp@oracle.com - githubUsers: - - brent.eyler@oracle.com - - xingchi.cheng@oracle.com -- label: Work Request - color: e21f29 - description: Issue pertains to the Work Request service - teamRoutingInfo: - - name: default - jira: - project: WORKREQ - email: kewilke_org_ww@oracle.com - githubUsers: - - Kenwilk13 - - josh-potter -- label: Big Data - color: e21f29 - description: Issue pertains to the Big Data service - teamRoutingInfo: - - name: default - jira: - project: BDSV2 - email: obds-devops_ww_grp@oracle.com - githubUsers: - - vit-kotacka - - pblahaorcl -- label: Cloud Advisor - color: e21f29 - description: Issue pertains to the Cloud Advisor (Optimizer) service - teamRoutingInfo: - - name: default - jira: - project: OPTIMIZER - email: oracle_cloud_optimizer_us_grp@oracle.com - githubUsers: - - null - - null -- label: Cloud Guard - color: e21f29 - description: Issue pertains to the Cloud Guard service - teamRoutingInfo: - - name: default - jira: - project: SECCEN - email: seccen-engg_ww_grp@oracle.com - githubUsers: - - prakashyamuna - - kmapprity -- label: GoldenGate - color: e21f29 - description: Issue pertains to the GoldenGate service - teamRoutingInfo: - - name: default - jira: - project: GGS - email: ggs_team_ww_grp@oracle.com - githubUsers: - - sbalousek -- label: Logging - color: e21f29 - description: Issue pertains to the Logging service - teamRoutingInfo: - - name: default - jira: - project: HYD - email: hydra_dev_us_grp@oracle.com - githubUsers: - - dherasko - - sbhaktav -- label: Management Agent - color: e21f29 - description: Issue pertains to the Management Agent service - teamRoutingInfo: - - name: default - jira: - project: MGMTAGENT - email: team_oci_mgmtagent_macs_ww_grp@oracle.com - githubUsers: - - null - - null -- label: Management Dashboard - color: e21f29 - description: Issue pertains to the Management Dashboard service - teamRoutingInfo: - - name: default - jira: - project: MGMTUI - email: em_target_analytics_grp@oracle.com - githubUsers: - - ora-jerry - - fmorshed -- label: Vulnerability Scanning - color: e21f29 - description: Issue pertains to the Vulnerability Scanning service - teamRoutingInfo: - - name: default - jira: - project: VSS - email: team_oci_scanning_us_grp@oracle.com - githubUsers: - - sanjaychadda - - JFairbairnSmith -- label: Web Application Acceleration and Security - color: e21f29 - description: Issue pertains to the Web Application Acceleration and Security service - teamRoutingInfo: - - name: default - jira: - project: WAAS - email: oci_waas_dev_us_grp@oracle.com - githubUsers: - - admtnnr - - gibson042 -- label: Operations Insights - color: e21f29 - description: Issue pertains to the Operations Insights service - teamRoutingInfo: - - name: default - jira: - project: DBX - email: dbx_dev_ww_grp@oracle.com - githubUsers: - - girbalac - - gurleyc1 -- label: Service Connector Hub - color: e21f29 - description: Issue pertains to the Service Connector Hub - teamRoutingInfo: - - name: default - jira: - project: OCH - email: och_us-grp@oracle.com - githubUsers: - - JulienDollon -- label: Media Services - color: e21f29 - description: Issue pertains to the Media Services - teamRoutingInfo: - - name: default - jira: - project: DMP - email: oci_dig_media_svc_us_grp@oracle.com - githubUsers: - - null -- label: Application Dependency Management - color: e21f29 - description: Issue pertains to the Application Dependency Management - teamRoutingInfo: - - name: default - jira: - project: AMW - email: amw-dev_ww_grp@oracle.com - githubUsers: - - HGuiroux - - douglasclarke -- label: Logging Analytics - color: e21f29 - description: Issue pertains to the Logging Analytics - teamRoutingInfo: - - name: default - jira: - project: LOGAN - email: omc_loganalytics_dev_ww_grp@oracle.com - githubUsers: - - ora-jerry - - nimakaveh -- label: Tenant Manager - color: e21f29 - description: Issue pertains to the Tenant Manager - teamRoutingInfo: - - name: default - jira: - project: ACCMGMT - email: acc_customer_tools_us_grp@oracle.com - githubUsers: - - didijain - - philneworacle -- label: Certificates Management - color: e21f29 - description: Issue pertains to the Certificates Management - teamRoutingInfo: - - name: default - jira: - project: OCICERT - email: team_oci_certificates_us_grp@oracle.com - githubUsers: - - mmganes - - nkelley -# Project to create a ticket to keep track of unlabeled Github customer issues -unlabeledRoutingInfo: - jira: - project: DEX - -# Standard GitHub labels that are shared between all of the SDK GitHub repositories. -standardLabels: - - label: bug - color: ee0701 - - label: done pending release - color: d5f29b - - label: duplicate - color: cccccc - - label: enhancement - color: 84b6eb - - label: help wanted - color: 128A0C - - label: invalid - color: e6e6e6 - - label: pending implementation and release - color: fbca04 - - label: question - color: cc317c - - label: under consideration for backlog - color: bfdadc - - label: wontfix - color: ffffff diff --git a/scripts/auto_gen_utils/team_city_scripts/github_issues/github_repo_config.yaml b/scripts/auto_gen_utils/team_city_scripts/github_issues/github_repo_config.yaml deleted file mode 100644 index f853681038..0000000000 --- a/scripts/auto_gen_utils/team_city_scripts/github_issues/github_repo_config.yaml +++ /dev/null @@ -1,12 +0,0 @@ -# Configuration for defining supported OCI SDK GitHub Repos. ---- -owner: oracle -supportedRepos: -- oci-java-sdk -- oci-python-sdk -- oci-cli -- oci-go-sdk -- oci-ruby-sdk -- oci-typescript-sdk -- oci-dotnet-sdk -- oci-powershell-modules \ No newline at end of file diff --git a/scripts/auto_gen_utils/team_city_scripts/github_issues/issue_config.py b/scripts/auto_gen_utils/team_city_scripts/github_issues/issue_config.py deleted file mode 100644 index 4e68fe7682..0000000000 --- a/scripts/auto_gen_utils/team_city_scripts/github_issues/issue_config.py +++ /dev/null @@ -1,128 +0,0 @@ -# Manages the configuration for all of the GitHub issue routing related scripts. -import os -import yaml -from pprint import PrettyPrinter - -pp = PrettyPrinter(indent=2) - - -def load_yaml(yaml_file): - """Loads a given YAML file. - - :param str yaml_file: - The path of the yaml file to load. - :return: The dictionary containing the parsed yaml file - :rtype: dict - """ - with open(yaml_file, 'r') as stream: - return yaml.safe_load(stream) - - -class IssueConfig: - """Loads the configuration yaml files in order to access the configuration for GitHub issue routing. - - :param str issue_routing_config_file: (optional) - The path to the issue routing configuration yaml file. If undefined, then the default configuration file - bundled with this script will be loaded. - :param str repo_config_file: (optional) - The path to the supported GitHub issues configuration yaml file. If undefined, then the default configuration - file bundled with this script will be loaded. - """ - def __init__(self, issue_routing_config_file=None, repo_config_file=None): - base_path = os.path.dirname(os.path.realpath(__file__)) - if issue_routing_config_file is None: - self.issue_routing_config = load_yaml(os.path.join(base_path, "github_label_routing_config.yaml")) - else: - self.issue_routing_config = load_yaml(issue_routing_config_file) - - if repo_config_file is None: - self.repo_config = load_yaml(os.path.join(base_path, "github_repo_config.yaml")) - else: - self.repo_config = load_yaml(repo_config_file) - - def get_issue_routing_config(self): - """Gets the parsed issue routing configuration. - - :return: The parsed issue routing configuration. - :rtype: dict - """ - return self.issue_routing_config - - def get_repo_config(self): - """Gets the parsed GitHub repository configuration. - - :return: The parsed repository configuration. - :rtype: dict - """ - return self.repo_config - - def get_standard_labels(self): - """Gets the list of standard labels. - - :return: the list of configured standard labels. - :rtype: list str - """ - labels = [] - for label_entry in self.get_issue_routing_config().get('standardLabels'): - labels.append(label_entry.get('label')) - return labels - - def get_service_labels(self): - """Gets the list of service labels. - - :return: the list of configured service labels. - :rtype: list str - """ - labels = [] - for label_entry in self.get_issue_routing_config().get('serviceLabels'): - labels.append(label_entry.get('label')) - return labels - - def get_service_teams_jira_projects(self): - """Gets the list of service team JIRA projects. - - :return: the list of JIRA projects - :rtype: list str - """ - jira_projects = [] - for service_label_cfg in self.get_issue_routing_config().get('serviceLabels'): - for team_routing_info in service_label_cfg.get('teamRoutingInfo'): - jira_projects.append(team_routing_info.get('jira').get('project')) - return jira_projects - - def print_config(self): - """Pretty prints the configuration to stdout.""" - print('Issue Routing Config:') - pp.pprint(self.get_issue_routing_config()) - - print('GitHub Repo Config:') - pp.pprint(self.get_repo_config()) - - def verify_github_user_access(self, github): - """Verifies that the current GitHub instance has access to the configured list of supported repositories. - - :param Github github: the GitHub instance - :raise ValueError if the configured GitHub instance does not have access to all of the configured repositories. - """ - gh_user_repo_names = [] - for repo in github.get_user().get_repos(): - gh_user_repo_names.append(repo.name) - - inaccessible_repos = [] - for configured_repo_name in self.get_repo_config().get('supportedRepos'): - if configured_repo_name not in gh_user_repo_names: - inaccessible_repos.append(configured_repo_name) - - if inaccessible_repos: - raise ValueError( - "The configured GitHub access credentials does not have access to the following configured " - "repos: {}".format(inaccessible_repos)) - - def get_full_repo_name(self, short_repo_name): - """Gets the full GitHub repository name based on the configured owner and provided repository name - - :param str short_repo_name: the repository name - :return: the full repository name - :rtype: str - """ - return "{}/{}".format(self.get_repo_config().get('owner'), short_repo_name) diff --git a/scripts/auto_gen_utils/team_city_scripts/github_issues/jira_wrapper.py b/scripts/auto_gen_utils/team_city_scripts/github_issues/jira_wrapper.py deleted file mode 100644 index 1752a11e7f..0000000000 --- a/scripts/auto_gen_utils/team_city_scripts/github_issues/jira_wrapper.py +++ /dev/null @@ -1,230 +0,0 @@ -import traceback -import six - -from github_issue_summary_header import GitHubIssueSummaryHeader -from jira import JIRA -from jira.exceptions import JIRAError -from util import debug_print, get_partial_debug_print_func, get_partial_print_func - - -class JiraTransitionError(ValueError): - """Raised when unable to transition a jira issue to a specific status.""" - pass - - -class JiraWrapper: - """Class to wrap calls to Jira""" - JIRA_LABEL = 'Customer_Github_Issue' - JIRA_EXISTING_ISSUES_FOR_PROJECT_QUERY = 'project in ({}) and labels = ' + JIRA_LABEL - JIRA_EXISTING_ISSUES_QUERY = 'labels = ' + JIRA_LABEL - JIRA_OPTIONS = { - 'server': 'https://jira.oci.oraclecorp.com', - 'rest_api_version': 2, - 'verify': True - } - # Different service team projects configure different transition states in their work flows. - JIRA_CLOSED_STATUS_LIST = ['Done', 'Resolved', 'Resolve Issue', 'Closed', 'Close Issue'] - JIRA_OPEN_STATUS_LIST = ['Backlog', 'Needs Triage', 'To Do', 'Reopen'] - - JIRA_COMMENT_TYPE_INFO = "INFO" - JIRA_COMMENT_TYPE_ERROR = "ERROR" - JIRA_COMMENT_TYPE_SUCCESS = "SUCCESS" - JIRA_COMMENT_TYPE_TO_COLOR = { - JIRA_COMMENT_TYPE_INFO: '#707070', # gray - JIRA_COMMENT_TYPE_ERROR: '#FF0000', # red - JIRA_COMMENT_TYPE_SUCCESS: '#14892c', # green - } - - def __init__(self, username=None, password=None, session_id=None, commit=False): - if username and password: - self.jira_client = JIRA(self.JIRA_OPTIONS, basic_auth=(username, password)) - elif session_id: - cookie_options = dict(self. JIRA_OPTIONS) - cookie_options['cookies'] = {'JSESSIONID': session_id} - self.jira_client = JIRA(cookie_options) - else: - raise ValueError("Either JIRA Credentials or a session ID must be provided") - self.commit = commit - - def get_jira_client(self): - return self.jira_client - - def get_all_jira_issues(self): - """Queries Jira for all existing issues that have the 'Customer_GitHub_Issue' label. - - :return: dictionary of summaries to jira issues - :rtype: dict([GitHubIssueSummaryHeader|str], jira.Issue) - """ - # When searching for existing issues from Jira, ensure that all of the results are paginated and combined - # into a list for processing. - returned_issue_count_from_search = 50 - total_issue_count_from_search = 0 - all_jira_issues = [] - while returned_issue_count_from_search == 50: - returned_issues_page = self.jira_client.search_issues(self.JIRA_EXISTING_ISSUES_QUERY, - startAt=total_issue_count_from_search, - maxResults=50, - fields='summary,description,status,resolution,type', - expand='expand') - all_jira_issues.extend(returned_issues_page) - returned_issue_count_from_search = len(returned_issues_page) - total_issue_count_from_search += returned_issue_count_from_search - - summary_to_jira_issue_dict = dict() - for jira_issue in all_jira_issues: - jira_issue_summary_text = six.text_type(jira_issue.fields.summary).strip() - jira_summary = GitHubIssueSummaryHeader.from_jira_issue_summary(jira_issue_summary_text) - if jira_summary: - summary_to_jira_issue_dict[jira_summary] = jira_issue - else: - # If the issue summary isn't for a service team issue, it could be for a DEX filed unlabeled issue. - # If so, add the issue to the dictionary if the issue has a non-closed status. - jira_issue_status = str(jira_issue.fields.status) - if jira_issue_status not in self.JIRA_CLOSED_STATUS_LIST: - debug_print('Found non-service team jira issue [{}] ' - 'with non closed status [{}]'.format(jira_issue, - jira_issue_status)) - summary_to_jira_issue_dict[jira_issue_summary_text] = jira_issue - else: - print('[WARN] Skipping jira issue. Unable to parse service team Jira issue summary for ' - '[Key: {}, Summary: {}]'.format(jira_issue.key, jira_issue.fields.summary)) - - return summary_to_jira_issue_dict - - def create_jira_issue(self, project, summary, description, service_issue=True, indent=0): - """ Creates a new jira issue. - - :param str project: the jira project - :param str summary: the summary for the issue - :param str description: the description for the issue - :param (optional) int indent: the number of spaces to indent debug logging - """ - jira_summary_max_length = 254 - if len(summary) > jira_summary_max_length: - summary = summary[0: jira_summary_max_length - 4] + '...' - - if self.commit: - labels = [self.JIRA_LABEL, 'PP'] - if service_issue: - labels.append('Service-Owned-PP') - - new_issue = self.jira_client.create_issue(project=project, - summary=summary, - description=description, - issuetype={'name': 'Task'}, - labels=labels) - print_indent = get_partial_print_func(indent) - print_indent('Created new jira issue [project: {}, ' - 'key: {}, summary: {}]: {}'.format(project, - new_issue.key, - summary, - new_issue.permalink())) - else: - debug_print_indent = get_partial_debug_print_func(indent) - debug_print_indent('**** DRY RUN **** Not creating a new jira issue: ' - '[project: {}, summary: {}, description:\n{}'.format(project, - summary, - description)) - - def transition_jira_issue_status(self, issue, status_list, do_add_resolution=False, indent=0): - """Transitions a jira issue to a status contained within the given status list. A list of status values is - done as different jira projects define different issue work flows. Finding the first matching status provides - more coverage to ensure that a given jira issue can be transitioned to a viable status. - - :param jira.Issue issue: the jira issue - :param list(str) status_list: the list of possible status values to transition to - :param (optional) boolean do_add_resolution: if True, then a Done resolution will be applied, else, False. - :param (optional) int indent: the number of spaces to indent debug logging - :raises JiraTransitionError: if no matching status was found for the issue's project work flow - """ - - transitions_from_jira = [] - transition_names_from_jira = [] # Used for error handling - transitions = self.jira_client.transitions(issue) - for transition in transitions: - transitions_from_jira.append(transition) - transition_names_from_jira.append(transition['name']) - - transition_to_apply = None - for status in status_list: - for transition in transitions_from_jira: - if transition['name'].lower() == status.lower(): - transition_to_apply = transition - break - if transition_to_apply: - break - - debug_print_indent = get_partial_debug_print_func(indent) - try: - if issue.fields.status.name == status: - debug_print_indent('Not transitioning issue {} to status "{}" ({}) ' - 'because it already has that status'.format(issue.key, - transition_to_apply['name'], - transition_to_apply['id'])) - except AttributeError: - debug_print_indent('Not transitioning issue {} to status "{}" ({}) ' - 'Unable to retrieve jira issue status ' - '(AttributeError)'.format(issue.key, - transition_to_apply['name'], - transition_to_apply['id'])) - traceback.print_exc() - except ValueError: - debug_print_indent('Not transitioning issue {} to status "{}" ({})' - 'Unable to retrieve jira issue status (ValueError)'.format(issue.key, - transition_to_apply['name'], - transition_to_apply['id'])) - traceback.print_exc() - - if transition_to_apply: - if self.commit: - debug_print_indent('Transitioning {} to {} using transition {} (do_add_resolution: {})'.format( - issue.key, - transition_to_apply['name'], - transition_to_apply['id'], - do_add_resolution) - ) - try: - if do_add_resolution: - self.jira_client.transition_issue(issue, transition_to_apply['id'], resolution={'name': 'Done'}) - else: - self.jira_client.transition_issue(issue, transition_to_apply['id']) - except JIRAError as e: - raise JiraTransitionError("An error occurred while transitioning issue {} to '{}' ({}): {}".format( - issue.key, - transition_to_apply['name'], - transition_to_apply['id'], - e.message) - ) - else: - debug_print_indent('**** DRY RUN **** Not transitioning [{}] to "{}" ' - 'using transition {}'.format(issue.key, - transition_to_apply['name'], - transition_to_apply['id'])) - else: - raise JiraTransitionError( - "Don't know how to transition this issue to any of the status values [{}]. Jira returned: [{}]".format( - ', '.join(status_list), - ', '.join(transition_names_from_jira)) - ) - - def add_jira_comment(self, issue_key, comment, comment_type=None, indent=0): - """Adds a comment to the given issue - - :param number issue_key: the identifier for the jira issue - :param str comment: The comment to add - :param str comment_type: The comment type to control the color rendering in the jira comment. - See JIRA_COMMENT_TYPE_TO_COLOR - :param (optional) int indent: the number of spaces to indent debug logging - """ - debug_print_indent = get_partial_debug_print_func(indent) - color = self.JIRA_COMMENT_TYPE_TO_COLOR.get(comment_type, None) - if color: - comment = '{{color:{color}}}{comment}{{color}}'.format(color=color, comment=comment) - - if self.commit: - debug_print_indent("Making the following comment for {}".format(issue_key)) - debug_print_indent(comment) - self.jira_client.add_comment(issue_key, comment) - else: - debug_print_indent("**** DRY RUN **** Not making the following comment for {}\n{}".format(issue_key, - comment)) diff --git a/scripts/auto_gen_utils/team_city_scripts/github_issues/route_issues_from_github.py b/scripts/auto_gen_utils/team_city_scripts/github_issues/route_issues_from_github.py deleted file mode 100644 index f618ec7913..0000000000 --- a/scripts/auto_gen_utils/team_city_scripts/github_issues/route_issues_from_github.py +++ /dev/null @@ -1,442 +0,0 @@ -import argparse -import six -import sys -import util - -from github import Github -from jira_wrapper import JiraWrapper, JiraTransitionError -from jira.exceptions import JIRAError -from issue_config import IssueConfig -from util import debug_print, get_partial_print_func, get_partial_debug_print_func -from github_issue_summary_header import GitHubIssueSummaryHeader - -# Service Team Jira configuration for closing an issue -# Each service team has different work flows configured; thus, different field requirements -# upon transitioning a jira issue to a resolved/closed state. -JIRA_SERVICE_TEAM_CLOSE_TRANSITION_CONFIG = { - 'BMI': { - 'do_add_resolution': False - }, - 'DEX': { - 'do_add_resolution': False - } -} - -LINE_SEP_SIZE = 80 -CLOSED_JIRA_COMMENT_TEMPLATE = """ -Closing this issue since the Github issue has been closed. - -{} -""" - -REOPEN_JIRA_COMMENT_TEMPLATE = """ -Reopening this issue since the Github issue is still in an open state. - -If this issue has been resolved, ensure that the Github issue has been closed. - -{} -""" - -FALLBACK_TO_DEX_JIRA_DESCRIPTION_TEMPLATE = """ -Failed to create a JIRA ticket for Github issue with {labels} label. -Move this ticket to the {project} queue. - -Please review and respond to the customer issue reported via Github: - -Customer Github issue: - SDK Repository: {repo_name} - Issue #: {issue_num} - Title: {title} - Labels: {labels} - URL: {url} - - -Note that you'll need to close the Github issue prior to closing this Jira issue to prevent it from being automatically reopened. -""" - -NEW_JIRA_ISSUE_DESCRIPTION_TEMPLATE = """ -Please review and respond to the customer issue reported via Github: - -Customer Github issue: - SDK Repository: {repo_name} - Issue #: {issue_num} - Title: {title} - Labels: {labels} - URL: {url} - - -Note that you'll need to close the Github issue prior to closing this Jira issue to prevent it from being automatically reopened. -""" - -UNLABELED_JIRA_ISSUE_DESCRIPTION_TEMPLATE = """ -The following customer Github issues need to be labeled for proper issue routing or answered + closed: - -{} -""" - -closed_issue_date_threshold = 30 -jira_wrapper = None -# Indented print/debug print functions -print_indent_4 = get_partial_print_func(4) -print_indent_8 = get_partial_print_func(8) -print_indent_12 = get_partial_print_func(12) -debug_print_indent_4 = get_partial_debug_print_func(4) -debug_print_indent_8 = get_partial_debug_print_func(8) -debug_print_indent_12 = get_partial_debug_print_func(12) - - -def get_github_issues(gh_repo): - """Retrieves the list of customer filed GitHub issues from a given repository. - - :param Repository gh_repo: the Github repository instance - :return: the list of closed customer issues and the list of open customer issues - :rtype: list github.Issue, list github.Issue - """ - all_issues = gh_repo.get_issues(state='all') - # Filter out all PRs as GitHub considers PRs as issues. - customer_issues = [] - for gh_issue in all_issues: - try: - if not gh_issue.pull_request: - customer_issues.append(gh_issue) - except AttributeError: - continue - - closed_issues = [] - open_issues = [] - for c_issue in customer_issues: - if c_issue.state == 'closed': - closed_issues.append(c_issue) - elif c_issue.state == 'open': - open_issues.append(c_issue) - else: - raise ValueError("Unrecognized GitHub issue state: {}".format(c_issue)) - return closed_issues, open_issues - - -def handle_existing_jira_issue_for_closed_issue(jira_issue, gh_issue): - """Handles an existing jira issue for the given closed customer Github issue by closing the jira issue. - - :param jira.Issue jira_issue: the jira issue that corresponds to the customer issue - :param github.Issue gh_issue: the customer Github issue - """ - - # First determine if the 'do_add_resolution' boolean flag needs to be set - do_add_resolution = True - for project_prefix, jira_service_config in JIRA_SERVICE_TEAM_CLOSE_TRANSITION_CONFIG.items(): - if jira_issue.key.startswith(project_prefix): - do_add_resolution = jira_service_config.get('do_add_resolution', True) - - # Check the state to ensure that the issue status values are in sync. GitHub is source of truth. - jira_status = str(jira_issue.fields.status) - if jira_status not in jira_wrapper.JIRA_CLOSED_STATUS_LIST: - # Close jira issue with comment - try: - print_indent_12('Closing jira issue: {} (Opts: do_add_resolution: {})'.format(jira_issue.key, - do_add_resolution)) - jira_wrapper.transition_jira_issue_status(jira_issue, - jira_wrapper.JIRA_CLOSED_STATUS_LIST, - do_add_resolution=do_add_resolution, - indent=12) - jira_wrapper.add_jira_comment(jira_issue, - CLOSED_JIRA_COMMENT_TEMPLATE.format(gh_issue.html_url), - comment_type=jira_wrapper.JIRA_COMMENT_TYPE_INFO, - indent=12) - except JiraTransitionError as e: - print_indent_12('Unable to close jira issue {}: {}'.format(jira_issue, six.text_type(e.message))) - else: - print_indent_12('Skipping jira issue [{}] as it already closed [{}]'.format(jira_issue, jira_status)) - - -def process_closed_issues_for_repo(gh_repo_name, closed_gh_issues, all_jira_issues_dict): - """Processes all closed customer Github issues by closing any jira issue that is open, if it exists. - - :param str gh_repo_name: the Github repository name - :param list(github.Issue) closed_gh_issues: the list of closed customer Github issues - :param dict(GitHubIssueSummaryHeader, jira.Issue) all_jira_issues_dict: the dictionary of jira issues - """ - print_indent_4('Closed Github issues for {}'.format(gh_repo_name)) - for gh_issue in closed_gh_issues: - gh_issue_summary = GitHubIssueSummaryHeader.from_github_issue(repo_name, gh_issue) - jira_issue = all_jira_issues_dict.get(gh_issue_summary) - if jira_issue: - debug_print_indent_8('Found existing Jira issue: {}'.format(jira_issue.key)) - handle_existing_jira_issue_for_closed_issue(jira_issue, gh_issue) - else: - debug_print_indent_8('Skipping. No jira issue found for {}'.format(gh_issue_summary.to_jira_issue_summary())) - print_indent_4('Finished processing closed Github issues for {}'.format(gh_repo_name)) - debug_print('-' * LINE_SEP_SIZE) - - -def get_labels_from_github_issue(gh_issue): - """Gets all of the labels from the given Github issue. - - :param github.Issue gh_issue: the customer Github issue - :return: the list of labels - :rtype: list(str) - """ - labels = [] - for label in gh_issue.labels: - labels.append(label.name) - return labels - - -def find_intersecting_service_routing_configs(labels_list, routing_config): - """Finds all intersection issue routing configurations based on the list of labels. - - :param list(str) labels_list: the list of labels - :param dict routing_config: the issue routing configuration - :return: the list of issue routing configurations that match - :rtype: list(dict) - """ - intersecting_routing_info_list = [] - for routing_config in routing_config.get('serviceLabels'): - if routing_config.get('label') in labels_list: - intersecting_routing_info_list.append(routing_config) - return intersecting_routing_info_list - - -def create_new_jira_for_issue(gh_issue, gh_issue_summary, intersecting_cfgs, routing_config): - """Creates a new jira issue for the given Github customer issue. - - :param github.Issue gh_issue: the customer issue - :param GitHubIssueSummaryHeader gh_issue_summary: the issue summary - :param list(dict) intersecting_cfgs: the list of issue routing configurations that maps by label to the customer - Github issue - :param dict routing_config: the issue routing configuration - """ - cfg = intersecting_cfgs[0] - if intersecting_cfgs and len(intersecting_cfgs) > 1: - print_indent_8('Found more than one issue routing config. Using: {}'.format(cfg)) - debug_print_indent_12('Configurations: {}'.format(''.join(intersecting_cfgs))) - jira_routing_info = cfg.get('teamRoutingInfo')[0].get('jira') - - gh_issue_labels = [] - for gh_label in gh_issue.labels: - gh_issue_labels.append(gh_label.name) - - # If there is an issue while creating a new JIRA ticket, try to create a JIRA ticket to the SDK team (DEX queue) - # as fallback and then manually move the ticket to the service team. - try: - description = NEW_JIRA_ISSUE_DESCRIPTION_TEMPLATE.format(repo_name=gh_issue_summary.get_repo_name(), - issue_num=gh_issue_summary.get_issue_num(), - title=gh_issue_summary.get_summary(), - labels=', '.join(gh_issue_labels), - url=gh_issue.html_url) - jira_wrapper.create_jira_issue(jira_routing_info.get('project'), - gh_issue_summary.to_jira_issue_summary(), - description, - indent=8) - except JIRAError: - print('Failed to create JIRA ticket in {} queue. As a fallback option, creating a ticket in {} queue'.format( - jira_routing_info.get('project'), routing_config.get('unlabeledRoutingInfo').get('jira').get('project'))) - description = FALLBACK_TO_DEX_JIRA_DESCRIPTION_TEMPLATE.format(project=jira_routing_info.get('project'), - labels=', '.join(gh_issue_labels), - repo_name=gh_issue_summary.get_repo_name(), - issue_num=gh_issue_summary.get_issue_num(), - title=gh_issue_summary.get_summary(), - url=gh_issue.html_url) - jira_wrapper.create_jira_issue(routing_config.get('unlabeledRoutingInfo').get('jira').get('project'), - gh_issue_summary.to_jira_issue_summary(), - description, - indent=8) - - -def create_or_update_jira_for_unlabeled_issues(repository_name, unlabeled_gh_issues, jira_issues_dict, routing_config): - """Creates a new Jira ticket, or adds a comment to an already open Jira ticket so that all unlabeled customer - Github issues can be manually labeled for the next execution. - - :param str repository_name: the Github repository name - :param list(github.Issue) unlabeled_gh_issues: the list of customer Github issues missing matching routing labels - :param dict([GitHubIssueSummaryHeader|str], jira.Issue) jira_issues_dict: the dictionary of all existing jira issues - :param dict routing_config: the issue routing configuration - """ - jira_issue_summary = 'Unlabeled Github issues for {}'.format(repository_name) - jira_project = routing_config.get('unlabeledRoutingInfo').get('jira').get('project') - issue_list_str = [] - for gh_summary, gh_issue in unlabeled_gh_issues.items(): - issue_list_str.append('{}: {}\n'.format(gh_summary.to_jira_issue_summary(), gh_issue.html_url)) - content_to_add_or_update = UNLABELED_JIRA_ISSUE_DESCRIPTION_TEMPLATE.format("\n".join(issue_list_str)) - - existing_open_jira_issue_for_repo = jira_issues_dict.get(jira_issue_summary) - if existing_open_jira_issue_for_repo: - debug_print_indent_12('Found existing open jira issue [{}]'.format(existing_open_jira_issue_for_repo)) - jira_wrapper.add_jira_comment(existing_open_jira_issue_for_repo, - content_to_add_or_update, - comment_type=jira_wrapper.JIRA_COMMENT_TYPE_INFO, - indent=12) - else: - debug_print_indent_12('No existing open jira issue for repo [{}]'.format(repository_name)) - jira_wrapper.create_jira_issue(jira_project, jira_issue_summary, content_to_add_or_update, indent=12) - - -def handle_existing_jira_issue_for_open_gh_issue(jira_issue, gh_issue): - """ Handles an existing jira issue for the given open Github customer issue. - - Logic is as follows: - If the existing jira issue is in a closed state, then try to reopen the jira issue. - Else, do nothing. - - :param jira.Issue jira_issue: the existing jira issue that corresponds to the customer Github issue - :param github.Issue gh_issue: the associated customer Github issue - """ - # Next check state to ensure that they are in sync. GitHub is source of truth. - jira_status = str(jira_issue.fields.status) - if jira_status in jira_wrapper.JIRA_CLOSED_STATUS_LIST: - # Reopen with comment - try: - print_indent_12('Reopening jira issue: {}'.format(jira_issue)) - jira_wrapper.transition_jira_issue_status(jira_issue, jira_wrapper.JIRA_OPEN_STATUS_LIST, indent=12) - jira_wrapper.add_jira_comment(jira_issue, - REOPEN_JIRA_COMMENT_TEMPLATE.format(gh_issue.html_url), - comment_type=jira_wrapper.JIRA_COMMENT_TYPE_INFO, - indent=12) - except JiraTransitionError as e: - print_indent_12('Unable to reopen jira issue {}: {}'.format(jira_issue, six.text_type(e.message))) - else: - print_indent_12('Skipping jira issue [{}] as it already is in a non-closed status [{}]'.format(jira_issue, - jira_status)) - - -def process_open_issues_for_repo(gh_repo_name, open_gh_issues, existing_jira_issues_dict, routing_config): - """ Processes all open customer Github issues. - Logic is as follows: - For every open Github issue, check to see if it exists in Jira - If exists, then process the existing Jira. See handle_existing_jira_issue_for_open_gh_issue. - If not exists, then resolve the routing config based on the labels. - If routing config can't be resolved, then include in batch ticket to DEX team to label GH issues. - If routing config is resolved, then create a new Jira to the service team. - Finally, create a jira to the DEX team for all open issues that couldn't be routed to the service teams. - - :param github.Repository gh_repo_name: the Github repository - :param list(github.Issue) open_gh_issues: the list of open customer Github issues - :param dict([GitHubIssueSummaryHeader|str], jira.Issue) existing_jira_issues_dict: the dictionary - :param dict from IssueConfig routing_config: the issue routing configuration - """ - print_indent_4('Open Github issues for {}'.format(gh_repo_name)) - unlabeled_issues = dict() - for gh_issue in open_gh_issues: - gh_issue_summary = GitHubIssueSummaryHeader.from_github_issue(gh_repo_name, gh_issue) - jira_issue = existing_jira_issues_dict.get(gh_issue_summary) - if jira_issue: - debug_print_indent_8('Found existing Jira issue: {}'.format(jira_issue.key)) - handle_existing_jira_issue_for_open_gh_issue(jira_issue, gh_issue) - else: - jira_issue_summary = gh_issue_summary.to_jira_issue_summary() - debug_print_indent_8('No existing jira issue found for: {}'.format(jira_issue_summary)) - gh_issue_labels = get_labels_from_github_issue(gh_issue) - intersecting_configs = find_intersecting_service_routing_configs(gh_issue_labels, routing_config) - # Handle unlabeled issues later to create a single Jira issue assigned to DEX. - # Note: Issues with a label that isn't recognized will also be treated the same. - if not gh_issue_labels or not intersecting_configs: - debug_print_indent_12('No github issue label or intersecting issue routing configurations') - unlabeled_issues[gh_issue_summary] = gh_issue - continue - - # Create a new Jira issue. - create_new_jira_for_issue(gh_issue, gh_issue_summary, intersecting_configs, routing_config) - - # Now process all unlabeled issues - if unlabeled_issues: - debug_print_indent_8('Processing unlabeled issues ({})'.format(len(unlabeled_issues))) - create_or_update_jira_for_unlabeled_issues(repo_name, - unlabeled_issues, - existing_jira_issues_dict, - routing_config) - else: - debug_print_indent_8('No unlabeled issues to process') - print_indent_4('Finished processing open Github issues for {}'.format(gh_repo_name)) - debug_print('-' * LINE_SEP_SIZE) - - -if __name__ == "__main__": - parser = argparse.ArgumentParser(description='Queries for issues from the configured repositories and ' - 'creates a Jira issues for the corresponding configured project') - parser.add_argument('--issue-routing-config', - default=None, - help='(Optional) Path to the issue routing configuration yaml file') - parser.add_argument('--repo-config', - default=None, - help='(Optional) Path to the GitHub repository configuration yaml file') - parser.add_argument('--github-access-token', - required=True, - help='[Required] The GitHub access token that has access rights to the configured repositories') - parser.add_argument('--jira-username', - default=None, - help='(Optional if --jira-session-id is defined) The JIRA username') - parser.add_argument('--jira-password', - default=None, - help='(Optional if --jira-session-id is defined) The JIRA password') - parser.add_argument('--jira-session-id', - default=None, - help='(Optional if --jira-username and --jira-password ard defined) ' - 'The active JIRA session ID used for client auth') - parser.add_argument('--debug', - default=False, - action='store_true', - help='(Optional) Enable debugging console logging') - parser.add_argument('--commit', - default=False, - action='store_true', - help='(Optional) True if JIRA issues are to be created; else, False (default)') - args = parser.parse_args() - util.debug = args.debug - commit = args.commit - - # Load Config - config = IssueConfig(args.issue_routing_config, args.repo_config) - if util.debug: - config.print_config() - print('=' * LINE_SEP_SIZE) - sys.stdout.flush() - - # GitHub - gh = Github(args.github_access_token) - config.verify_github_user_access(gh) - issue_routing_config = config.get_issue_routing_config() - - # Jira - jira_wrapper = JiraWrapper(args.jira_username, args.jira_password, args.jira_session_id, commit) - all_existing_jira_issues_dict = jira_wrapper.get_all_jira_issues() - if util.debug: - print('Existing Jira issues:') - for summary, issue in all_existing_jira_issues_dict.items(): - issue_summary = six.text_type(issue.fields.summary).encode('utf-8', 'ignore') - print(' [{}]:\n' - ' Link: {}\n' - ' Status: {},\n' - ' Resolution: {},\n' - ' Summary: {}'.format(issue.key, - issue.permalink(), - issue.fields.status, - issue.fields.resolution, - issue_summary)) - print('=' * LINE_SEP_SIZE) - sys.stdout.flush() - - for repo_name in config.get_repo_config().get('supportedRepos'): - try: - print('Processing GitHub issues for [{}]...'.format(repo_name)) - full_repo_name = config.get_full_repo_name(repo_name) - repo = gh.get_repo(full_repo_name) - - closed_github_issues, open_github_issues = get_github_issues(repo) - if closed_github_issues: - process_closed_issues_for_repo(repo_name, closed_github_issues, all_existing_jira_issues_dict) - else: - debug_print_indent_4('No closed issues to process for repo: {}'.format(repo_name)) - debug_print('+' * LINE_SEP_SIZE) - - if open_github_issues: - process_open_issues_for_repo(repo_name, - open_github_issues, - all_existing_jira_issues_dict, - issue_routing_config) - else: - debug_print_indent_4('No open issues to process for repo: {}'.format(repo_name)) - except Exception: - print('Error while processing GitHub issues for [{}]. Routing github issue for other SDKs, if any.'.format(repo_name)) - continue - debug_print('+' * LINE_SEP_SIZE) - debug_print('=' * LINE_SEP_SIZE) - print('Finished') diff --git a/scripts/auto_gen_utils/team_city_scripts/github_issues/update_github_labels.py b/scripts/auto_gen_utils/team_city_scripts/github_issues/update_github_labels.py deleted file mode 100644 index 92d872d9ea..0000000000 --- a/scripts/auto_gen_utils/team_city_scripts/github_issues/update_github_labels.py +++ /dev/null @@ -1,116 +0,0 @@ -# This script parses the issue routing configuration and updates the configured GitHub repositories to ensure -# that the defined labels exist in each repository, respectively. -import argparse -import util - -from github import Github, GithubObject -from issue_config import IssueConfig -from util import get_partial_print_func, get_partial_debug_print_func - -print_indent_4 = get_partial_print_func(4) -debug_print_indent_4 = get_partial_debug_print_func(4) - - -def determine_missing_service_labels_for_repo(gh_repo, configured_labels): - """ Queries a given GitHub repository' configured labels and determines which labels from the given list are - missing. - - :param gh_repo: the GitHub repository instance - :param configured_labels: the list of service labels - :return: the list of missing service labels - :rtype: list str - """ - labels_from_repo = [] - for label_from_repo in gh_repo.get_labels(): - labels_from_repo.append(label_from_repo.name) - - missing_labels = [] - for configured_label in configured_labels: - if configured_label not in labels_from_repo: - missing_labels.append(configured_label) - - return missing_labels - - -def add_missing_labels_to_repo(gh_repo, missing_labels, label_config): - """ Adds the list of missing labels to the given GitHub repository. - - :param gh_repo: the GitHub repository instance. - :param missing_labels: the list of missing labels. - :param label_config: the label configuration as a dict - """ - for missing_label in missing_labels: - label_cfg_to_add = None - for label_cfg in label_config: - if label_cfg.get('label') == missing_label: - label_cfg_to_add = label_cfg - if label_cfg_to_add is None: - continue - - label_name = label_cfg_to_add.get('label') - label_color = label_cfg_to_add.get('color') - label_description = GithubObject.NotSet - if 'description' in label_cfg_to_add: - label_description = label_cfg_to_add.get('description') - - status_msg = "Creating label ['{}', #{}, {}] in repo [{}]".format(label_name, - label_color, - label_description, - gh_repo.name) - - if commit: - print_indent_4(status_msg) - repo.create_label(label_name, label_color, description=label_description) - else: - print_indent_4("**** DRY RUN ****" + status_msg) - - -if __name__ == "__main__": - parser = argparse.ArgumentParser(description='Parses the issue routing and GitHub repository configuration to ' - 'update the labels in GitHub') - parser.add_argument('--issue-routing-config', - default=None, - help='Path to the issue routing configuration yaml file') - parser.add_argument('--repo-config', - default=None, - help='Path to the GitHub repository configuration yaml file') - parser.add_argument('--github-access-token', - required=True, - help="The GitHub access token that has access rights to the configured repositories") - parser.add_argument('--debug', - default=False, - action='store_true', - help="Enable debugging console logging") - parser.add_argument('--commit', - default=False, - action='store_true', - help="True if labels in GitHub are to be updated; else, False (default)") - args = parser.parse_args() - util.debug = args.debug - commit = args.commit - - # Load Config - config = IssueConfig(args.issue_routing_config, args.repo_config) - if util.debug: - config.print_config() - - gh = Github(args.github_access_token) - config.verify_github_user_access(gh) - issue_routing_config = config.get_issue_routing_config() - - standard_labels = config.get_standard_labels() - standard_label_config = issue_routing_config.get('standardLabels') - service_labels = config.get_service_labels() - service_label_config = issue_routing_config.get('serviceLabels') - - for repo_name in config.get_repo_config().get('supportedRepos'): - print("Processing labels for [{}]...".format(repo_name)) - full_repo_name = config.get_full_repo_name(repo_name) - debug_print_indent_4("Fetching repo for [{}]".format(full_repo_name)) - repo = gh.get_repo(full_repo_name) - - missing_service_labels_for_repo = determine_missing_service_labels_for_repo(repo, service_labels) - missing_standard_labels_for_repo = determine_missing_service_labels_for_repo(repo, standard_labels) - - add_missing_labels_to_repo(repo, missing_service_labels_for_repo, service_label_config) - add_missing_labels_to_repo(repo, missing_standard_labels_for_repo, standard_label_config) diff --git a/scripts/auto_gen_utils/team_city_scripts/github_issues/util.py b/scripts/auto_gen_utils/team_city_scripts/github_issues/util.py deleted file mode 100644 index 2ccad997a3..0000000000 --- a/scripts/auto_gen_utils/team_city_scripts/github_issues/util.py +++ /dev/null @@ -1,36 +0,0 @@ -import sys - -from functools import partial -from pprint import PrettyPrinter - -debug = False -pp = PrettyPrinter(indent=2) - - -def debug_print(msg, additional='', pretty=False): - """ Prints a message to stdout. - - :param str msg: the message to display - :param str additional: additional text to print - :param boolean pretty: True if the message is to be pretty-printed; else, False (default) - """ - if not debug: - return - - if pretty: - pp.pprint(msg + additional) - else: - print(msg + additional) - sys.stdout.flush() - - -def get_partial_debug_print_func(indent): - return partial(debug_print, ' ' * indent) - - -def print_wrapper(msg, additional=''): - print(msg + additional) - - -def get_partial_print_func(indent): - return partial(print_wrapper, ' ' * indent) diff --git a/scripts/auto_gen_utils/team_city_scripts/go/public/1_setup_go_public_branch.sh b/scripts/auto_gen_utils/team_city_scripts/go/public/1_setup_go_public_branch.sh deleted file mode 100755 index 410b2241db..0000000000 --- a/scripts/auto_gen_utils/team_city_scripts/go/public/1_setup_go_public_branch.sh +++ /dev/null @@ -1,41 +0,0 @@ -set -e -set -x - -echo Creating venv to install sdk locally -. /opt/odo/tox_sic/venv/bin/activate -virtualenv .sdk-venv -. .sdk-venv/bin/activate - -# must disable StrictHostKeyChecking so that we don't get an interactive -# prompt later asking to confirm the host key -# Must disable -e (fail on non-zero exit code) because ssh returns 255 -set +e -ssh -o StrictHostKeyChecking=no git@bitbucket.oci.oraclecorp.com -p 7999 -set -e - -# Old way of doing that: -# ls -la ~/.ssh -# -# cat ~/.ssh/config -# -# printf "\n\nHost * \n StrictHostKeyChecking no\n" >> ~/.ssh/config -# -# cat ~/.ssh/config - -## AUTOGEN ## -cd autogen -pip ${PIP_TIMEOUT_PARAMETER} install --trusted-host artifactory.oci.oraclecorp.com --index-url https://artifactory.oci.oraclecorp.com/api/pypi/global-dev-pypi/simple -r requirements.txt -cd .. - -# run the setup job for step 2 in ./autogen -cd autogen -ls -la -pwd - -ls -la - -# checks out CLI branch with same name as SDK branch that triggered this build -python ./2_pre_generation_set_up.py --build-id $BUILD_ID --tool GoSDK - -# back out into root directory -cd .. diff --git a/scripts/auto_gen_utils/team_city_scripts/go/public/3_record_sdk_generation_success.sh b/scripts/auto_gen_utils/team_city_scripts/go/public/3_record_sdk_generation_success.sh deleted file mode 100755 index 337aa82688..0000000000 --- a/scripts/auto_gen_utils/team_city_scripts/go/public/3_record_sdk_generation_success.sh +++ /dev/null @@ -1,5 +0,0 @@ -set -x - -pwd -ls -la -touch go_sdk_generation_success.txt diff --git a/scripts/auto_gen_utils/team_city_scripts/go/public/5_record_sdk_build_success.sh b/scripts/auto_gen_utils/team_city_scripts/go/public/5_record_sdk_build_success.sh deleted file mode 100755 index fd3b46751a..0000000000 --- a/scripts/auto_gen_utils/team_city_scripts/go/public/5_record_sdk_build_success.sh +++ /dev/null @@ -1,5 +0,0 @@ -set -x - -pwd -ls -la -touch go_sdk_build_success.txt diff --git a/scripts/auto_gen_utils/team_city_scripts/go/public/6_report_gen_and_build_status.sh b/scripts/auto_gen_utils/team_city_scripts/go/public/6_report_gen_and_build_status.sh deleted file mode 100755 index f0f686e843..0000000000 --- a/scripts/auto_gen_utils/team_city_scripts/go/public/6_report_gen_and_build_status.sh +++ /dev/null @@ -1,42 +0,0 @@ -set -e -set -x - -pwd -ls -la - -# activate venv from step 1 to run below python script -ls -la ./.sdk-venv -. ./.sdk-venv/bin/activate - -cd src/github.com/oracle/oci-go-sdk -if [[ `git branch|grep "\*"|grep "bulk"` ]]; then - echo "Found bulk preview" - export BUILD_TYPE_ARG="--build-type bulk_pending_merge_public" - export PUSH_SPEC_DIFF_ARG="" - export PUSH_SPEC_DIFF_UNPROTECTED_ARG="" -else - export BUILD_TYPE_ARG="--build-type individual_public" -fi -cd ../../../.. - -if [ -n "$PUSH_SPEC_DIFF_UNPROTECTED_ARG" ]; then - set +e - diff=`diff -r specs-without-any-changes specs-without-conditional-groups/ | grep -v "^Only in specs-without-conditional-groups/"` - diff_lines=`diff -r specs-without-any-changes specs-without-conditional-groups/ | grep -v "^Only in specs-without-conditional-groups/" | wc -l` - if [ "$diff_lines" -gt "0" ]; then - echo "WARNING: Detected changes in the spec that were not protected by conditional groups." >&2 - echo "$diff" - else - echo "Did not detect any changes that were not protected by conditional groups." - export PUSH_SPEC_DIFF_UNPROTECTED_ARG="" - fi - set -e -fi - -# commit changes from generation and build for go-sdk -# this step will run no matter what happened before it so it can report success / failure to the JIRA tickets -cd autogen -python ./3_report_generation_status.py --build-id $BUILD_ID --tool GoSDK $BUILD_TYPE_ARG $PUSH_SPEC_DIFF_ARG $PUSH_SPEC_DIFF_UNPROTECTED_ARG -cd .. - -ls -la ./src/github.com/oracle/oci-go-sdk diff --git a/scripts/auto_gen_utils/team_city_scripts/go/public/7_commit_generated_changes.sh b/scripts/auto_gen_utils/team_city_scripts/go/public/7_commit_generated_changes.sh deleted file mode 100755 index d6591e2485..0000000000 --- a/scripts/auto_gen_utils/team_city_scripts/go/public/7_commit_generated_changes.sh +++ /dev/null @@ -1,27 +0,0 @@ -set -x -set -e - -# configure git for this commit -git config --global user.email "$GIT_USER_EMAIL" -git config --global user.name "$GIT_USER_NAME" - -# activate venv from step 1 to run below python script -ls -la ./.sdk-venv -. ./.sdk-venv/bin/activate - - -# get the branch we're on -cd src/github.com/oracle/oci-go-sdk -branch=`git branch|grep "^\*"|cut -c3-` -if [[ ${branch} == *"bulk"* ]]; then - build_type="bulk_pending_merge_public" -else - build_type="individual_public" -fi -cd ../../../.. - -# commit changes from generation and build -cd autogen -ls -la -python ./4_on_generation_complete.py --build-id $BUILD_ID --tool GoSDK --build-type ${build_type} -cd .. diff --git a/scripts/auto_gen_utils/team_city_scripts/java/check_for_pom_version_mismatch.py b/scripts/auto_gen_utils/team_city_scripts/java/check_for_pom_version_mismatch.py deleted file mode 100644 index 7c58c66fc9..0000000000 --- a/scripts/auto_gen_utils/team_city_scripts/java/check_for_pom_version_mismatch.py +++ /dev/null @@ -1,110 +0,0 @@ -import argparse -import os -import xml.etree.ElementTree as ET -import sys -from packaging import version - -# Add the root of the package, two directories up, to the sys.path -dir_path = os.path.dirname(os.path.realpath(__file__)) -sys.path.append(os.path.join(dir_path, '../..')) - -from shared.bitbucket_utils import setup_bitbucket, make_general_comment, clone_target_branch # noqa: ignore=F402 -import shared.bitbucket_utils # noqa: ignore=F402 - -ns = {"ns":"http://maven.apache.org/POM/4.0.0"} - - -def get_pom_version(sdk_dir): - pom_path = os.path.join(sdk_dir, 'pom.xml') - pom = ET.parse(pom_path) - xpath = './ns:version' - return pom.find(xpath, ns).text - - -def truncate_pom_version(pom_version): - dash_pos = pom_version.find("-") - if dash_pos >= 0: - pom_version = pom_version[:dash_pos] - return pom_version - - -# -# Parameters variable set up -# -parser = argparse.ArgumentParser(description='Warn if the pom version does not match the target') -parser.add_argument('--build-branch', required=False, help="The value of the teamcity.build.branch variable") -parser.add_argument('--username', required=False, help='LDAP username ("firstname.lastname@oracle.com"; within TeamCity, use "%%system.teamcity.auth.userId%%")') -parser.add_argument('--password', required=False, help='LDAP password (within TeamCity, use "%%system.teamcity.auth.password%%")') -parser.add_argument('--source_branch_root_dir', required=False, help='Root directory of the source branch') -parser.add_argument('--target_branch_root_dir', required=False, help='Root directory of the target branch') -parser.add_argument('-v', '--verbose', action='store_true', default=False, required=False, help='Verbose output') -parser.add_argument('--dry-run', action='store_true', default=False, required=False, help='Dry-run, do not post comment to Bitbucket') - -verbose = False -args = parser.parse_args() -setup_bitbucket(args) - -if args.build_branch: - pr_id = None - try: - # If the teamcity.build.branch variable is just something like "1234", then this is a - # validation build for pull request "1234" - pr_id = int(args.build_branch) - except ValueError: - print("Not a pull request validation build.") - sys.exit(0) - -dry_run = True if args.dry_run else False - -if args.verbose: - verbose = True - shared.bitbucket_utils.verbose = True - -source_branch_root_dir = os.path.abspath(args.source_branch_root_dir) if args.source_branch_root_dir else os.getcwd() -target_branch_root_dir = os.path.abspath(args.target_branch_root_dir) if args.target_branch_root_dir else clone_target_branch(pr_id, "java-sdk") - -source_pom_version = get_pom_version(source_branch_root_dir) -trunc_source_pom_version = source_pom_version -target_pom_version = get_pom_version(target_branch_root_dir) -trunc_target_pom_version = target_pom_version -if verbose: - print("Source branch pom version is {source_pom_version}".format(source_pom_version=source_pom_version)) - print("Target branch pom version is {target_pom_version}".format(target_pom_version=target_pom_version)) - - -# Ensure that pom versions are not mismatched between public and preview. -if '-preview1' in source_pom_version and '-preview1' not in target_pom_version: - text = "Source branch pom version ({source_pom_version}) is for preview while the target branch version ({target_pom_version}) is not.".format(source_pom_version=source_pom_version, target_pom_version=target_pom_version) - if verbose: - print(text) - if not dry_run: - make_general_comment("SDK", "java-sdk", pr_id, text) - sys.exit(1) - - -if '-preview1' in target_pom_version and '-preview1' not in source_pom_version: - text = "Source branch pom version ({source_pom_version}) is not for preview while the target branch version ({target_pom_version}) is.".format(source_pom_version=source_pom_version, target_pom_version=target_pom_version) - if verbose: - print(text) - if not dry_run: - make_general_comment("SDK", "java-sdk", pr_id, text) - sys.exit(1) - -# Truncate any trailing text after the version numbers for comparison -trunc_source_pom_version = truncate_pom_version(source_pom_version) -trunc_target_pom_version = truncate_pom_version(target_pom_version) - -if verbose: - print("Truncated source branch pom version is {trunc_source_pom_version}".format(trunc_source_pom_version=trunc_source_pom_version)) - print("Truncated target branch pom version is {trunc_target_pom_version}".format(trunc_target_pom_version=trunc_target_pom_version)) - - -if version.parse(trunc_source_pom_version) < version.parse(trunc_target_pom_version): - text = "The pom version of the source branch ({source_pom_version}) is out of date with the target branch ({target_pom_version}). Please re-fetch from the remote and rebase your changes on top of the target branch.".format(source_pom_version=source_pom_version, target_pom_version=target_pom_version) - if verbose: - print(text) - if not dry_run: - make_general_comment("SDK", "java-sdk", pr_id, text) - sys.exit(1) -elif verbose: - print("The source and target branch pom versions are in sync.") diff --git a/scripts/auto_gen_utils/team_city_scripts/java/checkout_source_branch.py b/scripts/auto_gen_utils/team_city_scripts/java/checkout_source_branch.py deleted file mode 100644 index b21569fbb7..0000000000 --- a/scripts/auto_gen_utils/team_city_scripts/java/checkout_source_branch.py +++ /dev/null @@ -1,88 +0,0 @@ -from __future__ import print_function -import argparse -import os -import ssl -import sys -import urllib3 -import re -from git import Repo - -# Add the root of the package, two directories up, to the sys.path -dir_path = os.path.dirname(os.path.realpath(__file__)) -sys.path.append(os.path.join(dir_path, '../..')) - -from shared.bitbucket_utils import setup_bitbucket, make_general_comment, get_pullrequest, get_pr_source_branch # noqa: ignore=F402 -import shared.bitbucket_utils # noqa: ignore=F402 - -KEEP_TEMP_FILES = True - -urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) -verbose = False -if (not os.environ.get('PYTHONHTTPSVERIFY', '') and getattr(ssl, '_create_unverified_context', None)): - ssl._create_default_https_context = ssl._create_unverified_context - - -def printv(str): - global verbose - if verbose: - print(str) - - -# -# Parameters variable set up -# -parser = argparse.ArgumentParser(description='Check out the source branch, as indicated in the PR. Warning: this is destructive to the contents in the Java SDK directory.') -parser.add_argument('--build-id', required=False, help="The TeamCity build id for the build that is running this script. This is used to update the relevant Bitbucket PRs with links to the TeamCity build") -parser.add_argument('--build-branch', required=True, help="The value of the teamcity.build.branch variable") -parser.add_argument('--java-sdk-dir', required=False, help='Directory of the Java SDK') -parser.add_argument('-v', '--verbose', action='store_true', default=False, required=False, help='Verbose output') - -args = parser.parse_args() -setup_bitbucket(args) - -if args.verbose: - verbose = True - shared.bitbucket_utils.verbose = True - -if args.java_sdk_dir: - java_sdk_dir = os.path.abspath(args.java_sdk_dir) -else: - java_sdk_dir = os.getcwd() - -pr_id = None -try: - # If the teamcity.build.branch variable is just something like "1234", then this is a - # validation build for pull request "1234" - pr_id = int(args.build_branch) -except ValueError: - print("Not a pull request validation build. Not warning.") - sys.exit(0) - -pr = get_pullrequest("SDK", "java-sdk", pr_id) -printv(pr.text) - -try: - repo = Repo.init(java_sdk_dir) - - current_commit = None - current_branch = [branch.strip()[2:] for branch in repo.git.branch().split('\n') if branch.startswith('* ')][0] - printv("current branch: {}".format(current_branch)) - result = re.search(r'\(HEAD detached at ([^)]*)\)', current_branch) - if not result: - # this is what it looks like in Team City - result = re.search(r'\(detached from ([^)]*)\)', current_branch) - if result: - current_commit = result.group(1) - - source_branch = get_pr_source_branch(pr) - printv("source branch: {}".format(source_branch)) - - repo.git.fetch("origin") - repo.git.checkout(source_branch) - - if current_commit: - repo.git.reset('--hard', current_commit) - printv("resetting to commit: {}".format(current_commit)) -except Exception as e: - print('EXCEPTION: {}'.format(str(e))) - print('Failed to change to source branch.') diff --git a/scripts/auto_gen_utils/team_city_scripts/java/compatibility/compare-all-versions-using-clirr.sh b/scripts/auto_gen_utils/team_city_scripts/java/compatibility/compare-all-versions-using-clirr.sh deleted file mode 100755 index eee7634c46..0000000000 --- a/scripts/auto_gen_utils/team_city_scripts/java/compatibility/compare-all-versions-using-clirr.sh +++ /dev/null @@ -1,160 +0,0 @@ -#!/bin/bash - -# Syntax: $0 [--startWithVersion ] [--javaSdkDir ] -# -# --startWithVersion -# Skips versions older than , meaning the first version çompared is -# to the version just preceding it. -# If not provided, will start comparing the oldest version to the 2nd oldest version. -# -# --javaSdkDir -# The directory where the GitHub version of the OCI Java SDK has been checked out -# If not provided, uses the current directory. -# - -# If you are running this on macOS `brew install gsed`. - -if uname -a | grep -i "Darwin" > /dev/null; then - if ! which gsed > /dev/null; then - echo "If you are running this on macOS, please install gsed: 'brew install gsed'." - exit 1 - fi -fi - -set -e -#set -x - -# Some operating systems (including macOS) do not have -# the correct `sed` version that supports `-i` (without space). -# There, it is called `gsed`. The if statement below makes the script -# sed/gsed-agnostic. -if [ -z ${SED+x} ]; then - if which gsed > /dev/null; then - SED=`which gsed` - else - SED=`which sed` - fi -fi - -java_sdk_root_dir="." - -while [[ $# -gt 1 ]]; do - key="$1" - - case $key in - --startWithVersion) - start_with_version="$2" - shift # past argument - shift # past value - ;; - --javaSdkDir) - java_sdk_root_dir="$2" - shift # past argument - shift # past value - ;; - *) # unknown option - echo "Unknown option: $key" - echo "Syntax: $0 [--startWithVersion ] [--javaSdkDir ] " - exit 1 - ;; - esac -done - -if [ $# -lt 1 ]; then - echo "Syntax: $0 [--startWithVersion ] [--javaSdkDir ] " - exit 1 -fi -if [ $# -gt 1 ]; then - echo "Syntax: $0 [--startWithVersion ] [--javaSdkDir ] " - exit 1 -fi - -clirr="clirr:2.9.2-oracle-SNAPSHOT" -maven_metadata_xml_file="$1" - -tempfileprefix=`basename $0` -# filtered_maven_metadata_xml_file=$(mktemp -t ${tempfileprefix}) -filtered_maven_metadata_xml_file="${maven_metadata_xml_file}.filtered" - -# skip the first line, it's the latest version: $SED '1,1d' -grep -o ".*" ${maven_metadata_xml_file} \ - | $SED 's_\([^<]*\)_\1_' \ - | $SED '1,1d' \ - | grep -v -i "beta" \ - | grep -v -i "experimental" \ - | grep -v -i "snapshot" \ - | grep -v -i "preview" \ - | grep -v -i "401stream" \ - > ${filtered_maven_metadata_xml_file} - -temp_file=$(mktemp -t "${tempfileprefix}-temp") - -run_dir=`pwd` -cd $java_sdk_root_dir - -for version in `cat ${filtered_maven_metadata_xml_file}`; do - if [ -z ${previous_version+x} ]; then - # previous version unset - previous_version="${version}" - version_range_beginning="${version}" - continue - fi - if [ -n "${start_with_version}" ]; then - # start_with_version set, skip until ${version} is ${start_with_version} - if [[ "${version}" != "${start_with_version}" ]]; then - # not the desired version yet - echo "Skipping ${version}" - previous_version="${version}" - version_range_beginning="${version}" - continue - fi - echo "Starting with version ${start_with_version}" - unset start_with_version - fi - - echo "##################################################" - echo "##################################################" - echo "Comparing ${previous_version} to ${version}" - - git_tag="v${version}" - echo -e "\tChecking out git tag ${git_tag}..." - if ! git checkout "${git_tag}" > ${temp_file} 2>&1; then - echo -e "ERROR: \tgit checkout ${git_tag} failed" - cat ${temp_file} - echo -e "\tCompatible version range: ${version_range_beginning} to ${previous_version}" - unset previous_version - continue - fi - - git_date=`git log -n 1 --pretty=format:"%cd"` - echo -e "\tVersion ${version} created ${git_date}" - - echo -e "\tRunning clirr..." - clirr_output_file="${run_dir}/clirr-${version}.txt" - if ! mvn ${clirr}:check \ - -f bmc-common/pom.xml \ - -DcomparisonVersion="${previous_version}" \ - -DtextOutputFile="${clirr_output_file}" \ - -DfailOnError="false" \ - -DfailOnWarning="false" \ - > ${temp_file} 2>&1; then - echo "mvn clirr:check failed" - cat ${temp_file} - rm ${temp_file} - exit 1 - fi - echo -e "\tGenerated ${clirr_output_file}" - if [ -s ${clirr_output_file} ]; then - echo -e "\tClirr detected changes between ${previous_version} and ${version}" - echo -e "\tCompatible version range: ${version_range_beginning} to ${previous_version}" - version_range_beginning="${version}" - fi - - previous_version="${version}" -done - -echo -e "\tCompatible version range: ${version_range_beginning} to ${version}" - -cd $run_dir - -rm ${temp_file} \ No newline at end of file diff --git a/scripts/auto_gen_utils/team_city_scripts/java/compatibility/compare-all-versions-using-codegen-version.sh b/scripts/auto_gen_utils/team_city_scripts/java/compatibility/compare-all-versions-using-codegen-version.sh deleted file mode 100755 index fbe7de8843..0000000000 --- a/scripts/auto_gen_utils/team_city_scripts/java/compatibility/compare-all-versions-using-codegen-version.sh +++ /dev/null @@ -1,158 +0,0 @@ -#!/bin/bash - -# Syntax: $0 [--startWithVersion ] [--javaSdkDir ] -# -# --startWithVersion -# Skips versions older than , meaning the first version çompared is -# to the version just preceding it. -# If not provided, will start comparing the oldest version to the 2nd oldest version. -# -# --javaSdkDir -# The directory where the Bitbucket version of the OCI Java SDK has been checked out -# If not provided, uses the current directory. -# - -# If you are running this on macOS `brew install gsed`. - -if uname -a | grep -i "Darwin" > /dev/null; then - if ! which gsed > /dev/null; then - echo "If you are running this on macOS, please install gsed: 'brew install gsed'." - exit 1 - fi -fi - -set -e -#set -x - -# Some operating systems (including macOS) do not have -# the correct `sed` version that supports `-i` (without space). -# There, it is called `gsed`. The if statement below makes the script -# sed/gsed-agnostic. -if [ -z ${SED+x} ]; then - if which gsed > /dev/null; then - SED=`which gsed` - else - SED=`which sed` - fi -fi - -java_sdk_root_dir="." - -while [[ $# -gt 1 ]]; do - key="$1" - - case $key in - --startWithVersion) - start_with_version="$2" - shift # past argument - shift # past value - ;; - --javaSdkDir) - java_sdk_root_dir="$2" - shift # past argument - shift # past value - ;; - *) # unknown option - echo "Unknown option: $key" - echo "Syntax: $0 [--startWithVersion ] [--javaSdkDir ] " - exit 1 - ;; - esac -done - -if [ $# -lt 1 ]; then - echo "Syntax: $0 [--startWithVersion ] [--javaSdkDir ] " - exit 1 -fi -if [ $# -gt 1 ]; then - echo "Syntax: $0 [--startWithVersion ] [--javaSdkDir ] " - exit 1 -fi - -maven_metadata_xml_file="$1" - -tempfileprefix=`basename $0` -# filtered_maven_metadata_xml_file=$(mktemp -t ${tempfileprefix}) -filtered_maven_metadata_xml_file="${maven_metadata_xml_file}.filtered" - -# skip the first line, it's the latest version: $SED '1,1d' -grep -o ".*" ${maven_metadata_xml_file} \ - | $SED 's_\([^<]*\)_\1_' \ - | $SED '1,1d' \ - | grep -v -i "beta" \ - | grep -v -i "experimental" \ - | grep -v -i "snapshot" \ - | grep -v -i "preview" \ - | grep -v -i "401stream" \ - > ${filtered_maven_metadata_xml_file} - -temp_file=$(mktemp -t "${tempfileprefix}-temp") - -run_dir=`pwd` -cd $java_sdk_root_dir - -for version in `cat ${filtered_maven_metadata_xml_file}`; do - if [ -z ${previous_version+x} ]; then - # previous version unset - previous_version="${version}" - version_range_beginning="${version}" - continue - fi - if [ -n "${start_with_version}" ]; then - # start_with_version set, skip until ${version} is ${start_with_version} - if [[ "${version}" != "${start_with_version}" ]]; then - # not the desired version yet - echo "Skipping ${version}" - previous_version="${version}" - version_range_beginning="${version}" - continue - fi - echo "Starting with version ${start_with_version}" - unset start_with_version - fi - - echo "##################################################" - echo "##################################################" - echo "Comparing ${previous_version} to ${version}" - - git_tag="${previous_version}" - echo -e "\tChecking out git tag ${git_tag}..." - if ! git checkout "${git_tag}" > ${temp_file} 2>&1; then - echo -e "ERROR: \tgit checkout ${git_tag} failed" - cat ${temp_file} - echo -e "\tCompatible version range: ${version_range_beginning} to ${previous_version}" - unset previous_version - continue - fi - - old_codegen_version=`grep "codegen.version>" bmc-codegen/pom.xml | sed 's/^.*>\(.*\)<.*$/\1/'` - - git_tag="${version}" - echo -e "\tChecking out git tag ${git_tag}..." - if ! git checkout "${git_tag}" > ${temp_file} 2>&1; then - echo -e "ERROR: \tgit checkout ${git_tag} failed" - cat ${temp_file} - echo -e "\tCompatible version range: ${version_range_beginning} to ${previous_version} (codegen ${old_codegen_version})" - unset previous_version - continue - fi - - git_date=`git log -n 1 --pretty=format:"%cd"` - echo -e "\tVersion ${version} created ${git_date}" - new_codegen_version=`grep "codegen.version>" bmc-codegen/pom.xml | sed 's/^.*>\(.*\)<.*$/\1/'` - echo -e "\tVersion ${version} uses codegen ${new_codegen_version}" - - if [[ "${old_codegen_version}" != "${new_codegen_version}" ]]; then - echo -e "\tCodegen version changed from ${old_codegen_version} to ${new_codegen_version}" - echo -e "\tCompatible version range: ${version_range_beginning} to ${previous_version} (codegen ${old_codegen_version})" - version_range_beginning="${version}" - fi - - previous_version="${version}" -done - -echo -e "\tCompatible version range: ${version_range_beginning} to ${version}" - -cd $run_dir - -rm ${temp_file} \ No newline at end of file diff --git a/scripts/auto_gen_utils/team_city_scripts/java/determine_build_profile.py b/scripts/auto_gen_utils/team_city_scripts/java/determine_build_profile.py deleted file mode 100644 index c967706e16..0000000000 --- a/scripts/auto_gen_utils/team_city_scripts/java/determine_build_profile.py +++ /dev/null @@ -1,229 +0,0 @@ -from __future__ import print_function -import argparse -import os -import requests -import urllib3 -import re -import ssl -import sys -import traceback -from xml.etree import ElementTree -import getpass - -import shared.bitbucket_utils # noqa: ignore=F402 - -urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) -verbose = False -if (not os.environ.get('PYTHONHTTPSVERIFY', '') and getattr(ssl, '_create_unverified_context', None)): - ssl._create_default_https_context = ssl._create_unverified_context - - -def printv(str): - global verbose - if verbose: - print(str) - - -def setup_auth(): - global args, username, password, auth - username = args.username - if args.password is not None: - password = args.password - else: - password = getpass.getpass("LDAP password:") - - auth = (username, password) - - -def get_changed_files_for_change(change_id): - url = "https://teamcity.oci.oraclecorp.com/app/rest/changes/id:{}".format(change_id) - r = requests.get(url, verify=False, auth=auth) - - root = ElementTree.fromstring(r.content) - files_node = root.find("files") - - files = [] - - for child in files_node: - if child.tag.lower() == "file": - files.append(child.attrib['file']) - - return files - - -def get_commit_hashes(pr_id): - pr_commits = shared.bitbucket_utils.get_pullrequest_commits("SDK", "java-sdk", pr_id) - - commit_hashes = [] - - for change in pr_commits.json()['values']: - commit_hashes.append(change['id']) - printv("Commit hash {}".format(change['id'])) - - return commit_hashes - - -def get_changed_files(build_id, pr_id): - commit_hashes = None - if pr_id: - commit_hashes = get_commit_hashes(pr_id) - - url = "https://teamcity.oci.oraclecorp.com/app/rest/changes?locator=build:(id:{})".format(build_id) - r = requests.get(url, verify=False, auth=auth) - - root = ElementTree.fromstring(r.content) - - files = [] - - for child in root: - if child.tag.lower() == "change": - if commit_hashes and child.attrib['version'] not in commit_hashes: - continue - change_id = child.attrib['id'] - files.extend(get_changed_files_for_change(change_id)) - - return files - - -def get_changed_files_for_pr(pr_id): - try: - pr_diff = shared.bitbucket_utils.get_pullrequest_diff("SDK", "java-sdk", pr_id) - json = pr_diff.json() - if json['truncated']: - printv("Diff for {} is truncated".format(pr_id)) - return None - - files = [] - - for diff in json['diffs']: - if 'destination' in diff: - diff_destination = diff['destination'] - if 'toString' in diff_destination: - diff_file = diff_destination['toString'] - printv("Diff file {}".format(diff_file)) - files.append(diff_file) - - return files - except Exception as e: - printv("Failed to get diff for {}".format(pr_id)) - printv("type error: {}".format(str(e))) - printv(traceback.format_exc()) - return None - - -# -# Parameters variable set up -# -parser = argparse.ArgumentParser(description='Determine the recommended build profile for the change being built: "dev" or "quick"') -parser.add_argument('--build-id', required=False, help='TeamCity build id') -parser.add_argument('--username', required=False, help='LDAP username ("firstname.lastname@oracle.com"; within TeamCity, use "%%system.teamcity.auth.userId%%")') -parser.add_argument('--password', required=False, help='LDAP password (within TeamCity, use "%%system.teamcity.auth.password%%")') -parser.add_argument('--build-branch', required=False, help="The value of the teamcity.build.branch variable") -parser.add_argument('--changed-modules-output-file', required=False, help="If provided, the changed modoules will be written to this file") -parser.add_argument('-v', '--verbose', action='store_true', default=False, required=False, help='Verbose output') - -args = parser.parse_args() -shared.bitbucket_utils.setup_bitbucket(args) - -if args.verbose: - verbose = True - -pr_id = None -if args.build_branch: - try: - # If the teamcity.build.branch variable is just something like "1234", then this is a - # validation build for pull request "1234" - pr_id = int(args.build_branch) - except ValueError: - print("Not a pull request validation build.") - sys.exit(0) - -build_id = args.build_id - -if pr_id: - changed_files = get_changed_files_for_pr(pr_id) -else: - if not args.build_id or not args.username: - print("--build-id and --username required if --build-branch is not set") - sys.exit(1) - - # Fall back to TeamCity diff - setup_auth() - changed_files = get_changed_files(build_id, pr_id) - -if not changed_files: - # No changes, let's rebuild everything to be safe. - # This can happen when a job failed, and then we hit "Run" again - pom_file_change = True -else: - pom_file_change = False - -hand_written_directories = [ - "bmc-common/", - "bmc-smoketests/", - "bmc-addons/", - "bmc-circuitbreaker/", - "bmc-hand-written/", - "bmc-examples/", - "bmc-encryption/", - "bmc-objectstorage/bmc-objectstorage-extensions/" - "bmc-objectstorage/bmc-objectstorage-generated/src/main/java/com/oracle/bmc/objectstorage/internal/http/ObjectMetadataInterceptor.java", - "bmc-streaming/src/main/java/com/oracle/bmc/streaming/StreamClientBuilder.java", - "bmc-streaming/src/main/java/com/oracle/bmc/streaming/AbstractStreamBasedClientBuilder.java", - "bmc-streaming/src/main/java/com/oracle/bmc/streaming/StreamAsyncClientBuilder.java", - "bmc-keymanagement/src/main/java/com/oracle/bmc/keymanagement/KmsManagementClientBuilder.java", - "bmc-keymanagement/src/main/java/com/oracle/bmc/keymanagement/KmsManagementAsyncClientBuilder.java", - "bmc-keymanagement/src/main/java/com/oracle/bmc/keymanagement/KmsCryptoAsyncClientBuilder.java", - "bmc-keymanagement/src/main/java/com/oracle/bmc/keymanagement/AbstractVaultBasedClientBuilder.java", - "bmc-keymanagement/src/main/java/com/oracle/bmc/keymanagement/AbstractKmsCryptoClientBuilder.java", - "bmc-keymanagement/src/main/java/com/oracle/bmc/keymanagement/KmsCryptoClientBuilder.java", - "bmc-keymanagement/src/main/java/com/oracle/bmc/keymanagement/AbstractKmsManagementClientBuilder.java", - "bmc-shaded/" - "bmc-shaded-smoketests/" -] -hand_written_change = False -truncated = False - -root_module_changed = False -changed_modules = [] - -if changed_files: - printv("{} changed files".format(len(changed_files))) - - for file in changed_files: - printv(file) - - if file.lower().endswith("pom.xml"): - if not file.lower().startswith("bmc-codegen/bmc-"): - pom_file_change = True - printv("pom file change in {}".format(file)) - for d in hand_written_directories: - if file.lower().startswith(d): - hand_written_change = True - printv("Change in hand-written directory in {}".format(file)) - - m = re.search(r'^.*bmc-([^/]*)', file) - if m: - module_name = m.group(0) - if module_name not in changed_modules: - changed_modules.append(module_name) - else: - root_module_changed = True -else: - printv("Truncated response from Bitbucket, building everything") - truncated = True - -if pom_file_change or hand_written_change or truncated: - print('dev') -else: - print('quick') - -if args.changed_modules_output_file: - with open(args.changed_modules_output_file, 'w') as writer: - if not truncated and not root_module_changed and changed_modules: - changed_modules_output = "--projects {}".format(",".join(changed_modules)) - printv(changed_modules_output) - writer.write(changed_modules_output) - else: - printv("Not writing individual changed modules, truncated? {}, root module changed? {}, changed_modules? {}".format( - truncated, root_module_changed, changed_modules)) diff --git a/scripts/auto_gen_utils/team_city_scripts/java/determine_codegen_projects_from_commit.py b/scripts/auto_gen_utils/team_city_scripts/java/determine_codegen_projects_from_commit.py deleted file mode 100644 index 641405a01b..0000000000 --- a/scripts/auto_gen_utils/team_city_scripts/java/determine_codegen_projects_from_commit.py +++ /dev/null @@ -1,102 +0,0 @@ -from __future__ import print_function -import argparse -import os -import urllib3 -import re -import ssl -import traceback - -import shared.bitbucket_utils # noqa: ignore=F402 - -urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) -verbose = False -if (not os.environ.get('PYTHONHTTPSVERIFY', '') and getattr(ssl, '_create_unverified_context', None)): - ssl._create_default_https_context = ssl._create_unverified_context - - -def printv(str): - global verbose - if verbose: - print(str) - - -def get_changed_files_for_commit(commit_hash, commit_path): - try: - pr_diff = shared.bitbucket_utils.get_commit_diff("SDK", "java-sdk", commit_hash, commit_path) - json = pr_diff.json() - printv(json) - if json['truncated']: - printv("Diff for {} is truncated".format(commit_hash)) - return None - - files = [] - - for diff in json['diffs']: - if 'destination' in diff: - diff_destination = diff['destination'] - if 'toString' in diff_destination: - diff_file = diff_destination['toString'] - printv("Diff file {}".format(diff_file)) - files.append(diff_file) - - return files - except Exception as e: - printv("Failed to get diff for {}".format(commit_hash)) - printv("type error: {}".format(str(e))) - printv(traceback.format_exc()) - return None - - -# -# Parameters variable set up -# -parser = argparse.ArgumentParser(description='Determine the changed codegen projects to be used for generation') -parser.add_argument('--commit', required=True, help="The commit hash with the codegen pom.xml changes") -parser.add_argument('--commit-path', required=False, help="Subpath for the commit diff, e.g. 'bmc-codegen'") -parser.add_argument('--changed-modules-output-file', required=False, help="If provided, the changed modoules will be written to this file") -parser.add_argument('-v', '--verbose', action='store_true', default=False, required=False, help='Verbose output') - -args = parser.parse_args() -shared.bitbucket_utils.setup_bitbucket(args) - -if args.verbose: - verbose = True - -changed_files = get_changed_files_for_commit(args.commit, args.commit_path) - -should_generate_everything = False -truncated = False - -changed_modules = [] - -if changed_files: - printv("{} changed files".format(len(changed_files))) - - for file in changed_files: - printv(file) - - if file.lower().startswith("bmc-codegen/bmc-") and file.lower().endswith("pom.xml"): - m = re.search(r'^.*bmc-([^/]*)', file) - if m: - module_name = m.group(0) - if module_name not in changed_modules: - changed_modules.append(module_name) - printv("codegen pom file change in {}".format(file)) - else: - printv("couldn't extract codegen pom module for file change in {}, generating everything".format(file)) - should_generate_everything = True -else: - printv("Truncated response from Bitbucket, generating everything") - truncated = True - -changed_modules_output = "" -if not truncated and not should_generate_everything and changed_modules: - changed_modules_output = "--projects {}".format(",".join(changed_modules)) - print(changed_modules_output) -else: - printv("Not writing individual changed modules, truncated? {}, should generate everything? {}, changed_modules? {}".format( - truncated, should_generate_everything, changed_modules)) - -if args.changed_modules_output_file: - with open(args.changed_modules_output_file, 'w') as writer: - writer.write(changed_modules_output) diff --git a/scripts/auto_gen_utils/team_city_scripts/java/determine_full_version.py b/scripts/auto_gen_utils/team_city_scripts/java/determine_full_version.py deleted file mode 100755 index 25bd428fd9..0000000000 --- a/scripts/auto_gen_utils/team_city_scripts/java/determine_full_version.py +++ /dev/null @@ -1,130 +0,0 @@ -from __future__ import print_function -import argparse -import os -import requests -import urllib3 -import ssl -import hashlib -import re -import ntpath - -GROUP_ID = "com.oracle.oci.sdk" -ARTIFACT_ID = "oci-java-sdk-dist" -ARTIFACT_TYPE = "zip" - -urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) -verbose = False -if (not os.environ.get('PYTHONHTTPSVERIFY', '') and getattr(ssl, '_create_unverified_context', None)): - ssl._create_default_https_context = ssl._create_unverified_context - - -def printv(str): - global verbose - if verbose: - print(str) - - -# Unfortunately, this needs to be authenticated -def get_artifact_sha256(auth, build_name, build_number, group_id, artifact_id, version, artifact_type): - requested_module = "{}:{}:{}".format(group_id, artifact_id, version) - url = "https://artifactory.oci.oraclecorp.com/api/build/{}/{}".format(build_name, build_number) - printv("URL: {}".format(url)) - - r = requests.get(url, verify=False, auth=auth) - - modules = r.json()['buildInfo']['modules'] - - for module in modules: - module_id = module['id'] - if module_id == requested_module: - artifacts = module['artifacts'] - for artifact in artifacts: - if artifact['type'] == artifact_type: - sha256 = artifact['sha256'] - printv('Found artifact "{}", type "{}": sha256 is "{}"'.format(module_id, artifact_type, sha256)) - return sha256 - - printv('Not found: artifact "{}", type "{}"'.format(requested_module, artifact_type)) - - return None - - -def get_file_sha1(artifact_file, block_size=65536): - sha1 = hashlib.sha1() - with open(artifact_file, 'rb') as f: - for block in iter(lambda: f.read(block_size), b''): - sha1.update(block) - return sha1.hexdigest() - - -def get_snapshot_version_with_sha1(group_id, artifact_id, version, artifact_type, sha1): - url = "https://artifactory.oci.oraclecorp.com/api/storage/opc-public-sdk-snapshot-maven-local/{}/{}/{}".format(group_id.replace('.','/'), artifact_id, version) - r = requests.get(url, verify=False) - children = r.json()['children'] - - for child in reversed(children): - if not child['folder']: - candidate = child['uri'][1:] - if (candidate.startswith(artifact_id) and candidate.endswith(artifact_type)): - m = re.search(artifact_id + "-(.*)." + artifact_type, candidate) - candidate_version = m.group(1) - printv("Candidate version: {}".format(candidate_version)) - - candidate_url = url + "/" + candidate - r = requests.get(candidate_url, verify=False) - - candidate_sha1 = r.json()['checksums']['sha1'] - printv("\tCandidate sha1: {}".format(candidate_sha1)) - if candidate_sha1 == sha1: - return candidate_version - - return None - - -# -# Parameters variable set up -# -parser = argparse.ArgumentParser(description='Determine the full version of a Java SDK build just deployed to Artifactory') -parser.add_argument('--group-id', default=GROUP_ID, help='Group id of the artifact, default is {}'.format(GROUP_ID)) -parser.add_argument('--artifact-id', default=ARTIFACT_ID, help='Artifact id of the artifact, default is {}'.format(ARTIFACT_ID)) -parser.add_argument('--version', required=False, help='Version that was built (e.g. "1.2.3" or "1.2.3-SNAPSHOT")') -parser.add_argument('--artifact-type', default=ARTIFACT_TYPE, help='Artifact type of the artifact, default is {}'.format(ARTIFACT_TYPE)) -parser.add_argument('--file', required=True, help='The built {}:{} artifact whose full version should be retrieved'.format(GROUP_ID, ARTIFACT_ID)) -parser.add_argument('-v', '--verbose', action='store_true', default=False, required=False, help='Verbose output') - -args = parser.parse_args() - -if args.verbose: - verbose = True - -group_id = args.group_id -artifact_id = args.artifact_id -artifact_type = args.artifact_type -artifact_file = args.file - -if args.version: - version = args.version -else: - file_name = ntpath.basename(artifact_file) - if not file_name.startswith(artifact_id): - print('File name does not start with "{}", cannot autodetect version'.format(artifact_id)) - exit(1) - if not file_name.endswith(artifact_type): - print('File name does not end with "{}", cannot autodetect version'.format(artifact_type)) - exit(1) - m = re.search(artifact_id + "-(.*)." + artifact_type, file_name) - version = m.group(1) - printv('Autodetected version "{}"'.format(version)) - -if not version.upper().endswith("-SNAPSHOT"): - # if it's not a snapshot, the full version is just the version that was built - print(version) - exit(0) - -printv('Recognizing version "{}" as snapshot, need to determine full version'.format(version)) - -requested_sha1 = get_file_sha1(artifact_file) -printv('sha1 of file "{}" is: {}'.format(artifact_file, requested_sha1)) - -timed_snapshot = get_snapshot_version_with_sha1(group_id, artifact_id, version, artifact_type, requested_sha1) -print(timed_snapshot) diff --git a/scripts/auto_gen_utils/team_city_scripts/java/public/1_setup_java_public_branch.sh b/scripts/auto_gen_utils/team_city_scripts/java/public/1_setup_java_public_branch.sh deleted file mode 100755 index 64147d088d..0000000000 --- a/scripts/auto_gen_utils/team_city_scripts/java/public/1_setup_java_public_branch.sh +++ /dev/null @@ -1,41 +0,0 @@ -set -e -set -x - -echo Creating venv to install sdk locally -. /opt/odo/tox_sic/venv/bin/activate -virtualenv .sdk-venv -. .sdk-venv/bin/activate - -# must disable StrictHostKeyChecking so that we don't get an interactive -# prompt later asking to confirm the host key -# Must disable -e (fail on non-zero exit code) because ssh returns 255 -set +e -ssh -o StrictHostKeyChecking=no git@bitbucket.oci.oraclecorp.com -p 7999 -set -e - -# Old way of doing that: -# ls -la ~/.ssh -# -# cat ~/.ssh/config -# -# printf "\n\nHost * \n StrictHostKeyChecking no\n" >> ~/.ssh/config -# -# cat ~/.ssh/config - -## AUTOGEN ## -cd autogen -pip ${PIP_TIMEOUT_PARAMETER} install --trusted-host artifactory.oci.oraclecorp.com --index-url https://artifactory.oci.oraclecorp.com/api/pypi/global-dev-pypi/simple -r requirements.txt --quiet -cd .. - -# run the setup job for step 2 in ./autogen -cd autogen -ls -la -pwd - -ls -la - -# checks out CLI branch with same name as SDK branch that triggered this build -python ./2_pre_generation_set_up.py --build-id $BUILD_ID --tool JavaSDK - -# back out into root directory -cd .. diff --git a/scripts/auto_gen_utils/team_city_scripts/java/public/3_record_sdk_generation_success.sh b/scripts/auto_gen_utils/team_city_scripts/java/public/3_record_sdk_generation_success.sh deleted file mode 100755 index 97b53b5562..0000000000 --- a/scripts/auto_gen_utils/team_city_scripts/java/public/3_record_sdk_generation_success.sh +++ /dev/null @@ -1,5 +0,0 @@ -set -x - -pwd -ls -la -touch java_sdk_generation_success.txt diff --git a/scripts/auto_gen_utils/team_city_scripts/java/public/5_record_sdk_build_success.sh b/scripts/auto_gen_utils/team_city_scripts/java/public/5_record_sdk_build_success.sh deleted file mode 100755 index ff499a72f3..0000000000 --- a/scripts/auto_gen_utils/team_city_scripts/java/public/5_record_sdk_build_success.sh +++ /dev/null @@ -1,5 +0,0 @@ -set -x - -pwd -ls -la -touch java_sdk_build_success.txt diff --git a/scripts/auto_gen_utils/team_city_scripts/java/public/6_report_gen_and_build_status.sh b/scripts/auto_gen_utils/team_city_scripts/java/public/6_report_gen_and_build_status.sh deleted file mode 100755 index be6456b7a4..0000000000 --- a/scripts/auto_gen_utils/team_city_scripts/java/public/6_report_gen_and_build_status.sh +++ /dev/null @@ -1,26 +0,0 @@ -set -e -set -x - -pwd -ls -la - -# activate venv from step 1 to run below python script -ls -la ./.sdk-venv -. ./.sdk-venv/bin/activate - -cd java-sdk -if [[ `git branch|grep "\*"|grep "bulk"` ]]; then - echo "Found bulk preview" - export BUILD_TYPE_ARG="--build-type bulk_pending_merge_public" -else - export BUILD_TYPE_ARG="--build-type individual_public" -fi -cd .. - -# commit changes from generation and build for java-sdk -# this step will run no matter what happened before it so it can report success / failure to the JIRA tickets -cd autogen -python ./3_report_generation_status.py --build-id $BUILD_ID --tool JavaSDK $BUILD_TYPE_ARG --optional-file-for-dexreq-ticket ../specvalidator.txt -cd .. - -ls -la ./java-sdk diff --git a/scripts/auto_gen_utils/team_city_scripts/java/public/7_commit_generated_changes.sh b/scripts/auto_gen_utils/team_city_scripts/java/public/7_commit_generated_changes.sh deleted file mode 100755 index 8debb78c76..0000000000 --- a/scripts/auto_gen_utils/team_city_scripts/java/public/7_commit_generated_changes.sh +++ /dev/null @@ -1,26 +0,0 @@ -set -x -set -e - -# configure git for this commit -git config --global user.email "$GIT_USER_EMAIL" -git config --global user.name "$GIT_USER_NAME" - -# activate venv from step 1 to run below python script -ls -la ./.sdk-venv -. ./.sdk-venv/bin/activate - -# get the branch we're on -cd java-sdk -branch=`git branch|grep "^\*"|cut -c3-` -if [[ ${branch} == *"bulk"* ]]; then - build_type="bulk_pending_merge_public" -else - build_type="individual_public" -fi -cd .. - -# commit changes from generation and build -cd autogen -ls -la -python ./4_on_generation_complete.py --build-id $BUILD_ID --tool JavaSDK --build-type ${build_type} -cd .. diff --git a/scripts/auto_gen_utils/team_city_scripts/java/warn_about_backward_incompatible_changes.py b/scripts/auto_gen_utils/team_city_scripts/java/warn_about_backward_incompatible_changes.py deleted file mode 100644 index 7434b12845..0000000000 --- a/scripts/auto_gen_utils/team_city_scripts/java/warn_about_backward_incompatible_changes.py +++ /dev/null @@ -1,338 +0,0 @@ -from __future__ import print_function -import argparse -import os -import re -import shutil -import ssl -import sys -import urllib3 -import getpass -import xml.etree.ElementTree as ET -from filecmp import dircmp -from glob import glob -from git import Repo - -# Add the root of the package, two directories up, to the sys.path -dir_path = os.path.dirname(os.path.realpath(__file__)) -sys.path.append(os.path.join(dir_path, '../..')) - -from shared.bitbucket_utils import setup_bitbucket, get_pullrequest, make_general_comment, clone_target_branch # noqa: ignore=F402 -from add_or_update_scripts.add_or_update_spec_utils import parse_pom # noqa: ignore=F402 -import shared.bitbucket_utils # noqa: ignore=F402 -import util # noqa:E402 -import config # noqa:E402 -from shared.buildsvc_tc_compatibility import build_log_link - - -KEEP_TEMP_FILES = True - -CUSTOM_JIRA_ISSUE_FIELDS = [config.CUSTOM_FIELD_ID_GROUP_ID, config.CUSTOM_FIELD_ID_ARTIFACT_ID] - -urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) -verbose = False -if (not os.environ.get('PYTHONHTTPSVERIFY', '') and getattr(ssl, '_create_unverified_context', None)): - ssl._create_default_https_context = ssl._create_unverified_context - - -def printv(str): - global verbose - if verbose: - print(str) - - -def setup_auth(): - global args, username, password, auth - username = args.username - if args.password is not None: - password = args.password - else: - password = getpass.getpass("LDAP password:") - - auth = (username, password) - - -def append_diff_file(diff_files, file): - if not file.endswith('.bak') and not file.endswith('.iml'): - diff_files.append(file) - - -def get_diff_files(dcmp, diff_files=[]): - for name in dcmp.diff_files: - append_diff_file(diff_files, os.path.join(dcmp.left, name)) - for name in dcmp.left_only: - append_diff_file(diff_files, os.path.join(dcmp.left, name)) - for name in dcmp.right_only: - append_diff_file(diff_files, os.path.join(dcmp.left, name)) - for sub_dcmp in dcmp.subdirs.values(): - get_diff_files(sub_dcmp, diff_files) - - return diff_files - - -def get_changed_files(java_sdk_dir, pr_id, base_dir): - should_clean_up = False - if not base_dir: - should_clean_up = not KEEP_TEMP_FILES - base_dir = clone_target_branch(pr_id, "java-sdk") - if not base_dir: - return None - - dcmp = dircmp(java_sdk_dir, base_dir, ['RCS', 'CVS', 'tags', '.git', 'target', - 'pom.xml.versionsBackup', '.DS_Store', '.idea']) - files = get_diff_files(dcmp) - - prefix = os.path.join(java_sdk_dir, "") - - files = [f[len(prefix):] if f.startswith(prefix) else f for f in files] - - printv("Files that differ:") - for f in files: - printv(f) - - if should_clean_up: - shutil.rmtree(base_dir, ignore_errors=True, onerror=None) - - return files - - -def find_files_with_filter(dir, filter): - """ - Search a path defined by dir for files with a name that matches filter - - @param str dir: - Path to search - - @param str filter: - filter for files to match on. Example 'clirr-*.txt' - """ - files = [os.path.abspath(y) for x in os.walk(dir) for y in glob(os.path.join(x[0], filter))] - - return files - - -def find_clirr_files(dir): - clirr_files = find_files_with_filter(dir, 'clirr-*.txt') - - return clirr_files - - -def find_pom_files(dir): - pom_files = find_files_with_filter(dir, "pom.xml") - - return pom_files - - -def get_project(dir, group_id, artifact_id): - """ - This function searches all the pom files contained in 'dir' the group_id - and artifact_id - """ - - ns = {"ns":"http://maven.apache.org/POM/4.0.0"} - - # allow default namespace for output, dont print ns0: prefixes everywhere - ET.register_namespace('',"http://maven.apache.org/POM/4.0.0") - - group_id_xpath = ".//ns:properties//ns:codegen.artifactory.groupId" - artifact_id_xpath = ".//ns:properties//ns:codegen.artifactory.artifactId" - - pom_files = find_pom_files(dir) - - file_name = None - project = None - - for pom_file in pom_files: - try: - pom = parse_pom(pom_file) - property = pom.findall(group_id_xpath, ns)[0] - pom_group_id = property.text - property = pom.findall(artifact_id_xpath, ns)[0] - pom_artifact_id = property.text - except IndexError: - # An IndexError means findall did not return anything - # for the path, which means this is not the pom file - # we want. - continue - - if group_id == pom_group_id and artifact_id == pom_artifact_id: - file_name = pom_file - break - - # If file_name is not None it should be in the form of - # "/folders/java-sdk/bmc-codegen/{project}-codegen/pom.xml" where {project} - # is the part that we want. - if file_name: - head, tail = os.path.split(file_name) - if head.endswith("-codegen"): - path_parts = head.split(os.sep) - project = path_parts[-1][:-len("-codegen")] - - return project - - -# -# Parameters variable set up -# -parser = argparse.ArgumentParser(description='Warn if there are clirr errors in packages that are changed. JIRA_USERNAME, JIRA_PASSWORD, BITBUCKET_USERNAME, AND BITBUCKET_PASSWORD are expected env vars.') -parser.add_argument('--build-id', required=False, help="The TeamCity build id for the build that is running this script. This is used to update the relevant Bitbucket PRs with links to the TeamCity build") -parser.add_argument('--build-branch', required=True, help="The value of the teamcity.build.branch variable") -parser.add_argument('--username', required=True, help='LDAP username ("firstname.lastname@oracle.com"; within TeamCity, use "%%system.teamcity.auth.userId%%")') -parser.add_argument('--password', required=False, help='LDAP password (within TeamCity, use "%%system.teamcity.auth.password%%")') -parser.add_argument('--java-sdk-dir', required=False, help='Directory of the Java SDK') -parser.add_argument('-v', '--verbose', action='store_true', default=False, required=False, help='Verbose output') -parser.add_argument('--base-dir', required=False, help='The directory that has the Git checkout of the target branch') -parser.add_argument('--keep-temp-files', action='store_true', default=False, required=False, help='Keep temporary files') -parser.add_argument('--dry-run', action='store_true', default=False, required=False, help='Dry-run, do not post comment to Bitbucket') -parser.add_argument('--build-type', choices=['DEXREQ', 'PR'], default='PR', help='Build type for the TeamCity job') - -args = parser.parse_args() -setup_bitbucket(args) - -if args.dry_run: - dry_run = True -else: - dry_run = False - -if args.verbose: - verbose = True - shared.bitbucket_utils.verbose = True - -if args.java_sdk_dir: - java_sdk_dir = os.path.abspath(args.java_sdk_dir) -else: - java_sdk_dir = os.getcwd() - -if args.keep_temp_files: - KEEP_TEMP_FILES = True - -clirr_files = find_clirr_files(java_sdk_dir) -if not clirr_files: - print("No clirr*.txt files found. Nothing to warn about.") - sys.exit(0) - -comment_destination = "Bitbucket PR" -if args.build_type == "DEXREQ": - comment_destination = "DEXREQ ticket" - -pr_id = args.build_branch -if args.build_type == 'PR': - try: - # If the teamcity.build.branch variable is just something like "1234", then this is a - # validation build for pull request "1234" - pr_id = int(args.build_branch) - except ValueError: - print("Not a pull request validation build. Not warning.") - sys.exit(0) - setup_auth() - -warn_about_clirr_files = [] - -changed_files = [] -issue = None -if args.build_type == 'DEXREQ': - # Get DEXREQ ticket from Git log - repo = Repo(java_sdk_dir) - last_commit_message = repo.git.log(n=1, format='%s%n%b') - dexreq_issues = util.parse_issue_keys_from_commit_message(last_commit_message) - - # Get artifact id and group id from the first DEXREQ Ticket - issue = None - project = None - if dexreq_issues and len(dexreq_issues) > 0: - issue = util.get_dexreq_issue(dexreq_issues[0], fields=(CUSTOM_JIRA_ISSUE_FIELDS)) - - if issue: - group_id = getattr(issue.fields, config.CUSTOM_FIELD_ID_GROUP_ID) - artifact_id = getattr(issue.fields, config.CUSTOM_FIELD_ID_ARTIFACT_ID) - project = get_project(os.path.join(java_sdk_dir, "bmc-codegen"), - group_id, - artifact_id) - - if project: - print("Adding {0} to changed files".format(project)) - changed_files.append(project) - else: - print("No project found, warning about all backward compatibilities!") - -else: - try: - changed_files = get_changed_files(java_sdk_dir, pr_id, args.base_dir) - except Exception as e: - print(e) - print("Could not get changed files, warning about all backward compatibilities!") - -if not changed_files: - # No changes, let's warn about everything to be safe. - # This can happen when a job failed, and then we hit "Run" again - warn_about_clirr_files = clirr_files -else: - # Find the clirr files that correspond to changed files - for clirr_file in clirr_files: - printv("Checking {}".format(clirr_file)) - project_dir = os.path.abspath(os.path.join(clirr_file, os.pardir, os.pardir)) - - prefix = os.path.join(java_sdk_dir, "") - if project_dir.startswith(prefix): - project_dir = project_dir[len(prefix):] - - # See if any changed files start with this - for cf in changed_files: - if cf.startswith(project_dir): - printv("\tFound changed file in this module: {}".format(cf)) - warn_about_clirr_files.append(clirr_file) - break - -messages_for_modules = {} - -for file in warn_about_clirr_files: - printv("Emit errors from {}".format(file)) - - m = re.match(r"^.*/clirr-(.*)\.txt$", file) - if m: - module_name = m.group(1) - else: - module_name = "Unknown module" - - with open(file, 'r') as content_file: - content = content_file.read() - if content and len(content) > 0: - if module_name in messages_for_modules: - text = messages_for_modules[module_name] + "\n" - else: - text = "" - text = text + "\n" + content - messages_for_modules[module_name] = text - -if messages_for_modules: - text = "Clirr detected backward incompatibilities possibly related to this change, compared to the latest release version of the OCI Java SDK:\n\n" - - if not changed_files: - text = text + "(Note: Could not detect changed files; including all backward incompatibilities to be safe.)\n\n" - - for module, messages in messages_for_modules.items(): - text = text + "{}:\n{}\n\n".format(module, messages) - - if args.build_id: - text = text + "\n\nPlease use the information above to diagnose the problem. More information may also available in the {build_log_link}.".format( - build_log_link=build_log_link(args.build_id, text="build log")) - - if text: - text = text.encode('utf8') - - print(text) - - if not dry_run: - if args.build_type == "DEXREQ": - if issue: - print("Adding backwards incompatibility comment to {}".format(issue.key)) - util.add_jira_comment(issue.key, text) - printv("Adding '{}' label to: {}".format(config.BACKWARD_INCOMPATIBLE_CHANGES_LABEL, issue.key)) - issue.add_field_value('labels', config.BACKWARD_INCOMPATIBLE_CHANGES_LABEL) - else: - print("No DEXREQ ticket identified. Cannot add comment") - else: - make_general_comment("SDK", "java-sdk", pr_id, text) - else: - print("DRY-RUN: Not adding comment to {}".format(comment_destination)) -else: - print("Nothing to warn about!") diff --git a/scripts/auto_gen_utils/team_city_scripts/java/warn_about_source_formatting.py b/scripts/auto_gen_utils/team_city_scripts/java/warn_about_source_formatting.py deleted file mode 100644 index e62a2283e2..0000000000 --- a/scripts/auto_gen_utils/team_city_scripts/java/warn_about_source_formatting.py +++ /dev/null @@ -1,118 +0,0 @@ -# -*- coding: utf-8 -*- -from __future__ import print_function -import argparse -import os -import ssl -import sys -import urllib3 -from git import Repo - -# Add the root of the package, two directories up, to the sys.path -dir_path = os.path.dirname(os.path.realpath(__file__)) -sys.path.append(os.path.join(dir_path, '../..')) - -from shared.bitbucket_utils import setup_bitbucket, make_general_comment # noqa: ignore=F402 -import shared.bitbucket_utils # noqa: ignore=F402 -from shared.buildsvc_tc_compatibility import build_log_link - - -KEEP_TEMP_FILES = True - -urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) -verbose = False -if (not os.environ.get('PYTHONHTTPSVERIFY', '') and getattr(ssl, '_create_unverified_context', None)): - ssl._create_default_https_context = ssl._create_unverified_context - - -def printv(str): - global verbose - if verbose: - print(str) - - -# -# Parameters variable set up -# -parser = argparse.ArgumentParser(description='Warn if source formatting is incorrect (really, warn if there are changed files in this git repository).') -parser.add_argument('--build-id', required=False, help="The TeamCity build id for the build that is running this script. This is used to update the relevant Bitbucket PRs with links to the TeamCity build") -parser.add_argument('--build-branch', required=True, help="The value of the teamcity.build.branch variable") -parser.add_argument('--java-sdk-dir', required=False, help='Directory of the Java SDK') -parser.add_argument('-v', '--verbose', action='store_true', default=False, required=False, help='Verbose output') -parser.add_argument('--dry-run', action='store_true', default=False, required=False, help='Dry-run, do not post comment to Bitbucket') - -args = parser.parse_args() -setup_bitbucket(args) - -if args.dry_run: - dry_run = True -else: - dry_run = False - -if args.verbose: - verbose = True - shared.bitbucket_utils.verbose = True - -if args.java_sdk_dir: - java_sdk_dir = os.path.abspath(args.java_sdk_dir) -else: - java_sdk_dir = os.getcwd() - -pr_id = None -try: - # If the teamcity.build.branch variable is just something like "1234", then this is a - # validation build for pull request "1234" - pr_id = int(args.build_branch) -except ValueError: - print("Not a pull request validation build. Not warning.") - sys.exit(0) - -repo = Repo.init(java_sdk_dir) -git_status = repo.git.status() -git_diff = repo.git.diff('--name-only', '--ignore-submodules') - -current_branch = [branch.strip()[2:] for branch in repo.git.branch().split('\n') if branch.startswith('* ')][0] -printv("source branch: {}".format(current_branch)) - -printv("git diff:\n{}".format(git_diff)) - -printv("git status: '{}'".format(git_status)) - -could_fix = False -if 'nothing to commit' not in git_status and "nothing added to commit" not in git_status: - # Try to fix it - - try: - repo.git.add("*.java") - repo.git.add("*.properties") - repo.git.commit("-m", "Committing source formatting changes. Automatically performed, see build log:\n\n{build_log_link}".format( - build_log_link=build_log_link(args.build_id, text="build log"))) - if dry_run: - print('DRY-RUN: not pushing to branch {}'.format(current_branch)) - else: - repo.git.push('-u', 'origin', current_branch) - - could_fix = True - text = "There were source formatting problems, but we automatically committed the changes to your branch. Note that this may kick off another validation build. Changes made:\n\n" + git_diff + "\n\nMore information may also available in the {build_log_link}.".format(build_log_link=build_log_link(build_id, text="build log")) - except Exception as e: - print('EXCEPTION: {}'.format(str(e))) - print('Failed to push source formatting changes.') - - if not could_fix: - text = "There are source formatting problems in the files below. Run `mvn process-sources -Pdev`.\n\n" + git_diff + "\n\nDon't want to do this by hand anymore? Give the 'DEXREQ Automation' user write access to your repo ([similar instructions here](https://confluence.oci.oraclecorp.com/pages/viewpage.action?spaceKey=DEX&title=Pull+Request+Validation+Builds+for+the+Testing+Service#PullRequestValidationBuildsfortheTestingService-WhenUsingYourOwnFork))." - - if args.build_id: - text = text + "\n\nPlease use the information above to diagnose the problem. More information may also available in the {build_log_link}.".format( - build_log_link=build_log_link(args.build_id, text="build log")) - - print(text) - - if not dry_run: - make_general_comment("SDK", "java-sdk", pr_id, text) - else: - print("DRY-RUN: Not making Bitbucket comment.") -else: - print("Nothing to warn about!") - could_fix = True - -if not could_fix: - sys.exit(1) diff --git a/scripts/auto_gen_utils/team_city_scripts/legacy_java_sdk/check_for_pom_version_mismatch.py b/scripts/auto_gen_utils/team_city_scripts/legacy_java_sdk/check_for_pom_version_mismatch.py deleted file mode 100644 index fa8da366e7..0000000000 --- a/scripts/auto_gen_utils/team_city_scripts/legacy_java_sdk/check_for_pom_version_mismatch.py +++ /dev/null @@ -1,110 +0,0 @@ -import argparse -import os -import xml.etree.ElementTree as ET -import sys -from packaging import version - -# Add the root of the package, two directories up, to the sys.path -dir_path = os.path.dirname(os.path.realpath(__file__)) -sys.path.append(os.path.join(dir_path, '../..')) - -from shared.bitbucket_utils import setup_bitbucket, make_general_comment, clone_target_branch # noqa: ignore=F402 -import shared.bitbucket_utils # noqa: ignore=F402 - -ns = {"ns":"http://maven.apache.org/POM/4.0.0"} - - -def get_pom_version(sdk_dir): - pom_path = os.path.join(sdk_dir, 'pom.xml') - pom = ET.parse(pom_path) - xpath = './ns:version' - return pom.find(xpath, ns).text - - -def truncate_pom_version(pom_version): - dash_pos = pom_version.find("-") - if dash_pos >= 0: - pom_version = pom_version[:dash_pos] - return pom_version - - -# -# Parameters variable set up -# -parser = argparse.ArgumentParser(description='Warn if the pom version does not match the target') -parser.add_argument('--build-branch', required=False, help="The value of the teamcity.build.branch variable") -parser.add_argument('--username', required=False, help='LDAP username ("firstname.lastname@oracle.com"; within TeamCity, use "%%system.teamcity.auth.userId%%")') -parser.add_argument('--password', required=False, help='LDAP password (within TeamCity, use "%%system.teamcity.auth.password%%")') -parser.add_argument('--source_branch_root_dir', required=False, help='Root directory of the source branch') -parser.add_argument('--target_branch_root_dir', required=False, help='Root directory of the target branch') -parser.add_argument('-v', '--verbose', action='store_true', default=False, required=False, help='Verbose output') -parser.add_argument('--dry-run', action='store_true', default=False, required=False, help='Dry-run, do not post comment to Bitbucket') - -verbose = False -args = parser.parse_args() -setup_bitbucket(args) - -if args.build_branch: - pr_id = None - try: - # If the teamcity.build.branch variable is just something like "1234", then this is a - # validation build for pull request "1234" - pr_id = int(args.build_branch) - except ValueError: - print("Not a pull request validation build.") - sys.exit(0) - -dry_run = True if args.dry_run else False - -if args.verbose: - verbose = True - shared.bitbucket_utils.verbose = True - -source_branch_root_dir = os.path.abspath(args.source_branch_root_dir) if args.source_branch_root_dir else os.getcwd() -target_branch_root_dir = os.path.abspath(args.target_branch_root_dir) if args.target_branch_root_dir else clone_target_branch(pr_id, "legacy-java-sdk") - -source_pom_version = get_pom_version(source_branch_root_dir) -trunc_source_pom_version = source_pom_version -target_pom_version = get_pom_version(target_branch_root_dir) -trunc_target_pom_version = target_pom_version -if verbose: - print("Source branch pom version is {source_pom_version}".format(source_pom_version=source_pom_version)) - print("Target branch pom version is {target_pom_version}".format(target_pom_version=target_pom_version)) - - -# Ensure that pom versions are not mismatched between public and preview. -if '-preview1' in source_pom_version and '-preview1' not in target_pom_version: - text = "Source branch pom version ({source_pom_version}) is for preview while the target branch version ({target_pom_version}) is not.".format(source_pom_version=source_pom_version, target_pom_version=target_pom_version) - if verbose: - print(text) - if not dry_run: - make_general_comment("SDK", "legacy-java-sdk", pr_id, text) - sys.exit(1) - - -if '-preview1' in target_pom_version and '-preview1' not in source_pom_version: - text = "Source branch pom version ({source_pom_version}) is not for preview while the target branch version ({target_pom_version}) is.".format(source_pom_version=source_pom_version, target_pom_version=target_pom_version) - if verbose: - print(text) - if not dry_run: - make_general_comment("SDK", "legacy-java-sdk", pr_id, text) - sys.exit(1) - -# Truncate any trailing text after the version numbers for comparison -trunc_source_pom_version = truncate_pom_version(source_pom_version) -trunc_target_pom_version = truncate_pom_version(target_pom_version) - -if verbose: - print("Truncated source branch pom version is {trunc_source_pom_version}".format(trunc_source_pom_version=trunc_source_pom_version)) - print("Truncated target branch pom version is {trunc_target_pom_version}".format(trunc_target_pom_version=trunc_target_pom_version)) - - -if version.parse(trunc_source_pom_version) < version.parse(trunc_target_pom_version): - text = "The pom version of the source branch ({source_pom_version}) is out of date with the target branch ({target_pom_version}). Please re-fetch from the remote and rebase your changes on top of the target branch.".format(source_pom_version=source_pom_version, target_pom_version=target_pom_version) - if verbose: - print(text) - if not dry_run: - make_general_comment("SDK", "legacy-java-sdk", pr_id, text) - sys.exit(1) -elif verbose: - print("The source and target branch pom versions are in sync.") diff --git a/scripts/auto_gen_utils/team_city_scripts/legacy_java_sdk/checkout_source_branch.py b/scripts/auto_gen_utils/team_city_scripts/legacy_java_sdk/checkout_source_branch.py deleted file mode 100644 index 5f76cb1c8e..0000000000 --- a/scripts/auto_gen_utils/team_city_scripts/legacy_java_sdk/checkout_source_branch.py +++ /dev/null @@ -1,88 +0,0 @@ -from __future__ import print_function -import argparse -import os -import ssl -import sys -import urllib3 -import re -from git import Repo - -# Add the root of the package, two directories up, to the sys.path -dir_path = os.path.dirname(os.path.realpath(__file__)) -sys.path.append(os.path.join(dir_path, '../..')) - -from shared.bitbucket_utils import setup_bitbucket, make_general_comment, get_pullrequest, get_pr_source_branch # noqa: ignore=F402 -import shared.bitbucket_utils # noqa: ignore=F402 - -KEEP_TEMP_FILES = True - -urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) -verbose = False -if (not os.environ.get('PYTHONHTTPSVERIFY', '') and getattr(ssl, '_create_unverified_context', None)): - ssl._create_default_https_context = ssl._create_unverified_context - - -def printv(str): - global verbose - if verbose: - print(str) - - -# -# Parameters variable set up -# -parser = argparse.ArgumentParser(description='Check out the source branch, as indicated in the PR. Warning: this is destructive to the contents in the Legacy Java SDK directory.') -parser.add_argument('--build-id', required=False, help="The TeamCity build id for the build that is running this script. This is used to update the relevant Bitbucket PRs with links to the TeamCity build") -parser.add_argument('--build-branch', required=True, help="The value of the teamcity.build.branch variable") -parser.add_argument('--legacy-java-sdk-dir', required=False, help='Directory of the Legacy Java SDK') -parser.add_argument('-v', '--verbose', action='store_true', default=False, required=False, help='Verbose output') - -args = parser.parse_args() -setup_bitbucket(args) - -if args.verbose: - verbose = True - shared.bitbucket_utils.verbose = True - -if args.legacy_java_sdk_dir: - legacy_java_sdk_dir = os.path.abspath(args.legacy_java_sdk_dir) -else: - legacy_java_sdk_dir = os.getcwd() - -pr_id = None -try: - # If the teamcity.build.branch variable is just something like "1234", then this is a - # validation build for pull request "1234" - pr_id = int(args.build_branch) -except ValueError: - print("Not a pull request validation build. Not warning.") - sys.exit(0) - -pr = get_pullrequest("SDK", "legacy-java-sdk", pr_id) -printv(pr.text) - -try: - repo = Repo.init(legacy_java_sdk_dir) - - current_commit = None - current_branch = [branch.strip()[2:] for branch in repo.git.branch().split('\n') if branch.startswith('* ')][0] - printv("current branch: {}".format(current_branch)) - result = re.search(r'\(HEAD detached at ([^)]*)\)', current_branch) - if not result: - # this is what it looks like in Team City - result = re.search(r'\(detached from ([^)]*)\)', current_branch) - if result: - current_commit = result.group(1) - - source_branch = get_pr_source_branch(pr) - printv("source branch: {}".format(source_branch)) - - repo.git.fetch("origin") - repo.git.checkout(source_branch) - - if current_commit: - repo.git.reset('--hard', current_commit) - printv("resetting to commit: {}".format(current_commit)) -except Exception as e: - print('EXCEPTION: {}'.format(str(e))) - print('Failed to change to source branch.') diff --git a/scripts/auto_gen_utils/team_city_scripts/legacy_java_sdk/determine_build_profile.py b/scripts/auto_gen_utils/team_city_scripts/legacy_java_sdk/determine_build_profile.py deleted file mode 100644 index ff49aa4aa8..0000000000 --- a/scripts/auto_gen_utils/team_city_scripts/legacy_java_sdk/determine_build_profile.py +++ /dev/null @@ -1,229 +0,0 @@ -from __future__ import print_function -import argparse -import os -import requests -import urllib3 -import re -import ssl -import sys -import traceback -from xml.etree import ElementTree -import getpass - -import shared.bitbucket_utils # noqa: ignore=F402 - -urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) -verbose = False -if (not os.environ.get('PYTHONHTTPSVERIFY', '') and getattr(ssl, '_create_unverified_context', None)): - ssl._create_default_https_context = ssl._create_unverified_context - - -def printv(str): - global verbose - if verbose: - print(str) - - -def setup_auth(): - global args, username, password, auth - username = args.username - if args.password is not None: - password = args.password - else: - password = getpass.getpass("LDAP password:") - - auth = (username, password) - - -def get_changed_files_for_change(change_id): - url = "https://teamcity.oci.oraclecorp.com/app/rest/changes/id:{}".format(change_id) - r = requests.get(url, verify=False, auth=auth) - - root = ElementTree.fromstring(r.content) - files_node = root.find("files") - - files = [] - - for child in files_node: - if child.tag.lower() == "file": - files.append(child.attrib['file']) - - return files - - -def get_commit_hashes(pr_id): - pr_commits = shared.bitbucket_utils.get_pullrequest_commits("SDK", "legacy-java-sdk", pr_id) - - commit_hashes = [] - - for change in pr_commits.json()['values']: - commit_hashes.append(change['id']) - printv("Commit hash {}".format(change['id'])) - - return commit_hashes - - -def get_changed_files(build_id, pr_id): - commit_hashes = None - if pr_id: - commit_hashes = get_commit_hashes(pr_id) - - url = "https://teamcity.oci.oraclecorp.com/app/rest/changes?locator=build:(id:{})".format(build_id) - r = requests.get(url, verify=False, auth=auth) - - root = ElementTree.fromstring(r.content) - - files = [] - - for child in root: - if child.tag.lower() == "change": - if commit_hashes and child.attrib['version'] not in commit_hashes: - continue - change_id = child.attrib['id'] - files.extend(get_changed_files_for_change(change_id)) - - return files - - -def get_changed_files_for_pr(pr_id): - try: - pr_diff = shared.bitbucket_utils.get_pullrequest_diff("SDK", "legacy-java-sdk", pr_id) - json = pr_diff.json() - if json['truncated']: - printv("Diff for {} is truncated".format(pr_id)) - return None - - files = [] - - for diff in json['diffs']: - if 'destination' in diff: - diff_destination = diff['destination'] - if 'toString' in diff_destination: - diff_file = diff_destination['toString'] - printv("Diff file {}".format(diff_file)) - files.append(diff_file) - - return files - except Exception as e: - printv("Failed to get diff for {}".format(pr_id)) - printv("type error: {}".format(str(e))) - printv(traceback.format_exc()) - return None - - -# -# Parameters variable set up -# -parser = argparse.ArgumentParser(description='Determine the recommended build profile for the change being built: "dev" or "quick"') -parser.add_argument('--build-id', required=False, help='TeamCity build id') -parser.add_argument('--username', required=False, help='LDAP username ("firstname.lastname@oracle.com"; within TeamCity, use "%%system.teamcity.auth.userId%%")') -parser.add_argument('--password', required=False, help='LDAP password (within TeamCity, use "%%system.teamcity.auth.password%%")') -parser.add_argument('--build-branch', required=False, help="The value of the teamcity.build.branch variable") -parser.add_argument('--changed-modules-output-file', required=False, help="If provided, the changed modoules will be written to this file") -parser.add_argument('-v', '--verbose', action='store_true', default=False, required=False, help='Verbose output') - -args = parser.parse_args() -shared.bitbucket_utils.setup_bitbucket(args) - -if args.verbose: - verbose = True - -pr_id = None -if args.build_branch: - try: - # If the teamcity.build.branch variable is just something like "1234", then this is a - # validation build for pull request "1234" - pr_id = int(args.build_branch) - except ValueError: - print("Not a pull request validation build.") - sys.exit(0) - -build_id = args.build_id - -if pr_id: - changed_files = get_changed_files_for_pr(pr_id) -else: - if not args.build_id or not args.username: - print("--build-id and --username required if --build-branch is not set") - sys.exit(1) - - # Fall back to TeamCity diff - setup_auth() - changed_files = get_changed_files(build_id, pr_id) - -if not changed_files: - # No changes, let's rebuild everything to be safe. - # This can happen when a job failed, and then we hit "Run" again - pom_file_change = True -else: - pom_file_change = False - -hand_written_directories = [ - "bmc-common/", - "bmc-smoketests/", - "bmc-addons/", - "bmc-circuitbreaker/", - "bmc-hand-written/", - "bmc-examples/", - "bmc-encryption/", - "bmc-objectstorage/bmc-objectstorage-extensions/" - "bmc-objectstorage/bmc-objectstorage-generated/src/main/java/com/oracle/bmc/objectstorage/internal/http/ObjectMetadataInterceptor.java", - "bmc-streaming/src/main/java/com/oracle/bmc/streaming/StreamClientBuilder.java", - "bmc-streaming/src/main/java/com/oracle/bmc/streaming/AbstractStreamBasedClientBuilder.java", - "bmc-streaming/src/main/java/com/oracle/bmc/streaming/StreamAsyncClientBuilder.java", - "bmc-keymanagement/src/main/java/com/oracle/bmc/keymanagement/KmsManagementClientBuilder.java", - "bmc-keymanagement/src/main/java/com/oracle/bmc/keymanagement/KmsManagementAsyncClientBuilder.java", - "bmc-keymanagement/src/main/java/com/oracle/bmc/keymanagement/KmsCryptoAsyncClientBuilder.java", - "bmc-keymanagement/src/main/java/com/oracle/bmc/keymanagement/AbstractVaultBasedClientBuilder.java", - "bmc-keymanagement/src/main/java/com/oracle/bmc/keymanagement/AbstractKmsCryptoClientBuilder.java", - "bmc-keymanagement/src/main/java/com/oracle/bmc/keymanagement/KmsCryptoClientBuilder.java", - "bmc-keymanagement/src/main/java/com/oracle/bmc/keymanagement/AbstractKmsManagementClientBuilder.java", - "bmc-shaded/" - "bmc-shaded-smoketests/" -] -hand_written_change = False -truncated = False - -root_module_changed = False -changed_modules = [] - -if changed_files: - printv("{} changed files".format(len(changed_files))) - - for file in changed_files: - printv(file) - - if file.lower().endswith("pom.xml"): - if not file.lower().startswith("bmc-codegen/bmc-"): - pom_file_change = True - printv("pom file change in {}".format(file)) - for d in hand_written_directories: - if file.lower().startswith(d): - hand_written_change = True - printv("Change in hand-written directory in {}".format(file)) - - m = re.search(r'^.*bmc-([^/]*)', file) - if m: - module_name = m.group(0) - if module_name not in changed_modules: - changed_modules.append(module_name) - else: - root_module_changed = True -else: - printv("Truncated response from Bitbucket, building everything") - truncated = True - -if pom_file_change or hand_written_change or truncated: - print('dev') -else: - print('quick') - -if args.changed_modules_output_file: - with open(args.changed_modules_output_file, 'w') as writer: - if not truncated and not root_module_changed and changed_modules: - changed_modules_output = "--projects {}".format(",".join(changed_modules)) - printv(changed_modules_output) - writer.write(changed_modules_output) - else: - printv("Not writing individual changed modules, truncated? {}, root module changed? {}, changed_modules? {}".format( - truncated, root_module_changed, changed_modules)) diff --git a/scripts/auto_gen_utils/team_city_scripts/legacy_java_sdk/determine_codegen_projects_from_commit.py b/scripts/auto_gen_utils/team_city_scripts/legacy_java_sdk/determine_codegen_projects_from_commit.py deleted file mode 100644 index eb543691a4..0000000000 --- a/scripts/auto_gen_utils/team_city_scripts/legacy_java_sdk/determine_codegen_projects_from_commit.py +++ /dev/null @@ -1,102 +0,0 @@ -from __future__ import print_function -import argparse -import os -import urllib3 -import re -import ssl -import traceback - -import shared.bitbucket_utils # noqa: ignore=F402 - -urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) -verbose = False -if (not os.environ.get('PYTHONHTTPSVERIFY', '') and getattr(ssl, '_create_unverified_context', None)): - ssl._create_default_https_context = ssl._create_unverified_context - - -def printv(str): - global verbose - if verbose: - print(str) - - -def get_changed_files_for_commit(commit_hash, commit_path): - try: - pr_diff = shared.bitbucket_utils.get_commit_diff("SDK", "legacy-java-sdk", commit_hash, commit_path) - json = pr_diff.json() - printv(json) - if json['truncated']: - printv("Diff for {} is truncated".format(commit_hash)) - return None - - files = [] - - for diff in json['diffs']: - if 'destination' in diff: - diff_destination = diff['destination'] - if 'toString' in diff_destination: - diff_file = diff_destination['toString'] - printv("Diff file {}".format(diff_file)) - files.append(diff_file) - - return files - except Exception as e: - printv("Failed to get diff for {}".format(commit_hash)) - printv("type error: {}".format(str(e))) - printv(traceback.format_exc()) - return None - - -# -# Parameters variable set up -# -parser = argparse.ArgumentParser(description='Determine the changed codegen projects to be used for generation') -parser.add_argument('--commit', required=True, help="The commit hash with the codegen pom.xml changes") -parser.add_argument('--commit-path', required=False, help="Subpath for the commit diff, e.g. 'bmc-codegen'") -parser.add_argument('--changed-modules-output-file', required=False, help="If provided, the changed modoules will be written to this file") -parser.add_argument('-v', '--verbose', action='store_true', default=False, required=False, help='Verbose output') - -args = parser.parse_args() -shared.bitbucket_utils.setup_bitbucket(args) - -if args.verbose: - verbose = True - -changed_files = get_changed_files_for_commit(args.commit, args.commit_path) - -should_generate_everything = False -truncated = False - -changed_modules = [] - -if changed_files: - printv("{} changed files".format(len(changed_files))) - - for file in changed_files: - printv(file) - - if file.lower().startswith("bmc-codegen/bmc-") and file.lower().endswith("pom.xml"): - m = re.search(r'^.*bmc-([^/]*)', file) - if m: - module_name = m.group(0) - if module_name not in changed_modules: - changed_modules.append(module_name) - printv("codegen pom file change in {}".format(file)) - else: - printv("couldn't extract codegen pom module for file change in {}, generating everything".format(file)) - should_generate_everything = True -else: - printv("Truncated response from Bitbucket, generating everything") - truncated = True - -changed_modules_output = "" -if not truncated and not should_generate_everything and changed_modules: - changed_modules_output = "--projects {}".format(",".join(changed_modules)) - print(changed_modules_output) -else: - printv("Not writing individual changed modules, truncated? {}, should generate everything? {}, changed_modules? {}".format( - truncated, should_generate_everything, changed_modules)) - -if args.changed_modules_output_file: - with open(args.changed_modules_output_file, 'w') as writer: - writer.write(changed_modules_output) diff --git a/scripts/auto_gen_utils/team_city_scripts/legacy_java_sdk/determine_full_version.py b/scripts/auto_gen_utils/team_city_scripts/legacy_java_sdk/determine_full_version.py deleted file mode 100755 index 8935d094f3..0000000000 --- a/scripts/auto_gen_utils/team_city_scripts/legacy_java_sdk/determine_full_version.py +++ /dev/null @@ -1,130 +0,0 @@ -from __future__ import print_function -import argparse -import os -import requests -import urllib3 -import ssl -import hashlib -import re -import ntpath - -GROUP_ID = "com.oracle.oci.sdk" -ARTIFACT_ID = "oci-java-sdk-dist" -ARTIFACT_TYPE = "zip" - -urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) -verbose = False -if (not os.environ.get('PYTHONHTTPSVERIFY', '') and getattr(ssl, '_create_unverified_context', None)): - ssl._create_default_https_context = ssl._create_unverified_context - - -def printv(str): - global verbose - if verbose: - print(str) - - -# Unfortunately, this needs to be authenticated -def get_artifact_sha256(auth, build_name, build_number, group_id, artifact_id, version, artifact_type): - requested_module = "{}:{}:{}".format(group_id, artifact_id, version) - url = "https://artifactory.oci.oraclecorp.com/api/build/{}/{}".format(build_name, build_number) - printv("URL: {}".format(url)) - - r = requests.get(url, verify=False, auth=auth) - - modules = r.json()['buildInfo']['modules'] - - for module in modules: - module_id = module['id'] - if module_id == requested_module: - artifacts = module['artifacts'] - for artifact in artifacts: - if artifact['type'] == artifact_type: - sha256 = artifact['sha256'] - printv('Found artifact "{}", type "{}": sha256 is "{}"'.format(module_id, artifact_type, sha256)) - return sha256 - - printv('Not found: artifact "{}", type "{}"'.format(requested_module, artifact_type)) - - return None - - -def get_file_sha1(artifact_file, block_size=65536): - sha1 = hashlib.sha1() - with open(artifact_file, 'rb') as f: - for block in iter(lambda: f.read(block_size), b''): - sha1.update(block) - return sha1.hexdigest() - - -def get_snapshot_version_with_sha1(group_id, artifact_id, version, artifact_type, sha1): - url = "https://artifactory.oci.oraclecorp.com/api/storage/opc-public-sdk-snapshot-maven-local/{}/{}/{}".format(group_id.replace('.','/'), artifact_id, version) - r = requests.get(url, verify=False) - children = r.json()['children'] - - for child in reversed(children): - if not child['folder']: - candidate = child['uri'][1:] - if (candidate.startswith(artifact_id) and candidate.endswith(artifact_type)): - m = re.search(artifact_id + "-(.*)." + artifact_type, candidate) - candidate_version = m.group(1) - printv("Candidate version: {}".format(candidate_version)) - - candidate_url = url + "/" + candidate - r = requests.get(candidate_url, verify=False) - - candidate_sha1 = r.json()['checksums']['sha1'] - printv("\tCandidate sha1: {}".format(candidate_sha1)) - if candidate_sha1 == sha1: - return candidate_version - - return None - - -# -# Parameters variable set up -# -parser = argparse.ArgumentParser(description='Determine the full version of a Legacy Java SDK build just deployed to Artifactory') -parser.add_argument('--group-id', default=GROUP_ID, help='Group id of the artifact, default is {}'.format(GROUP_ID)) -parser.add_argument('--artifact-id', default=ARTIFACT_ID, help='Artifact id of the artifact, default is {}'.format(ARTIFACT_ID)) -parser.add_argument('--version', required=False, help='Version that was built (e.g. "1.2.3" or "1.2.3-SNAPSHOT")') -parser.add_argument('--artifact-type', default=ARTIFACT_TYPE, help='Artifact type of the artifact, default is {}'.format(ARTIFACT_TYPE)) -parser.add_argument('--file', required=True, help='The built {}:{} artifact whose full version should be retrieved'.format(GROUP_ID, ARTIFACT_ID)) -parser.add_argument('-v', '--verbose', action='store_true', default=False, required=False, help='Verbose output') - -args = parser.parse_args() - -if args.verbose: - verbose = True - -group_id = args.group_id -artifact_id = args.artifact_id -artifact_type = args.artifact_type -artifact_file = args.file - -if args.version: - version = args.version -else: - file_name = ntpath.basename(artifact_file) - if not file_name.startswith(artifact_id): - print('File name does not start with "{}", cannot autodetect version'.format(artifact_id)) - exit(1) - if not file_name.endswith(artifact_type): - print('File name does not end with "{}", cannot autodetect version'.format(artifact_type)) - exit(1) - m = re.search(artifact_id + "-(.*)." + artifact_type, file_name) - version = m.group(1) - printv('Autodetected version "{}"'.format(version)) - -if not version.upper().endswith("-SNAPSHOT"): - # if it's not a snapshot, the full version is just the version that was built - print(version) - exit(0) - -printv('Recognizing version "{}" as snapshot, need to determine full version'.format(version)) - -requested_sha1 = get_file_sha1(artifact_file) -printv('sha1 of file "{}" is: {}'.format(artifact_file, requested_sha1)) - -timed_snapshot = get_snapshot_version_with_sha1(group_id, artifact_id, version, artifact_type, requested_sha1) -print(timed_snapshot) diff --git a/scripts/auto_gen_utils/team_city_scripts/legacy_java_sdk/public/1_setup_legacy_java_public_branch.sh b/scripts/auto_gen_utils/team_city_scripts/legacy_java_sdk/public/1_setup_legacy_java_public_branch.sh deleted file mode 100755 index f091af7a94..0000000000 --- a/scripts/auto_gen_utils/team_city_scripts/legacy_java_sdk/public/1_setup_legacy_java_public_branch.sh +++ /dev/null @@ -1,41 +0,0 @@ -set -e -set -x - -echo Creating venv to install sdk locally -. /opt/odo/tox_sic/venv/bin/activate -virtualenv .sdk-venv -. .sdk-venv/bin/activate - -# must disable StrictHostKeyChecking so that we don't get an interactive -# prompt later asking to confirm the host key -# Must disable -e (fail on non-zero exit code) because ssh returns 255 -set +e -ssh -o StrictHostKeyChecking=no git@bitbucket.oci.oraclecorp.com -p 7999 -set -e - -# Old way of doing that: -# ls -la ~/.ssh -# -# cat ~/.ssh/config -# -# printf "\n\nHost * \n StrictHostKeyChecking no\n" >> ~/.ssh/config -# -# cat ~/.ssh/config - -## AUTOGEN ## -cd autogen -pip ${PIP_TIMEOUT_PARAMETER} install --trusted-host artifactory.oci.oraclecorp.com --index-url https://artifactory.oci.oraclecorp.com/api/pypi/global-dev-pypi/simple -r requirements.txt --quiet -cd .. - -# run the setup job for step 2 in ./autogen -cd autogen -ls -la -pwd - -ls -la - -# checks out CLI branch with same name as SDK branch that triggered this build -python ./2_pre_generation_set_up.py --build-id $BUILD_ID --tool LegacyJavaSDK - -# back out into root directory -cd .. diff --git a/scripts/auto_gen_utils/team_city_scripts/legacy_java_sdk/public/3_record_sdk_generation_success.sh b/scripts/auto_gen_utils/team_city_scripts/legacy_java_sdk/public/3_record_sdk_generation_success.sh deleted file mode 100755 index d916cad356..0000000000 --- a/scripts/auto_gen_utils/team_city_scripts/legacy_java_sdk/public/3_record_sdk_generation_success.sh +++ /dev/null @@ -1,5 +0,0 @@ -set -x - -pwd -ls -la -touch legacy_java_sdk_generation_success.txt diff --git a/scripts/auto_gen_utils/team_city_scripts/legacy_java_sdk/public/5_record_sdk_build_success.sh b/scripts/auto_gen_utils/team_city_scripts/legacy_java_sdk/public/5_record_sdk_build_success.sh deleted file mode 100755 index 2211ebd3a7..0000000000 --- a/scripts/auto_gen_utils/team_city_scripts/legacy_java_sdk/public/5_record_sdk_build_success.sh +++ /dev/null @@ -1,5 +0,0 @@ -set -x - -pwd -ls -la -touch legacy_java_sdk_build_success.txt diff --git a/scripts/auto_gen_utils/team_city_scripts/legacy_java_sdk/public/6_report_gen_and_build_status.sh b/scripts/auto_gen_utils/team_city_scripts/legacy_java_sdk/public/6_report_gen_and_build_status.sh deleted file mode 100755 index fb67d4c786..0000000000 --- a/scripts/auto_gen_utils/team_city_scripts/legacy_java_sdk/public/6_report_gen_and_build_status.sh +++ /dev/null @@ -1,26 +0,0 @@ -set -e -set -x - -pwd -ls -la - -# activate venv from step 1 to run below python script -ls -la ./.sdk-venv -. ./.sdk-venv/bin/activate - -cd legacy-java-sdk -if [[ `git branch|grep "\*"|grep "bulk"` ]]; then - echo "Found bulk preview" - export BUILD_TYPE_ARG="--build-type bulk_pending_merge_public" -else - export BUILD_TYPE_ARG="--build-type individual_public" -fi -cd .. - -# commit changes from generation and build for java-sdk -# this step will run no matter what happened before it so it can report success / failure to the JIRA tickets -cd autogen -python ./3_report_generation_status.py --build-id $BUILD_ID --tool LegacyJavaSDK $BUILD_TYPE_ARG --optional-file-for-dexreq-ticket ../specvalidator.txt -cd .. - -ls -la ./legacy-java-sdk diff --git a/scripts/auto_gen_utils/team_city_scripts/legacy_java_sdk/public/7_commit_generated_changes.sh b/scripts/auto_gen_utils/team_city_scripts/legacy_java_sdk/public/7_commit_generated_changes.sh deleted file mode 100755 index ea68fd5cc4..0000000000 --- a/scripts/auto_gen_utils/team_city_scripts/legacy_java_sdk/public/7_commit_generated_changes.sh +++ /dev/null @@ -1,26 +0,0 @@ -set -x -set -e - -# configure git for this commit -git config --global user.email "$GIT_USER_EMAIL" -git config --global user.name "$GIT_USER_NAME" - -# activate venv from step 1 to run below python script -ls -la ./.sdk-venv -. ./.sdk-venv/bin/activate - -# get the branch we're on -cd legacy-java-sdk -branch=`git branch|grep "^\*"|cut -c3-` -if [[ ${branch} == *"bulk"* ]]; then - build_type="bulk_pending_merge_public" -else - build_type="individual_public" -fi -cd .. - -# commit changes from generation and build -cd autogen -ls -la -python ./4_on_generation_complete.py --build-id $BUILD_ID --tool LegacyJavaSDK --build-type ${build_type} -cd .. diff --git a/scripts/auto_gen_utils/team_city_scripts/legacy_java_sdk/warn_about_backward_incompatible_changes.py b/scripts/auto_gen_utils/team_city_scripts/legacy_java_sdk/warn_about_backward_incompatible_changes.py deleted file mode 100644 index aca8f77743..0000000000 --- a/scripts/auto_gen_utils/team_city_scripts/legacy_java_sdk/warn_about_backward_incompatible_changes.py +++ /dev/null @@ -1,335 +0,0 @@ -from __future__ import print_function -import argparse -import os -import re -import shutil -import ssl -import sys -import urllib3 -import getpass -import xml.etree.ElementTree as ET -from filecmp import dircmp -from glob import glob -from git import Repo - -# Add the root of the package, two directories up, to the sys.path -dir_path = os.path.dirname(os.path.realpath(__file__)) -sys.path.append(os.path.join(dir_path, '../..')) - -from shared.bitbucket_utils import setup_bitbucket, get_pullrequest, make_general_comment, clone_target_branch # noqa: ignore=F402 -from add_or_update_scripts.add_or_update_spec_utils import parse_pom # noqa: ignore=F402 -import shared.bitbucket_utils # noqa: ignore=F402 -import util # noqa:E402 -import config # noqa:E402 -from shared.buildsvc_tc_compatibility import build_log_link - - -KEEP_TEMP_FILES = True - -CUSTOM_JIRA_ISSUE_FIELDS = [config.CUSTOM_FIELD_ID_GROUP_ID, config.CUSTOM_FIELD_ID_ARTIFACT_ID] - -urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) -verbose = False -if (not os.environ.get('PYTHONHTTPSVERIFY', '') and getattr(ssl, '_create_unverified_context', None)): - ssl._create_default_https_context = ssl._create_unverified_context - - -def printv(str): - global verbose - if verbose: - print(str) - - -def setup_auth(): - global args, username, password, auth - username = args.username - if args.password is not None: - password = args.password - else: - password = getpass.getpass("LDAP password:") - - auth = (username, password) - - -def append_diff_file(diff_files, file): - if not file.endswith('.bak') and not file.endswith('.iml'): - diff_files.append(file) - - -def get_diff_files(dcmp, diff_files=[]): - for name in dcmp.diff_files: - append_diff_file(diff_files, os.path.join(dcmp.left, name)) - for name in dcmp.left_only: - append_diff_file(diff_files, os.path.join(dcmp.left, name)) - for name in dcmp.right_only: - append_diff_file(diff_files, os.path.join(dcmp.left, name)) - for sub_dcmp in dcmp.subdirs.values(): - get_diff_files(sub_dcmp, diff_files) - - return diff_files - - -def get_changed_files(legacy_java_sdk_dir, pr_id, base_dir): - should_clean_up = False - if not base_dir: - should_clean_up = not KEEP_TEMP_FILES - base_dir = clone_target_branch(pr_id, "legacy-java-sdk") - if not base_dir: - return None - - dcmp = dircmp(legacy_java_sdk_dir, base_dir, ['RCS', 'CVS', 'tags', '.git', 'target', - 'pom.xml.versionsBackup', '.DS_Store', '.idea']) - files = get_diff_files(dcmp) - - prefix = os.path.join(legacy_java_sdk_dir, "") - - files = [f[len(prefix):] if f.startswith(prefix) else f for f in files] - - printv("Files that differ:") - for f in files: - printv(f) - - if should_clean_up: - shutil.rmtree(base_dir, ignore_errors=True, onerror=None) - - return files - - -def find_files_with_filter(dir, filter): - """ - Search a path defined by dir for files with a name that matches filter - - @param str dir: - Path to search - - @param str filter: - filter for files to match on. Example 'clirr-*.txt' - """ - files = [os.path.abspath(y) for x in os.walk(dir) for y in glob(os.path.join(x[0], filter))] - - return files - - -def find_clirr_files(dir): - clirr_files = find_files_with_filter(dir, 'clirr-*.txt') - - return clirr_files - - -def find_pom_files(dir): - pom_files = find_files_with_filter(dir, "pom.xml") - - return pom_files - - -def get_project(dir, group_id, artifact_id): - """ - This function searches all the pom files contained in 'dir' the group_id - and artifact_id - """ - - ns = {"ns":"http://maven.apache.org/POM/4.0.0"} - - # allow default namespace for output, dont print ns0: prefixes everywhere - ET.register_namespace('',"http://maven.apache.org/POM/4.0.0") - - group_id_xpath = ".//ns:properties//ns:codegen.artifactory.groupId" - artifact_id_xpath = ".//ns:properties//ns:codegen.artifactory.artifactId" - - pom_files = find_pom_files(dir) - - file_name = None - project = None - - for pom_file in pom_files: - try: - pom = parse_pom(pom_file) - property = pom.findall(group_id_xpath, ns)[0] - pom_group_id = property.text - property = pom.findall(artifact_id_xpath, ns)[0] - pom_artifact_id = property.text - except IndexError: - # An IndexError means findall did not return anything - # for the path, which means this is not the pom file - # we want. - continue - - if group_id == pom_group_id and artifact_id == pom_artifact_id: - file_name = pom_file - break - - # If file_name is not None it should be in the form of - # "/folders/legacy-java-sdk/bmc-codegen/{project}-codegen/pom.xml" where {project} - # is the part that we want. - if file_name: - head, tail = os.path.split(file_name) - if head.endswith("-codegen"): - path_parts = head.split(os.sep) - project = path_parts[-1][:-len("-codegen")] - - return project - - -# -# Parameters variable set up -# -parser = argparse.ArgumentParser(description='Warn if there are clirr errors in packages that are changed. JIRA_USERNAME, JIRA_PASSWORD, BITBUCKET_USERNAME, AND BITBUCKET_PASSWORD are expected env vars.') -parser.add_argument('--build-id', required=False, help="The TeamCity build id for the build that is running this script. This is used to update the relevant Bitbucket PRs with links to the TeamCity build") -parser.add_argument('--build-branch', required=True, help="The value of the teamcity.build.branch variable") -parser.add_argument('--username', required=True, help='LDAP username ("firstname.lastname@oracle.com"; within TeamCity, use "%%system.teamcity.auth.userId%%")') -parser.add_argument('--password', required=False, help='LDAP password (within TeamCity, use "%%system.teamcity.auth.password%%")') -parser.add_argument('--legacy-java-sdk-dir', required=False, help='Directory of the Legacy Java SDK') -parser.add_argument('-v', '--verbose', action='store_true', default=False, required=False, help='Verbose output') -parser.add_argument('--base-dir', required=False, help='The directory that has the Git checkout of the target branch') -parser.add_argument('--keep-temp-files', action='store_true', default=False, required=False, help='Keep temporary files') -parser.add_argument('--dry-run', action='store_true', default=False, required=False, help='Dry-run, do not post comment to Bitbucket') -parser.add_argument('--build-type', choices=['DEXREQ', 'PR'], default='PR', help='Build type for the TeamCity job') - -args = parser.parse_args() -setup_bitbucket(args) - -if args.dry_run: - dry_run = True -else: - dry_run = False - -if args.verbose: - verbose = True - shared.bitbucket_utils.verbose = True - -if args.legacy_java_sdk_dir: - legacy_java_sdk_dir = os.path.abspath(args.legacy_java_sdk_dir) -else: - legacy_java_sdk_dir = os.getcwd() - -if args.keep_temp_files: - KEEP_TEMP_FILES = True - -clirr_files = find_clirr_files(legacy_java_sdk_dir) -if not clirr_files: - print("No clirr*.txt files found. Nothing to warn about.") - sys.exit(0) - -comment_destination = "Bitbucket PR" -if args.build_type == "DEXREQ": - comment_destination = "DEXREQ ticket" - -pr_id = args.build_branch -if args.build_type == 'PR': - try: - # If the teamcity.build.branch variable is just something like "1234", then this is a - # validation build for pull request "1234" - pr_id = int(args.build_branch) - except ValueError: - print("Not a pull request validation build. Not warning.") - sys.exit(0) - setup_auth() - -warn_about_clirr_files = [] - -changed_files = [] -issue = None -if args.build_type == 'DEXREQ': - # Get DEXREQ ticket from Git log - repo = Repo(legacy_java_sdk_dir) - last_commit_message = repo.git.log(n=1, format='%s%n%b') - dexreq_issues = util.parse_issue_keys_from_commit_message(last_commit_message) - - # Get artifact id and group id from the first DEXREQ Ticket - issue = None - project = None - if dexreq_issues and len(dexreq_issues) > 0: - issue = util.get_dexreq_issue(dexreq_issues[0], fields=(CUSTOM_JIRA_ISSUE_FIELDS)) - - if issue: - group_id = getattr(issue.fields, config.CUSTOM_FIELD_ID_GROUP_ID) - artifact_id = getattr(issue.fields, config.CUSTOM_FIELD_ID_ARTIFACT_ID) - project = get_project(os.path.join(legacy_java_sdk_dir, "bmc-codegen"), - group_id, - artifact_id) - - if project: - print("Adding {0} to changed files".format(project)) - changed_files.append(project) - else: - print("No project found, warning about all backward compatibilities!") - -else: - try: - changed_files = get_changed_files(legacy_java_sdk_dir, pr_id, args.base_dir) - except Exception as e: - print(e) - print("Could not get changed files, warning about all backward compatibilities!") - -if not changed_files: - # No changes, let's warn about everything to be safe. - # This can happen when a job failed, and then we hit "Run" again - warn_about_clirr_files = clirr_files -else: - # Find the clirr files that correspond to changed files - for clirr_file in clirr_files: - printv("Checking {}".format(clirr_file)) - project_dir = os.path.abspath(os.path.join(clirr_file, os.pardir, os.pardir)) - - prefix = os.path.join(legacy_java_sdk_dir, "") - if project_dir.startswith(prefix): - project_dir = project_dir[len(prefix):] - - # See if any changed files start with this - for cf in changed_files: - if cf.startswith(project_dir): - printv("\tFound changed file in this module: {}".format(cf)) - warn_about_clirr_files.append(clirr_file) - break - -messages_for_modules = {} - -for file in warn_about_clirr_files: - printv("Emit errors from {}".format(file)) - - m = re.match(r"^.*/clirr-(.*)\.txt$", file) - if m: - module_name = m.group(1) - else: - module_name = "Unknown module" - - with open(file, 'r') as content_file: - content = content_file.read() - if content and len(content) > 0: - if module_name in messages_for_modules: - text = messages_for_modules[module_name] + "\n" - else: - text = "" - text = text + "\n" + content - messages_for_modules[module_name] = text - -if messages_for_modules: - text = "Clirr detected backward incompatibilities possibly related to this change, compared to the latest release version of the OCI Legacy Java SDK:\n\n" - - if not changed_files: - text = text + "(Note: Could not detect changed files; including all backward incompatibilities to be safe.)\n\n" - - for module, messages in messages_for_modules.items(): - text = text + "{}:\n{}\n\n".format(module, messages) - - if args.build_id: - text = text + "\n\nPlease use the information above to diagnose the problem. More information may also available in the {build_log_link}.".format( - build_log_link=build_log_link(args.build_id, text="build log")) - - print(text) - - if not dry_run: - if args.build_type == "DEXREQ": - if issue: - print("Adding backwards incompatibility comment to {}".format(issue.key)) - util.add_jira_comment(issue.key, text) - printv("Adding '{}' label to: {}".format(config.BACKWARD_INCOMPATIBLE_CHANGES_LABEL, issue.key)) - issue.add_field_value('labels', config.BACKWARD_INCOMPATIBLE_CHANGES_LABEL) - else: - print("No DEXREQ ticket identified. Cannot add comment") - else: - make_general_comment("SDK", "legacy-java-sdk", pr_id, text) - else: - print("DRY-RUN: Not adding comment to {}".format(comment_destination)) -else: - print("Nothing to warn about!") diff --git a/scripts/auto_gen_utils/team_city_scripts/legacy_java_sdk/warn_about_source_formatting.py b/scripts/auto_gen_utils/team_city_scripts/legacy_java_sdk/warn_about_source_formatting.py deleted file mode 100644 index 4959bf87f8..0000000000 --- a/scripts/auto_gen_utils/team_city_scripts/legacy_java_sdk/warn_about_source_formatting.py +++ /dev/null @@ -1,117 +0,0 @@ -# -*- coding: utf-8 -*- -from __future__ import print_function -import argparse -import os -import ssl -import sys -import urllib3 -from git import Repo - -# Add the root of the package, two directories up, to the sys.path -dir_path = os.path.dirname(os.path.realpath(__file__)) -sys.path.append(os.path.join(dir_path, '../..')) - -from shared.bitbucket_utils import setup_bitbucket, make_general_comment # noqa: ignore=F402 -import shared.bitbucket_utils # noqa: ignore=F402 -from shared.buildsvc_tc_compatibility import build_log_link - -KEEP_TEMP_FILES = True - -urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) -verbose = False -if (not os.environ.get('PYTHONHTTPSVERIFY', '') and getattr(ssl, '_create_unverified_context', None)): - ssl._create_default_https_context = ssl._create_unverified_context - - -def printv(str): - global verbose - if verbose: - print(str) - - -# -# Parameters variable set up -# -parser = argparse.ArgumentParser(description='Warn if source formatting is incorrect (really, warn if there are changed files in this git repository).') -parser.add_argument('--build-id', required=False, help="The TeamCity build id for the build that is running this script. This is used to update the relevant Bitbucket PRs with links to the TeamCity build") -parser.add_argument('--build-branch', required=True, help="The value of the teamcity.build.branch variable") -parser.add_argument('--legacy-java-sdk-dir', required=False, help='Directory of the Legacy Java SDK') -parser.add_argument('-v', '--verbose', action='store_true', default=False, required=False, help='Verbose output') -parser.add_argument('--dry-run', action='store_true', default=False, required=False, help='Dry-run, do not post comment to Bitbucket') - -args = parser.parse_args() -setup_bitbucket(args) - -if args.dry_run: - dry_run = True -else: - dry_run = False - -if args.verbose: - verbose = True - shared.bitbucket_utils.verbose = True - -if args.legacy_java_sdk_dir: - legacy_java_sdk_dir = os.path.abspath(args.legacy_java_sdk_dir) -else: - legacy_java_sdk_dir = os.getcwd() - -pr_id = None -try: - # If the teamcity.build.branch variable is just something like "1234", then this is a - # validation build for pull request "1234" - pr_id = int(args.build_branch) -except ValueError: - print("Not a pull request validation build. Not warning.") - sys.exit(0) - -repo = Repo.init(legacy_java_sdk_dir) -git_status = repo.git.status() -git_diff = repo.git.diff('--name-only', '--ignore-submodules') - -current_branch = [branch.strip()[2:] for branch in repo.git.branch().split('\n') if branch.startswith('* ')][0] -printv("source branch: {}".format(current_branch)) - -printv("git diff:\n{}".format(git_diff)) - -printv("git status: '{}'".format(git_status)) - -could_fix = False -if 'nothing to commit' not in git_status and "nothing added to commit" not in git_status: - # Try to fix it - - try: - repo.git.add("*.java") - repo.git.add("*.properties") - repo.git.commit("-m", "Committing source formatting changes. Automatically performed, see build log:\n\n{build_log_link}".format( - build_log_link=build_log_link(args.build_id, text="build log"))) - if dry_run: - print('DRY-RUN: not pushing to branch {}'.format(current_branch)) - else: - repo.git.push('-u', 'origin', current_branch) - - could_fix = True - text = "There were source formatting problems, but we automatically committed the changes to your branch. Note that this may kick off another validation build. Changes made:\n\n" + git_diff + "\n\nMore information may also available in the {build_log_link}.".format(build_log_link=build_log_link(args.build_id, text="build log")) - except Exception as e: - print('EXCEPTION: {}'.format(str(e))) - print('Failed to push source formatting changes.') - - if not could_fix: - text = "There are source formatting problems in the files below. Run `mvn process-sources -Pdev`.\n\n" + git_diff + "\n\nDon't want to do this by hand anymore? Give the 'DEXREQ Automation' user write access to your repo ([similar instructions here](https://confluence.oci.oraclecorp.com/pages/viewpage.action?spaceKey=DEX&title=Pull+Request+Validation+Builds+for+the+Testing+Service#PullRequestValidationBuildsfortheTestingService-WhenUsingYourOwnFork))." - - if args.build_id: - text = text + "\n\nPlease use the information above to diagnose the problem. More information may also available in the {build_log_link}.".format( - build_log_link=build_log_link(args.build_id, text="build log")) - - print(text) - - if not dry_run: - make_general_comment("SDK", "legacy-java-sdk", pr_id, text) - else: - print("DRY-RUN: Not making Bitbucket comment.") -else: - print("Nothing to warn about!") - could_fix = True - -if not could_fix: - sys.exit(1) diff --git a/scripts/auto_gen_utils/team_city_scripts/oci_testing_service/determine_issue_routing_info_tag.py b/scripts/auto_gen_utils/team_city_scripts/oci_testing_service/determine_issue_routing_info_tag.py deleted file mode 100644 index 24fbb5a776..0000000000 --- a/scripts/auto_gen_utils/team_city_scripts/oci_testing_service/determine_issue_routing_info_tag.py +++ /dev/null @@ -1,211 +0,0 @@ -from __future__ import print_function -import argparse -import os -import ssl -import sys -import urllib3 -import ocits_shared -from ocits_shared import HELP_URL, TC_URL, printv, get_dexreq_tickets, get_issue_routing_info_tag, get_issue_routing_info_tag_from_description, get_package_names_from_description, get_limit_text, get_determined_text - -# Add the root of the package, two directories up, to the sys.path -dir_path = os.path.dirname(os.path.realpath(__file__)) -sys.path.append(os.path.join(dir_path, '../..')) - -from shared.bitbucket_utils import setup_bitbucket, get_pullrequest, get_pr_target_branch, make_general_comment # noqa: ignore=F402 -import shared.bitbucket_utils # noqa: ignore=F402 -import util # noqa: ignore=F402 -import config # noqa: ignore=F402 - -urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) -if (not os.environ.get('PYTHONHTTPSVERIFY', '') and getattr(ssl, '_create_unverified_context', None)): - ssl._create_default_https_context = ssl._create_unverified_context - - -# -# Warning messages -# - - -NO_TAGS_FOUND_RUNNING_ALL = """ -Could not find any issue routing tags, because even though DEXREQ tickets were referenced, none of them had the 'Issue Routing Tag' field filled out. - -The pull request description also didn't contain an issue routing tag in the form of `[IssueRoutingInfo.tag=sometag]`. - -This means the automation will run all tests belonging to the spec specified in the DEXREQ tickets. That may take a long time. - -{limit_text} Then restart the [TeamCity build]({tc_link}) by asking an SDK/CLI team member on #oci_public_sdks to click on the 'Run' button. - -Note: The 'Issue Routing Tag' is the same value you have chosen in your spec for the `x-obmcs-issue-routing-tag` annotation. Only tests for operations with matching issue routing tag will be run ([more info](https://confluence.oci.oraclecorp.com/display/DEX/Issue+Routing)). - -About to use the OCI testing service to run all tests in the following {class_sop}: - -{test_classes} - -{determined_text} - -The testing progress can be monitored in the log of the [TeamCity build]({tc_link}). - -For more information, see [Pull Request Validation Builds for the Testing Service]({help_url}). -""" - - -NO_DEXREQ_NO_TAGS_FOUND_RUNNING_ALL = """ -Could not find any issue routing tags, because no DEXREQ tickets were referenced. The pull request description also didn't contain an issue routing tag in the form of `[IssueRoutingInfo.tag=sometag]`. - -This is not a problem, but it means the automation will run all tests belonging to the modules specified in the pull request description in the form of `[RunTestsForModule=xyz]`. That may take a long time. - -{limit_text} Then restart the [TeamCity build]({tc_link}) by asking an SDK/CLI team member on #oci_public_sdks to click on the 'Run' button. - -Note: The 'Issue Routing Tag' is the same value you have chosen in your spec for the `x-obmcs-issue-routing-tag` annotation. Only tests for operations with matching issue routing tag will be run ([more info](https://confluence.oci.oraclecorp.com/display/DEX/Issue+Routing)). - -About to use the OCI testing service to run all tests in the following {class_sop}: - -{test_classes} - -{determined_text} - -The testing progress can be monitored in the log of the [TeamCity build]({tc_link}). - -For more information, see [Pull Request Validation Builds for the Testing Service]({help_url}). -""" - - -RUNNING_THE_FOLLOWING_TESTS = """ -About to use the OCI testing service to run tests with matching 'Issue Routing Tag' {issue_routing_tags} in the following {class_sop}: - -{test_classes} - -{determined_text}The testing progress can be monitored in the log of the [TeamCity build]({tc_link}). - -For more information, see [Pull Request Validation Builds for the Testing Service]({help_url}). -""" - - -# -# Parameters variable set up -# -parser = argparse.ArgumentParser(description='Determine the issue routing info tag.') -parser.add_argument('--build-id', required=True, help="The TeamCity build id for the build that is running this script. This is used to update the relevant Bitbucket PRs with links to the TeamCity build") -parser.add_argument('--build-branch', required=True, help="The value of the teamcity.build.branch variable") -parser.add_argument('-o', '--output', required=False, help='Output file') -parser.add_argument('-v', '--verbose', action='store_true', default=False, required=False, help='Verbose output') -parser.add_argument('--dry-run', action='store_true', default=False, required=False, help='Dry-run') - -args = parser.parse_args() -setup_bitbucket(args) - -ocits_shared.dry_run = args.dry_run - -if args.verbose: - ocits_shared.verbose = True - shared.bitbucket_utils.verbose = True - -pr_id = None -try: - # If the teamcity.build.branch variable is just something like "1234", then this is a - # validation build for pull request "1234" - pr_id = int(args.build_branch) -except ValueError: - # Only print this when using verbose, since we want the output be the target branch. - printv("Not a pull request validation build.") - sys.exit(1) - -test_classes = [] -if "TEST_CLASSES" in os.environ: - test_classes = os.environ.get("TEST_CLASSES").split(",") -else: - print("TEST_CLASSES environment variable has to be set (comma-separated list of Java class names)") - sys.exit(1) - -if not test_classes: - # Empty -- "NO-DEXREQ" was set and no [RunTestsForModule=xyz] was set - # This os ok. - sys.exit(0) - -pr = get_pullrequest("SDK", "oci-testing-service", pr_id) -printv(pr.text) - -json = pr.json() - -if 'title' in json: - title = json['title'] -else: - title = "" -if 'description' in json: - description = json['description'] -else: - description = "" - -tc_link = TC_URL.format(build_id=args.build_id) -tickets, no_dexreq_marker = get_dexreq_tickets(pr, tc_link) -package_names_from_description = get_package_names_from_description(description) - -if no_dexreq_marker and not package_names_from_description: - # If the user had "NO-DEXREQ" in the PR and didn't manually add modules using [RunTestsForModule=xyz], no tests will run - # No point in looking for an issue routing tag. - sys.exit(0) - -tags_set = set([]) -for issue_key in tickets: - tags = get_issue_routing_info_tag(issue_key) - - if tags: - for tag in tags: - tags_set.add(tag) - -tags = get_issue_routing_info_tag_from_description(description) -for tag in tags: - tags_set.add(tag) - -limit_text = get_limit_text(tickets, package_names_from_description) -determined_text = get_determined_text(tickets, package_names_from_description, test_classes) - -if not tags_set and not no_dexreq_marker: - printv("No issue routing info tags found!") - - text = NO_TAGS_FOUND_RUNNING_ALL.format( - tc_link=tc_link, - dexreq_tickets=", ".join(tickets), - ticket_sop="ticket" if len(tickets) == 1 else "tickets", - test_classes="\n".join("- `{}`".format(c) for c in test_classes), - class_sop="class" if len(test_classes) == 1 else "classes", - issue_routing_tags=", ".join("`{}`".format(t) for t in tags_set), - limit_text=limit_text, - determined_text=determined_text, - help_url=HELP_URL) -elif not tags_set and no_dexreq_marker: - printv("No issue routing info tags found, but NO-DEXREQ marker exists!") - - text = NO_DEXREQ_NO_TAGS_FOUND_RUNNING_ALL.format( - tc_link=tc_link, - dexreq_tickets=", ".join(tickets), - ticket_sop="ticket" if len(tickets) == 1 else "tickets", - test_classes="\n".join("- `{}`".format(c) for c in test_classes), - class_sop="class" if len(test_classes) == 1 else "classes", - issue_routing_tags=", ".join("`{}`".format(t) for t in tags_set), - limit_text=limit_text, - determined_text=determined_text, - help_url=HELP_URL) -else: - tags_str = ",".join(str(s) for s in tags_set) - print(tags_str) - - if args.output: - f = open(args.output, "w") - f.write(tags_str) - - text = RUNNING_THE_FOLLOWING_TESTS.format( - tc_link=tc_link, - dexreq_tickets=", ".join(tickets), - ticket_sop="ticket" if len(tickets) == 1 else "tickets", - test_classes="\n".join("- `{}`".format(c) for c in test_classes), - class_sop="class" if len(test_classes) == 1 else "classes", - this_class_was_these_classes_were="This class was" if len(test_classes) == 1 else "These classes were", - issue_routing_tags=", ".join("`{}`".format(t) for t in tags_set), - help_url=HELP_URL, - determined_text=determined_text) - -if not ocits_shared.dry_run: - make_general_comment("SDK", "oci-testing-service", pr_id, text) -else: - print("DRY-RUN: Not making BitBucket comment\n{}".format(text)) diff --git a/scripts/auto_gen_utils/team_city_scripts/oci_testing_service/determine_java_sdk_versions_used.py b/scripts/auto_gen_utils/team_city_scripts/oci_testing_service/determine_java_sdk_versions_used.py deleted file mode 100644 index 2bae18f8d8..0000000000 --- a/scripts/auto_gen_utils/team_city_scripts/oci_testing_service/determine_java_sdk_versions_used.py +++ /dev/null @@ -1,79 +0,0 @@ -from __future__ import print_function -import argparse -import os -import ssl -import sys -import urllib3 -import xml.etree.ElementTree as ET -from glob import glob - -import ocits_shared -from ocits_shared import parse_xml - -# Add the root of the package, two directories up, to the sys.path -dir_path = os.path.dirname(os.path.realpath(__file__)) -sys.path.append(os.path.join(dir_path, '../..')) - -import util # noqa: ignore=F402 -import config # noqa: ignore=F402 - -urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) -if (not os.environ.get('PYTHONHTTPSVERIFY', '') and getattr(ssl, '_create_unverified_context', None)): - ssl._create_default_https_context = ssl._create_unverified_context - - -ns = {"ns":"http://maven.apache.org/POM/4.0.0"} - -# allow default namespace for output, dont print ns0: prefixes everywhere -ET.register_namespace('',"http://maven.apache.org/POM/4.0.0") - - -def get_group_artifact_versions_from_pom_file(path): - dependencies = [] - - group_id = "com.oracle.oci.sdk" - - pom_files = [y for x in os.walk(path) for y in glob(os.path.join(x[0], 'pom.xml'))] - for ldr_path in pom_files: - pom = parse_xml(ldr_path) - # Find all the places where and both match - xpath = './/ns:dependency[ns:groupId="{}"]'.format(group_id) - dependency_nodes = pom.findall(xpath, ns) - for node in dependency_nodes: - artifact_id = None - version = None - - for child in node: - tag = child.tag.replace('{{{}}}'.format(ns['ns']), "") - if tag == "artifactId": - artifact_id = child.text - elif tag == "version": - version = child.text - - if artifact_id and version: - dependencies.append((group_id, artifact_id, version)) - - return dependencies - - -# -# Parameters variable set up -# -parser = argparse.ArgumentParser(description='Determine the Java SDK versions used.') -parser.add_argument('--oci-testing-service-path', required=True, help="Path to the root directory of the OCI Testing Service") -parser.add_argument('-o', '--output', required=False, help='Output file') -parser.add_argument('-v', '--verbose', action='store_true', default=False, required=False, help='Verbose output') - -args = parser.parse_args() - -if args.verbose: - ocits_shared.verbose = True - -dependencies = get_group_artifact_versions_from_pom_file(args.oci_testing_service_path) - -dependencies_str = "\n".join("{}:{}:{}".format(g, a, v) for g, a, v in dependencies) -print(dependencies_str) - -if args.output: - f = open(args.output, "w") - f.write(dependencies_str) diff --git a/scripts/auto_gen_utils/team_city_scripts/oci_testing_service/determine_pr_target_branch.py b/scripts/auto_gen_utils/team_city_scripts/oci_testing_service/determine_pr_target_branch.py deleted file mode 100644 index cabc197162..0000000000 --- a/scripts/auto_gen_utils/team_city_scripts/oci_testing_service/determine_pr_target_branch.py +++ /dev/null @@ -1,51 +0,0 @@ -from __future__ import print_function -import argparse -import os -import ssl -import sys -import urllib3 - -import ocits_shared -from ocits_shared import printv - -# Add the root of the package, two directories up, to the sys.path -dir_path = os.path.dirname(os.path.realpath(__file__)) -sys.path.append(os.path.join(dir_path, '../..')) - -from shared.bitbucket_utils import setup_bitbucket, get_pullrequest, get_pr_target_branch # noqa: ignore=F402 -import shared.bitbucket_utils # noqa: ignore=F402 - -urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) -if (not os.environ.get('PYTHONHTTPSVERIFY', '') and getattr(ssl, '_create_unverified_context', None)): - ssl._create_default_https_context = ssl._create_unverified_context - - -# -# Parameters variable set up -# -parser = argparse.ArgumentParser(description='Determine the target branch of the pull request.') -parser.add_argument('--build-branch', required=True, help="The value of the teamcity.build.branch variable") -parser.add_argument('-v', '--verbose', action='store_true', default=False, required=False, help='Verbose output') - -args = parser.parse_args() -setup_bitbucket(args) - -if args.verbose: - ocits_shared.verbose = True - shared.bitbucket_utils.verbose = True - -pr_id = None -try: - # If the teamcity.build.branch variable is just something like "1234", then this is a - # validation build for pull request "1234" - pr_id = int(args.build_branch) -except ValueError: - # Only print this when using verbose, since we want the output be the target branch. - printv("Not a pull request validation build.") - sys.exit(0) - -pr = get_pullrequest("SDK", "oci-testing-service", pr_id) -printv(pr.text) - -target_branch = get_pr_target_branch(pr) -print(target_branch) diff --git a/scripts/auto_gen_utils/team_city_scripts/oci_testing_service/determine_test_classes.py b/scripts/auto_gen_utils/team_city_scripts/oci_testing_service/determine_test_classes.py deleted file mode 100644 index 16c1aad7a9..0000000000 --- a/scripts/auto_gen_utils/team_city_scripts/oci_testing_service/determine_test_classes.py +++ /dev/null @@ -1,286 +0,0 @@ -from __future__ import print_function -import argparse -import os -import ssl -import sys -import urllib3 -import re -import xml.etree.ElementTree as ET -from glob import glob - -import ocits_shared -from ocits_shared import HELP_URL, TC_URL, printv, get_dexreq_tickets, get_group_and_artifact_ids, parse_xml, get_package_names_from_description - -# Add the root of the package, two directories up, to the sys.path -dir_path = os.path.dirname(os.path.realpath(__file__)) -sys.path.append(os.path.join(dir_path, '../..')) - -from shared.bitbucket_utils import setup_bitbucket, get_pullrequest, make_general_comment # noqa: ignore=F402 -import shared.bitbucket_utils # noqa: ignore=F402 -import util # noqa: ignore=F402 -import config # noqa: ignore=F402 - -urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) -if (not os.environ.get('PYTHONHTTPSVERIFY', '') and getattr(ssl, '_create_unverified_context', None)): - ssl._create_default_https_context = ssl._create_unverified_context - - -ns = {"ns":"http://maven.apache.org/POM/4.0.0"} - -# allow default namespace for output, dont print ns0: prefixes everywhere -ET.register_namespace('',"http://maven.apache.org/POM/4.0.0") - - -# -# Warning messages -# - - -NO_TESTS_FOUND_BECAUSE_UNKNOWN_SPEC = """ -Not running the OCI testing service tests. - -The referenced DEXREQ {ticket_sop} {dexreq_tickets} {ticket_has_have} 'Spec Group Id' and 'Spec Artifact Id' set ({ids_list}), but no spec with those ids was not found in the Java SDK. - -{author_text}Please ensure that the 'Spec Group Id' and 'Spec Artifact Id' fields are filled out correctly, and that the SDK for {that_spec_those_specs} has been generated. Then restart the [TeamCity build]({tc_link}) by asking an SDK/CLI team member on #oci_public_sdks to click on the 'Run' button. - -For more information, see [Pull Request Validation Builds for the Testing Service]({help_url}). -""" - - -NO_TESTS_FOUND_BECAUSE_NO_TEST_CLASSES = """ -Not running the OCI testing service tests. - -The referenced DEXREQ {ticket_sop} {dexreq_tickets} {ticket_has_have} 'Spec Group Id' and 'Spec Artifact Id' set, which resulted in the Java package {name_sop} {java_package_names}. Unfortunately, {this_package_these_packages} did not contain `*AutoTest.java` files. - -{author_text}Please ensure that the 'Spec Group Id' and 'Spec Artifact Id' fields are filled out correctly, and that the SDK for that spec has been generated. Then restart the [TeamCity build]({tc_link}) by asking an SDK/CLI team member on #oci_public_sdks to click on the 'Run' button. - -For more information, see [Pull Request Validation Builds for the Testing Service]({help_url}). -""" - - -NO_TESTS_FOUND_BUT_NO_DEXREQ = """ -Not running the OCI testing service tests. - -No test classes were found, because no DEXREQ tickets were referenced in the pull request title or description. But since the `{no_dexreq_marker}` marker was present in the description, this is not a problem; the tests just won't get run. - -If you do want to run tests, you can specify which modules should have their tests run by adding annotations in the form of `[RunTestsForModule=xyz]` to the pull request description. You may also want to limit the tests that then get run by specifying the issue routing tag in the form of `[IssueRoutingInfo.tag=sometag]` in the pull request description, - -For more information, see [Pull Request Validation Builds for the Testing Service]({help_url}). -""" - - -def get_package_name_from_pom_file(group_id, artifact_id, java_sdk_path): - codegen_dir = os.path.join(java_sdk_path, "bmc-codegen") - pom_files = [y for x in os.walk(codegen_dir) for y in glob(os.path.join(x[0], 'pom.xml'))] - for ldr_path in pom_files: - pom = parse_xml(ldr_path) - # Find all the places where and both match - xpath = './/ns:properties[ns:codegen.artifactory.groupId="{}"][ns:codegen.artifactory.artifactId="{}"]'.format(group_id, artifact_id) - properties = pom.findall(xpath, ns) - if properties: - codegen_artifact_id = pom.findall("./ns:artifactId", ns)[0].text - m = re.match("oci-java-sdk-([^-]*)-codegen", codegen_artifact_id) - if m: - return m.group(1) - - return None - - -def get_package_names(group_artifact_ids_set, pr): - global tc_link - - package_names_set = set([]) - problems = [] - for issue_key, group_id, artifact_id in group_artifact_ids_set: - package_name = get_package_name_from_pom_file(group_id, artifact_id, args.java_sdk_path) - - if package_name: - package_names_set.add(package_name) - else: - problems.append((issue_key, group_id, artifact_id)) - - if problems: - print("Had problems determining the package name for tickets: {}".format(", ".join(i for i, g, a in problems))) - - author_text = "" - json = pr.json() - if json['author'] and json['author']['user'] and json['author']['user']['name']: - author_text = "@{name}: ".format(name=json['author']['user']['name']) - - text = NO_TESTS_FOUND_BECAUSE_UNKNOWN_SPEC.format( - tc_link=tc_link, - dexreq_tickets=", ".join(i for i, g, a in problems), - ids_list=", ".join("`{}:{}`".format(g, a) for i, g, a in problems) + ("" if len(problems) == 1 else ", respectively"), - ticket_sop="ticket" if len(problems) == 1 else "tickets", - ticket_has_have="has" if len(problems) == 1 else "have", - that_spec_those_specs="that spec" if len(problems) == 1 else "those specs", - help_url=HELP_URL, - author_text=author_text) - - if not ocits_shared.dry_run: - make_general_comment("SDK", "oci-testing-service", pr_id, text) - else: - print("DRY-RUN: Not making BitBucket comment\n{}".format(text)) - - # Don't fail the build - sys.exit(1) - - printv("Package names: {}".format(", ".join(package_names_set))) - - return package_names_set - - -def get_test_classes_for_package(package_name, java_sdk_path): - # bmc-integtests/src - # +-- test - # +-- java - # +-- com - # +-- oracle - # +-- bmc - # +-- - # +-- *AutoTest.java - src_dir = os.path.join(java_sdk_path, "bmc-integtests/src/test/java") - package_dir = os.path.join(src_dir, "com/oracle/bmc/{}".format(package_name)) - printv("Looking for *AutoTest.java in {}".format(package_dir)) - test_files = [y for x in os.walk(package_dir) for y in glob(os.path.join(x[0], '*AutoTest.java'))] - - printv("Test files:\n{}".format("\n".join("\t{}".format(x) for x in test_files))) - - test_classes = [] - for test_file in test_files: - if test_file.startswith(src_dir + "/") and test_file.endswith(".java"): - within_src_dir = test_file[len(src_dir + "/"):-len(".java")] - test_classes.append(within_src_dir.replace('/', '.')) - - return test_classes - - -def get_test_classes(package_names_set, java_sdk_path): - global tickets, tc_link - - test_classes_set = set([]) - - for package_name in package_names_set: - test_classes = get_test_classes_for_package(package_name, java_sdk_path) - - if test_classes: - for test_class in test_classes: - test_classes_set.add(test_class) - - return test_classes_set - - -def get_test_classes_when_tickets_optional(package_names_set, java_sdk_path, pr_id): - test_classes_set = get_test_classes(package_names_set, java_sdk_path) - if not test_classes_set: - text = NO_TESTS_FOUND_BUT_NO_DEXREQ.format( - no_dexreq_marker=ocits_shared.NO_DEXREQ_MARKER, - help_url=HELP_URL) - - if not ocits_shared.dry_run: - make_general_comment("SDK", "oci-testing-service", pr_id, text) - else: - print("DRY-RUN: Not making BitBucket comment\n{}".format(text)) - - return test_classes_set - - -def get_test_classes_when_tickets_required(package_names_set, java_sdk_path, pd_id, pr, tickets, tc_link): - group_artifact_ids_set = get_group_and_artifact_ids(tickets, tc_link, pr) - package_names_set.update(get_package_names(group_artifact_ids_set, pr)) - test_classes_set = get_test_classes(package_names_set, java_sdk_path) - - if not test_classes_set: - print("No test classes found!") - - author_text = "" - json = pr.json() - if json['author'] and json['author']['user'] and json['author']['user']['name']: - author_text = "@{name}: ".format(name=json['author']['user']['name']) - - text = NO_TESTS_FOUND_BECAUSE_NO_TEST_CLASSES.format( - tc_link=tc_link, - dexreq_tickets=", ".join(tickets), - ticket_sop="ticket" if len(tickets) == 1 else "tickets", - ticket_has_have="has" if len(tickets) == 1 else "have", - java_package_names=", ".join("`com.oracle.bmc.{}`".format(x) for x in package_names_set), - name_sop="name" if len(package_names_set) == 1 else "names", - this_package_these_packages="this package" if len(package_names_set) == 1 else "these packages", - help_url=HELP_URL, - author_text=author_text) - - if not ocits_shared.dry_run: - make_general_comment("SDK", "oci-testing-service", pr_id, text) - else: - print("DRY-RUN: Not making BitBucket comment\n{}".format(text)) - - # Fail the build - sys.exit(1) - - return test_classes_set - - -# -# Parameters variable set up -# -parser = argparse.ArgumentParser(description='Determine the test classes to run.') -parser.add_argument('--build-id', required=True, help="The TeamCity build id for the build that is running this script. This is used to update the relevant Bitbucket PRs with links to the TeamCity build") -parser.add_argument('--build-branch', required=True, help="The value of the teamcity.build.branch variable") -parser.add_argument('--java-sdk-path', required=True, help="Path to the root directory of the Java SDK") -parser.add_argument('-o', '--output', required=False, help='Output file') -parser.add_argument('-p', '--packages-output', required=False, help='Output file for packages') -parser.add_argument('-v', '--verbose', action='store_true', default=False, required=False, help='Verbose output') -parser.add_argument('--dry-run', action='store_true', default=False, required=False, help='Dry-run') - -args = parser.parse_args() -setup_bitbucket(args) - -ocits_shared.dry_run = args.dry_run -shared.bitbucket_utils.dry_run = args.dry_run - -if args.verbose: - ocits_shared.verbose = True - shared.bitbucket_utils.verbose = True - -pr_id = None -try: - # If the teamcity.build.branch variable is just something like "1234", then this is a - # validation build for pull request "1234" - pr_id = int(args.build_branch) -except ValueError: - # Only print this when using verbose, since we want the output be the target branch. - printv("Not a pull request validation build.") - sys.exit(2) - -pr = get_pullrequest("SDK", "oci-testing-service", pr_id) -printv(pr.text) -tc_link = TC_URL.format(build_id=args.build_id) - -package_names_set = set([]) -test_classes_set = set([]) - -if 'description' in pr.json(): - description = pr.json()['description'] -else: - description = "" - -# Start with the test classes from the PR description -package_names_set.update(get_package_names_from_description(description)) - -tickets, no_dexreq_marker = get_dexreq_tickets(pr, tc_link) -if no_dexreq_marker: - test_classes_set = get_test_classes_when_tickets_optional(package_names_set, args.java_sdk_path, pr_id) -else: - test_classes_set = get_test_classes_when_tickets_required(package_names_set, args.java_sdk_path, pr_id, pr, tickets, tc_link) - -test_classes_str = ",".join(str(s) for s in test_classes_set) -print(test_classes_str) - -if args.output: - f = open(args.output, "w") - f.write(test_classes_str) - -if args.packages_output: - f = open(args.packages_output, "w") - f.write(",".join(str(s) for s in package_names_set)) - f.flush() diff --git a/scripts/auto_gen_utils/team_city_scripts/oci_testing_service/jira_ticket_reporter.py b/scripts/auto_gen_utils/team_city_scripts/oci_testing_service/jira_ticket_reporter.py deleted file mode 100644 index e5d16ca291..0000000000 --- a/scripts/auto_gen_utils/team_city_scripts/oci_testing_service/jira_ticket_reporter.py +++ /dev/null @@ -1,87 +0,0 @@ -from jira import JIRA, JIRAError -import os - - -class JiraTicketReporter: - JIRA_SERVER = 'https://jira.oci.oraclecorp.com' - - JIRA_OPTIONS = { - 'server': JIRA_SERVER, - 'verify': True - } - - TICKET_SEARCH_QUERY = """ - project={project} and (Description ~ 'IssueRoutingInfo tag {tag}' or worklogComment ~ 'IssueRoutingInfo tag {tag}') and resolution = Unresolved and status not in (\"done\", \"closed\", \"duplicate\", \"won't fix\", \"resolved\") and labels in (\"{label}\") - """ - - JIRA_TICKET_LABEL = "SDK-Nightly-Run-Failure" - - TICKET_UPDATE_DESCRIPTION = """ - - Code coverage failure:{codecoverage_text} - """ - - CODE_COVERAGE_TEXT = """ - {package} - Expected code coverage: {baseline_codecoverage}, Actual code coverage: {actual_codecoverage} - Complete code coverage report can be found at: {report_url} - """ - - TICKET_CREATE_DESCRIPTION = """ - This ticket was opened because the nightly run failed to meet the code coverage criteria.{codecoverage_text} - - """ - - TICKET_CREATE_SUMMARY = """ - Java SDK Nightly Build Error - """ - - FAILURE_GENERAL_COMMENT = """ - There were failures. Please check worklog for details. - - """ - - FAILURE_CREATE_COMMENT = """ - There were failures. Please check worklog for details. - - Please do not delete this line: {{ IssueRoutingInfo tag {tag}}} - """ - - jira_client = None - - def __init__(self): - username = os.environ.get('JIRA_USERNAME') - password = os.environ.get('JIRA_PASSWORD') - self.jira_client = JIRA(self.JIRA_OPTIONS, basic_auth=(username, password)) - - def report_codecov_to_jira_ticket(self, project, tag, package_label, baseline_codecoverage, actual_codecoverage, report_url, dry_run): - codecoverage_text = self.CODE_COVERAGE_TEXT.format(package=package_label, baseline_codecoverage=baseline_codecoverage, actual_codecoverage=actual_codecoverage, report_url=report_url) - try: - query = self.TICKET_SEARCH_QUERY.format(project=project, tag=tag, label=self.JIRA_TICKET_LABEL) - print(query) - issues = self.jira_client.search_issues(query, fields=["summary","status","assignee","key"]) - if not issues: - desc = self.TICKET_CREATE_DESCRIPTION.format(codecoverage_text=codecoverage_text) - if not dry_run: - print("creating a new issue") - new_issue = self.jira_client.create_issue(project=project, summary=self.TICKET_CREATE_SUMMARY, description=self.FAILURE_CREATE_COMMENT.format(tag=tag), issuetype={'name': 'Bug'}) - print(new_issue) - self.jira_client.add_worklog(new_issue, timeSpentSeconds=60, comment=desc) - labels = [self.JIRA_TICKET_LABEL] - new_issue.update(fields={"labels": labels}) - else: - print("dry run, NOT creating ticket with details: " + desc) - else: - if len(issues) > 1: - # more than one open issue found, we log the "issue" and just pick the first one returned - print("more than one active issue found for {project}-{tag}".format(project=project, tag=tag)) - print(issues) - comm = self.TICKET_UPDATE_DESCRIPTION.format(codecoverage_text=codecoverage_text) - if not dry_run: - self.jira_client.add_worklog(issues[0], timeSpentSeconds=60, comment=comm) - self.jira_client.add_comment(issues[0], self.FAILURE_GENERAL_COMMENT) - else: - print("dry run, NOT updating ticket with details in worklog: " + comm) - print("general comments: " + self.FAILURE_GENERAL_COMMENT) - except JIRAError as e: - print("error logging issue to jira: " + str(e.status_code) + " " + e.text) diff --git a/scripts/auto_gen_utils/team_city_scripts/oci_testing_service/ocits_shared.py b/scripts/auto_gen_utils/team_city_scripts/oci_testing_service/ocits_shared.py deleted file mode 100644 index 0dd9d9e872..0000000000 --- a/scripts/auto_gen_utils/team_city_scripts/oci_testing_service/ocits_shared.py +++ /dev/null @@ -1,499 +0,0 @@ -import json -import os -import re -import sys -import urllib3 -import six -import xml.etree.ElementTree as ET -from functools import reduce - -# Add the root of the package, two directories up, to the sys.path -dir_path = os.path.dirname(os.path.realpath(__file__)) -sys.path.append(os.path.join(dir_path, '../..')) - -from shared.bitbucket_utils import make_general_comment # noqa: ignore=F402 -import util # noqa: ignore=F402 -import config # noqa: ignore=F402 - -global verbose -verbose = False - -global dry_run -dry_run = False - - -NO_DEXREQ_MARKER = "NO-DEXREQ" - - -def printv(str): - global verbose - if verbose: - print(str) - - -DEFAULT_JIRA_ISSUE_FIELDS = ['summary', 'description', 'labels', 'comment', 'status', 'reporter'] -CUSTOM_JIRA_ISSUE_FIELDS = [ - config.CUSTOM_FIELD_ID_ARTIFACT_ID, - config.CUSTOM_FIELD_ID_GROUP_ID, - config.CUSTOM_FIELD_ID_ARTIFACT_VERSION, - config.CUSTOM_FIELD_ID_SPEC_LOCATION_IN_ARTIFACT, - config.CUSTOM_FIELD_ID_SPEC_FRIENDLY_NAME, - config.CUSTOM_FIELD_ID_SERVICE_SUBDOMAIN, - config.CUSTOM_FIELD_ID_FEATURE_IDS, - config.CUSTOM_FIELD_ID_JAVA_SDK_STATUS, - config.CUSTOM_FIELD_ID_PYTHON_SDK_STATUS, - config.CUSTOM_FIELD_ID_RUBY_SDK_STATUS, - config.CUSTOM_FIELD_ID_GO_SDK_STATUS, - config.CUSTOM_FIELD_ID_CLI_STATUS, - config.CUSTOM_FIELD_ID_SDK_CLI_GA_DATE, - config.CUSTOM_FIELD_ID_ISSUE_ROUTING_TAG -] - -HELP_URL = "https://confluence.oci.oraclecorp.com/display/DEX/Pull+Request+Validation+Builds+for+the+Testing+Service" -CODECOV_BASELINE_URL_MASTER = "https://objectstorage.us-phoenix-1.oraclecloud.com/p/5mzix52OhxjnITDKe5bdefXAwxOLpEUEta1czeh_aK4/n/dex-us-phoenix-1/b/codecov_baseline/o/codecov_master.json" -CODECOV_BASELINE_URL_PREVIEW = "https://objectstorage.us-phoenix-1.oraclecloud.com/p/y4R2h_AwvDBBG0avDuy8ZilmHWQU8MrGP2GYadWP91Y/n/dex-us-phoenix-1/b/codecov_baseline/o/codecov_preview.json" - -JACOCO_XML_PATH = "{java_sdk_path}/bmc-integtests/target/jacoco.xml" - -NO_DEXREQ_TICKET_REFERENCED = """ -Not running the OCI testing service tests. - -The pull request title or description didn't reference any DEXREQ tickets, nor did it mention `{no_dexreq_marker}`. - -{author_text}Please edit the title or description to include a DEXREQ issue key, e.g. `DEXREQ-259`. If you are fixing a test or making other changes not related to a DEXREQ ticket, then include `{no_dexreq_marker}` in the ticket. - -Once you have made those changes, restart the [TeamCity build]({tc_link}) by asking an SDK/CLI team member on #oci_public_sdks to click on the 'Run' button. - -Example: `DEXREQ-259: {pr_title}` - -Example: `{no_dexreq_marker}: {pr_title}` - -For more information, see [Pull Request Validation Builds for the Testing Service]({help_url}). -""" - -NO_TESTS_FOUND_BECAUSE_NO_GROUP_ARTIFACT_ID = """ -Not running the OCI testing service tests. - -The referenced DEXREQ {ticket_sop} did not have 'Spec Group Id' and 'Spec Artifact Id' set. - -{author_text}Please edit the referenced DEXREQ {ticket_sop} ({dexreq_tickets}) and fill out the 'Spec Group Id' and 'Spec Artifact Id' fields. Then restart the [TeamCity build]({tc_link}) by asking an SDK/CLI team member on #oci_public_sdks to click on the 'Run' button. - -For more information, see [Pull Request Validation Builds for the Testing Service]({help_url}). -""" - - -# -# Extracting DEXREQ tickets from text -# - -def get_dexreq_tickets_from_text(text): - tickets = re.findall(r"DEXREQ-\d+", text) - - unique = reduce(lambda l, x: l + [x] if x not in l else l, tickets, []) - - if not unique: - printv("No DEXREQ tickets referenced in pull request title or description!") - else: - printv(unique) - - no_dexreq_marker = False - if not unique and NO_DEXREQ_MARKER in text: - # Only check if we didn't find any DEXREQ tickets - printv("Found {} marker in pull request.".format(NO_DEXREQ_MARKER)) - no_dexreq_marker = True - - return unique, no_dexreq_marker - - -def get_dexreq_tickets(pr, tc_link): - json = pr.json() - - if 'title' in json: - title = json['title'] - else: - title = "" - if 'description' in json: - description = json['description'] - else: - description = "" - - tickets, no_dexreq_marker = get_dexreq_tickets_from_text(title + "\n" + description) - - if not tickets and not no_dexreq_marker: - author_text = "" - json = pr.json() - if json['author'] and json['author']['user'] and json['author']['user']['name']: - author_text = "@{name}: ".format(name=json['author']['user']['name']) - - text = NO_DEXREQ_TICKET_REFERENCED.format( - tc_link=tc_link, - pr_title=title, - help_url=HELP_URL, - author_text=author_text, - no_dexreq_marker=NO_DEXREQ_MARKER) - - if not dry_run: - make_general_comment("SDK", "oci-testing-service", json['id'], text) - else: - print("DRY-RUN: Not making BitBucket comment\n{}".format(text)) - - # Don't fail the build - sys.exit(0) - - return tickets, no_dexreq_marker - - -# -# JIRA -# - - -def get_dev_status_info_for_issue(issue): - jira_internal_session = util.JIRA_CLIENT()._session - - issue_dev_status_url = config.JIRA_DEV_STATUS_REST_API_URL_FORMAT.format(issue.id) - return json.loads(jira_internal_session.get(issue_dev_status_url).content) - - -def get_pull_requests_for_issue(issue): - dev_status_info = get_dev_status_info_for_issue(issue) - if dev_status_info['errors']: - raise ValueError('There was an error retrieving pull request information for {}. Error(s): {}'.format( - issue.key, - json.dumps(dev_status_info['errors']) - )) - - return dev_status_info['detail'][0]['pullRequests'] - - -def get_jira_issue(issue_key): - return util.get_dexreq_issue(issue_key, fields=(DEFAULT_JIRA_ISSUE_FIELDS + CUSTOM_JIRA_ISSUE_FIELDS), expand=["changelog"]) - - -def filter_pull_requests(pull_requests, project, branch): - filtered = [] - for x in pull_requests: - if x['destination']['branch'] != branch: - continue - if x['destination']['repository']['avatarDescription'] != "SDK": - continue - if x['destination']['repository']['name'] != project: - continue - if x['status'] == "DECLINED": - printv("Ignoring pull request {}, it has been DECLINED".format(x['id'])) - continue - - filtered.append(x) - - return filtered - - -def get_group_and_artifact_ids_from_jira(issue_key): - issue = util.get_dexreq_issue(issue_key, fields=(DEFAULT_JIRA_ISSUE_FIELDS + CUSTOM_JIRA_ISSUE_FIELDS), expand=["changelog"]) - - group_id = getattr(issue.fields, config.CUSTOM_FIELD_ID_GROUP_ID) - if not group_id: - printv("No group id in JIRA item '{}'".format(issue_key)) - - artifact_id = getattr(issue.fields, config.CUSTOM_FIELD_ID_ARTIFACT_ID) - if not artifact_id: - printv("No artifact id in JIRA item '{}'".format(issue_key)) - - return group_id, artifact_id - - -def get_group_and_artifact_ids(tickets, tc_link, pr): - group_artifact_ids_set = set([]) - problems = [] - for issue_key in tickets: - group_id, artifact_id = get_group_and_artifact_ids_from_jira(issue_key) - - if group_id and artifact_id: - group_artifact_ids_set.add((issue_key, group_id, artifact_id)) - else: - problems.append(issue_key) - - printv("Ticket/group/artifact ids:\n{}".format("\n".join("\t{}/{}/{}".format(a,b,c) for a, b, c in group_artifact_ids_set))) - - if problems: - print("Tickets without group/artifact ids found!") - - author_text = "" - json = pr.json() - if json['author'] and json['author']['user'] and json['author']['user']['name']: - author_text = "@{name}: ".format(name=json['author']['user']['name']) - - text = NO_TESTS_FOUND_BECAUSE_NO_GROUP_ARTIFACT_ID.format( - tc_link=tc_link, - dexreq_tickets=", ".join(problems), - ticket_sop="ticket" if len(problems) == 1 else "tickets", - ticket_has_have="has" if len(problems) == 1 else "have", - help_url=HELP_URL, - author_text=author_text) - make_general_comment("SDK", "oci-testing-service", json['id'], text) - - # Don't fail the build - sys.exit(0) - - return group_artifact_ids_set - - -def get_issue_routing_info_tag(issue_key): - issue = util.get_dexreq_issue(issue_key, fields=(DEFAULT_JIRA_ISSUE_FIELDS + CUSTOM_JIRA_ISSUE_FIELDS), expand=["changelog"]) - - issue_routing_tag = getattr(issue.fields, config.CUSTOM_FIELD_ID_ISSUE_ROUTING_TAG) - - if not issue_routing_tag: - printv("No issue routing tag in JIRA item '{}'".format(issue_key)) - unique = [] - else: - tags = [x.strip() for x in issue_routing_tag.split(",")] - - unique = reduce(lambda l, x: l + [x] if x not in l else l, tags, []) - - return unique - - -def get_issue_routing_info_tag_from_description(description): - tags = re.findall("\\[IssueRoutingInfo.tag=([^]]+)\\]", description) - - unique = reduce(lambda l, x: l + [x] if x not in l else l, tags, []) - - return unique - - -def get_package_names_from_description(description): - tags = re.findall("\\[RunTestsForModule=([^]]+)\\]", description) - - unique = reduce(lambda l, x: l + [x] if x not in l else l, tags, []) - - return unique - - -def get_master_javasdk_pr_url(description): - results = re.findall("\\[master.pr.javasdk=([^]]+)\\]", description) - - links = reduce(lambda l, x: l + [x] if x not in l else l, results, []) - - if len(links) == 0: - return None - elif len(links) > 1: - raise ValueError("More than one Java SDK master PR URL found") - - return links[0] - - -class CommentedTreeBuilder(ET.XMLTreeBuilder): - def __init__(self, html=0, target=None): - ET.XMLTreeBuilder.__init__(self, html, target) - self._parser.CommentHandler = self.handle_comment - - def handle_comment(self, data): - self._target.start(ET.Comment, {}) - self._target.data(data) - self._target.end(ET.Comment) - - -def parse_xml(file_name): - return ET.parse(file_name, parser=ET.XMLParser(target=CommentedTreeBuilder())) - - -# given the service name and the operation name (which is the name of the test), -# we return xpath that maps to the code cov report xml entry for this method -def get_method_xpath(class_name, operation): - method_xpath = ".//class[@name='{c}']//method[@name='{m}']".format(c=class_name, m=operation) - return method_xpath - - -# given a service name (package), list of operations, aggregate the code cov values for a level (such as INSTRUCTION, LINE) -# and category ( missed or covered) -def get_value_for_codecov(xml, class_name, operations, level, category): - value = 0 - for operation in operations: - counter_xpath_format = "{m}/counter[@type='{lvl}']".format(m=get_method_xpath(class_name, operation), lvl=level) - cat_val = get_value_for_path_if_exists(xml, counter_xpath_format, category) - value += cat_val - return value - - -def get_codecov_baseline_url(target_branch): - if target_branch == "master": - return CODECOV_BASELINE_URL_MASTER - elif target_branch == "preview": - return CODECOV_BASELINE_URL_PREVIEW - else: - printv("invalid target branch: " + target_branch) - sys.exit(2) - - -def get_codecov_baseline(target_branch): - if not dry_run: - http = urllib3.PoolManager() - response = http.request("GET", get_codecov_baseline_url(target_branch)) - return json.loads(response.data.decode("utf-8")) - else: - with open("./{b}_local_codecov.json".format(b=target_branch)) as data_file: - json_data = data_file.read() - return json.loads(json_data) - - -def save_codecov_baseline(data, target_branch): - if not dry_run: - http = urllib3.PoolManager() - encoded_data = json.dumps(data).encode('utf-8') - response = http.request( - "PUT", - get_codecov_baseline_url(target_branch), - body=encoded_data, - headers={'Content-Type': 'application/json'}) - return response.status - else: - file_name = os.path.join(".", "{b}_local_codecov.json".format(b=target_branch)) - with open(file_name, 'w') as f: - f.write(json.dumps(data)) - f.flush() - - -def get_value_for_path_if_exists(xml, package_xpath_format, category): - value = 0 - levelNode = xml.getroot().find(package_xpath_format) - if levelNode is not None: - categoryAttr = levelNode.get(category) - if categoryAttr is not None: - value += int(categoryAttr) - else: - print("for path {p}, category {c} is missing, code coverage report needs investigation".format(p=package_xpath_format, c=category)) - else: - print("path {p} is missing, skipping the values ".format(p=package_xpath_format)) - return value - - -# given the ops file which has json structure list of test class name, test method name, issue roting tag -# we return a dictionary that maps {service class name, tag} => list of service methods -# if the code coverage ops file doesn't exist, we return an empty map -def get_class_tag_breakdown_from_ops(ops_file): - codecov_map = dict() - if os.path.isfile(ops_file): - with open(ops_file) as fp: - json_data = fp.read() - try: - ops = json.loads(json_data) - for entry in ops["Tests"]: - tp = (entry["Class"], entry["Tag"]) - if tp not in codecov_map: - codecov_map[tp] = set() - codecov_map[tp].add(entry["Test"]) - except ValueError: - print("get_class_tag_breakdown_from_ops:invalid code coverage operations file") - return codecov_map - - -# given the ops file, we get project key associated with tuple (class, tag). If multiple project keys are found, we return None -# since in that case code coverage cannot be reported to a specific Jira project -def get_project_key_for_class_tag(ops_file, class_tag_pair): - print(class_tag_pair) - project = None - if os.path.isfile(ops_file): - with open(ops_file) as fp: - json_data = fp.read() - try: - ops = json.loads(json_data) - for entry in ops["Tests"]: - if entry["Class"] == class_tag_pair[0] and entry["Tag"] == class_tag_pair[1]: - if project is None: - project = entry["ProjectKey"] - elif project != entry["ProjectKey"]: - print("multiple project keys found for " + class_tag_pair) - print("cannot use code coverage data for cutting ticket") - return None - except ValueError: - print("get_project_key_for_class_tag: invalid code coverage operations file") - return project - - -def get_operations_from_test_methods(test_methods): - operations = [] - for method in test_methods: - operations.append(method[:-(len("Test"))]) - return operations - - -# remove IntegrationAutoTest suffix, and concatenate Client for example -# com.oracle.bmc.email.EmailIntegrationAutoTest should become com/oracle/bmc/email/EmailClient -def get_class_from_test_name(test_class_name): - return test_class_name[:-(len("IntegrationAutoTest"))].replace(".", "/") + "Client" - - -# return an array of jsons, each json will have structure something like {"class": classname, "tag": tagname, "data": {aggregated data}} -def extract_package_codecov_data_from_reportxml(xml, ops_file): - codecov_map = get_class_tag_breakdown_from_ops(ops_file) - service_data = [] - for tp, test_methods in six.iteritems(codecov_map): - operations = get_operations_from_test_methods(test_methods) - class_name = get_class_from_test_name(tp[0]) - current_data = { - "missedInstructions": get_value_for_codecov(xml, class_name, operations, "INSTRUCTION", "missed"), - "coveredInstructions": get_value_for_codecov(xml, class_name, operations, "INSTRUCTION","covered"), - "missedBranches": get_value_for_codecov(xml, class_name, operations, "BRANCH", "missed"), - "coveredBranches": get_value_for_codecov(xml, class_name, operations, "BRANCH", "covered"), - "missedLines": get_value_for_codecov(xml, class_name, operations, "LINE", "missed"), - "coveredLines": get_value_for_codecov(xml, class_name, operations, "LINE", "covered") - } - class_tag_data = { - "class": class_name, - "tag": tp[1], - "data": current_data, - "testClass": tp[0] - } - service_data.append(class_tag_data) - return service_data - - -# return an array of jsons, each json will have structure something like {"class": classname, "tag": tagname, "data": {aggregated data}} -# from the baseline code coverage json. we match the classname/tag from ops_file to filter ther results -def extract_package_codecov_data_from_baseline(baseline_json, ops_file): - codecov_map = get_class_tag_breakdown_from_ops(ops_file) - codecov_data = [] - for tp in codecov_map: - codecov_class_tag_data = list(filter(lambda class_tag: class_tag["class"] == get_class_from_test_name(tp[0]) and class_tag["tag"] == tp[1], sum(baseline_json.values(), []))) - codecov_data.extend(codecov_class_tag_data) - return codecov_data - - -# Given a list of DEXREQ tickets and a list of package names from the pull request description, -# generate the text that tells users how they can limit the tests that get run. -def get_limit_text(tickets, package_names_from_description): - if tickets: - return "If you want to limit the number of tests run, please edit the referenced DEXREQ {ticket_sop} ({dexreq_tickets}) and fill out the 'Issue Routing Tag' {field_sop}, or include an issue routing tag in the form of `[IssueRoutingInfo.tag=sometag]` in the description of the pull request.".format( - dexreq_tickets=", ".join(tickets), - ticket_sop="ticket" if len(tickets) == 1 else "tickets", - field_sop="field" if len(tickets) == 1 else "fields") - elif not tickets and package_names_from_description: - return "If you want to limit the number of tests run, please include an issue routing tag in the form of `[IssueRoutingInfo.tag=sometag]` in the description of the pull request." - else: - return None - - -# Given a list of DEXREQ tickets and a list of package names from the pull request description, -# generate the text that tells users how the tests to be run were determined. -def get_determined_text(tickets, package_names_from_description, test_classes): - if tickets and package_names_from_description: - return "{this_class_was_these_classes_were} determined using the 'Spec Group Id' and 'Spec Artifact Id' values set in the referenced DEXREQ {ticket_sop} {dexreq_tickets} and the `[RunTestsForModule=xyz]` {annotation_sop} in the pull request description.".format( - this_class_was_these_classes_were="This class was" if len(test_classes) == 1 else "These classes were", - dexreq_tickets=", ".join(tickets), - ticket_sop="ticket" if len(tickets) == 1 else "tickets", - annotation_sop="annotation" if len(package_names_from_description) == 1 else "annotations") - elif tickets and not package_names_from_description: - return "{this_class_was_these_classes_were} determined using the 'Spec Group Id' and 'Spec Artifact Id' values set in the referenced DEXREQ {ticket_sop} {dexreq_tickets}.".format( - this_class_was_these_classes_were="This class was" if len(test_classes) == 1 else "These classes were", - dexreq_tickets=", ".join(tickets), - ticket_sop="ticket" if len(tickets) == 1 else "tickets") - elif not tickets and package_names_from_description: - return "{this_class_was_these_classes_were} determined using the {annotations} {annotation_sop} in the pull request description.".format( - this_class_was_these_classes_were="This class was" if len(test_classes) == 1 else "These classes were", - dexreq_tickets=", ".join(tickets), - ticket_sop="ticket" if len(tickets) == 1 else "tickets", - annotations=", ".join("`[RunTestsForModule={}]`".format(p) for p in package_names_from_description), - annotation_sop="annotation" if len(package_names_from_description) == 1 else "annotations") - else: - return None diff --git a/scripts/auto_gen_utils/team_city_scripts/oci_testing_service/report_test_results.py b/scripts/auto_gen_utils/team_city_scripts/oci_testing_service/report_test_results.py deleted file mode 100644 index 6f32721474..0000000000 --- a/scripts/auto_gen_utils/team_city_scripts/oci_testing_service/report_test_results.py +++ /dev/null @@ -1,505 +0,0 @@ -from __future__ import print_function -import argparse -import os -import ssl -import sys -import urllib3 -from glob import glob -import ocits_shared -from ocits_shared import HELP_URL, TC_URL, JACOCO_XML_PATH, get_dexreq_tickets_from_text, printv, parse_xml, extract_package_codecov_data_from_reportxml, get_class_tag_breakdown_from_ops, get_class_from_test_name, get_package_names_from_description, get_determined_text - -# Add the root of the package, two directories up, to the sys.path -dir_path = os.path.dirname(os.path.realpath(__file__)) -sys.path.append(os.path.join(dir_path, '../..')) - -from shared.bitbucket_utils import setup_bitbucket, get_pullrequest, make_general_comment # noqa: ignore=F402 -import shared.bitbucket_utils # noqa: ignore=F402 -import util # noqa: ignore=F402 -import config # noqa: ignore=F402 - -urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) -if (not os.environ.get('PYTHONHTTPSVERIFY', '') and getattr(ssl, '_create_unverified_context', None)): - ssl._create_default_https_context = ssl._create_unverified_context - -# -# Warning messages -# - -NO_TESTS_RUN = """ -No tests could be run. The automation attempted to run matching tests, but none were found. - -{author_text}Please make sure that you have specified the correct 'Issue Routing Tag' in the referenced DEXREQ tickets (or in the description of this pull request). Then restart the [TeamCity build]({tc_link}) by asking an SDK/CLI team member on #oci_public_sdks to click on the 'Run' button. - -For more information, see [Pull Request Validation Builds for the Testing Service]({help_url}). -""" - -NO_TESTS_MATCHED = """ -The automation tried to run tests, but no tests matched the 'Issue Routing Tag'{issue_routing_tag_text}{test_classes_text} - -{author_text}Please make sure that you have specified the correct 'Issue Routing Tag' in the referenced DEXREQ tickets (or in the description of this pull request). Then restart the [TeamCity build]({tc_link}) by asking an SDK/CLI team member on #oci_public_sdks to click on the 'Run' button. - -For more information, see [Pull Request Validation Builds for the Testing Service]({help_url}). -""" - -PARTIAL_TEXT_TESTS_SKIPPED = """ - - -The following {skipped_tests_sop} were skipped because they were not whitelisted: - -{skipped} -""" - -ALL_TESTS_SKIPPED = """ -{summary_text} - -{author_text}No tests were run, because no tests were whitelisted. Please make sure your tests are whitelisted.{skipped_text} - -More details are available in the log of the [TeamCity build]({tc_link}). - -For more information, see [Pull Request Validation Builds for the Testing Service]({help_url}). -""" - -TESTS_SUCCESSFUL = """ -{summary_text} - -All tests were successful. The following {successful_tests_sop} succeeded: - -{successful}{skipped_text} - -More details are available in the log of the [TeamCity build]({tc_link}). - -For more information, see [Pull Request Validation Builds for the Testing Service]({help_url}).{codecov} -""" - -PARTIAL_TEXT_TESTS_SUCCEEDED = """ - - -The following {successful_tests_sop} succeeded: -{successful} -""" - -TESTS_FAILED = """ -{summary_text} - -{author_text}Please make sure the tests pass. The following {failed_tests_sop} failed: -{failed}{succeeded_text}{skipped_text} - -More details are available in the log of the [TeamCity build]({tc_link}). - -For more information, see [Pull Request Validation Builds for the Testing Service]({help_url}).{codecov} -""" - -BASELINE_CODECOV_LINK = """ - -[Baseline code coverage report]({baseline_report_url}) -""" - -CODECOV_PACKAGE_TEXT = """ -##### `{package}`: -- Instruction coverage = {instr} -- Branch coverage = {branch} -- Line coverage = {line} {baseline_codecov_link} - - -""" - -CODECOV_TEXT = """ - - -###### Code Coverage Report: - -{packagetext} - -Full report available at [code coverage report](https://teamcity.oci.oraclecorp.com/repository/download/Sdk_OCI_Testing_Service_BuildPullRequest/{build_id}:id/jacocoreport/index.html). -""" - -CODECOV_FAILURE_TEXT = """ -"Build failed to meet the code coverage criteria (currently set to {p} percent.)" -""" - - -def get_summary_text(failed, skipped, successful): - if not failed and not skipped and not successful: - return "No tests were run." - - total_count = len(failed) + len(skipped) + len(successful) - - text = "The {test_tests_sop} finished. ".format( - test_tests_sop="test" if total_count == 1 else "tests") - previous = False - - if failed: - text = text + "{count} {test_tests_sop} failed".format( - count=len(failed), - test_tests_sop="test" if len(failed) == 1 else "tests") - previous = True - if skipped: - text = text + "{comma}{count} {test_tests_sop} were skipped".format( - comma=", " if previous else "", - count=len(skipped), - test_tests_sop="test" if len(skipped) == 1 else "tests") - previous = True - if successful: - text = text + "{comma}{count} {test_tests_sop} were successful".format( - comma=", " if previous else "", - count=len(successful), - test_tests_sop="test" if len(successful) == 1 else "tests") - previous = True - - text = text + "." - - return text - - -def get_test_xml_files(java_sdk_path): - junit_xml_dir = os.path.join(java_sdk_path, "bmc-integtests/target/junit-xml") - printv("Looking for TEST-*.xml files in {}".format(junit_xml_dir)) - return [y for x in os.walk(junit_xml_dir) for y in glob(os.path.join(x[0], 'TEST-*.xml'))] - - -def process_junit_xml(successful, failed, pom): - name = pom.getroot().get('name').encode('utf8') - errors = int(pom.getroot().get('errors').encode('utf8')) - failures = int(pom.getroot().get('failures').encode('utf8')) - - print("errors: {} failures: {} -- {}".format(errors, failures, name)) - - if errors + failures == 0: - successful.append(name) - return True - - # Something failed - short_message = None - message = None - exception_type = None - for test_case in pom.getroot().iter("testcase"): - for error in test_case.iter("error"): - message_node = error.get('message') - if message_node: - short_message = error.get('message').encode('utf8') - else: - short_message = "message attribute missing" - print("message attribute missing") - exception_type = error.get('type').encode('utf8') - message = error.text.encode('utf8') - - print("{} -- {}: {}".format(name, exception_type, short_message)) - failed.append((name, errors, failures, short_message, message, exception_type)) - - return False - - -def get_just_stack_trace(message): - text = "" - skip = True - - for line in message.split("\n"): - if text and not skip: - text = text + "\n" - - if line.startswith("\tat "): - skip = False - - if not skip: - text = text + line - - return text.strip() - - -def simplify_stack_trace(stack_trace): - text = "" - skip = False - - for line in stack_trace.split("\n"): - if text and not skip: - text = text + "\n" - - if line.startswith("\tat org.glassfish.jersey.server.model.internal."): - skip = True - - if line.startswith("Caused by"): - skip = False - - if not skip: - text = text + line - - return text - - -def get_failed_text(failed): - text = "" - for t, e, f, sm, m, et in failed: - if text: - text = text + "\n" - - # Ignore unimportant stack trace in short message - sm_ignore_pos = sm.find("\n\tat sun.reflect.GeneratedMethodAccessor54.invoke") - if sm_ignore_pos: - sm = sm[:sm_ignore_pos] - - sm = simplify_stack_trace(sm) - m = simplify_stack_trace(m) - - sm = sm.replace(get_just_stack_trace(m), "") - - text = text + "- `{}`: {}\n\nException:\n```{}```\n".format(t, sm, m) - - return text - - -# get the persisted previous build codecov numbers from object storage for comparison -# get the baseline code coverage from branch, get the current code coverage from xml report -# compare and report -def get_codecov_text(target_branch, ops_file, code_coverages): - if not ops_file: - return "" - # if target branch is not provided, assume no codecov reporting needed for backard compatibility - if not target_branch: - return "" - # baseline_json = get_codecov_baseline(target_branch) - xml_file_path = JACOCO_XML_PATH.format(java_sdk_path=args.java_sdk_path) - report_xml = parse_xml(xml_file_path) - package_text = "" - codecov_map = get_class_tag_breakdown_from_ops(ops_file) - service_data = extract_package_codecov_data_from_reportxml(report_xml, ops_file) - # baseline_data = extract_package_codecov_data_from_baseline(baseline_json, ops_file) - for tp in codecov_map: - class_name = get_class_from_test_name(tp[0]) - service_item = list(filter(lambda class_tag: class_tag["class"] == class_name and class_tag["tag"] == tp[1], service_data)) - # baseline_item = list(filter(lambda class_tag: class_tag["class"] == class_name and class_tag["tag"] == tp[1], baseline_data)) - # disable baseline comparison for now - package_text += get_codecov_package_text(service_item[0], None, "class: {c}, tag: {t}".format(c=class_name, t=tp[1]), code_coverages) - return CODECOV_TEXT.format(packagetext=package_text, build_id=args.build_id) - - -def get_codecov_lineitem_text(covered, missed): - baseline = covered + missed - current = (covered * 100 / baseline) if baseline != 0 else 0 - return str(current) + "% " - - -def get_codecov_lineitem_text_with_baseline(covered, missed, baseline_covered, baseline_missed): - current = get_percentage(covered, missed) - baseline = get_percentage(baseline_covered, baseline_missed) - delta = "" - if current == baseline: - delta = "**(no change)**" - elif current > baseline: - delta = "**( +" + str(current - baseline) + "%)**" - else: - delta = "**( -" + str(baseline - current) + "%)**" - return str(current) + "% " + delta - - -def get_percentage(covered, missed): - denominator = covered + missed - return int(covered * 100 / denominator) if denominator != 0 else 100 - - -# we get current and baseline structure and we compare corresponding values -def get_codecov_package_text(current_data, baseline_data, package_label, code_coverages): - current = current_data["data"] - code_coverages.append(get_percentage(current["coveredInstructions"], current["missedInstructions"])) - if baseline_data: - baseline = baseline_data["data"] - return CODECOV_PACKAGE_TEXT.format( - package=package_label, - instr=get_codecov_lineitem_text_with_baseline(current["coveredInstructions"], current["missedInstructions"], baseline["coveredInstructions"], baseline["missedInstructions"]), - branch=get_codecov_lineitem_text_with_baseline(current["coveredBranches"], current["missedBranches"], baseline["coveredBranches"], baseline["missedBranches"]), - line=get_codecov_lineitem_text_with_baseline(current["coveredLines"], current["missedLines"], baseline["coveredLines"], baseline["missedLines"]), - baseline_codecov_link=BASELINE_CODECOV_LINK.format(baseline_report_url=baseline["url"])) - else: - return CODECOV_PACKAGE_TEXT.format( - package=package_label, - instr=get_codecov_lineitem_text(current["coveredInstructions"], current["missedInstructions"]), - branch=get_codecov_lineitem_text(current["coveredBranches"], current["missedBranches"]), - line=get_codecov_lineitem_text(current["coveredLines"], current["missedLines"]), - baseline_codecov_link="") - - -# -# Parameters variable set up -# -parser = argparse.ArgumentParser(description='Report test results.') -parser.add_argument('--build-id', required=True, help="The TeamCity build id for the build that is running this script. This is used to update the relevant Bitbucket PRs with links to the TeamCity build") -parser.add_argument('--build-branch', required=True, help="The value of the teamcity.build.branch variable") -parser.add_argument('-t', '--target-branch', required=False, help="The target branch - master or preview") -parser.add_argument('--java-sdk-path', required=True, help="Path to the root directory of the Java SDK") -parser.add_argument('-v', '--verbose', action='store_true', default=False, required=False, help='Verbose output') -parser.add_argument('--dry-run', action='store_true', default=False, required=False, help='Dry-run, do not post to PR') -parser.add_argument('--codecov-operations-file', required=True, help='list of operations to consider for code coverage') -parser.add_argument('--expected-coverage-percentage', type=int, required=False, help='fail the PR if code coverage numbers drop') - -args = parser.parse_args() -setup_bitbucket(args) - -ocits_shared.dry_run = args.dry_run -shared.bitbucket_utils.dry_run = args.dry_run - -if args.verbose: - ocits_shared.verbose = True - shared.bitbucket_utils.verbose = True - -pr_id = None -try: - # If the teamcity.build.branch variable is just something like "1234", then this is a - # validation build for pull request "1234" - pr_id = int(args.build_branch) -except ValueError: - # Only print this when using verbose, since we want the output be the target branch. - printv("Not a pull request validation build.") - sys.exit(2) - -pr = get_pullrequest("SDK", "oci-testing-service", pr_id) -printv(pr.text) -pr_json = pr.json() -tc_link = TC_URL.format(build_id=args.build_id) - -if 'title' in pr_json: - title = pr_json['title'] -else: - title = "" -if 'description' in pr_json: - description = pr_json['description'] -else: - description = "" - -author_text = "" -pr_json = pr.json() -if pr_json['author'] and pr_json['author']['user'] and pr_json['author']['user']['name']: - author_text = "@{name}: ".format(name=pr_json['author']['user']['name']) - -package_names_from_description = get_package_names_from_description(description) -tickets, no_dexreq_marker = get_dexreq_tickets_from_text(title + "\n" + description) - -if no_dexreq_marker: - sys.exit(0) - -test_xml_files = get_test_xml_files(args.java_sdk_path) - -if not test_xml_files: - text = NO_TESTS_RUN.format( - tc_link=tc_link, - help_url=HELP_URL, - author_text=author_text) - - make_general_comment("SDK", "oci-testing-service", pr_id, text) - - sys.exit(1) - -successful = [] -failed = [] - -for f in test_xml_files: - print("processing " + f) - test_xml = parse_xml(f) - process_junit_xml(successful, failed, test_xml) - -successful_text = "\n".join("- `{}`".format(t) for t in successful) - -# Filter out skipped tests (org.junit.AssumptionViolatedException) -- not whitelisted -skipped = [] -for t, e, f, sm, m, et in failed: - if et == 'org.junit.AssumptionViolatedException': - # This is a "no tests matched the issue routing tag" error - skipped.append((t, e, f, sm, m, et)) - -skipped_text = "" -if skipped: - skipped_text = PARTIAL_TEXT_TESTS_SKIPPED.format( - skipped_tests_sop="test" if len(skipped) == 1 else "tests", - skipped="\n".join("- `{}`".format(t) for t, e, f, sm, m, et in skipped)) - -# Remove errors that were skipped -failed[:] = [x for x in failed if x not in skipped] - -# Find errors due to no matching tests -no_matching_tests = [] -for t, e, f, sm, m, et in failed: - if t == 'initializationError(org.junit.runner.manipulation.Filter)' and sm.startswith('No tests found matching ProjectFilter'): - # This is a "no tests matched the issue routing tag" error - no_matching_tests.append((t, e, f, sm, m, et)) - -# Remove errors due to no matching tests from list of regular failures -failed[:] = [x for x in failed if x not in no_matching_tests] - -if not failed and not successful and not skipped: - # All failures were "no tests matched the issue routing tags" -- there were no actual test failures - issue_routing_tag_text = "" - if "ISSUE_ROUTING_TAG" in os.environ: - issue_routing_tags = os.environ.get("ISSUE_ROUTING_TAG").split(",") - issue_routing_tag_text = " {}".format(", ".join("`{}`".format(t) for t in issue_routing_tags)) - - test_classes_text = "" - if "TEST_CLASSES" in os.environ: - test_classes = os.environ.get("TEST_CLASSES").split(",") - determined_text = get_determined_text(tickets, package_names_from_description, test_classes) - test_classes_text = " in the following {class_sop}:\n\n{test_classes}\n\n{determined_text}".format( - class_sop="class" if len(test_classes) == 1 else "classes", - test_classes="\n".join("- `{}`".format(c) for c in test_classes), - determined_text=determined_text) - - text = NO_TESTS_MATCHED.format( - tc_link=tc_link, - issue_routing_tag_text=issue_routing_tag_text, - test_classes_text=test_classes_text, - help_url=HELP_URL, - author_text=author_text) - - make_general_comment("SDK", "oci-testing-service", pr_id, text) - - sys.exit(1) -elif failed: - failed_text = get_failed_text(failed) - succeeded_text = "" - if successful: - succeeded_text = PARTIAL_TEXT_TESTS_SUCCEEDED.format( - successful=successful_text, - successful_tests_sop="test" if len(successful) == 1 else "tests") - code_coverages = [] - text = TESTS_FAILED.format( - tc_link=tc_link, - failed=failed_text, - failed_tests_sop="test" if len(failed) == 1 else "tests", - succeeded_text=succeeded_text, - skipped_text=skipped_text, - help_url=HELP_URL, - summary_text=get_summary_text(failed, skipped, successful), - author_text=author_text, - codecov=get_codecov_text(args.target_branch, args.codecov_operations_file, code_coverages)) - - make_general_comment("SDK", "oci-testing-service", pr_id, text) - - sys.exit(1) -elif not failed and not successful and skipped: - text = ALL_TESTS_SKIPPED.format( - tc_link=tc_link, - skipped_text=skipped_text, - help_url=HELP_URL, - summary_text=get_summary_text(failed, skipped, successful), - author_text=author_text) - - make_general_comment("SDK", "oci-testing-service", pr_id, text) - - sys.exit(1) -else: - code_coverages = [] - text = TESTS_SUCCESSFUL.format( - tc_link=tc_link, - successful=successful_text, - successful_tests_sop="test" if len(successful) == 1 else "tests", - skipped_text=skipped_text, - help_url=HELP_URL, - summary_text=get_summary_text(failed, skipped, successful), - codecov=get_codecov_text(args.target_branch, args.codecov_operations_file, code_coverages)) - fail_on_code_coverage = False - print(code_coverages) - if args.expected_coverage_percentage is not None: - for code_coverage in code_coverages: - if code_coverage < args.expected_coverage_percentage: - fail_on_code_coverage = True - break - make_general_comment("SDK", "oci-testing-service", pr_id, text) - if fail_on_code_coverage: - make_general_comment("SDK", "oci-testing-service", pr_id, CODECOV_FAILURE_TEXT.format(p=args.expected_coverage_percentage)) - sys.exit(1) diff --git a/scripts/auto_gen_utils/team_city_scripts/oci_testing_service/report_testing_service_failure.py b/scripts/auto_gen_utils/team_city_scripts/oci_testing_service/report_testing_service_failure.py deleted file mode 100644 index a499ea9b44..0000000000 --- a/scripts/auto_gen_utils/team_city_scripts/oci_testing_service/report_testing_service_failure.py +++ /dev/null @@ -1,98 +0,0 @@ -from __future__ import print_function -import argparse -import os -import ssl -import sys -import urllib3 -import ocits_shared -from ocits_shared import printv, HELP_URL, TC_URL - -# Add the root of the package, two directories up, to the sys.path -dir_path = os.path.dirname(os.path.realpath(__file__)) -sys.path.append(os.path.join(dir_path, '../..')) - -from shared.bitbucket_utils import setup_bitbucket, get_pullrequest, make_general_comment # noqa: ignore=F402 -import shared.bitbucket_utils # noqa: ignore=F402 -import util # noqa: ignore=F402 -import config # noqa: ignore=F402 - -urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) -if (not os.environ.get('PYTHONHTTPSVERIFY', '') and getattr(ssl, '_create_unverified_context', None)): - ssl._create_default_https_context = ssl._create_unverified_context - -# -# Warning messages -# - -FAILURE = """ -{author_text}{message} - -{if_log_text}More information is {if_log_also}available in the [TeamCity build log]({tc_link}). - -Once you have fixed the problem, you can restart the [TeamCity build]({tc_link}) by asking an SDK/CLI team member on #oci_public_sdks to click on the 'Run' button. That is not necessary if the fix required a code change to the OCI Testing Service. In that case, a new validation build will start automatically. - -For more information, see [Pull Request Validation Builds for the Testing Service]({help_url}).{log} -""" - - -# -# Parameters variable set up -# -parser = argparse.ArgumentParser(description='Report testing service failure.') -parser.add_argument('--build-id', required=True, help="The TeamCity build id for the build that is running this script. This is used to update the relevant Bitbucket PRs with links to the TeamCity build") -parser.add_argument('--build-branch', required=True, help="The value of the teamcity.build.branch variable") -parser.add_argument('--log-input', required=False, help="Path to the log file for diagnostic output") -parser.add_argument('--message', required=True, help="Message to include in the PR comment") -parser.add_argument('-v', '--verbose', action='store_true', default=False, required=False, help='Verbose output') -parser.add_argument('--dry-run', action='store_true', default=False, required=False, help='Dry-run, do not post to PR') - -args = parser.parse_args() -setup_bitbucket(args) - -ocits_shared.dry_run = args.dry_run - -if args.verbose: - ocits_shared.verbose = True - shared.bitbucket_utils.verbose = True - -pr_id = None -try: - # If the teamcity.build.branch variable is just something like "1234", then this is a - # validation build for pull request "1234" - pr_id = int(args.build_branch) -except ValueError: - print("Not a pull request validation build.") - sys.exit(2) - -pr = get_pullrequest("SDK", "oci-testing-service", pr_id) -printv(pr.text) - -author_text = "" -json = pr.json() -if json['author'] and json['author']['user'] and json['author']['user']['name']: - author_text = "@{name}: ".format(name=json['author']['user']['name']) - -log = "" -if args.log_input and os.path.exists(args.log_input): - file = open(args.log_input, "r") - log = file.read() - log = "\n\n```\n" + log + "```" - -tc_link = TC_URL.format(build_id=args.build_id) - -text = FAILURE.format( - message=args.message, - tc_link=tc_link, - help_url=HELP_URL, - log=log, - if_log_text="Please use the information below to diagnose the problem. " if log else "", - if_log_also="also " if log else "", - author_text=author_text) - -if args.dry_run: - print("DRY-RUN: {}".format(text)) -else: - make_general_comment("SDK", "oci-testing-service", pr_id, text) - -# Don't fail the build, let that happen in the TC script -sys.exit(0) diff --git a/scripts/auto_gen_utils/team_city_scripts/oci_testing_service/report_testing_service_stderr.py b/scripts/auto_gen_utils/team_city_scripts/oci_testing_service/report_testing_service_stderr.py deleted file mode 100644 index 3d8eef9862..0000000000 --- a/scripts/auto_gen_utils/team_city_scripts/oci_testing_service/report_testing_service_stderr.py +++ /dev/null @@ -1,171 +0,0 @@ -from __future__ import print_function -import argparse -import os -import ssl -import sys -import urllib3 -import ocits_shared -from ocits_shared import printv, HELP_URL, TC_URL - -# Add the root of the package, two directories up, to the sys.path -dir_path = os.path.dirname(os.path.realpath(__file__)) -sys.path.append(os.path.join(dir_path, '../..')) - -from shared.bitbucket_utils import setup_bitbucket, get_pullrequest, make_general_comment # noqa: ignore=F402 -import shared.bitbucket_utils # noqa: ignore=F402 -import util # noqa: ignore=F402 -import config # noqa: ignore=F402 - -urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) -if (not os.environ.get('PYTHONHTTPSVERIFY', '') and getattr(ssl, '_create_unverified_context', None)): - ssl._create_default_https_context = ssl._create_unverified_context - - -START_SKIPPING_SUBSTRINGS = [ - "at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)", - "at org.jvnet.hk2.internal.ServiceLocatorImpl.internalGetService" -] - - -def simplify_stack_traces(log): - simplified_log = "" - - lines = log.split("\n") - line_num = 0 - while line_num < len(lines): - line = lines[line_num] - line_num += 1 - - should_skip = False - for substring in START_SKIPPING_SUBSTRINGS: - if line.strip().startswith(substring): - should_skip = True - break - - if should_skip: - while line_num < len(lines): - line = lines[line_num] - line_num += 1 - - if not line.strip().startswith("at"): - # stop skipping and process this line again - line_num -= 1 - break - else: - simplified_log = simplified_log + "\n" + line - - return simplified_log - - -ERROR_SUBSTRINGS = [ - "org.glassfish.jersey.internal.Errors logErrors", - "WARN org.eclipse.jetty.server.handler.ContextHandler.ROOT - unavailable" -] - - -def check_and_summarize_log_file(log): - for e in ERROR_SUBSTRINGS: - if e in log: - summarized_log = "" - lines = log.split("\n") - line_num = 0 - while line_num < len(lines): - line = lines[line_num] - line_num += 1 - - if e in line: - summarized_log = line - while line_num < len(lines): - line = lines[line_num] - line_num += 1 - - if line.strip().startswith("["): - break - - summarized_log = summarized_log + "\n" + line - - # Only collect the first example - break - - return simplify_stack_traces(summarized_log) - - return None - - -# -# Warning messages -# - -FAILURE = """ -The OCI testing service does not appear to have started correctly. - -{author_text}Please use the information below to diagnose the problem. More information is also available in the [TeamCity build log]({tc_link}). - -Once you have fixed the problem, you can restart the [TeamCity build]({tc_link}) by asking an SDK/CLI team member on #oci_public_sdks to click on the 'Run' button. That is not necessary if the fix required a code change to the OCI Testing Service. In that case, a new validation build will start automatically. - -For more information, see [Pull Request Validation Builds for the Testing Service]({help_url}).{log} -""" - - -# -# Parameters variable set up -# -parser = argparse.ArgumentParser(description='Report testing service failure using service.stderr.log file.') -parser.add_argument('--build-id', required=True, help="The TeamCity build id for the build that is running this script. This is used to update the relevant Bitbucket PRs with links to the TeamCity build") -parser.add_argument('--build-branch', required=True, help="The value of the teamcity.build.branch variable") -parser.add_argument('--log-input', required=True, help="Path to the log file for diagnostic output") -parser.add_argument('-v', '--verbose', action='store_true', default=False, required=False, help='Verbose output') -parser.add_argument('--dry-run', action='store_true', default=False, required=False, help='Dry-run, do not post to PR') - -args = parser.parse_args() -setup_bitbucket(args) - -ocits_shared.dry_run = args.dry_run -shared.bitbucket_utils.dry_run = args.dry_run - -if args.verbose: - ocits_shared.verbose = True - shared.bitbucket_utils.verbose = True - -pr_id = None -try: - # If the teamcity.build.branch variable is just something like "1234", then this is a - # validation build for pull request "1234" - pr_id = int(args.build_branch) -except ValueError: - print("Not a pull request validation build.") - sys.exit(2) - -pr = get_pullrequest("SDK", "oci-testing-service", pr_id) -printv(pr.text) - -author_text = "" -json = pr.json() -if json['author'] and json['author']['user'] and json['author']['user']['name']: - author_text = "@{name}: ".format(name=json['author']['user']['name']) - -log = "" -file = open(args.log_input, "r") -log = file.read() - -log = check_and_summarize_log_file(log) - -if not log: - sys.exit(0) - -log = "\n\n```" + log + "```" - -tc_link = TC_URL.format(build_id=args.build_id) - -text = FAILURE.format( - tc_link=tc_link, - help_url=HELP_URL, - log=log, - author_text=author_text) - -if args.dry_run: - print("DRY-RUN: {}".format(text)) -else: - make_general_comment("SDK", "oci-testing-service", pr_id, text) - -sys.exit(1) diff --git a/scripts/auto_gen_utils/team_city_scripts/oci_testing_service/save_codecov_data.py b/scripts/auto_gen_utils/team_city_scripts/oci_testing_service/save_codecov_data.py deleted file mode 100644 index f3b92ce838..0000000000 --- a/scripts/auto_gen_utils/team_city_scripts/oci_testing_service/save_codecov_data.py +++ /dev/null @@ -1,94 +0,0 @@ -# -# we get called by the scheduled job that runs tests for a service. -# we locate the jacoco report xml, extract the code coverage numbers and save the information -# to a predefined object storage location -# -import argparse -import json -import re -import datetime -import ocits_shared -from ocits_shared import JACOCO_XML_PATH, parse_xml, get_codecov_baseline, save_codecov_baseline, extract_package_codecov_data_from_reportxml -from jira_ticket_reporter import JiraTicketReporter - -CODECOV_REPORT_URL = "https://teamcity.oci.oraclecorp.com/repository/download/{build_config_id}/{build_id}:id/jacocoreport/index.html" -# -# xml_file_path - code coverage xml file expected at a specific location -# package - the name of the service such as email -# target_branch - the branch the tests were run against - currently master or preview -# we get the saved baseline data from object storage, we extract the current code coverage data -# from the report xml and we update the object storage with this information -# if build_config_id is provided, we use it as is to generate the URL for code coverage report else -# we try to guess the URL using build_config_name -# - - -def get_percent_tests_passed(ops_file): - with open(ops_file) as fp: - json_data = fp.read() - ops = json.loads(json_data) - summary = ops["Summary"] - return int((summary["Tests run"] - (summary["Skipped"] + summary["Failures"])) * 100 / summary["Tests run"]) - - -# apart from saving data to PAR, we also now file bug if code coverage drops -def save_codecov_data_from_reportxml(xml_file_path, package, ops_file, target_branch, build_config_name, build_config_id, build_id, expected_coverage_percentage, dry_run): - report_xml = parse_xml(xml_file_path) - baseline_json = get_codecov_baseline(target_branch) - class_tag_list = extract_package_codecov_data_from_reportxml(report_xml, ops_file) - print(class_tag_list) - utc = datetime.datetime.utcnow() - for class_tag in class_tag_list: - current_data = class_tag["data"] - current_data["url"] = get_codecov_report_url(build_config_name, build_config_id, build_id) - current_data["timestamp"] = utc.strftime("%Y-%m-%dT%H:%M:%Sz") - current_data["testsPassedPercentage"] = get_percent_tests_passed(ops_file) - # project_key = get_project_key_for_class_tag(ops_file, (class_tag["testClass"], class_tag["tag"])) - # https://jira.oci.oraclecorp.com/browse/APOLLO-2549 : Change Project key to cut ticket for Apollo - log_jira_if_codecov_drops(class_tag, "APOLLO", expected_coverage_percentage, dry_run) - baseline_json[package] = class_tag_list - error_code = save_codecov_baseline(baseline_json, target_branch) - print("error code for saving baseline: " + str(error_code)) - - -# compare current instructions coverage percentage if below the specified bar, open/update jira ticket for the specified project key -def log_jira_if_codecov_drops(class_tag, project_key, expected_coverage_percentage, dry_run): - total = (class_tag["data"]["coveredInstructions"] + class_tag["data"]["missedInstructions"]) - current_coverage = 100 * class_tag["data"]["coveredInstructions"] / total if total != 0 else 100 - if current_coverage < expected_coverage_percentage: - reporter = JiraTicketReporter() - package_label = "class: {c}, tag: {t}".format(c=class_tag["class"], t=class_tag["tag"]) - reporter.report_codecov_to_jira_ticket(project_key, class_tag["tag"], package_label, expected_coverage_percentage, current_coverage, class_tag["data"]["url"], dry_run) - print("current coverage number {value}, expected: {baseline}".format(value=current_coverage, baseline=expected_coverage_percentage)) - - -# -# try to construct build configuration id using build configuration name by following rules -# replace (string1) by _string1 -# strip all non alphabet characters -# -def get_codecov_report_url(build_config_name, build_config_id, build_id): - if not build_config_id: - build_config_name = build_config_name.replace("(", "_") - build_config_name = build_config_name.replace(")", "") - regex = re.compile('[^a-zA-Z_]') - build_config_id = "Sdk_OCI_Testing_Service_" + regex.sub('', build_config_name) - - return CODECOV_REPORT_URL.format(build_config_id=build_config_id, build_id=build_id) - - -parser = argparse.ArgumentParser(description='save test results') -parser.add_argument('--java-sdk-path', required=True, help="Path to the root directory of the Java SDK") -parser.add_argument('-p', '--package', required=True, help='service package name') -parser.add_argument('--expected-coverage-percentage', default=0, type=int, help='expected code coverage percentage, opens ticket if coverage is below this number') -parser.add_argument('--codecov-operations-file', required=True, help='list of operations to consider for code coverage') -parser.add_argument('-t', '--target-branch', required=True, help='target branch - preview or master') -parser.add_argument('--build-configuration-id', required=False, help="ID of the build configuration") -parser.add_argument('--build-configuration-name', required=True, help="Name of the build configuration") -parser.add_argument('--build-id', required=True, help="build id of this build") -parser.add_argument('--dry-run', default=False, action='store_true', required=False, help='Dry-run, do not save the data') - -args = parser.parse_args() -ocits_shared.dry_run = args.dry_run -xml_file_path = JACOCO_XML_PATH.format(java_sdk_path=args.java_sdk_path) -save_codecov_data_from_reportxml(xml_file_path, args.package, args.codecov_operations_file, args.target_branch, args.build_configuration_name, args.build_configuration_id, args.build_id, args.expected_coverage_percentage, args.dry_run) diff --git a/scripts/auto_gen_utils/team_city_scripts/oci_testing_service/switch_to_matching_sdk_branch.py b/scripts/auto_gen_utils/team_city_scripts/oci_testing_service/switch_to_matching_sdk_branch.py deleted file mode 100644 index a3ba22b8e2..0000000000 --- a/scripts/auto_gen_utils/team_city_scripts/oci_testing_service/switch_to_matching_sdk_branch.py +++ /dev/null @@ -1,313 +0,0 @@ -from __future__ import print_function -import argparse -import os -import ssl -import sys -import urllib3 -import re -from git import Repo, GitCommandError - -import ocits_shared -from ocits_shared import HELP_URL, TC_URL, get_dexreq_tickets, printv, get_jira_issue, get_pull_requests_for_issue, filter_pull_requests, get_master_javasdk_pr_url - -# Add the root of the package, two directories up, to the sys.path -dir_path = os.path.dirname(os.path.realpath(__file__)) -sys.path.append(os.path.join(dir_path, '../..')) - -from shared.bitbucket_utils import setup_bitbucket, get_pullrequest, get_pr_source_branch, make_general_comment, get_pr_source_clone_ssh_url, get_repo_permissions_url # noqa: ignore=F402 -import shared.bitbucket_utils # noqa: ignore=F402 -import util # noqa: ignore=F402 -import config # noqa: ignore=F402 - -urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) -if (not os.environ.get('PYTHONHTTPSVERIFY', '') and getattr(ssl, '_create_unverified_context', None)): - ssl._create_default_https_context = ssl._create_unverified_context - - -# -# Warning messages -# - -MATCHING_SDK_BRANCH_NOT_FOUND = """ -Not running the OCI testing service tests. - -For pull requests against the `master` branch, there must be a pull request of the {sdk_name} targeting its `master` branch, and that pull request has to be referenced from the public DEXREQ ticket. Unfortunately, no pull request of the {sdk_name} targeting `master` was found in the DEXREQ {ticket_sop} {tickets}. - -{author_text}Make sure that you reference the same public DEXREQ ticket in both the OCI Testing Service pull request and the {sdk_name} pull request. - -After you have made sure the {sdk_name} pull request is referenced from the DEXREQ {ticket_sop} {tickets}, restart the [TeamCity build]({tc_link}) by asking an SDK/CLI team member on #oci_public_sdks to click on the 'Run' button. - -For more information, see [Pull Request Validation Builds for the Testing Service]({help_url}). -""" - -COULD_NOT_DETERMINE_CLONE_URL = """ -Not running the OCI testing service tests. - -Could not determine the source repository's clone URL for the {sdk_name} pull request: {sdk_pr}. - -{author_text}Please make sure you have given the 'DEXREQ Automation' user [read access to your {sdk_name} repository]({permissions_url}), and that you have added the ['Key for OCI testing service on-PR build access']({help_url}#PullRequestValidationBuildsfortheTestingService-WhenUsingYourOwnFork) to your {sdk_name} repository's access keys. - -**Note: The read access and the access key have to be added to {sdk_name} repository, not the OCI testing service repository!** - -After you have made sure access rights and keys are correct, restart the [TeamCity build]({tc_link}) by asking an SDK/CLI team member on #oci_public_sdks to click on the 'Run' button. - -For more information, see the [When Using Your Own Fork]({help_url}#PullRequestValidationBuildsfortheTestingService-WhenUsingYourOwnFork) section of [Pull Request Validation Builds for the Testing Service]({help_url}#PullRequestValidationBuildsfortheTestingService-WhenUsingYourOwnFork). -""" - -COULD_NOT_FETCH_REPO = """ -Not running the OCI testing service tests. - -Could not fetch the source repository for the {sdk_name} pull request: {sdk_pr}. - -{author_text}Please make sure you have added the ['Key for OCI testing service on-PR build access']({help_url}#PullRequestValidationBuildsfortheTestingService-WhenUsingYourOwnFork) to your {sdk_name} repository's access keys. - -**Note: The access key has to be added to {sdk_name} repository, not the OCI testing service repository!** - -After you have made sure access rights and keys are correct, restart the [TeamCity build]({tc_link}) by asking an SDK/CLI team member on #oci_public_sdks to click on the 'Run' button. - -For more information, see the [When Using Your Own Fork]({help_url}#PullRequestValidationBuildsfortheTestingService-WhenUsingYourOwnFork) section of [Pull Request Validation Builds for the Testing Service]({help_url}#PullRequestValidationBuildsfortheTestingService-WhenUsingYourOwnFork). -""" - -USING_SDK_PR = """ -Since this change is targeting the `master` branch, the build job is using another {sdk_name} pull request to build the {sdk_name}. - -The pull request chosen to provide the code for the {sdk_name} build is: - -{sdk_pr} - -If this is not the right pull request, please see [My master testing service PR picks the wrong master Java SDK branch]({help_url}#PullRequestValidationBuildsfortheTestingService-MymastertestingservicePRpicksthewrongmasterJavaSDKbranch). - -This build is now progressing in the [TeamCity build]({tc_link}). For more information, see the [When Using Your Own Fork]({help_url}#PullRequestValidationBuildsfortheTestingService-WhenUsingYourOwnFork) section of [Pull Request Validation Builds for the Testing Service]({help_url}#PullRequestValidationBuildsfortheTestingService-WhenUsingYourOwnFork). -""" - -USING_MASTER = """ -Building this change against the {sdk_name} in the `master` branch. - -This change is targeting the `master` branch, the build job is using another {sdk_name} pull request to build the {sdk_name}. The pull request chosen to provide the code for the {sdk_name} build is: - -{sdk_pr} - -If this is not the right pull request, please see [My master testing service PR picks the wrong master Java SDK branch]({help_url}#PullRequestValidationBuildsfortheTestingService-MymastertestingservicePRpicksthewrongmasterJavaSDKbranch). - -Since that {sdk_name} pull request has already been merged, we are building the testing service in this pull request against the {sdk_name} in the `master` branch. - -This build is now progressing in the [TeamCity build]({tc_link}). For more information, see the [When Using Your Own Fork]({help_url}#PullRequestValidationBuildsfortheTestingService-WhenUsingYourOwnFork) section of [Pull Request Validation Builds for the Testing Service]({help_url}#PullRequestValidationBuildsfortheTestingService-WhenUsingYourOwnFork). -""" - - -# Target the master branch. The message can contain the {sdk_branch} placeholder. -# The program will exit inside this function. -def target_master(output_file, message): - sdk_branch = "master" - sdk_repo.git.fetch("origin") - sdk_repo.git.checkout(sdk_branch) - printv(message.format(sdk_branch=sdk_branch)) - - print(sdk_branch) - - if output_file: - with open(output_file, 'w') as f: - f.write(sdk_branch) - - sys.exit(0) - - -# -# Parameters variable set up -# -parser = argparse.ArgumentParser(description='Determine the matching SDK branch, and switch to it if it exists.') -parser.add_argument('--build-id', required=True, help="The TeamCity build id for the build that is running this script. This is used to update the relevant Bitbucket PRs with links to the TeamCity build") -parser.add_argument('--build-branch', required=True, help="The value of the teamcity.build.branch variable") -parser.add_argument('--sdk-path', required=True, help="Path to the root directory of the SDK") -parser.add_argument('--sdk-name', default="Java SDK", required=False, help="Name of the SDK (if not specified, uses 'Java SDK')") -parser.add_argument('--sdk-project', default="java-sdk", required=False, help="Project for the SDK (if not specified, uses 'java-sdk')") -parser.add_argument('-o', '--output', required=False, help='Output file') -parser.add_argument('-v', '--verbose', action='store_true', default=False, required=False, help='Verbose output') -parser.add_argument('--dry-run', action='store_true', default=False, required=False, help='Dry-run') - -args = parser.parse_args() - -ocits_shared.dry_run = args.dry_run - -if args.verbose: - ocits_shared.verbose = True - shared.bitbucket_utils.verbose = True - -setup_bitbucket(args) - -sdk_repo = Repo.init(args.sdk_path) - -pr_id = None -try: - # If the teamcity.build.branch variable is just something like "1234", then this is a - # validation build for pull request "1234" - pr_id = int(args.build_branch) -except ValueError: - # Only print this when using verbose, since we want the output be the target branch. - printv("Not a pull request validation build.") - sys.exit(2) - -tc_link = TC_URL.format(build_id=args.build_id) - -pr = get_pullrequest("SDK", "oci-testing-service", pr_id) -printv(pr.text) - -json = pr.json() - -if 'title' in json: - title = json['title'] -else: - title = "" -if 'description' in json: - description = json['description'] -else: - description = "" - -author_text = "" -json = pr.json() -if json['author'] and json['author']['user'] and json['author']['user']['name']: - author_text = "@{name}: ".format(name=json['author']['user']['name']) - -sdk_pr_url = get_master_javasdk_pr_url(description) - -if sdk_pr_url: - print("Using URL from PR description: {}".format(sdk_pr_url)) - m = re.search("^.*bitbucket.*/projects/([^/]*)/repos/([^/]*)/pull-requests/([0-9]*).*$", sdk_pr_url) - if m: - sdk_pr_id = m.group(3) - else: - raise ValueError("Unknown PR URL: {}".format(sdk_pr_url)) -else: - tickets, no_dexreq_marker = get_dexreq_tickets(pr, tc_link) - - if no_dexreq_marker: - target_master(args.output, "Since this is NO-DEXREQ, should target tip of '{sdk_branch}'") - else: - printv("Tickets: {}".format(", ".join(tickets))) - - sdk_pr_candidates = [] - - for issue_key in tickets: - issue = get_jira_issue(issue_key) - pull_requests = get_pull_requests_for_issue(issue) - master_sdk_pull_request = filter_pull_requests(pull_requests, args.sdk_project, "master") - - for pr in master_sdk_pull_request: - sdk_pr_candidates.append((pr['id'].replace('#', ''))) - - if not sdk_pr_candidates: - print("Could not find {} PR targetting master in any of the PRs referenced in the DEXREQ tickets {}".format(args.sdk_name, ", ".join(tickets))) - - text = MATCHING_SDK_BRANCH_NOT_FOUND.format( - tc_link=tc_link, - sdk_name=args.sdk_name, - tickets=", ".join(tickets), - ticket_sop="ticket" if len(tickets) == 1 else "tickets", - help_url=HELP_URL, - author_text=author_text) - - if not ocits_shared.dry_run: - make_general_comment("SDK", "oci-testing-service", pr_id, text) - else: - print("DRY-RUN: Not making BitBucket comment\n{}".format(text)) - - # Don't fail the build - sys.exit(1) - - latest_candidate = 0 - sdk_pr_url = None - for candidate in sdk_pr_candidates: - num = int(candidate) - url = "https://bitbucket.oci.oraclecorp.com/projects/SDK/repos/{sdk_project}/pull-requests/{num}/overview".format(sdk_project=args.sdk_project, num=num) - printv("Candidate PR: {}".format(url)) - if num > latest_candidate: - latest_candidate = num - sdk_pr_url = url - - sdk_pr_id = str(latest_candidate) - printv("Choosing PR: {}".format(sdk_pr_id)) - -sdk_pr = get_pullrequest("SDK", args.sdk_project, sdk_pr_id) -printv(sdk_pr.text) - -already_merged = sdk_pr.json()['state'] == 'MERGED' -if already_merged: - already_merged = True - text = USING_MASTER.format( - tc_link=tc_link, - sdk_name=args.sdk_name, - sdk_pr=sdk_pr_url, - help_url=HELP_URL) -else: - text = USING_SDK_PR.format( - tc_link=tc_link, - sdk_name=args.sdk_name, - sdk_pr=sdk_pr_url, - help_url=HELP_URL) - -make_general_comment("SDK", "oci-testing-service", pr_id, text) - -if already_merged: - target_master(args.output, "SDK PR has already been merged, should target tip of '{sdk_branch}'") - -remote_clone_ssh_url = get_pr_source_clone_ssh_url(sdk_pr) -printv("Remote Clone SSH URL: {}".format(remote_clone_ssh_url)) - -if not remote_clone_ssh_url: - print("Could not determine clone URL") - - text = COULD_NOT_DETERMINE_CLONE_URL.format( - tc_link=tc_link, - sdk_name=args.sdk_name, - sdk_pr=sdk_pr_url, - permissions_url=get_repo_permissions_url(sdk_pr), - help_url=HELP_URL, - author_text=author_text) - - if not ocits_shared.dry_run: - make_general_comment("SDK", "oci-testing-service", pr_id, text) - else: - print("DRY-RUN: Not making BitBucket comment\n{}".format(text)) - - # Don't fail the build - sys.exit(1) - -remote_sdk_branch = get_pr_source_branch(sdk_pr) -printv("Remote SDK branch: {}".format(remote_sdk_branch)) - -# git remote add fork ssh://git@bitbucket.oci.oraclecorp.com:7999/~XXX/java-sdk.git -sdk_repo.git.remote("add", "fork", remote_clone_ssh_url) - -try: - # git fetch fork - sdk_repo.git.fetch("fork") -except GitCommandError: - print("Could not fetch the remote repo") - - text = COULD_NOT_FETCH_REPO.format( - tc_link=tc_link, - sdk_name=args.sdk_name, - sdk_pr=sdk_pr_url, - help_url=HELP_URL, - author_text=author_text) - - if not ocits_shared.dry_run: - make_general_comment("SDK", "oci-testing-service", pr_id, text) - else: - print("DRY-RUN: Not making BitBucket comment\n{}".format(text)) - - # Don't fail the build - sys.exit(1) - -# git branch fork_generated-auto-public-JavaSDK-DEXREQ-NNN-YYYY-MM-DD-HH-mm-ss fork/fork_generated-auto-public-JavaSDK-DEXREQ-NNN-YYYY-MM-DD-HH-mm-ss -sdk_branch = "fork_{}".format(remote_sdk_branch) -sdk_repo.git.branch(sdk_branch, "fork/{}".format(remote_sdk_branch)) - -# git checkout fork_generated-auto-public-JavaSDK-DEXREQ-248-2019-01-23-19-08-17 -sdk_repo.git.checkout(sdk_branch) - -print(sdk_branch) - -if args.output: - with open(args.output, 'w') as f: - f.write(sdk_branch) diff --git a/scripts/auto_gen_utils/team_city_scripts/orm/fail_build_on_code_coverage.py b/scripts/auto_gen_utils/team_city_scripts/orm/fail_build_on_code_coverage.py deleted file mode 100644 index 0fa31ac0ca..0000000000 --- a/scripts/auto_gen_utils/team_city_scripts/orm/fail_build_on_code_coverage.py +++ /dev/null @@ -1,189 +0,0 @@ -import argparse -import json -import os -import sys -import urllib3 -import xml.etree.ElementTree as ET -import datetime - -dir_path = os.path.dirname(os.path.realpath(__file__)) -sys.path.append(os.path.join(dir_path, '../..')) - -from shared.bitbucket_utils import setup_bitbucket, get_pullrequest, make_general_comment # noqa: ignore=F402 -import shared.bitbucket_utils # noqa: ignore=F402 - -CODECOV_PACKAGE_TEXT = """ -{level} code coverage for {p} changed from {b} to {c} -""" - -CODECOV_FAILED_HEADER_TEXT = """ -build failed due to dropped code coverage. Here are the details. - -""" - -CODECOV_SUCCEEDED_HEADER_TEXT = """ -code coverage numbers are acceptable. Here are the details. - -""" - -dry_run = False - -ORM_CODECOV_BASELINE_URL = "https://objectstorage.us-phoenix-1.oraclecloud.com/p/MTnyrnb7DssIgX9qFoIZYoeCLScR5rD2eHWAmSyg_J8/n/dex-us-phoenix-1/b/codecov_baseline/o/orm_codecov_baseline.json" -local_json_file = "./orm_local_codecov.json" - - -class CoverageData: - CONST_JACOCO_XML_PATH = "target/site/jacoco/jacoco.xml" - CONST_INSTRUCTION_XPATH = "./counter[@type='INSTRUCTION']" - CONST_BRANCH_XPATH = "./counter[@type='BRANCH']" - CONST_DATE_XPATH = "./sessioninfo" - CONST_DATE_FORMAT = "%Y-%m-%dT%H:%M:%Sz" - - def __init__(self, project_name, project_path): - self.project_name = project_name - self.project_path = project_path - self.instructions_coverage = 0 - self.branch_coverage = 0 - utc = datetime.datetime.utcnow() - self.date = utc.strftime(self.CONST_DATE_FORMAT) - - # given the report xml and xpath, return code coverage as percentage - def __get_coverage(self, report_xml, xpath): - missed = int(report_xml.getroot().find(xpath).get("missed")) - covered = int(report_xml.getroot().find(xpath).get("covered")) - total = missed + covered - percentage = int((covered * 100) / total) if total != 0 else 0 - return percentage - - def extract_code_coverage(self): - xml_path = os.path.join(self.project_path, self.CONST_JACOCO_XML_PATH) - report_xml = ET.parse(xml_path) - self.instructions_coverage = self.__get_coverage(report_xml, self.CONST_INSTRUCTION_XPATH) - self.branch_coverage = self.__get_coverage(report_xml, self.CONST_INSTRUCTION_XPATH) - self.date = datetime.datetime.fromtimestamp(int(report_xml.getroot().find(self.CONST_DATE_XPATH).get("start")) / 1000).strftime(self.CONST_DATE_FORMAT) - - -def get_codecov_baseline(): - if not dry_run: - http = urllib3.PoolManager() - response = http.request("GET", ORM_CODECOV_BASELINE_URL) - return json.loads(response.data.decode("utf-8")) - else: - with open(local_json_file) as data_file: - json_data = data_file.read() - return json.loads(json_data) - - -def save_baseline(baseline): - if not dry_run: - http = urllib3.PoolManager() - encoded_data = json.dumps(baseline).encode('utf-8') - response = http.request( - "PUT", - ORM_CODECOV_BASELINE_URL, - body=encoded_data, - headers={'Content-Type': 'application/json'}) - return response.status - else: - file_name = os.path.join(local_json_file) - with open(file_name, 'w') as f: - f.write(json.dumps(baseline)) - f.flush() - return 200 - - -parser = argparse.ArgumentParser(description='fail build on code coverage') -parser.add_argument('--project-root', required=True, help="Path to the root directory of the ORM project") -parser.add_argument('--dry-run', default=False, action='store_true', required=False, help='Dry-run, do not save the data') -parser.add_argument('--build-branch', required=True, help="The value of the teamcity.build.branch variable") -parser.add_argument('--project', required=True, help="The project for which code coverage is collected") -parser.add_argument('--fail-on-code-coverage', default=False, type=lambda x: (str(x).lower() == 'true'), help="The project for which code coverage is collected") - -args = parser.parse_args() -setup_bitbucket(args) -dry_run = args.dry_run -fail_on_code_coverage = args.fail_on_code_coverage -project_root = args.project_root -project = args.project - -pr_id = None -try: - # If the teamcity.build.branch variable is just something like "1234", then this is a - # validation build for pull request "1234" - pr_id = int(args.build_branch) - pr = get_pullrequest("ORC", args.project, pr_id) - print(pr.text) -except ValueError: - # Only print this when using verbose, since we want the output be the target branch. - print("Not a pull request validation build.") - # sys.exit(0) - -orm_project_dict = { - "ambassador":"ambassador", - "work-request-management":"work-request-management", - "image-builder-tools":"image-builder-tools", - "job-management":"job-management", - "maestro":"maestro", - "orm-commons":"orm-commons", - "persistence-lib":"persistence-lib", - "terraformer-pool-management":"terraformer-pool-management", - "work-request-management":"work-request-management", - "workflow":"workflow", - "workflow-worker":"workflow-worker" -} - -terraformer_dict = { - "terraformer":"." -} - -project_dict = None - -if args.project == "orm": - project_dict = orm_project_dict -elif args.project == "terraformer": - project_dict = terraformer_dict -else: - print("Not a valid project") - sys.exit(2) - -baseline = get_codecov_baseline() -codecov_dropped = False - -for proj in project_dict: - data = CoverageData(proj, os.path.join(project_root, project_dict[proj])) - data.extract_code_coverage() - print(data.instructions_coverage) - if proj in baseline: - package_text_instr = CODECOV_PACKAGE_TEXT.format(level="instructions", p=proj, b=baseline[proj]["instructions_coverage"], c=data.instructions_coverage) - package_text_branch = CODECOV_PACKAGE_TEXT.format(level="branch", p=proj, b=baseline[proj]["branch_coverage"], c=data.branch_coverage) - - if(baseline[proj]["instructions_coverage"] > data.instructions_coverage): - CODECOV_FAILED_HEADER_TEXT += package_text_instr - codecov_dropped = True - else: - CODECOV_SUCCEEDED_HEADER_TEXT += package_text_instr - print("updating instructions code coverage for {p} from {b} to {c}".format(p=proj, b=baseline[proj]["instructions_coverage"], c=data.instructions_coverage)) - baseline[proj]["instructions_coverage"] = data.instructions_coverage - - if(baseline[proj]["branch_coverage"] > data.branch_coverage): - CODECOV_FAILED_HEADER_TEXT += package_text_branch - codecov_dropped = True - else: - CODECOV_SUCCEEDED_HEADER_TEXT += package_text_branch - print("updating instructions code coverage for {p} from {b} to {c}".format(p=proj, b=baseline[proj]["branch_coverage"], c=data.branch_coverage)) - baseline[proj]["branch_coverage"] = data.branch_coverage - else: - baseline[proj] = {} - baseline[proj]["instructions_coverage"] = data.instructions_coverage - baseline[proj]["branch_coverage"] = data.branch_coverage - baseline[proj]["date"] = data.date - -print(baseline) -codecov_message = CODECOV_FAILED_HEADER_TEXT if codecov_dropped else CODECOV_SUCCEEDED_HEADER_TEXT -val = save_baseline(baseline) -print(codecov_message) -if not dry_run: - if pr_id: - make_general_comment("ORC", args.project, pr_id, codecov_message) -if codecov_dropped and fail_on_code_coverage: - sys.exit(1) diff --git a/scripts/auto_gen_utils/team_city_scripts/powershell/preview/1_setup_powershell_preview_branch.sh b/scripts/auto_gen_utils/team_city_scripts/powershell/preview/1_setup_powershell_preview_branch.sh deleted file mode 100755 index b67912b6b3..0000000000 --- a/scripts/auto_gen_utils/team_city_scripts/powershell/preview/1_setup_powershell_preview_branch.sh +++ /dev/null @@ -1,29 +0,0 @@ -set -e -set -x - -echo Creating venv to install sdk locally -. /opt/odo/tox_sic/venv/bin/activate -virtualenv .sdk-venv -. .sdk-venv/bin/activate - -# must disable StrictHostKeyChecking so that we don't get an interactive -# prompt later asking to confirm the host key -# If `set -e`, must disable "fail on non-zero exit code" using `set +e` -# because ssh returns 255 -set +e -ssh -o StrictHostKeyChecking=no git@bitbucket.oci.oraclecorp.com -p 7999 -set -e - - -## AUTOGEN ## -cd autogen -pip ${PIP_TIMEOUT_PARAMETER} install --trusted-host artifactory.oci.oraclecorp.com --index-url https://artifactory.oci.oraclecorp.com/api/pypi/global-dev-pypi/simple -r requirements.txt --quiet - -# run the setup job for step 2 in ./autogen -ls -la -pwd - -python ./2_pre_generation_set_up.py --build-id $BUILD_ID --tool PowerShell - -# back out into root directory -cd .. \ No newline at end of file diff --git a/scripts/auto_gen_utils/team_city_scripts/powershell/preview/3_record_ps_generation_success.sh b/scripts/auto_gen_utils/team_city_scripts/powershell/preview/3_record_ps_generation_success.sh deleted file mode 100755 index 36094c2b10..0000000000 --- a/scripts/auto_gen_utils/team_city_scripts/powershell/preview/3_record_ps_generation_success.sh +++ /dev/null @@ -1,5 +0,0 @@ -set -x - -pwd -ls -la -touch powershell_generation_success.txt diff --git a/scripts/auto_gen_utils/team_city_scripts/powershell/preview/5_record_ps_build_success.sh b/scripts/auto_gen_utils/team_city_scripts/powershell/preview/5_record_ps_build_success.sh deleted file mode 100755 index 8d62696ee7..0000000000 --- a/scripts/auto_gen_utils/team_city_scripts/powershell/preview/5_record_ps_build_success.sh +++ /dev/null @@ -1,5 +0,0 @@ -set -x - -pwd -ls -la -touch powershell_build_success.txt diff --git a/scripts/auto_gen_utils/team_city_scripts/powershell/preview/6_report_gen_and_build_status.sh b/scripts/auto_gen_utils/team_city_scripts/powershell/preview/6_report_gen_and_build_status.sh deleted file mode 100755 index 7a528dd20e..0000000000 --- a/scripts/auto_gen_utils/team_city_scripts/powershell/preview/6_report_gen_and_build_status.sh +++ /dev/null @@ -1,26 +0,0 @@ -set -e -set -x - -pwd -ls -la - -# activate venv from step 1 to run below python script -ls -la ./.sdk-venv -. ./.sdk-venv/bin/activate - -cd oci-powershell-modules -branch=`git branch|grep "^\*"|cut -c3-` -if [[ ${branch} == *"bulk"* ]]; then - build_type="bulk_pending_merge_preview" -else - build_type="individual_preview" -fi - -cd .. -# commit changes from generation and build for powershell -# this step will run no matter what happened before it so it can report success / failure to the JIRA tickets -cd autogen -python ./3_report_generation_status.py --build-id $BUILD_ID --tool PowerShell --build-type ${build_type} -cd .. - -ls -la ./oci-powershell-modules diff --git a/scripts/auto_gen_utils/team_city_scripts/powershell/preview/7_commit_generated_changes.sh b/scripts/auto_gen_utils/team_city_scripts/powershell/preview/7_commit_generated_changes.sh deleted file mode 100755 index d37d3e96fc..0000000000 --- a/scripts/auto_gen_utils/team_city_scripts/powershell/preview/7_commit_generated_changes.sh +++ /dev/null @@ -1,26 +0,0 @@ -set -x -set -e - -# configure git for this commit -git config --global user.email "$GIT_USER_EMAIL" -git config --global user.name "$GIT_USER_NAME" - -# activate venv from step 1 to run below python script -ls -la ./.sdk-venv -. ./.sdk-venv/bin/activate - -# get the branch we're on -cd oci-powershell-modules -branch=`git branch|grep "^\*"|cut -c3-` -if [[ ${branch} == *"bulk"* ]]; then - build_type="bulk_pending_merge_preview" -else - build_type="individual_preview" -fi -cd .. - -# commit changes from generation and build -cd autogen -ls -la -python ./4_on_generation_complete.py --build-id $BUILD_ID --tool PowerShell --build-type ${build_type} -cd .. diff --git a/scripts/auto_gen_utils/team_city_scripts/powershell/preview/9_mark_done_post_bulk_merge.sh b/scripts/auto_gen_utils/team_city_scripts/powershell/preview/9_mark_done_post_bulk_merge.sh deleted file mode 100755 index fb7cead3ef..0000000000 --- a/scripts/auto_gen_utils/team_city_scripts/powershell/preview/9_mark_done_post_bulk_merge.sh +++ /dev/null @@ -1,32 +0,0 @@ -#!/bin/bash - -set -e -set -x - -echo Creating venv to install sdk locally -. /opt/odo/tox_sic/venv/bin/activate -virtualenv .sdk-venv -. .sdk-venv/bin/activate - -# must disable StrictHostKeyChecking so that we don't get an interactive -# prompt later asking to confirm the host key -# Must disable -e (fail on non-zero exit code) because ssh returns 255 -set +e -ssh -o StrictHostKeyChecking=no git@bitbucket.oci.oraclecorp.com -p 7999 -set -e - - -# run the job to pull JIRAs from the queue and update POM -# this job expects to be run from ./autogen -cd auto-gen -ls -la -pwd - -pip ${PIP_TIMEOUT_PARAMETER} install --trusted-host artifactory.oci.oraclecorp.com --index-url https://artifactory.oci.oraclecorp.com/api/pypi/global-dev-pypi/simple -r requirements.txt - -# $BUILD_ID should be defined within the TC job configuration: export BUILD_ID=%teamcity.build.id% -FULL_VERSION=`cat ../oci-powershell-modules/version.txt`-preview$BUILD_ID -python ./5_mark_preview_tickets_done_post_merge.py --build-id $BUILD_ID --tool PowerShell --full-version $FULL_VERSION --allow-transition-overall-issue-to-done --build-conf-name Sdk_OciPowershellTools_BuildPreviewModules - -# back out into root directory -cd .. diff --git a/scripts/auto_gen_utils/team_city_scripts/powershell/public/1_setup_powershell_branch.sh b/scripts/auto_gen_utils/team_city_scripts/powershell/public/1_setup_powershell_branch.sh deleted file mode 100755 index b67912b6b3..0000000000 --- a/scripts/auto_gen_utils/team_city_scripts/powershell/public/1_setup_powershell_branch.sh +++ /dev/null @@ -1,29 +0,0 @@ -set -e -set -x - -echo Creating venv to install sdk locally -. /opt/odo/tox_sic/venv/bin/activate -virtualenv .sdk-venv -. .sdk-venv/bin/activate - -# must disable StrictHostKeyChecking so that we don't get an interactive -# prompt later asking to confirm the host key -# If `set -e`, must disable "fail on non-zero exit code" using `set +e` -# because ssh returns 255 -set +e -ssh -o StrictHostKeyChecking=no git@bitbucket.oci.oraclecorp.com -p 7999 -set -e - - -## AUTOGEN ## -cd autogen -pip ${PIP_TIMEOUT_PARAMETER} install --trusted-host artifactory.oci.oraclecorp.com --index-url https://artifactory.oci.oraclecorp.com/api/pypi/global-dev-pypi/simple -r requirements.txt --quiet - -# run the setup job for step 2 in ./autogen -ls -la -pwd - -python ./2_pre_generation_set_up.py --build-id $BUILD_ID --tool PowerShell - -# back out into root directory -cd .. \ No newline at end of file diff --git a/scripts/auto_gen_utils/team_city_scripts/powershell/public/3_record_ps_generation_success.sh b/scripts/auto_gen_utils/team_city_scripts/powershell/public/3_record_ps_generation_success.sh deleted file mode 100755 index 36094c2b10..0000000000 --- a/scripts/auto_gen_utils/team_city_scripts/powershell/public/3_record_ps_generation_success.sh +++ /dev/null @@ -1,5 +0,0 @@ -set -x - -pwd -ls -la -touch powershell_generation_success.txt diff --git a/scripts/auto_gen_utils/team_city_scripts/powershell/public/5_record_ps_build_success.sh b/scripts/auto_gen_utils/team_city_scripts/powershell/public/5_record_ps_build_success.sh deleted file mode 100755 index 8d62696ee7..0000000000 --- a/scripts/auto_gen_utils/team_city_scripts/powershell/public/5_record_ps_build_success.sh +++ /dev/null @@ -1,5 +0,0 @@ -set -x - -pwd -ls -la -touch powershell_build_success.txt diff --git a/scripts/auto_gen_utils/team_city_scripts/powershell/public/6_report_gen_and_build_status.sh b/scripts/auto_gen_utils/team_city_scripts/powershell/public/6_report_gen_and_build_status.sh deleted file mode 100755 index 3d3f35ddf3..0000000000 --- a/scripts/auto_gen_utils/team_city_scripts/powershell/public/6_report_gen_and_build_status.sh +++ /dev/null @@ -1,26 +0,0 @@ -set -e -set -x - -pwd -ls -la - -# activate venv from step 1 to run below python script -ls -la ./.sdk-venv -. ./.sdk-venv/bin/activate - -cd oci-powershell-modules -branch=`git branch|grep "^\*"|cut -c3-` -if [[ ${branch} == *"bulk"* ]]; then - build_type="bulk_pending_merge_public" -else - build_type="individual_public" -fi - -cd .. -# commit changes from generation and build for powershell -# this step will run no matter what happened before it so it can report success / failure to the JIRA tickets -cd autogen -python ./3_report_generation_status.py --build-id $BUILD_ID --tool PowerShell --build-type ${build_type} -cd .. - -ls -la ./oci-powershell-modules diff --git a/scripts/auto_gen_utils/team_city_scripts/powershell/public/7_commit_generated_changes.sh b/scripts/auto_gen_utils/team_city_scripts/powershell/public/7_commit_generated_changes.sh deleted file mode 100755 index 9dc65f4e70..0000000000 --- a/scripts/auto_gen_utils/team_city_scripts/powershell/public/7_commit_generated_changes.sh +++ /dev/null @@ -1,26 +0,0 @@ -set -x -set -e - -# configure git for this commit -git config --global user.email "$GIT_USER_EMAIL" -git config --global user.name "$GIT_USER_NAME" - -# activate venv from step 1 to run below python script -ls -la ./.sdk-venv -. ./.sdk-venv/bin/activate - -# get the branch we're on -cd oci-powershell-modules -branch=`git branch|grep "^\*"|cut -c3-` -if [[ ${branch} == *"bulk"* ]]; then - build_type="bulk_pending_merge_public" -else - build_type="individual_public" -fi -cd .. - -# commit changes from generation and build -cd autogen -ls -la -python ./4_on_generation_complete.py --build-id $BUILD_ID --tool PowerShell --build-type ${build_type} -cd .. diff --git a/scripts/auto_gen_utils/team_city_scripts/powershell/public/9_mark_done_post_bulk_merge.sh b/scripts/auto_gen_utils/team_city_scripts/powershell/public/9_mark_done_post_bulk_merge.sh deleted file mode 100755 index 32536edc67..0000000000 --- a/scripts/auto_gen_utils/team_city_scripts/powershell/public/9_mark_done_post_bulk_merge.sh +++ /dev/null @@ -1,32 +0,0 @@ -#!/bin/bash - -set -e -set -x - -echo Creating venv to install sdk locally -. /opt/odo/tox_sic/venv/bin/activate -virtualenv .sdk-venv -. .sdk-venv/bin/activate - -# must disable StrictHostKeyChecking so that we don't get an interactive -# prompt later asking to confirm the host key -# Must disable -e (fail on non-zero exit code) because ssh returns 255 -set +e -ssh -o StrictHostKeyChecking=no git@bitbucket.oci.oraclecorp.com -p 7999 -set -e - - -# run the job to pull JIRAs from the queue and update POM -# this job expects to be run from ./autogen -cd auto-gen -ls -la -pwd - -pip ${PIP_TIMEOUT_PARAMETER} install --trusted-host artifactory.oci.oraclecorp.com --index-url https://artifactory.oci.oraclecorp.com/api/pypi/global-dev-pypi/simple -r requirements.txt - -# $BUILD_ID should be defined within the TC job configuration: export BUILD_ID=%teamcity.build.id% -FULL_VERSION=`cat ../oci-powershell-modules/version.txt`-$BUILD_ID -python ./5_mark_preview_tickets_done_post_merge.py --build-id $BUILD_ID --tool PowerShell --full-version $FULL_VERSION --allow-transition-overall-issue-to-deploy --build-conf-name Sdk_OciPowershellTools_BuildAndPublishMasterModules - -# back out into root directory -cd .. diff --git a/scripts/auto_gen_utils/team_city_scripts/process_jira_queue.sh b/scripts/auto_gen_utils/team_city_scripts/process_jira_queue.sh deleted file mode 100755 index 729fc6abfc..0000000000 --- a/scripts/auto_gen_utils/team_city_scripts/process_jira_queue.sh +++ /dev/null @@ -1,198 +0,0 @@ -#!/bin/bash - -set -e -set -x - -echo Creating venv to install sdk locally -. /opt/odo/tox_sic/venv/bin/activate -virtualenv .sdk-venv -. .sdk-venv/bin/activate - -# Upgrade python version to Python=3.6.5 -echo "Setup python environment" -curl -L https://raw.githubusercontent.com/yyuu/pyenv-installer/master/bin/pyenv-installer | bash -export PATH="$HOME/.pyenv/bin:$PATH" -eval "$(pyenv init -)" -eval "$(pyenv init --path)" -eval "$(pyenv virtualenv-init -)" -export PYTHON_3_VERSION=3.6.5 -pyenv install $PYTHON_3_VERSION -s -pyenv shell $PYTHON_3_VERSION -echo "Python Version" -python --version -pip ${PIP_TIMEOUT_PARAMETER} install -U pip - -# must disable StrictHostKeyChecking so that we don't get an interactive -# prompt later asking to confirm the host key -# Must disable -e (fail on non-zero exit code) because ssh returns 255 -set +e -ssh -o StrictHostKeyChecking=no git@bitbucket.oci.oraclecorp.com -p 7999 -set -e - -# Old way of doing that: -# ls -la ~/.ssh -# -# cat ~/.ssh/config -# -# printf "\n\nHost * \n StrictHostKeyChecking no\n" >> ~/.ssh/config -# -# cat ~/.ssh/config - -# configure git for this commit -git config --global user.email "$GIT_USER_EMAIL" -git config --global user.name "$GIT_USER_NAME" - -echo "## CLI ##" -cd python-cli -git fetch $GIT_OPTS origin $BASE_BRANCH -git checkout $BASE_BRANCH -if [ $? -ne 0 ]; then - echo "Failed to check out base branch: $BASE_BRANCH. Exiting script." - exit 1 -fi - -pip ${PIP_TIMEOUT_PARAMETER} install --use-deprecated=legacy-resolver --trusted-host artifactory.oci.oraclecorp.com --index-url https://artifactory.oci.oraclecorp.com/api/pypi/global-dev-pypi/simple -r requirements.txt -pip ${PIP_TIMEOUT_PARAMETER} install --use-deprecated=legacy-resolver --trusted-host artifactory.oci.oraclecorp.com --index-url https://artifactory.oci.oraclecorp.com/api/pypi/global-dev-pypi/simple -r requirements-internal.txt - -git diff --color | cat -cd .. - -echo "## Python SDK ##" -cd python-sdk -git fetch $GIT_OPTS origin $BASE_BRANCH -git checkout $BASE_BRANCH -if [ $? -ne 0 ]; then - echo "Failed to check out base branch: $BASE_BRANCH. Exiting script." - exit 1 -fi - -pip ${PIP_TIMEOUT_PARAMETER} install --trusted-host artifactory.oci.oraclecorp.com --index-url https://artifactory.oci.oraclecorp.com/api/pypi/global-dev-pypi/simple -r requirements.txt -pip ${PIP_TIMEOUT_PARAMETER} install --trusted-host artifactory.oci.oraclecorp.com --index-url https://artifactory.oci.oraclecorp.com/api/pypi/global-dev-pypi/simple -r requirements-internal.txt - -git diff --color | cat -cd .. - -echo "## Java SDK ##" -cd java-sdk -git fetch $GIT_OPTS origin $BASE_BRANCH -git checkout $BASE_BRANCH -if [ $? -ne 0 ]; then - echo "Failed to check out base branch: $BASE_BRANCH. Exiting script." - exit 1 -fi - -git diff --color | cat -cd .. - -echo "## GO SDK ##" -cd src/github.com/oracle/oci-go-sdk -git fetch $GIT_OPTS origin $BASE_BRANCH -git checkout $TEST_BRANCH -if [ $? -ne 0 ]; then - echo "Failed to check out base branch: $BASE_BRANCH. Exiting script." - exit 1 -fi - -git diff --color | cat -cd ../../../.. - -echo "## Ruby SDK ##" -cd ruby-sdk -git fetch $GIT_OPTS origin $BASE_BRANCH -git checkout $BASE_BRANCH -if [ $? -ne 0 ]; then - echo "Failed to check out base branch: $BASE_BRANCH. Exiting script." - exit 1 -fi - -git diff --color | cat -cd .. - -echo "## Typescript SDK ##" -cd oci-typescript-sdk -git fetch $GIT_OPTS origin $BASE_BRANCH -git checkout $BASE_BRANCH -if [ $? -ne 0 ]; then - echo "Failed to check out base branch: $BASE_BRANCH. Exiting script." - exit 1 -fi -git diff --color | cat -cd .. - -echo "## .NET SDK ##" -cd oci-dotnet-sdk -git fetch $GIT_OPTS origin $BASE_BRANCH -git checkout $BASE_BRANCH -if [ $? -ne 0 ]; then - echo "Failed to check out base branch: $BASE_BRANCH. Exiting script." - exit 1 -fi -git diff --color | cat -cd .. - -echo "## PowerShell ##" -cd oci-powershell-modules -git fetch $GIT_OPTS origin $BASE_BRANCH -git checkout $BASE_BRANCH -if [ $? -ne 0 ]; then - echo "Failed to check out base branch: $BASE_BRANCH. Exiting script." - exit 1 -fi -git diff --color | cat -cd .. - -echo "## Legacy Java SDK ##" -cd legacy-java-sdk -git fetch $GIT_OPTS origin $BASE_BRANCH -git checkout $BASE_BRANCH -if [ $? -ne 0 ]; then - echo "Failed to check out base branch: $BASE_BRANCH. Exiting script." - exit 1 -fi -git diff --color | cat -cd .. - -echo "## pom.xml Updates ##" -# run the job to pull JIRAs from the queue and update POM -# this job expects to be run from ./autogen -cd autogen -ls -la -pwd - -pip ${PIP_TIMEOUT_PARAMETER} install --trusted-host artifactory.oci.oraclecorp.com --index-url https://artifactory.oci.oraclecorp.com/api/pypi/global-dev-pypi/simple -r requirements.txt - -ls -la - -ALLOW_INDIVIDUAL_TOOL_GENERATION_ARG="" -if [ "$ALLOW_INDIVIDUAL_TOOL_GENERATION" == "true" ]; then - ALLOW_INDIVIDUAL_TOOL_GENERATION_ARG="--allow-individual-tool-generation" -fi - -ISSUE_FILTER_ARG="" -if [ ! -z "$ISSUE_FILTER" ]; then - ISSUE_FILTER_ARG="--issue "$ISSUE_FILTER -fi - - -if [ ! -z "$LIST_OF_GO_TICKETS" ]; then - source ./change_dexreq_to_release_approved.sh $LIST_OF_GO_TICKETS -fi - - - -# updated script can process all tools at once -python ./1_process_preview_jira_queue.py --tool ALL --build-id $BUILD_ID --build-type $BUILD_TYPE --base-branch $BASE_BRANCH $ALLOW_INDIVIDUAL_TOOL_GENERATION_ARG $DRY_RUN_ARG $ISSUE_FILTER_ARG $PUSH_SPEC_BASELINE_ARG $VERBOSE_ARG - -# # CLI -# python ./1_process_preview_jira_queue.py --build-id $BUILD_ID --build-type $BUILD_TYPE --base-branch $BASE_BRANCH $ALLOW_INDIVIDUAL_TOOL_GENERATION_ARG $DRY_RUN_ARG $ISSUE_FILTER_ARG $PUSH_SPEC_BASELINE_ARG -# # Java SDK -# python ./1_process_preview_jira_queue.py --build-id $BUILD_ID --build-type $BUILD_TYPE --tool JavaSDK --base-branch $BASE_BRANCH $ALLOW_INDIVIDUAL_TOOL_GENERATION_ARG $DRY_RUN_ARG $ISSUE_FILTER_ARG $PUSH_SPEC_BASELINE_ARG -# # Python SDK -# python ./1_process_preview_jira_queue.py --build-id $BUILD_ID --build-type $BUILD_TYPE --tool PythonSDK --base-branch $BASE_BRANCH $ALLOW_INDIVIDUAL_TOOL_GENERATION_ARG $DRY_RUN_ARG $ISSUE_FILTER_ARG $PUSH_SPEC_BASELINE_ARG -# # Go SDK -# python ./1_process_preview_jira_queue.py --build-id $BUILD_ID --build-type $BUILD_TYPE --tool GoSDK --base-branch $TEST_BRANCH $ALLOW_INDIVIDUAL_TOOL_GENERATION_ARG $DRY_RUN_ARG $ISSUE_FILTER_ARG $PUSH_SPEC_BASELINE_ARG -# # Ruby SDK -# python ./1_process_preview_jira_queue.py --build-id $BUILD_ID --build-type $BUILD_TYPE --tool RubySDK --base-branch $TEST_BRANCH $ALLOW_INDIVIDUAL_TOOL_GENERATION_ARG $DRY_RUN_ARG $ISSUE_FILTER_ARG $PUSH_SPEC_BASELINE_ARG - -# back out into root directory -cd .. diff --git a/scripts/auto_gen_utils/team_city_scripts/python_sdk/shared/1_setup_python_public_branch.sh b/scripts/auto_gen_utils/team_city_scripts/python_sdk/shared/1_setup_python_public_branch.sh deleted file mode 100755 index 95df458659..0000000000 --- a/scripts/auto_gen_utils/team_city_scripts/python_sdk/shared/1_setup_python_public_branch.sh +++ /dev/null @@ -1,31 +0,0 @@ -set -e -set -x - -echo Creating venv to install sdk locally -. /opt/odo/tox_sic/venv/bin/activate -virtualenv .sdk-venv -. .sdk-venv/bin/activate - -pip ${PIP_TIMEOUT_PARAMETER} install -U pip - -## Install AUTOGEN requirements## -pip ${PIP_TIMEOUT_PARAMETER} install --trusted-host artifactory.oci.oraclecorp.com --index-url https://artifactory.oci.oraclecorp.com/api/pypi/global-dev-pypi/simple -r autogen/requirements.txt - -## Install SDK requirements ## -cd python-sdk -git pull -git fetch -pip ${PIP_TIMEOUT_PARAMETER} install --trusted-host artifactory.oci.oraclecorp.com --index-url https://artifactory.oci.oraclecorp.com/api/pypi/global-dev-pypi/simple -r requirements.txt -pip ${PIP_TIMEOUT_PARAMETER} install --trusted-host artifactory.oci.oraclecorp.com --index-url https://artifactory.oci.oraclecorp.com/api/pypi/global-dev-pypi/simple -r requirements-internal.txt -cd .. - -# run the setup job for step 2 in ./autogen -cd autogen -ls -la -pwd - -# checks out CLI branch with same name as SDK branch that triggered this build -python ./2_pre_generation_set_up.py --build-id $BUILD_ID --tool PythonSDK - -# back out into root directory -cd .. diff --git a/scripts/auto_gen_utils/team_city_scripts/python_sdk/shared/3_record_sdk_generation_success.sh b/scripts/auto_gen_utils/team_city_scripts/python_sdk/shared/3_record_sdk_generation_success.sh deleted file mode 100755 index 7540402beb..0000000000 --- a/scripts/auto_gen_utils/team_city_scripts/python_sdk/shared/3_record_sdk_generation_success.sh +++ /dev/null @@ -1,5 +0,0 @@ -set -x - -pwd -ls -la -touch python_sdk_generation_success.txt \ No newline at end of file diff --git a/scripts/auto_gen_utils/team_city_scripts/python_sdk/shared/4_build_and_publish_artifacts.sh b/scripts/auto_gen_utils/team_city_scripts/python_sdk/shared/4_build_and_publish_artifacts.sh deleted file mode 100755 index 76c2862cfa..0000000000 --- a/scripts/auto_gen_utils/team_city_scripts/python_sdk/shared/4_build_and_publish_artifacts.sh +++ /dev/null @@ -1,111 +0,0 @@ -set -e -set -x - -# ls -la ./.sdk-venv -# . ./.sdk-venv/bin/activate - -# Set up python3 pyenv(v3.8.6) -echo "Setup python environment" -curl -L https://raw.githubusercontent.com/yyuu/pyenv-installer/master/bin/pyenv-installer | bash -export PATH="$HOME/.pyenv/bin:$PATH" -eval "$(pyenv init -)" -eval "$(pyenv init --path)" -eval "$(pyenv virtualenv-init -)" -export PYTHON_3_VERSION=3.6.5 -pyenv install $PYTHON_3_VERSION -s -pyenv shell $PYTHON_3_VERSION - -echo "Python Version" -python --version - -pip install -U pip -pip ${PIP_TIMEOUT_PARAMETER} install -e ./python-sdk - -# cat ~/.ssh/config - -# configure git for this commit -git config --global user.email "$GIT_USER_EMAIL" -git config --global user.name "$GIT_USER_NAME" - -cd python-sdk -# the below commands are copied from: -# Sdk_PythonSdk_PreviewTestsDocsWheel -# at some point we should consolidate Python SDK Preview Build to use build.sh - -SDK_VERSION=$(tail -1 src/oci/version.py | cut -d '"' -f2) -DEV_VERSION=$SDK_VERSION.$BUILD_NUMBER -echo SDK Version Number $SDK_VERSION -echo Build Version Number $DEV_VERSION - -echo Rewriting version from $SDK_VERSION to $DEV_VERSION -# Replace the version with the DEV_VERSION (SDK_VERSION + Build Number) so that we can make -# referencing and declaring dependencies on preview CLIs more explicit -rm src/oci/version.py -cat < src/oci/version.py -# coding: utf-8 -# Copyright (c) 2016, 2017, Oracle and/or its affiliates. All rights reserved. - -__version__ = "$DEV_VERSION" - -EOF - -# Echo out the version to confirm -cat src/oci/version.py - -echo Building Docs -# pip install sphinx --timeout 120 -# pip ${PIP_TIMEOUT_PARAMETER} install sphinx_rtd_theme -pip ${PIP_TIMEOUT_PARAMETER} install -r docs/requirements.txt -make docs -mkdir -p dist/oci-python-sdk-docs-$SDK_VERSION/ -cp -r docs/_build/html/* dist/oci-python-sdk-docs-$SDK_VERSION/ - -echo Running Tests - -if [ $TEST_ENABLE = "false" ]; then - echo "TESTS HAVE BEEN DISABLED." -else - pip ${PIP_TIMEOUT_PARAMETER} install tox - tox -e flake8,py36 -fi - -pip install wheel -echo Building Wheel -make build - -# Create a dev directory that will contain versions of the whl, zip, and docs meant for -# the dev pypi artifactory. Each artifact includes the build number in the version to avoid -# conflicts. - -mkdir -p dist/dev/ -if [ -f "dist/oci-$DEV_VERSION-py3-none-any.whl" ]; then - cp dist/oci-$DEV_VERSION-py3-none-any.whl dist/dev/oci-$DEV_VERSION-py3-none-any.whl -else - cp dist/oci-$DEV_VERSION-py2.py3-none-any.whl dist/dev/oci-$DEV_VERSION-py2.py3-none-any.whl -fi -cp dist/oci-python-sdk-$DEV_VERSION.zip dist/dev/oci-python-sdk-$DEV_VERSION.zip - -pushd dist/oci-python-sdk-docs-$SDK_VERSION -zip -r ../oci-python-sdk-docs-$SDK_VERSION.zip . -popd -cp dist/oci-python-sdk-docs-$SDK_VERSION.zip dist/dev/oci-python-sdk-docs-$DEV_VERSION.zip - - -echo Contents of dist folder -ls -la dist - -# the build script creates a virtualenv inside this folder which we need to remove or it will be checked in -# commenting this out since we are not invoking build.sh -# rm -rf ./.sdk-venv - -# AFTER building the wheel, reset src/oci/version.py back to regular version, we don't want to check in TC version -git checkout -- src/oci/version.py - -cd .. - -# write DEV_VERSION to a text file so next step can use it -echo $DEV_VERSION >> ~/.DEV_VERSION - -# DEV_VERSION should contain the version string for this build of the Python SDK -# we need this in the next step to know which version the CLI should depend on -echo "Dev version: $DEV_VERSION" diff --git a/scripts/auto_gen_utils/team_city_scripts/python_sdk/shared/5_record_sdk_build_success.sh b/scripts/auto_gen_utils/team_city_scripts/python_sdk/shared/5_record_sdk_build_success.sh deleted file mode 100755 index 1d5e110ec8..0000000000 --- a/scripts/auto_gen_utils/team_city_scripts/python_sdk/shared/5_record_sdk_build_success.sh +++ /dev/null @@ -1,5 +0,0 @@ -set -x - -pwd -ls -la -touch python_sdk_build_success.txt \ No newline at end of file diff --git a/scripts/auto_gen_utils/team_city_scripts/python_sdk/shared/6_report_gen_and_build_status.sh b/scripts/auto_gen_utils/team_city_scripts/python_sdk/shared/6_report_gen_and_build_status.sh deleted file mode 100755 index 3d6bff4e9a..0000000000 --- a/scripts/auto_gen_utils/team_city_scripts/python_sdk/shared/6_report_gen_and_build_status.sh +++ /dev/null @@ -1,18 +0,0 @@ -set -e -set -x - -pwd -ls -la - -# activate venv from step 1 to run below python script -ls -la ./.sdk-venv -. ./.sdk-venv/bin/activate - -# commit changes from generation and build.sh for python-cli and python-sdk -# this step will run no matter what happened before it so it can report success / failure to the JIRA tickets -# $BUILD_TYPE_ARG is defined in the TC build step configuration -cd autogen -python ./3_report_generation_status.py --build-id $BUILD_ID --tool PythonSDK $BUILD_TYPE_ARG -cd .. - -ls -la ./python-sdk \ No newline at end of file diff --git a/scripts/auto_gen_utils/team_city_scripts/python_sdk/shared/7_commit_and_push_generated_changes.sh b/scripts/auto_gen_utils/team_city_scripts/python_sdk/shared/7_commit_and_push_generated_changes.sh deleted file mode 100755 index c0f3fdd925..0000000000 --- a/scripts/auto_gen_utils/team_city_scripts/python_sdk/shared/7_commit_and_push_generated_changes.sh +++ /dev/null @@ -1,13 +0,0 @@ -set -x -set -e - -# activate venv from step 1 to run below python script -ls -la ./.sdk-venv -. ./.sdk-venv/bin/activate - -# commit changes from generation and build.sh for python-cli and python-sdk -# this step will run no matter what happened before it so it can report success / failure to the JIRA tickets -cd autogen -ls -la -python ./4_on_generation_complete.py --build-id $BUILD_ID --tool PythonSDK --build-type $BUILD_TYPE -cd .. \ No newline at end of file diff --git a/scripts/auto_gen_utils/team_city_scripts/python_sdk/shared/disableStrictHostKeyChecking.sh b/scripts/auto_gen_utils/team_city_scripts/python_sdk/shared/disableStrictHostKeyChecking.sh deleted file mode 100755 index dd9d030b4a..0000000000 --- a/scripts/auto_gen_utils/team_city_scripts/python_sdk/shared/disableStrictHostKeyChecking.sh +++ /dev/null @@ -1,14 +0,0 @@ -# must disable StrictHostKeyChecking so that we don't get an interactive -# prompt later asking to confirm the host key -# If `set -e`, must disable "fail on non-zero exit code" using `set +e` -# because ssh returns 255 -ssh -o StrictHostKeyChecking=no git@bitbucket.oci.oraclecorp.com -p 7999 - -# Old way of doing that: -# ls -la ~/.ssh -# -# cat ~/.ssh/config -# -# printf "\n\nHost * \n StrictHostKeyChecking no\n" >> ~/.ssh/config -# -# cat ~/.ssh/config \ No newline at end of file diff --git a/scripts/auto_gen_utils/team_city_scripts/ruby/preview/1_setup_ruby_preview_branch.sh b/scripts/auto_gen_utils/team_city_scripts/ruby/preview/1_setup_ruby_preview_branch.sh deleted file mode 100755 index b57f11732d..0000000000 --- a/scripts/auto_gen_utils/team_city_scripts/ruby/preview/1_setup_ruby_preview_branch.sh +++ /dev/null @@ -1,43 +0,0 @@ -#!/bin/bash - -set -e -set -x - -echo Creating venv to install sdk locally -. /opt/odo/tox_sic/venv/bin/activate -virtualenv .sdk-venv -. .sdk-venv/bin/activate - -# must disable StrictHostKeyChecking so that we don't get an interactive -# prompt later asking to confirm the host key -# Must disable -e (fail on non-zero exit code) because ssh returns 255 -set +e -ssh -o StrictHostKeyChecking=no git@bitbucket.oci.oraclecorp.com -p 7999 -set -e - -# Old way of doing that: -# ls -la ~/.ssh -# -# cat ~/.ssh/config -# -# printf "\n\nHost * \n StrictHostKeyChecking no\n" >> ~/.ssh/config -# -# cat ~/.ssh/config - -## AUTOGEN ## -cd autogen -pip ${PIP_TIMEOUT_PARAMETER} install --trusted-host artifactory.oci.oraclecorp.com --index-url https://artifactory.oci.oraclecorp.com/api/pypi/global-dev-pypi/simple -r requirements.txt -cd .. - -# run the setup job for step 2 in ./autogen -cd autogen -ls -la -pwd - -ls -la - -# checks out the branch with same name as SDK branch that triggered this build -python ./2_pre_generation_set_up.py --build-id $BUILD_ID --tool RubySDK - -# back out into root directory -cd .. \ No newline at end of file diff --git a/scripts/auto_gen_utils/team_city_scripts/ruby/preview/2_configure_rbenv.sh b/scripts/auto_gen_utils/team_city_scripts/ruby/preview/2_configure_rbenv.sh deleted file mode 100755 index b72793e140..0000000000 --- a/scripts/auto_gen_utils/team_city_scripts/ruby/preview/2_configure_rbenv.sh +++ /dev/null @@ -1,59 +0,0 @@ -#!/bin/bash - -# NOTE: The pom.xml build step (#3) in TC needs an environment variable configured as below -# for the build step to pick up the correct version of ruby with its installed gems from -# this script: -# env.PATH => /home/teamcity/.rbenv/shims:%env.PATH% - -RUBY_VERSION_TO_INSTALL=2.7.3 - -if [ ! -d "$HOME/.rbenv" ]; then - echo Setting up rbenv and ruby-build to install the Ruby SDK locally - - # Set up rbenv - git clone https://github.com/rbenv/rbenv.git ~/.rbenv - export PATH="$HOME/.rbenv/bin:$PATH" - eval "$(rbenv init -)" - - # Install ruby-build so we can install Rubies - mkdir -p "$(rbenv root)"/plugins - git clone https://github.com/rbenv/ruby-build.git "$(rbenv root)"/plugins/ruby-build -fi - -if [ -d "$HOME/.rbenv" ]; then - export PATH="$HOME/.rbenv/bin:$PATH" - eval "$(rbenv init -)" -fi - -# In latest OL7.9 image, install Ruby will fail due to DTrace, temporarly disable it -if [ -z $(rbenv versions | grep "$RUBY_VERSION_TO_INSTALL") ]; then - echo "Installing Ruby Version $RUBY_VERSION_TO_INSTALL" - RUBY_CONFIGURE_OPTS="--disable-dtrace" rbenv install -s $RUBY_VERSION_TO_INSTALL -fi - -if [ ! -f $HOME/.rbenv/version ] || [ $(cat $HOME/.rbenv/version) != $RUBY_VERSION_TO_INSTALL ]; then - echo "Setting $RUBY_VERSION_TO_INSTALL as local Ruby version" - rbenv local $RUBY_VERSION_TO_INSTALL -fi - -echo "Ruby version" -which ruby -ruby --version - -# Change to the ruby-sdk and install its dev dependencies -cd ruby-sdk -echo "Installing dependencies for $(pwd)" - -# Now install the ruby dependencies -echo "Installing Ruby Dependencies" -gem install bundler -rbenv rehash -bundle install - -# Verify gems -gem list - -# Verify PATH -echo "Path: $PATH" - -cd .. diff --git a/scripts/auto_gen_utils/team_city_scripts/ruby/preview/4_record_sdk_generation_success.sh b/scripts/auto_gen_utils/team_city_scripts/ruby/preview/4_record_sdk_generation_success.sh deleted file mode 100755 index 037c7c83d7..0000000000 --- a/scripts/auto_gen_utils/team_city_scripts/ruby/preview/4_record_sdk_generation_success.sh +++ /dev/null @@ -1,7 +0,0 @@ -#!/bin/bash - -set -x - -pwd -ls -la -touch ruby_sdk_generation_success.txt \ No newline at end of file diff --git a/scripts/auto_gen_utils/team_city_scripts/ruby/preview/5_build_and_publish_artifacts.sh b/scripts/auto_gen_utils/team_city_scripts/ruby/preview/5_build_and_publish_artifacts.sh deleted file mode 100755 index dc0bcb02a8..0000000000 --- a/scripts/auto_gen_utils/team_city_scripts/ruby/preview/5_build_and_publish_artifacts.sh +++ /dev/null @@ -1,85 +0,0 @@ -#!/bin/bash - -set -e -set -x - -# must disable StrictHostKeyChecking so that we don't get an interactive -# prompt later asking to confirm the host key -# Must disable -e (fail on non-zero exit code) because ssh returns 255 -set +e -ssh -o StrictHostKeyChecking=no git@bitbucket.oci.oraclecorp.com -p 7999 -set -e - -# Old way of doing that: -# ls -la ~/.ssh -# -# cat ~/.ssh/config -# -# printf "\n\nHost * \n StrictHostKeyChecking no\n" >> ~/.ssh/config -# -# cat ~/.ssh/config - -# configure git for this commit -git config --global user.email "$GIT_USER_EMAIL" -git config --global user.name "$GIT_USER_NAME" - -## Maven version from build used to name the dist zip archive ## -echo Maven Version $MAVEN_VERSION - -## Verify rbenv setup ## -echo Ruby Version -ruby --version - -echo Gem Version -gem --version - -echo Gem env -gem env - -# Install extra gems -echo Install typhoeus -gem install typhoeus -v 1.0.2 --user-install - -echo Install inifile -gem install inifile -v 3.0.0 --user-install - -echo Install minitest -gem install minitest -v 5.8.3 --user-install - -echo Install rake -gem install rake -v 10.4.2 --user-install - -echo Python version -python --version - -# This will copy the code to a specific region variant (r2) to process the links for the docs -cd ruby-sdk/scripts -python pre_process.py r2 -echo Pre-processing complete - -# Now build the gem for the r2 variant -cd ../variants/r2 - -echo Building gem -gem build oci.gemspec || { echo 'Failed to build oci gem (probably a test failure).' ; exit 1; } - -echo Buiding RDocs -ruby <> ~/.ssh/config -# -# cat ~/.ssh/config - -# run the job to pull JIRAs from the queue and update POM -# this job expects to be run from ./autogen -cd auto-gen -ls -la -pwd - -pip ${PIP_TIMEOUT_PARAMETER} install --trusted-host artifactory.oci.oraclecorp.com --index-url https://artifactory.oci.oraclecorp.com/api/pypi/global-dev-pypi/simple -r requirements.txt - -# This should be defined within the TC job configuration: export BUILD_ID=%teamcity.build.id% -RUBY_VERSION=$(grep "VERSION" ../ruby-sdk/lib/oci/version.rb | cut -d "'" -f2 ) -FULL_VERSION=$RUBY_VERSION.$BUILD_NUMBER -python ./5_mark_preview_tickets_done_post_merge.py --build-id $BUILD_ID --tool RubySDK --full-version $FULL_VERSION --allow-transition-overall-issue-to-done --build-conf-name Sdk_RubySdk_BuildPreviewGemAndDocs - -# back out into root directory -cd .. diff --git a/scripts/auto_gen_utils/team_city_scripts/ruby/public/1_setup_ruby_public_branch.sh b/scripts/auto_gen_utils/team_city_scripts/ruby/public/1_setup_ruby_public_branch.sh deleted file mode 100755 index b57f11732d..0000000000 --- a/scripts/auto_gen_utils/team_city_scripts/ruby/public/1_setup_ruby_public_branch.sh +++ /dev/null @@ -1,43 +0,0 @@ -#!/bin/bash - -set -e -set -x - -echo Creating venv to install sdk locally -. /opt/odo/tox_sic/venv/bin/activate -virtualenv .sdk-venv -. .sdk-venv/bin/activate - -# must disable StrictHostKeyChecking so that we don't get an interactive -# prompt later asking to confirm the host key -# Must disable -e (fail on non-zero exit code) because ssh returns 255 -set +e -ssh -o StrictHostKeyChecking=no git@bitbucket.oci.oraclecorp.com -p 7999 -set -e - -# Old way of doing that: -# ls -la ~/.ssh -# -# cat ~/.ssh/config -# -# printf "\n\nHost * \n StrictHostKeyChecking no\n" >> ~/.ssh/config -# -# cat ~/.ssh/config - -## AUTOGEN ## -cd autogen -pip ${PIP_TIMEOUT_PARAMETER} install --trusted-host artifactory.oci.oraclecorp.com --index-url https://artifactory.oci.oraclecorp.com/api/pypi/global-dev-pypi/simple -r requirements.txt -cd .. - -# run the setup job for step 2 in ./autogen -cd autogen -ls -la -pwd - -ls -la - -# checks out the branch with same name as SDK branch that triggered this build -python ./2_pre_generation_set_up.py --build-id $BUILD_ID --tool RubySDK - -# back out into root directory -cd .. \ No newline at end of file diff --git a/scripts/auto_gen_utils/team_city_scripts/ruby/public/2_configure_rbenv.sh b/scripts/auto_gen_utils/team_city_scripts/ruby/public/2_configure_rbenv.sh deleted file mode 100755 index cacd9b4c9e..0000000000 --- a/scripts/auto_gen_utils/team_city_scripts/ruby/public/2_configure_rbenv.sh +++ /dev/null @@ -1,62 +0,0 @@ -#!/bin/bash - -# NOTE: The pom.xml build step (#3) in TC needs an environment variable configured as below -# for the build step to pick up the correct version of ruby with its installed gems from -# this script: -# env.PATH => /home/teamcity/.rbenv/shims:%env.PATH% - -RUBY_VERSION_TO_INSTALL=2.7.3 -if [ ! -z "$1" ]; then - RUBY_VERSION_TO_INSTALL=$1 -fi - -if [ ! -d "$HOME/.rbenv" ]; then - echo Setting up rbenv and ruby-build to install the Ruby SDK locally - - # Set up rbenv - git clone https://github.com/rbenv/rbenv.git ~/.rbenv - export PATH="$HOME/.rbenv/bin:$PATH" - eval "$(rbenv init -)" - - # Install ruby-build so we can install Rubies - mkdir -p "$(rbenv root)"/plugins - git clone https://github.com/rbenv/ruby-build.git "$(rbenv root)"/plugins/ruby-build -fi - -if [ -d "$HOME/.rbenv" ]; then - export PATH="$HOME/.rbenv/bin:$PATH" - eval "$(rbenv init -)" -fi - -# In latest OL7.9 image, install Ruby will fail due to DTrace, temporarly disable it -if [ -z $(rbenv versions | grep "$RUBY_VERSION_TO_INSTALL") ]; then - echo "Installing Ruby Version $RUBY_VERSION_TO_INSTALL" - RUBY_CONFIGURE_OPTS="--disable-dtrace" rbenv install -s $RUBY_VERSION_TO_INSTALL -fi - -if [ ! -f $HOME/.rbenv/version ] || [ $(cat $HOME/.rbenv/version) != $RUBY_VERSION_TO_INSTALL ]; then - echo "Setting $RUBY_VERSION_TO_INSTALL as local Ruby version" - rbenv local $RUBY_VERSION_TO_INSTALL -fi - -echo "Ruby version" -which ruby -ruby --version - -# Change to the ruby-sdk and install its dev dependencies -cd ruby-sdk -echo "Installing dependencies for $(pwd)" - -# Now install the ruby dependencies -echo "Installing Ruby Dependencies" -gem install bundler -rbenv rehash -bundle install - -# Verify gems -gem list - -# Verify PATH -echo "Path: $PATH" - -cd .. \ No newline at end of file diff --git a/scripts/auto_gen_utils/team_city_scripts/ruby/public/4_record_sdk_generation_success.sh b/scripts/auto_gen_utils/team_city_scripts/ruby/public/4_record_sdk_generation_success.sh deleted file mode 100755 index 037c7c83d7..0000000000 --- a/scripts/auto_gen_utils/team_city_scripts/ruby/public/4_record_sdk_generation_success.sh +++ /dev/null @@ -1,7 +0,0 @@ -#!/bin/bash - -set -x - -pwd -ls -la -touch ruby_sdk_generation_success.txt \ No newline at end of file diff --git a/scripts/auto_gen_utils/team_city_scripts/ruby/public/5_build_and_publish_artifacts.sh b/scripts/auto_gen_utils/team_city_scripts/ruby/public/5_build_and_publish_artifacts.sh deleted file mode 100755 index 78e14ee718..0000000000 --- a/scripts/auto_gen_utils/team_city_scripts/ruby/public/5_build_and_publish_artifacts.sh +++ /dev/null @@ -1,95 +0,0 @@ -#!/bin/bash - -set -e -set -x - -# must disable StrictHostKeyChecking so that we don't get an interactive -# prompt later asking to confirm the host key -# Must disable -e (fail on non-zero exit code) because ssh returns 255 -set +e -ssh -o StrictHostKeyChecking=no git@bitbucket.oci.oraclecorp.com -p 7999 -set -e - -# Old way of doing that: -# ls -la ~/.ssh -# -# cat ~/.ssh/config -# -# printf "\n\nHost * \n StrictHostKeyChecking no\n" >> ~/.ssh/config -# -# cat ~/.ssh/config - -# configure git for this commit -git config --global user.email "$GIT_USER_EMAIL" -git config --global user.name "$GIT_USER_NAME" - -## Maven version from build used to name the dist zip archive ## -echo Maven Version $MAVEN_VERSION - -## Verify rbenv setup ## -echo Ruby Version -ruby --version - -echo Gem Version -gem --version - -echo Gem env -gem env - -# Install extra gems -echo Install typhoeus -gem install typhoeus -v 1.0.2 --user-install - -echo Install inifile -gem install inifile -v 3.0.0 --user-install - -echo Install minitest -gem install minitest -v 5.8.3 --user-install - -echo Install rake -gem install rake -v 10.4.2 --user-install - -echo Install bundle -gem install bundler -v 2.3.27 --user-install - -echo Python version -python --version - -cd ruby-sdk -bundle install - -# This will copy the code to a specific region variant (r2) to process the links for the docs -cd scripts -python pre_process.py r2 -echo Pre-processing complete - -# Now build the gem for the r2 variant -cd ../variants/r2 - -echo Building gem -gem build oci.gemspec || { echo 'Failed to build oci gem (probably a test failure).' ; exit 1; } - -version=$(grep "VERSION" lib/oci/version.rb | cut -d\' -f2) -find . -name \*.rb |xargs sed -i "s#https://docs\.cloud\.oracle\.com/en-us/iaas/tools/ruby-sdk-examples/latest/#https://docs\.cloud\.oracle\.com/en-us/iaas/tools/ruby-sdk-examples/$version/#g" - -echo Buiding RDocs -ruby <> ~/.ssh/config -# -# cat ~/.ssh/config - -# run the job to pull JIRAs from the queue and update POM -# this job expects to be run from ./autogen -cd auto-gen -ls -la -pwd - -pip ${PIP_TIMEOUT_PARAMETER} install --trusted-host artifactory.oci.oraclecorp.com --index-url https://artifactory.oci.oraclecorp.com/api/pypi/global-dev-pypi/simple -r requirements.txt - -# This should be defined within the TC job configuration: export BUILD_ID=%teamcity.build.id% -RUBY_VERSION=$(grep "VERSION" ../ruby-sdk/lib/oci/version.rb | cut -d "'" -f2 ) -FULL_VERSION=$RUBY_VERSION.$BUILD_NUMBER -python ./5_mark_preview_tickets_done_post_merge.py \ - --build-id $BUILD_ID \ - --tool RubySDK \ - --full-version $FULL_VERSION \ - --build-conf-name Sdk_RubySdk_BuildGemAndDocs \ - --allow-transition-overall-issue-to-deploy - -# back out into root directory -cd .. diff --git a/scripts/auto_gen_utils/team_city_scripts/run_autogen_issue_advisor.sh b/scripts/auto_gen_utils/team_city_scripts/run_autogen_issue_advisor.sh deleted file mode 100755 index 4d27bcb061..0000000000 --- a/scripts/auto_gen_utils/team_city_scripts/run_autogen_issue_advisor.sh +++ /dev/null @@ -1,72 +0,0 @@ -#!/bin/bash - -set -e -set -x - -echo Creating venv to install sdk locally -. /opt/odo/tox_sic/venv/bin/activate -virtualenv .sdk-venv -. .sdk-venv/bin/activate - -# must disable StrictHostKeyChecking so that we don't get an interactive -# prompt later asking to confirm the host key -# Must disable -e (fail on non-zero exit code) because ssh returns 255 -set +e -ssh -o StrictHostKeyChecking=no git@bitbucket.oci.oraclecorp.com -p 7999 -set -e - -# Old way of doing that: -# ls -la ~/.ssh -# -# cat ~/.ssh/config -# -# printf "\n\nHost * \n StrictHostKeyChecking no\n" >> ~/.ssh/config -# -# cat ~/.ssh/config - -# configure git for this commit -git config --global user.email "$GIT_USER_EMAIL" -git config --global user.name "$GIT_USER_NAME" - -cd autogen -ls -la -pwd - -pip ${PIP_TIMEOUT_PARAMETER} install --trusted-host artifactory.oci.oraclecorp.com --index-url https://artifactory.oci.oraclecorp.com/api/pypi/global-dev-pypi/simple -r requirements.txt - -ls -la - -ISSUE_FILTER_ARG="" -if [ ! -z "$ISSUE_FILTER" ]; then - ISSUE_FILTER_ARG="--issue $ISSUE_FILTER" -fi - -BULK_PREVIEW_DATE_OVERRIDES_ARG="" -if [ ! -z "$BULK_PREVIEW_DATE_OVERRIDES" ]; then - BULK_PREVIEW_DATE_OVERRIDES_ARG="--bulk-preview-date-overrides="${BULK_PREVIEW_DATE_OVERRIDES} -fi - -PUBLIC_RELEASE_REQUESTED_CUT_OFF_DATE_OVERRIDES_ARG="" -if [ ! -z "$PUBLIC_RELEASE_REQUESTED_CUT_OFF_DATE_OVERRIDES" ]; then - PUBLIC_RELEASE_REQUESTED_CUT_OFF_DATE_OVERRIDES_ARG="--public-release-requested-cut-off-date-overrides="${PUBLIC_RELEASE_REQUESTED_CUT_OFF_DATE_OVERRIDES} -fi - -PUBLIC_RELEASE_DATE_OVERRIDES_ARG="" -if [ ! -z "${PUBLIC_RELEASE_DATE_OVERRIDES}" ]; then - PUBLIC_RELEASE_DATE_OVERRIDES_ARG="--public-release-date-overrides="${PUBLIC_RELEASE_DATE_OVERRIDES} -fi - -python ./autogen_issue_advisor.py \ - $PIPELINE_ARG \ - $DRY_RUN_ARG \ - $DISABLE_DATE_CHECK_ARG \ - $FORCE_ARG \ - $VERBOSE_ARG \ - $ISSUE_FILTER_ARG \ - $BULK_PREVIEW_DATE_OVERRIDES_ARG \ - $PUBLIC_RELEASE_REQUESTED_CUT_OFF_DATE_OVERRIDES_ARG \ - $PUBLIC_RELEASE_DATE_OVERRIDES_ARG \ - $IGNORE_WRONG_PIPELINE_ARG - -# back out into root directory -cd .. \ No newline at end of file diff --git a/scripts/auto_gen_utils/team_city_scripts/run_autogen_issue_advisor_calendar.sh b/scripts/auto_gen_utils/team_city_scripts/run_autogen_issue_advisor_calendar.sh deleted file mode 100755 index 8cc6a2eb6a..0000000000 --- a/scripts/auto_gen_utils/team_city_scripts/run_autogen_issue_advisor_calendar.sh +++ /dev/null @@ -1,90 +0,0 @@ -#!/bin/bash - -set -e -set -x - -echo Creating venv to install sdk locally -. /opt/odo/tox_sic/venv/bin/activate -virtualenv .sdk-venv -. .sdk-venv/bin/activate - -# must disable StrictHostKeyChecking so that we don't get an interactive -# prompt later asking to confirm the host key -# Must disable -e (fail on non-zero exit code) because ssh returns 255 -set +e -ssh -o StrictHostKeyChecking=no git@bitbucket.oci.oraclecorp.com -p 7999 -set -e - -# Old way of doing that: -# ls -la ~/.ssh -# -# cat ~/.ssh/config -# -# printf "\n\nHost * \n StrictHostKeyChecking no\n" >> ~/.ssh/config -# -# cat ~/.ssh/config - -# configure git for this commit -git config --global user.email "$GIT_USER_EMAIL" -git config --global user.name "$GIT_USER_NAME" - -cd autogen -ls -la -pwd - -pip ${PIP_TIMEOUT_PARAMETER} install --trusted-host artifactory.oci.oraclecorp.com --index-url https://artifactory.oci.oraclecorp.com/api/pypi/global-dev-pypi/simple -r requirements.txt - -ls -la - -ISSUE_FILTER_ARG="" -if [ ! -z "$ISSUE_FILTER" ]; then - ISSUE_FILTER_ARG="--issue $ISSUE_FILTER" -fi - -BULK_PREVIEW_DATE_OVERRIDES_ARG="" -if [ ! -z "$BULK_PREVIEW_DATE_OVERRIDES" ]; then - BULK_PREVIEW_DATE_OVERRIDES_ARG="--bulk-preview-date-overrides="${BULK_PREVIEW_DATE_OVERRIDES} -fi - -PUBLIC_RELEASE_REQUESTED_CUT_OFF_DATE_OVERRIDES_ARG="" -if [ ! -z "$PUBLIC_RELEASE_REQUESTED_CUT_OFF_DATE_OVERRIDES" ]; then - PUBLIC_RELEASE_REQUESTED_CUT_OFF_DATE_OVERRIDES_ARG="--public-release-requested-cut-off-date-overrides="${PUBLIC_RELEASE_REQUESTED_CUT_OFF_DATE_OVERRIDES} -fi - -PUBLIC_RELEASE_DATE_OVERRIDES_ARG="" -if [ ! -z "${PUBLIC_RELEASE_DATE_OVERRIDES}" ]; then - PUBLIC_RELEASE_DATE_OVERRIDES_ARG="--public-release-date-overrides="${PUBLIC_RELEASE_DATE_OVERRIDES} -fi - -python ./autogen_issue_advisor.py \ - $PIPELINE_ARG \ - $DRY_RUN_ARG \ - $FORCE_ARG \ - $VERBOSE_ARG \ - $ISSUE_FILTER_ARG \ - $BULK_PREVIEW_DATE_OVERRIDES_ARG \ - $PUBLIC_RELEASE_REQUESTED_CUT_OFF_DATE_OVERRIDES_ARG \ - $PUBLIC_RELEASE_DATE_OVERRIDES_ARG \ - --show-ga-calendar \ - --show-ga-calendar-count $SHOW_GA_CALENDAR_COUNT | tee ../ga-dates.txt - - -python ./autogen_issue_advisor.py \ - $PIPELINE_ARG \ - $DRY_RUN_ARG \ - $FORCE_ARG \ - $VERBOSE_ARG \ - $ISSUE_FILTER_ARG \ - $BULK_PREVIEW_DATE_OVERRIDES_ARG \ - --show-preview-calendar \ - --show-preview-calendar-count $SHOW_PREVIEW_CALENDAR_COUNT | tee ../preview-dates.txt - -# back out into root directory -cd .. - - -# uploads ga-dates.txt -curl https://objectstorage.us-phoenix-1.oraclecloud.com/p/Lu2GYrJGOs6JCEibFpXmirYt1LAIXAWuiC6Cx8ps8KXBAxPeOJWmOb6yz0QNH3qS/n/dex-us-phoenix-1/b/generated_markdown/o/ga-dates.txt --upload-file ga-dates.txt - -# uploads preview-dates.txt -curl https://objectstorage.us-phoenix-1.oraclecloud.com/p/UXNHi_BuJ58pHnv4Zucc6oE1WE_-VqeTp5ELkoN1Wqb_WCikNwb7BaMIM0BI6eJ-/n/dex-us-phoenix-1/b/generated_markdown/o/preview-dates.txt --upload-file preview-dates.txt diff --git a/scripts/auto_gen_utils/team_city_scripts/typescript/preview/1_setup_typescript_preview_branch.sh b/scripts/auto_gen_utils/team_city_scripts/typescript/preview/1_setup_typescript_preview_branch.sh deleted file mode 100755 index d7ea580a50..0000000000 --- a/scripts/auto_gen_utils/team_city_scripts/typescript/preview/1_setup_typescript_preview_branch.sh +++ /dev/null @@ -1,35 +0,0 @@ -set -e -set -x - -echo Creating venv to install sdk locally -. /opt/odo/tox_sic/venv/bin/activate -virtualenv .sdk-venv -. .sdk-venv/bin/activate - -# must disable StrictHostKeyChecking so that we don't get an interactive -# prompt later asking to confirm the host key -# If `set -e`, must disable "fail on non-zero exit code" using `set +e` -# because ssh returns 255 -set +e -ssh -o StrictHostKeyChecking=no git@bitbucket.oci.oraclecorp.com -p 7999 -set -e - - -## AUTOGEN ## -cd autogen -pip ${PIP_TIMEOUT_PARAMETER} install --trusted-host artifactory.oci.oraclecorp.com --index-url https://artifactory.oci.oraclecorp.com/api/pypi/global-dev-pypi/simple -r requirements.txt --quiet -cd .. - -# run the setup job for step 2 in ./autogen -cd autogen -ls -la -pwd - -ls -la - - - -python ./2_pre_generation_set_up.py --build-id $BUILD_ID --tool TypescriptSDK - -# back out into root directory -cd .. \ No newline at end of file diff --git a/scripts/auto_gen_utils/team_city_scripts/typescript/preview/3_record_sdk_generation_success.sh b/scripts/auto_gen_utils/team_city_scripts/typescript/preview/3_record_sdk_generation_success.sh deleted file mode 100755 index 48a4df452c..0000000000 --- a/scripts/auto_gen_utils/team_city_scripts/typescript/preview/3_record_sdk_generation_success.sh +++ /dev/null @@ -1,5 +0,0 @@ -set -x - -pwd -ls -la -touch typescript_sdk_generation_success.txt diff --git a/scripts/auto_gen_utils/team_city_scripts/typescript/preview/5_record_sdk_build_success.sh b/scripts/auto_gen_utils/team_city_scripts/typescript/preview/5_record_sdk_build_success.sh deleted file mode 100755 index 002c83d19f..0000000000 --- a/scripts/auto_gen_utils/team_city_scripts/typescript/preview/5_record_sdk_build_success.sh +++ /dev/null @@ -1,5 +0,0 @@ -set -x - -pwd -ls -la -touch typescript_sdk_build_success.txt diff --git a/scripts/auto_gen_utils/team_city_scripts/typescript/preview/6_report_gen_and_build_status.sh b/scripts/auto_gen_utils/team_city_scripts/typescript/preview/6_report_gen_and_build_status.sh deleted file mode 100755 index 852cd6dcd0..0000000000 --- a/scripts/auto_gen_utils/team_city_scripts/typescript/preview/6_report_gen_and_build_status.sh +++ /dev/null @@ -1,26 +0,0 @@ -set -e -set -x - -pwd -ls -la - -# activate venv from step 1 to run below python script -ls -la ./.sdk-venv -. ./.sdk-venv/bin/activate - -cd oci-typescript-sdk -branch=`git branch|grep "^\*"|cut -c3-` -if [[ ${branch} == *"bulk"* ]]; then - build_type="bulk_pending_merge_preview" -else - build_type="individual_preview" -fi - -cd .. -# commit changes from generation and build for typescript-sdk -# this step will run no matter what happened before it so it can report success / failure to the JIRA tickets -cd autogen -python ./3_report_generation_status.py --build-id $BUILD_ID --tool TypescriptSDK --build-type ${build_type} -cd .. - -ls -la ./oci-typescript-sdk diff --git a/scripts/auto_gen_utils/team_city_scripts/typescript/preview/7_commit_generated_changes.sh b/scripts/auto_gen_utils/team_city_scripts/typescript/preview/7_commit_generated_changes.sh deleted file mode 100755 index a71662ba0e..0000000000 --- a/scripts/auto_gen_utils/team_city_scripts/typescript/preview/7_commit_generated_changes.sh +++ /dev/null @@ -1,26 +0,0 @@ -set -x -set -e - -# configure git for this commit -git config --global user.email "$GIT_USER_EMAIL" -git config --global user.name "$GIT_USER_NAME" - -# activate venv from step 1 to run below python script -ls -la ./.sdk-venv -. ./.sdk-venv/bin/activate - -# get the branch we're on -cd oci-typescript-sdk -branch=`git branch|grep "^\*"|cut -c3-` -if [[ ${branch} == *"bulk"* ]]; then - build_type="bulk_pending_merge_preview" -else - build_type="individual_preview" -fi -cd .. - -# commit changes from generation and build -cd autogen -ls -la -python ./4_on_generation_complete.py --build-id $BUILD_ID --tool TypescriptSDK --build-type ${build_type} -cd .. diff --git a/scripts/auto_gen_utils/team_city_scripts/typescript/preview/9_mark_done_post_bulk_merge.sh b/scripts/auto_gen_utils/team_city_scripts/typescript/preview/9_mark_done_post_bulk_merge.sh deleted file mode 100755 index 4cf39a2054..0000000000 --- a/scripts/auto_gen_utils/team_city_scripts/typescript/preview/9_mark_done_post_bulk_merge.sh +++ /dev/null @@ -1,32 +0,0 @@ -#!/bin/bash - -set -e -set -x - -echo Creating venv to install sdk locally -. /opt/odo/tox_sic/venv/bin/activate -virtualenv .sdk-venv -. .sdk-venv/bin/activate - -# must disable StrictHostKeyChecking so that we don't get an interactive -# prompt later asking to confirm the host key -# Must disable -e (fail on non-zero exit code) because ssh returns 255 -set +e -ssh -o StrictHostKeyChecking=no git@bitbucket.oci.oraclecorp.com -p 7999 -set -e - - -# run the job to pull JIRAs from the queue and update POM -# this job expects to be run from ./autogen -cd auto-gen -ls -la -pwd - -pip ${PIP_TIMEOUT_PARAMETER} install --trusted-host artifactory.oci.oraclecorp.com --index-url https://artifactory.oci.oraclecorp.com/api/pypi/global-dev-pypi/simple -r requirements.txt - -# This should be defined within the TC job configuration: export BUILD_ID=%teamcity.build.id% -FULL_VERSION=`cat ../oci-typescript-sdk/package_version` -python ./5_mark_preview_tickets_done_post_merge.py --build-id $BUILD_ID --tool TypescriptSDK --full-version $FULL_VERSION --allow-transition-overall-issue-to-done --build-conf-name Sdk_TypeScriptSDK_BuildSdkPreview - -# back out into root directory -cd .. diff --git a/scripts/auto_gen_utils/team_city_scripts/typescript/public/1_setup_typescript_branch.sh b/scripts/auto_gen_utils/team_city_scripts/typescript/public/1_setup_typescript_branch.sh deleted file mode 100755 index e3405f3462..0000000000 --- a/scripts/auto_gen_utils/team_city_scripts/typescript/public/1_setup_typescript_branch.sh +++ /dev/null @@ -1,33 +0,0 @@ -set -e -set -x - -echo Creating venv to install sdk locally -. /opt/odo/tox_sic/venv/bin/activate -virtualenv .sdk-venv -. .sdk-venv/bin/activate - -# must disable StrictHostKeyChecking so that we don't get an interactive -# prompt later asking to confirm the host key -# Must disable -e (fail on non-zero exit code) because ssh returns 255 -set +e -ssh -o StrictHostKeyChecking=no git@bitbucket.oci.oraclecorp.com -p 7999 -set -e - - -## AUTOGEN ## -cd autogen -pip ${PIP_TIMEOUT_PARAMETER} install --trusted-host artifactory.oci.oraclecorp.com --index-url https://artifactory.oci.oraclecorp.com/api/pypi/global-dev-pypi/simple -r requirements.txt --quiet -cd .. - -# run the setup job for step 2 in ./autogen -cd autogen -ls -la -pwd - -ls -la - - -python ./2_pre_generation_set_up.py --build-id $BUILD_ID --tool TypescriptSDK - -# back out into root directory -cd .. diff --git a/scripts/auto_gen_utils/team_city_scripts/typescript/public/3_record_sdk_generation_success.sh b/scripts/auto_gen_utils/team_city_scripts/typescript/public/3_record_sdk_generation_success.sh deleted file mode 100755 index 48a4df452c..0000000000 --- a/scripts/auto_gen_utils/team_city_scripts/typescript/public/3_record_sdk_generation_success.sh +++ /dev/null @@ -1,5 +0,0 @@ -set -x - -pwd -ls -la -touch typescript_sdk_generation_success.txt diff --git a/scripts/auto_gen_utils/team_city_scripts/typescript/public/5_record_sdk_build_success.sh b/scripts/auto_gen_utils/team_city_scripts/typescript/public/5_record_sdk_build_success.sh deleted file mode 100755 index 002c83d19f..0000000000 --- a/scripts/auto_gen_utils/team_city_scripts/typescript/public/5_record_sdk_build_success.sh +++ /dev/null @@ -1,5 +0,0 @@ -set -x - -pwd -ls -la -touch typescript_sdk_build_success.txt diff --git a/scripts/auto_gen_utils/team_city_scripts/typescript/public/6_report_gen_and_build_status.sh b/scripts/auto_gen_utils/team_city_scripts/typescript/public/6_report_gen_and_build_status.sh deleted file mode 100755 index f65c362df2..0000000000 --- a/scripts/auto_gen_utils/team_city_scripts/typescript/public/6_report_gen_and_build_status.sh +++ /dev/null @@ -1,26 +0,0 @@ -set -e -set -x - -pwd -ls -la - -# activate venv from step 1 to run below python script -ls -la ./.sdk-venv -. ./.sdk-venv/bin/activate - -cd oci-typescript-sdk -if [[ `git branch|grep "\*"|grep "bulk"` ]]; then - echo "Found bulk public" - export BUILD_TYPE_ARG="--build-type bulk_pending_merge_public" -else - export BUILD_TYPE_ARG="--build-type individual_public" -fi -cd .. - -# commit changes from generation and build for typescript-sdk -# this step will run no matter what happened before it so it can report success / failure to the JIRA tickets -cd autogen -python ./3_report_generation_status.py --build-id $BUILD_ID --tool TypescriptSDK $BUILD_TYPE_ARG -cd .. - -ls -la ./oci-typescript-sdk diff --git a/scripts/auto_gen_utils/team_city_scripts/typescript/public/7_commit_generated_changes.sh b/scripts/auto_gen_utils/team_city_scripts/typescript/public/7_commit_generated_changes.sh deleted file mode 100755 index 915acd3fcf..0000000000 --- a/scripts/auto_gen_utils/team_city_scripts/typescript/public/7_commit_generated_changes.sh +++ /dev/null @@ -1,26 +0,0 @@ -set -x -set -e - -# configure git for this commit -git config --global user.email "$GIT_USER_EMAIL" -git config --global user.name "$GIT_USER_NAME" - -# activate venv from step 1 to run below python script -ls -la ./.sdk-venv -. ./.sdk-venv/bin/activate - -# get the branch we're on -cd oci-typescript-sdk -branch=`git branch|grep "^\*"|cut -c3-` -if [[ ${branch} == *"bulk"* ]]; then - build_type="bulk_pending_merge_public" -else - build_type="individual_public" -fi -cd .. - -# commit changes from generation and build -cd autogen -ls -la -python ./4_on_generation_complete.py --build-id $BUILD_ID --tool TypescriptSDK --build-type ${build_type} -cd .. diff --git a/scripts/auto_gen_utils/team_city_scripts/typescript/public/9_mark_done_post_bulk_merge.sh b/scripts/auto_gen_utils/team_city_scripts/typescript/public/9_mark_done_post_bulk_merge.sh deleted file mode 100755 index b35356cb91..0000000000 --- a/scripts/auto_gen_utils/team_city_scripts/typescript/public/9_mark_done_post_bulk_merge.sh +++ /dev/null @@ -1,32 +0,0 @@ -#!/bin/bash - -set -e -set -x - -echo Creating venv to install sdk locally -. /opt/odo/tox_sic/venv/bin/activate -virtualenv .sdk-venv -. .sdk-venv/bin/activate - -# must disable StrictHostKeyChecking so that we don't get an interactive -# prompt later asking to confirm the host key -# Must disable -e (fail on non-zero exit code) because ssh returns 255 -set +e -ssh -o StrictHostKeyChecking=no git@bitbucket.oci.oraclecorp.com -p 7999 -set -e - - -# run the job to pull JIRAs from the queue and update POM -# this job expects to be run from ./autogen -cd auto-gen -ls -la -pwd - -pip ${PIP_TIMEOUT_PARAMETER} install --trusted-host artifactory.oci.oraclecorp.com --index-url https://artifactory.oci.oraclecorp.com/api/pypi/global-dev-pypi/simple -r requirements.txt - -# This should be defined within the TC job configuration: export BUILD_ID=%teamcity.build.id% -FULL_VERSION=`cat ../oci-typescript-sdk/package_version` -python ./5_mark_preview_tickets_done_post_merge.py --build-id $BUILD_ID --allow-transition-overall-issue-to-deploy --tool TypescriptSDK --full-version $FULL_VERSION --build-conf-name Sdk_TypeScriptSDK_BuildSdkMaster - -# back out into root directory -cd .. \ No newline at end of file diff --git a/scripts/auto_gen_utils/team_city_scripts/udx_automation/README.md b/scripts/auto_gen_utils/team_city_scripts/udx_automation/README.md deleted file mode 100644 index fc311777d1..0000000000 --- a/scripts/auto_gen_utils/team_city_scripts/udx_automation/README.md +++ /dev/null @@ -1,40 +0,0 @@ -# UDX intake Process Automation - -## Introduction -Today when a Service teams wants to GA a new entire service or a new feature in their existing service, they need to file an ORM ticket which automatically creates a UDX ticket for tracking the UDX approvals needed this GA. Currently, the TPM manually does all the in-take process of such new UDX tickets which involves some verification of the ticket and collecting various information from related tickets. - -This approach has a lot of disadvantages as detailed below:- - -* This work though trivial takes a lot of effort and time of the TPM to manage. This time can be better utilized by the TPM to other higher priorities task. -* As this is a manual work, it is prone to human errors ,and we should automate wherever we can. -* As the number of services keep on increasing, this workflow will be unsustainable as it does not scale well. -* If the TPM is out of office or is busy in other higher priority tasks, the review for such tickets will pile up and the GA for services might get delayed. -* Makes it harder to hand off this work to a new TPM - -## Automation Goal - -* When a UDX tickets gets created for review, automation should be the first one to run and check if the ticket is ready. -* Transition to **In Design** state if the ticket meets the bar. Otherwise, move to **Closed** if it is manually created or move to **More information needed** state in case of issues -* Job will run on a regular frequency and process all the tickets needing triage -* Make the job extensible to allow for adding possibilities of modifying any requirements or add new ones. - -## Automation Steps -The automation would do the following steps:- - -1. Gets ticket in **Needs Triage** status in the **UDX** project which do not have the Bypass Label **Bypass-UDX-Automation**. If you want to modify the Jira Query the automation uses, then you can set the environment variable **TICKETS_IN_TRIAGE_QUERY** which has the value of the query you want to do instead. -2. Check if the ticket has a Root ORM ticket attached to it. If the ticket does not have a Root ORM then the UDX ticket is **closed** with a comment and link to create UDX ticket via the ORM process. -3. If the root ORM ticket has ***Customer facing changes*** set to **No**, then transition the UDX ticket to **closed** state with a comment that UDX ticket is not needed when there are no such changes. -4. Start the intake process for the UDX ticket - 1. If the UDX ticket **reporter** is set to ***jira-automation-bot*** then set reporter to match the reporter set in the Root ORM. - 2. If the **Service Team Project/Queue** is missing, then update this with the value from the Root ORM ticket. - 3. Find the TC ticket linked to UDX by looking at the Root ORM ticket and finding the child Technical content ORM ticket to get the TC ticket and create a link to it. - 4. Check if the **Public facing API changes**. - 1. If it is set to **Yes**, then:- - - set **Surface Exempt from Feature Impact = No** - - Check if **GA date** is either a *Tuesday or Wednesday* and if it is not then add a comment asking the *Reporter* to set the **GA Date** to a *Tuesday/Wednesday* on the Root ORM. - 2. If it is set to **No**, then - - Check if there is either an *SDK/CLI* or *Terraform* as **surface exemptions**, if not move the ticket to **More Information Needed** status with a comment asking the *Reporter* to check if they forgot to add the exemptions. - - If the necessary **surface exemptions** are present then check if the **GA Date** is a weekday, if it is not then ask the *Reporter* to set the **GA Date** to a weekday on the Root ORM. - - 5. Transition tickets still not transitioned to **In Design** with a comment asking the *Reporter* to provide the VPAT documentation information. - diff --git a/scripts/auto_gen_utils/team_city_scripts/udx_automation/__init__.py b/scripts/auto_gen_utils/team_city_scripts/udx_automation/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/scripts/auto_gen_utils/team_city_scripts/udx_automation/udx_ticket_review_automation.py b/scripts/auto_gen_utils/team_city_scripts/udx_automation/udx_ticket_review_automation.py deleted file mode 100644 index ff1bd0bb8a..0000000000 --- a/scripts/auto_gen_utils/team_city_scripts/udx_automation/udx_ticket_review_automation.py +++ /dev/null @@ -1,417 +0,0 @@ -import os -import sys -import argparse -import logging -import datetime - -dir_path = os.path.dirname(os.path.realpath(__file__)) -sys.path.append(os.path.join(dir_path, '../..')) - -import util # noqa: ignore=F402 -import config # noqa: ignore=F402 - - -JIRA_AUTOMATION_BOT = 'jira-automation-bot' -UDX_AUTOMATION_MAINTAINER = os.environ.get('UDX_AUTOMATION_MAINTAINER') or 'anurggar' -ALLOWED_GA_WEEKDAYS = ['tuesday', 'wednesday'] -BYPASS_UDX_AUTOMATION_LABEL = 'Bypass-UDX-Automation' -MANUAL_CHECK_LABEL = 'UDX-Manually-Created' -EXEMPTIONS_NEEDED_WHEN_NO_PUBLIC_API_CHANGES = ['SDK/CLI', 'Terraform', 'UX/Console', 'Docs'] -UDX_AUTOMATION_LABEL = 'UDX-Intake-Automation' - -IN_DESIGN_COMMENT = \ - ''' - [~{}], - We have moved your ticket to *In Design* phase, however, please complete the steps mentioned below in order to proceed:- - - {} - ''' - -GA_DATE_NOT_IN_ALLOWED_DAYS_COMMENT = \ - f'* The GA Date for this ticket is not a {"/".join(ALLOWED_GA_WEEKDAYS)}, if SDK/CLI or Terraform are impacted ' \ - f'you must *update the target GA date* on the top level ORM to these days in order to align with ' \ - f'the dev tools release schedule.' - - -GA_DATE_NOT_A_WEEKDAY_COMMENT = \ - f"* You must *update the target GA date* on the top level ORM to a week day in order to align with the dev tools release schedule" - -VPAT_DOCUMENTATION_COMMENT = \ - f'* Please answer the VPAT related questions asked in the UDX ticket description. As per UDX updated GA requirement feature teams must provide the VPAT at the time of GA. Refer the [UDX Self Service On boarding|https://confluence.oci.oraclecorp.com/display/UDX/UDX+Self-Service+Onboarding+-+VPAT+Documentation] for additional details.' - -EXCEPTION_OCCURRED_COMMENT = \ - ''' - [~{}], - An error occurred in the UDX intake automation while processing this ticket. Please check the builds logs to rectify this issue. - ''' - -MISSING_ROOT_ORM_COMMENT = \ - ''' - [~{}], - Have you created an ORM ticket for this? UDX tickets should only be generated through the ORM process. Manually generated and cloned UDX tickets can't be processed and will be cancelled unless granted exception by the UDX team. See this wiki ([https://confluence.oci.oraclecorp.com/display/OCIRM/Getting+Started+with+Release+Management]) for info on creating an ORM ticket which will in turn create the UDX ticket - ''' - -NO_CUSTOMER_FACING_CHANGES_ROOT_ORM_COMMENT = \ - ''' - [~{}], - Have you created an ORM ticket with *Customer facing = No* ?, UDX tickets should only be generated through the ORM process with *Customer facing= Yes*. This UDX tickets can't be processed and will be cancelled. See this wiki ([https://confluence.oci.oraclecorp.com/display/OCIRM/Getting+Started+with+Release+Management]) for info on creating an ORM ticket which will in turn create the UDX ticket - ''' - -MISSING_EXCEPTIONS_COMMENT = \ - ''' - [~{}], - This ticket was submitted with "No API change to Public-Facing Endpoints" selected but no UDX Surfaces have been marked as Exempt. - - Please clarify which of these customer facing surfaces are impacted by this change: - * Console UI - * SDK/CLI - * Terraform - * Technical Content (Docs) - - Let us know which surfaces are impacted ASAP so that we can Un-Block this ticket and move it to the next step. - In the future you can mark the UDX surfaces affected as described in the second bullet of the UDX Feature Release Checklist ([https://confluence.oci.oraclecorp.com/display/UDX/UDX+Feature+Release+Checklist]) - ''' - -TICKETS_IN_TRIAGE_QUERY = f'project = UDX AND status in ("Needs Triage") AND labels not in ({BYPASS_UDX_AUTOMATION_LABEL}, {MANUAL_CHECK_LABEL})' \ - f'ORDER BY status ASC, cf[12197] ASC, cf[12196] DESC, cf[11140] ASC, key ASC, reporter ' \ - f'DESC, updated DESC, priority DESC' - -# QUERY_TEMPLATE = 'issue = UDX-21538' -QUERY_TEMPLATE = os.environ.get('QUERY') or TICKETS_IN_TRIAGE_QUERY - -TICKETS_IN_MORE_INFO = f'project = "User and Developer Experience" AND status = "More Information Needed" AND labels not in ({BYPASS_UDX_AUTOMATION_LABEL}, {MANUAL_CHECK_LABEL}) AND labels in ({UDX_AUTOMATION_LABEL})' - -# create logger -logging.basicConfig() -logger = logging.getLogger('UDX_INTAKE_AUTOMATION') -logger.setLevel(logging.DEBUG) - - -def is_root_orm_customer_facing(root_orm): - # customer facing field in root has cf_10720 - customer_facing = root_orm.fields.customfield_10720 - logger.info(f'The root orm ticket: {root_orm.key}, customer facing field: {customer_facing}') - if customer_facing and 'Yes' in customer_facing.value: - return True - return False - - -def close_not_customer_facing_ticket(udx_ticket, root_orm): - logger.info(f'Transitioning ticket {udx_ticket.key} to {config.STATUS_CLOSED} as root orm: {root_orm.key}' - f' mentions there is no customer facing changes') - if not config.IS_DRY_RUN: - add_udx_automation_label(udx_ticket) - ticket_creator = udx_ticket.fields.reporter.name if udx_ticket.fields.reporter else udx_ticket.fields.assignee.name - util.JIRA_CLIENT().transition_issue(udx_ticket, config.STATUS_CLOSED, comment=NO_CUSTOMER_FACING_CHANGES_ROOT_ORM_COMMENT.format(ticket_creator, root_orm.key)) - - -def update_manually_created_ticket(udx_ticket): - logger.info(f'Updating ticket {udx_ticket.key} with missing Root ORM comment and adding manual check label') - if not config.IS_DRY_RUN: - add_udx_automation_label(udx_ticket) - add_udx_manually_created_label(udx_ticket) - # Add comment that root ORM is Missing. - ticket_creator = udx_ticket.fields.reporter.name if udx_ticket.fields.reporter else udx_ticket.fields.assignee.name - util.add_jira_comment(udx_ticket.key,comment=MISSING_ROOT_ORM_COMMENT.format(ticket_creator)) - - -def transition_ticket_missing_exemptions_to_more_info_needed(udx_ticket): - logger.info(f'Transitioning ticket: {udx_ticket.key} to status: {config.STATUS_MORE_INFORMATION_NEEDED} as ticket is missing exemptions') - if not config.IS_DRY_RUN: - add_udx_automation_label(udx_ticket) - util.JIRA_CLIENT().transition_issue(udx_ticket, config.STATUS_MORE_INFORMATION_NEEDED) - # Adding comment separately as the transition to More Information needed already has an auto-comment which - # overrides the comment we provide in transition issue api - util.add_jira_comment(udx_ticket.key, comment=MISSING_EXCEPTIONS_COMMENT.format(udx_ticket.fields.reporter.name)) - - -def get_parent_tickets(udx_ticket): - root_orm, wrapper_udx = None, None - wrapper_udx_link_generator = ( - link.inwardIssue for link in udx_ticket.fields.issuelinks - if hasattr(link, 'inwardIssue') and 'ORM' in link.inwardIssue.key - ) - wrapper_udx = next(wrapper_udx_link_generator, None) - if wrapper_udx: - wrapper_udx = util.JIRA_CLIENT().issue(wrapper_udx.key) - logger.debug(f'For ticket: {udx_ticket.key}, found wrapper udx ticket: {wrapper_udx.key}') - root_orm_udx_link_generator = ( - link.outwardIssue for link in wrapper_udx.fields.issuelinks - if hasattr(link, 'outwardIssue') and 'ORM' in link.outwardIssue.key - ) - root_orm = next(root_orm_udx_link_generator, None) - if root_orm: - logger.debug(f'For wrapper UDX ticket: {wrapper_udx.key}, found Root ORM ticket: {root_orm.key}') - root_orm = util.JIRA_CLIENT().issue(root_orm.key) - else: - logger.info(f'Root ORM ticket not found under wrapper UDX: {wrapper_udx.key}, setting wrapper udx as root ORM') - root_orm = wrapper_udx - else: - logger.error(f'For ticket: {udx_ticket.key} is missing Wrapper UDX ORM ticket') - return root_orm, wrapper_udx - - -def update_udx_service_queue(udx_ticket, root_orm): - udx_ticket_service_queue = udx_ticket.fields.customfield_13254 - if udx_ticket_service_queue is None: - logger.debug(f'For ticket: {udx_ticket.key}, Service Queue is missing.') - root_orm_service_queue = root_orm.fields.customfield_13254 - if root_orm_service_queue: - logger.debug(f'For ticket: {udx_ticket.key}, Adding Service Queue: {root_orm_service_queue.name} from ' - f'root ORM ticket: {root_orm.key}') - if not config.IS_DRY_RUN: - update_service_queue_dict = { - 'customfield_13254': { - 'key': root_orm_service_queue.key - } - } - udx_ticket.update(fields=update_service_queue_dict) - else: - logger.warning(f'Service Queue not found in either UDX ticket: {udx_ticket.key}, or its root ORM ' - f'ticket: {root_orm.key}') - - -def update_udx_reporter_if_missing(udx_ticket, root_orm): - if udx_ticket.fields.reporter and udx_ticket.fields.reporter.name == JIRA_AUTOMATION_BOT: - reporter = root_orm.fields.reporter - logger.debug(f'For ticket: {udx_ticket.key}, Reporter is missing. Adding reporter: {reporter.displayName} with ' - f'user name: {reporter.name} from root ORM ticket: {root_orm.key}') - - if not config.IS_DRY_RUN: - update_reporter_dict = { - 'reporter': { - 'name': reporter.name - } - } - udx_ticket.update(fields=update_reporter_dict) - - -def mark_udx_ticket_with_no_exemptions(udx_ticket): - logger.debug(f'For ticket: {udx_ticket.key}, Setting Surfaces Exempt from Feature Impact = NO EXEMPTIONS') - if not config.IS_DRY_RUN: - update_surface_exemption_dict = { - 'customfield_13594': [{'id': '16504'}] - } - udx_ticket.update(fields=update_surface_exemption_dict) - - -def udx_ticket_has_public_api_changes(udx_ticket, root_orm): - has_public_api_changes = root_orm.fields.customfield_13419 - if has_public_api_changes and has_public_api_changes.value and 'yes' in has_public_api_changes.value.lower(): - logger.debug(f'root orm ticket: {root_orm.key} has public facing API changes') - logger.debug(f'Setting has public API changes for ticket: {udx_ticket.key} to "Yes"') - if not config.IS_DRY_RUN: - api_changes_update_dict = { - 'customfield_13419': {'id': '15842'} - } - udx_ticket.update(fields=api_changes_update_dict) - return True - else: - logger.debug(f'Root ORM ticket: {root_orm.key} does not have public facing API changes') - logger.debug(f'Setting has public API changes for ticket: {udx_ticket.key} to "No"') - if not config.IS_DRY_RUN: - api_changes_update_dict = { - 'customfield_13419': {'id': '15843'} - } - udx_ticket.update(fields=api_changes_update_dict) - return False - - -def udx_ticket_missing_exemptions(udx_ticket): - surface_exemptions = udx_ticket.fields.customfield_13594 - if surface_exemptions: - logger.debug(f'For udx ticket: {udx_ticket.key}, the following surfaces are marked for exemption: {[e.value for e in surface_exemptions]}') - # If no exemptions return True - if any(e.value == "No Exemptions" for e in surface_exemptions): - return True - # If SDK/CLI or Terraform exemptions are present - if any(e.value in EXEMPTIONS_NEEDED_WHEN_NO_PUBLIC_API_CHANGES for e in surface_exemptions): - return False - return True - - -def get_tc_ticket_linked_to_udx(udx_ticket): - tc_link_generator = ( - link.outwardIssue for link in udx_ticket.fields.issuelinks - if hasattr(link, 'outwardIssue') and 'TC' in link.outwardIssue.key - ) - return next(tc_link_generator, None) - - -def get_tc_ticket_from_root_orm(root_orm): - wrapper_tc_link_generator = ( - link.inwardIssue for link in root_orm.fields.issuelinks - if hasattr(link, 'inwardIssue') and hasattr(link.inwardIssue, 'fields') and 'Technical Content' in link.inwardIssue.fields.summary - ) - wrapper_tc = next(wrapper_tc_link_generator, None) - if wrapper_tc: - # Get Fill Wrapper TC ticket via its key - wrapper_tc = util.JIRA_CLIENT().issue(wrapper_tc.key) - logger.debug(f'Found wrapper tc ticket: {wrapper_tc.key} via root orm: {root_orm.key}') - - # Find the TC ticket linked to it via its outward issue links - tc_link_generator = ( - link.outwardIssue for link in wrapper_tc.fields.issuelinks - if hasattr(link, 'outwardIssue') and 'TC' in link.outwardIssue.key - ) - return next(tc_link_generator, None) - else: - logger.debug(f'For root ORM: {root_orm.key}, No wrapper TC ticket found!') - - -def link_tc_ticket_to_udx(udx_ticket, root_orm): - tc_ticket = get_tc_ticket_linked_to_udx(udx_ticket) - if tc_ticket is None: - logger.debug(f'No TC ticket linked to udx ticket: {udx_ticket.key}, checking in root orm: {root_orm.key}') - tc_ticket = get_tc_ticket_from_root_orm(root_orm) - if tc_ticket is None: - logger.info(f'No TC ticket found in root orm: {root_orm.key}') - else: - logger.info(f'Found TC ticket: {tc_ticket.key} via root orm: {root_orm.key}') - if not config.IS_DRY_RUN: - util.JIRA_CLIENT().create_issue_link(type='Required', inwardIssue=udx_ticket.key, outwardIssue=tc_ticket.key) - else: - logger.info(f'TC ticket: {tc_ticket.key} already linked to udx ticket: {udx_ticket.key}') - - -def is_ga_date_tuesday_or_wednesday(udx_ticket): - ga_date = udx_ticket.fields.customfield_11140 - logger.debug(f'For UDX ticket:{udx_ticket.key}, found GA date: {ga_date}') - year, month, day = (int(x) for x in ga_date.split('-')) - day_name = datetime.date(year, month, day).strftime("%A") - if day_name.lower() in ALLOWED_GA_WEEKDAYS: - logger.debug(f'The {ga_date} falls on allowed GA days {ALLOWED_GA_WEEKDAYS}') - return True - logger.debug(f'The {ga_date} does not fall on allowed GA days {ALLOWED_GA_WEEKDAYS}') - return False - - -def is_ga_date_a_weekday(udx_ticket): - ga_date = udx_ticket.fields.customfield_11140 - logger.debug(f'For UDX ticket:{udx_ticket.key}, found GA date: {ga_date}') - ga_date = datetime.datetime.strptime(ga_date, '%Y-%m-%d').date() - if ga_date.weekday() < 5: - return True - else: - return False - - -def add_label(udx_ticket, label): - if not config.IS_DRY_RUN: - labels = udx_ticket.fields.labels or [] - labels.append(label) - udx_ticket.update(fields={"labels": labels}) - - -def add_udx_automation_label(udx_ticket): - logger.debug(f'Adding label: {UDX_AUTOMATION_LABEL}, to ticket: {udx_ticket.key}') - add_label(udx_ticket, UDX_AUTOMATION_LABEL) - - -def add_udx_manually_created_label(udx_ticket): - logger.debug(f'Adding label: {MANUAL_CHECK_LABEL}, to ticket: {udx_ticket.key}') - add_label(udx_ticket, MANUAL_CHECK_LABEL) - - -def intake_ticket(udx_ticket): - try: - logger.debug(f'Processing ticket: {udx_ticket.key}') - root_orm, wrapper_udx = get_parent_tickets(udx_ticket) - - # 1. Close any ticket which are manually created - if root_orm is None: - update_manually_created_ticket(udx_ticket) - return - - # 2.Close any tickets which don't have any Customer facing changes - if not is_root_orm_customer_facing(root_orm): - close_not_customer_facing_ticket(udx_ticket, root_orm) - return - - # Start Intake Process - # 3. If the UDX reporter is Unassigned then set it to the root ORM ticket reporter - update_udx_reporter_if_missing(udx_ticket, root_orm) - - # 4. Update the UDX ticket Service Queue to match the value in Root ORM ticket - update_udx_service_queue(udx_ticket, root_orm) - - # 5 Link TC tickets to UDX - link_tc_ticket_to_udx(udx_ticket, root_orm) - - additional_in_design_comment = [VPAT_DOCUMENTATION_COMMENT] - - # 6 Check if the exemptions are present on the UDX ticket if there are no Public API changes - if udx_ticket_has_public_api_changes(udx_ticket, root_orm): - # 6.a If ticket has public facing api changes, set UDX Surfaces Exempt from Feature Impact = NO EXEMPTIONS - mark_udx_ticket_with_no_exemptions(udx_ticket) - - # 6.a.1 Add comment if GA date is not a Tuesday/Wednesday - if not is_ga_date_tuesday_or_wednesday(udx_ticket): - additional_in_design_comment.append(GA_DATE_NOT_IN_ALLOWED_DAYS_COMMENT) - else: - # 6.b For ticket with no public facing API changes - # 6.b.1 If no exemptions or no SDK/CLI, Terraform exemptions then transition to More information needed - if udx_ticket_missing_exemptions(udx_ticket): - transition_ticket_missing_exemptions_to_more_info_needed(udx_ticket) - return - else: - # 6.b.2 If it has SDK/CLI, Terraform exemptions, check if GA date is a weekday - if not is_ga_date_a_weekday(udx_ticket): - additional_in_design_comment.append(GA_DATE_NOT_A_WEEKDAY_COMMENT) - - # 7 Move the ticket to In-design - if not config.IS_DRY_RUN: - util.JIRA_CLIENT().transition_issue(udx_ticket, config.STATUS_IN_DESIGN) - add_udx_automation_label(udx_ticket) - util.add_jira_comment(udx_ticket.key, comment=IN_DESIGN_COMMENT.format(udx_ticket.fields.reporter.name, '\n'.join(additional_in_design_comment))) - - except Exception as e: - logger.error(f'Failed to process ticket: {udx_ticket} due to exception: {e}') - util.add_jira_comment(udx_ticket.key, EXCEPTION_OCCURRED_COMMENT.format(UDX_AUTOMATION_MAINTAINER), config.COMMENT_TYPE_INFO) - - -def process_more_info_needed_tickets(udx_ticket): - if not udx_ticket_missing_exemptions(udx_ticket): - logger.info(f'The ticket: {udx_ticket.key} has surfaces marked for exemptions') - - additional_in_design_comment = [VPAT_DOCUMENTATION_COMMENT] - - # Check if release date is weekday else add additional comment - if not is_ga_date_a_weekday(udx_ticket): - additional_in_design_comment.append(GA_DATE_NOT_A_WEEKDAY_COMMENT) - - if not config.IS_DRY_RUN: - util.JIRA_CLIENT().transition_issue(udx_ticket, config.STATUS_NEEDS_TRIAGE) - util.JIRA_CLIENT().transition_issue(udx_ticket, config.STATUS_IN_DESIGN) - util.add_jira_comment(udx_ticket.key, comment=IN_DESIGN_COMMENT.format(udx_ticket.fields.reporter.name, '\n'.join(additional_in_design_comment))) - - -if __name__ == '__main__': - parser = argparse.ArgumentParser(description='Udx Intake Automation') - parser.add_argument('--dry-run', - default=False, - action='store_true', - help='Perform a dry-run') - - args = parser.parse_args() - config.IS_DRY_RUN = args.dry_run - - if config.IS_DRY_RUN: - logger.info('Running in dry-run mode') - - # Process tickets in More Information needed - tickets_in_more_info_needed = util.jira_search_issues(TICKETS_IN_MORE_INFO) - if len(tickets_in_more_info_needed) == 0: - logger.info(f'No ticket found in More Info needed state') - else: - for ticket in tickets_in_more_info_needed: - process_more_info_needed_tickets(ticket) - - # Find all tickets to API Review ticket readiness check - tickets_in_triage = util.jira_search_issues(QUERY_TEMPLATE) - if len(tickets_in_triage) == 0: - logger.info(f'No actionable tickets found for Query: {QUERY_TEMPLATE}') - else: - logger.info(f'Total Tickets found: {len(tickets_in_triage)}') - for ticket in tickets_in_triage: - intake_ticket(ticket) diff --git a/scripts/auto_gen_utils/team_city_scripts/zip_and_delete.py b/scripts/auto_gen_utils/team_city_scripts/zip_and_delete.py deleted file mode 100644 index aedbe5f944..0000000000 --- a/scripts/auto_gen_utils/team_city_scripts/zip_and_delete.py +++ /dev/null @@ -1,40 +0,0 @@ -from __future__ import print_function - -import argparse -import os -import zipfile -import shutil - -parser = argparse.ArgumentParser(description='Script to zip and delete files') -parser.add_argument('-d', '--directory', type=str, required=True, help='Directory path to be zipped and deleted') -parser.add_argument('--verbose', default=False, action='store_true', help='Verbose logging') -parser.add_argument('--dry-run', default=False, action='store_true', help='Dry-run, do not delete files') - -args = parser.parse_args() -dir_path = args.directory -is_verbose = args.verbose -dry_run = args.dry_run - -def printv(s): - if is_verbose: - print(s) - -def zip_and_delete_files(foldername): - source_dir = foldername - zip_dir = foldername + '.zip' - zipobj = zipfile.ZipFile(zip_dir, 'w', zipfile.ZIP_DEFLATED) - rootlen = len(source_dir) + 1 - for base, dirs, files in os.walk(source_dir): - for file in files: - file_name = os.path.join(base, file) - printv("Adding file to zip: {}".format(file_name)) - zipobj.write(file_name, file_name[rootlen:]) - if not dry_run: - printv("Removing Zipped file: {}".format(file_name)) - os.remove(file_name) - if not dry_run: - printv("Removing Dir: {}".format(source_dir)) - shutil.rmtree(source_dir) - -printv("Zipping directory: {}".format(dir_path)) -zip_and_delete_files(dir_path) \ No newline at end of file diff --git a/scripts/auto_gen_utils/tests/__init__.py b/scripts/auto_gen_utils/tests/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/scripts/auto_gen_utils/tests/test_bitbucket_utils.py b/scripts/auto_gen_utils/tests/test_bitbucket_utils.py deleted file mode 100644 index c27e5c589e..0000000000 --- a/scripts/auto_gen_utils/tests/test_bitbucket_utils.py +++ /dev/null @@ -1,96 +0,0 @@ -import os - -import config -import shared.bitbucket_utils - - -def test_printv(): - shared.bitbucket_utils.verbose = True - shared.bitbucket_utils.printv("abc") - shared.bitbucket_utils.printv(123) - shared.bitbucket_utils.printv("") - shared.bitbucket_utils.printv(None) - shared.bitbucket_utils.printv(True) - shared.bitbucket_utils.printv(False) - - -# Note: If you have the DEBUG_DEXREQ_BRANCH_PREFIX set, this test will exit out -def test_get_spec_pr_branch_reference_no_init_branches(): - if os.environ.get('DEBUG_DEXREQ_BRANCH_PREFIX'): - return - - assert shared.bitbucket_utils.get_spec_pr_branch_reference("preview", "suffix") == "refs/heads/spec-auto-v2-preview-suffix-diff" - assert shared.bitbucket_utils.get_spec_pr_branch_reference("public", "suffix") == "refs/heads/spec-auto-v2-public-suffix-diff" - assert shared.bitbucket_utils.get_spec_pr_branch_reference("preview", "suffix2") == "refs/heads/spec-auto-v2-preview-suffix2-diff" - assert shared.bitbucket_utils.get_spec_pr_branch_reference("public", "suffix2") == "refs/heads/spec-auto-v2-public-suffix2-diff" - - -def test_get_spec_pr_branch_reference(monkeypatch): - previous = config.DEBUG_DEXREQ_BRANCH_PREFIX - try: - monkeypatch.delenv("DEBUG_DEXREQ_BRANCH_PREFIX", None) - - config.init_branches() - - assert shared.bitbucket_utils.get_spec_pr_branch_reference("preview", "suffix") == "refs/heads/spec-auto-v2-preview-suffix-diff" - assert shared.bitbucket_utils.get_spec_pr_branch_reference("public", "suffix") == "refs/heads/spec-auto-v2-public-suffix-diff" - assert shared.bitbucket_utils.get_spec_pr_branch_reference("preview", "suffix2") == "refs/heads/spec-auto-v2-preview-suffix2-diff" - assert shared.bitbucket_utils.get_spec_pr_branch_reference("public", "suffix2") == "refs/heads/spec-auto-v2-public-suffix2-diff" - finally: - monkeypatch.setenv("DEBUG_DEXREQ_BRANCH_PREFIX", previous) - config.init_branches() - monkeypatch.delenv("DEBUG_DEXREQ_BRANCH_PREFIX", None) - - -def test_get_spec_pr_branch_reference_with_debug_prefix(monkeypatch): - previous = config.DEBUG_DEXREQ_BRANCH_PREFIX - try: - monkeypatch.setenv("DEBUG_DEXREQ_BRANCH_PREFIX", "xyz-") - - config.init_branches() - - assert config.DEBUG_DEXREQ_BRANCH_PREFIX == "xyz-" - assert shared.bitbucket_utils.get_spec_pr_branch_reference("preview", "suffix") == "refs/heads/spec-xyz-auto-v2-preview-suffix-diff" - assert shared.bitbucket_utils.get_spec_pr_branch_reference("public", "suffix") == "refs/heads/spec-xyz-auto-v2-public-suffix-diff" - assert shared.bitbucket_utils.get_spec_pr_branch_reference("preview", "suffix2") == "refs/heads/spec-xyz-auto-v2-preview-suffix2-diff" - assert shared.bitbucket_utils.get_spec_pr_branch_reference("public", "suffix2") == "refs/heads/spec-xyz-auto-v2-public-suffix2-diff" - finally: - monkeypatch.setenv("DEBUG_DEXREQ_BRANCH_PREFIX", previous) - config.init_branches() - monkeypatch.delenv("DEBUG_DEXREQ_BRANCH_PREFIX", None) - - -def test_get_spec_pr_branch_reference_with_debug_prefix_empty_quotes(monkeypatch): - previous = config.DEBUG_DEXREQ_BRANCH_PREFIX - try: - monkeypatch.setenv("DEBUG_DEXREQ_BRANCH_PREFIX", '""') - - config.init_branches() - - assert config.DEBUG_DEXREQ_BRANCH_PREFIX == "" - assert shared.bitbucket_utils.get_spec_pr_branch_reference("preview", "suffix") == "refs/heads/spec-auto-v2-preview-suffix-diff" - assert shared.bitbucket_utils.get_spec_pr_branch_reference("public", "suffix") == "refs/heads/spec-auto-v2-public-suffix-diff" - assert shared.bitbucket_utils.get_spec_pr_branch_reference("preview", "suffix2") == "refs/heads/spec-auto-v2-preview-suffix2-diff" - assert shared.bitbucket_utils.get_spec_pr_branch_reference("public", "suffix2") == "refs/heads/spec-auto-v2-public-suffix2-diff" - finally: - monkeypatch.setenv("DEBUG_DEXREQ_BRANCH_PREFIX", previous) - config.init_branches() - monkeypatch.delenv("DEBUG_DEXREQ_BRANCH_PREFIX", None) - - -def test_get_spec_pr_branch_reference_with_debug_prefix_with_quotes(monkeypatch): - previous = config.DEBUG_DEXREQ_BRANCH_PREFIX - try: - monkeypatch.setenv("DEBUG_DEXREQ_BRANCH_PREFIX", '"xyz-"') - - config.init_branches() - - assert config.DEBUG_DEXREQ_BRANCH_PREFIX == "xyz-" - assert shared.bitbucket_utils.get_spec_pr_branch_reference("preview", "suffix") == "refs/heads/spec-xyz-auto-v2-preview-suffix-diff" - assert shared.bitbucket_utils.get_spec_pr_branch_reference("public", "suffix") == "refs/heads/spec-xyz-auto-v2-public-suffix-diff" - assert shared.bitbucket_utils.get_spec_pr_branch_reference("preview", "suffix2") == "refs/heads/spec-xyz-auto-v2-preview-suffix2-diff" - assert shared.bitbucket_utils.get_spec_pr_branch_reference("public", "suffix2") == "refs/heads/spec-xyz-auto-v2-public-suffix2-diff" - finally: - monkeypatch.setenv("DEBUG_DEXREQ_BRANCH_PREFIX", previous) - config.init_branches() - monkeypatch.delenv("DEBUG_DEXREQ_BRANCH_PREFIX", None) diff --git a/scripts/auto_gen_utils/tests/test_buildsvc_tc_compatibility.py b/scripts/auto_gen_utils/tests/test_buildsvc_tc_compatibility.py deleted file mode 100644 index 066a4ba12b..0000000000 --- a/scripts/auto_gen_utils/tests/test_buildsvc_tc_compatibility.py +++ /dev/null @@ -1,577 +0,0 @@ -from shared.buildsvc_tc_compatibility import parse_build_id, parse_vcs_root, build_log_link, build_artifacts_link - - -def test_parse_build_id(): - is_build_service, build_id, buildsvc_branch, buildsvc_build_number = parse_build_id("1234") - assert is_build_service is False - assert build_id == "1234" - assert buildsvc_branch is None - assert buildsvc_build_number is None - - is_build_service, build_id, buildsvc_branch, buildsvc_build_number = parse_build_id("buildsvc-1234") - assert is_build_service is True - assert build_id == "buildsvc-1234" - assert buildsvc_branch is None - assert buildsvc_build_number == "1234" - - is_build_service, build_id, buildsvc_branch, buildsvc_build_number = parse_build_id("buildsvc-prototype-1234") - assert is_build_service is True - assert build_id == "buildsvc-prototype-1234" - assert buildsvc_branch == "prototype" - assert buildsvc_build_number == "1234" - - -def test_parse_vcs_root(): - project, repo = parse_vcs_root("xyz") - assert project is None - assert repo is None - - project, repo = parse_vcs_root("ssh://git@bitbucket.oci.oraclecorp.com:7999/sdk/dexreq-surfaces.git") - assert project == "SDK" - assert repo == "dexreq-surfaces" - - -def test_build_log_link_no_vcs_root_env_var(monkeypatch): - monkeypatch.delenv("BLD_VCS_ROOT", None) - monkeypatch.delenv("BLD_VSC_ROOT", None) - monkeypatch.delenv("BLD_STEP", None) - - assert build_log_link("1234") == "[here|https://teamcity.oci.oraclecorp.com/viewLog.html?buildId=1234]" - assert build_log_link("1234", text="click here") == "[click here|https://teamcity.oci.oraclecorp.com/viewLog.html?buildId=1234]" - - assert build_log_link("buildsvc-1234") == "[here|https://devops.oci.oraclecorp.com/build/teams/SDK/projects/dexreq-surfaces/branches/main/builds/1234]" - assert build_log_link("buildsvc-1234", project="CLI") == "[here|https://devops.oci.oraclecorp.com/build/teams/CLI/projects/dexreq-surfaces/branches/main/builds/1234]" - assert build_log_link("buildsvc-1234", repo="java-sdk") == "[here|https://devops.oci.oraclecorp.com/build/teams/SDK/projects/java-sdk/branches/main/builds/1234]" - assert build_log_link("buildsvc-1234", default_branch="master") == "[here|https://devops.oci.oraclecorp.com/build/teams/SDK/projects/dexreq-surfaces/branches/master/builds/1234]" - assert build_log_link("buildsvc-prototype-1234") == "[here|https://devops.oci.oraclecorp.com/build/teams/SDK/projects/dexreq-surfaces/branches/prototype/builds/1234]" - assert build_log_link("buildsvc-prototype-1234", project="CLI") == "[here|https://devops.oci.oraclecorp.com/build/teams/CLI/projects/dexreq-surfaces/branches/prototype/builds/1234]" - assert build_log_link("buildsvc-prototype-1234", repo="java-sdk") == "[here|https://devops.oci.oraclecorp.com/build/teams/SDK/projects/java-sdk/branches/prototype/builds/1234]" - assert build_log_link("buildsvc-prototype-1234", default_branch="master") == "[here|https://devops.oci.oraclecorp.com/build/teams/SDK/projects/dexreq-surfaces/branches/prototype/builds/1234]" - - assert build_log_link("buildsvc-1234", text="click here") == "[click here|https://devops.oci.oraclecorp.com/build/teams/SDK/projects/dexreq-surfaces/branches/main/builds/1234]" - assert build_log_link("buildsvc-1234", text="click here", project="CLI") == "[click here|https://devops.oci.oraclecorp.com/build/teams/CLI/projects/dexreq-surfaces/branches/main/builds/1234]" - assert build_log_link("buildsvc-1234", text="click here", repo="java-sdk") == "[click here|https://devops.oci.oraclecorp.com/build/teams/SDK/projects/java-sdk/branches/main/builds/1234]" - assert build_log_link("buildsvc-1234", text="click here", default_branch="master") == "[click here|https://devops.oci.oraclecorp.com/build/teams/SDK/projects/dexreq-surfaces/branches/master/builds/1234]" - assert build_log_link("buildsvc-prototype-1234", text="click here") == "[click here|https://devops.oci.oraclecorp.com/build/teams/SDK/projects/dexreq-surfaces/branches/prototype/builds/1234]" - assert build_log_link("buildsvc-prototype-1234", text="click here", project="CLI") == "[click here|https://devops.oci.oraclecorp.com/build/teams/CLI/projects/dexreq-surfaces/branches/prototype/builds/1234]" - assert build_log_link("buildsvc-prototype-1234", text="click here", repo="java-sdk") == "[click here|https://devops.oci.oraclecorp.com/build/teams/SDK/projects/java-sdk/branches/prototype/builds/1234]" - assert build_log_link("buildsvc-prototype-1234", text="click here", default_branch="master") == "[click here|https://devops.oci.oraclecorp.com/build/teams/SDK/projects/dexreq-surfaces/branches/prototype/builds/1234]" - - assert build_log_link("buildsvc-1234", text="click here", build_step_name="foo") == "[click here|https://devops.oci.oraclecorp.com/build/teams/SDK/projects/dexreq-surfaces/branches/main/builds/1234/steps/foo]" - assert build_log_link("buildsvc-1234", text="click here", project="CLI", build_step_name="foo") == "[click here|https://devops.oci.oraclecorp.com/build/teams/CLI/projects/dexreq-surfaces/branches/main/builds/1234/steps/foo]" - assert build_log_link("buildsvc-1234", text="click here", repo="java-sdk", build_step_name="foo") == "[click here|https://devops.oci.oraclecorp.com/build/teams/SDK/projects/java-sdk/branches/main/builds/1234/steps/foo]" - assert build_log_link("buildsvc-1234", text="click here", default_branch="master", build_step_name="foo") == "[click here|https://devops.oci.oraclecorp.com/build/teams/SDK/projects/dexreq-surfaces/branches/master/builds/1234/steps/foo]" - assert build_log_link("buildsvc-prototype-1234", text="click here", build_step_name="foo") == "[click here|https://devops.oci.oraclecorp.com/build/teams/SDK/projects/dexreq-surfaces/branches/prototype/builds/1234/steps/foo]" - assert build_log_link("buildsvc-prototype-1234", text="click here", project="CLI", build_step_name="foo") == "[click here|https://devops.oci.oraclecorp.com/build/teams/CLI/projects/dexreq-surfaces/branches/prototype/builds/1234/steps/foo]" - assert build_log_link("buildsvc-prototype-1234", text="click here", repo="java-sdk", build_step_name="foo") == "[click here|https://devops.oci.oraclecorp.com/build/teams/SDK/projects/java-sdk/branches/prototype/builds/1234/steps/foo]" - assert build_log_link("buildsvc-prototype-1234", text="click here", default_branch="master", build_step_name="foo") == "[click here|https://devops.oci.oraclecorp.com/build/teams/SDK/projects/dexreq-surfaces/branches/prototype/builds/1234/steps/foo]" - - assert build_log_link("buildsvc-1234", text="click here", build_step_name="") == "[click here|https://devops.oci.oraclecorp.com/build/teams/SDK/projects/dexreq-surfaces/branches/main/builds/1234]" - assert build_log_link("buildsvc-1234", text="click here", project="CLI", build_step_name="") == "[click here|https://devops.oci.oraclecorp.com/build/teams/CLI/projects/dexreq-surfaces/branches/main/builds/1234]" - assert build_log_link("buildsvc-1234", text="click here", repo="java-sdk", build_step_name="") == "[click here|https://devops.oci.oraclecorp.com/build/teams/SDK/projects/java-sdk/branches/main/builds/1234]" - assert build_log_link("buildsvc-1234", text="click here", default_branch="master", build_step_name="") == "[click here|https://devops.oci.oraclecorp.com/build/teams/SDK/projects/dexreq-surfaces/branches/master/builds/1234]" - assert build_log_link("buildsvc-prototype-1234", text="click here", build_step_name="") == "[click here|https://devops.oci.oraclecorp.com/build/teams/SDK/projects/dexreq-surfaces/branches/prototype/builds/1234]" - assert build_log_link("buildsvc-prototype-1234", text="click here", project="CLI", build_step_name="") == "[click here|https://devops.oci.oraclecorp.com/build/teams/CLI/projects/dexreq-surfaces/branches/prototype/builds/1234]" - assert build_log_link("buildsvc-prototype-1234", text="click here", repo="java-sdk", build_step_name="") == "[click here|https://devops.oci.oraclecorp.com/build/teams/SDK/projects/java-sdk/branches/prototype/builds/1234]" - assert build_log_link("buildsvc-prototype-1234", text="click here", default_branch="master", build_step_name="") == "[click here|https://devops.oci.oraclecorp.com/build/teams/SDK/projects/dexreq-surfaces/branches/prototype/builds/1234]" - - monkeypatch.setenv("BLD_STEP", "xyz") - - assert build_log_link("1234") == "[here|https://teamcity.oci.oraclecorp.com/viewLog.html?buildId=1234]" - assert build_log_link("1234", text="click here") == "[click here|https://teamcity.oci.oraclecorp.com/viewLog.html?buildId=1234]" - - assert build_log_link("buildsvc-1234") == "[here|https://devops.oci.oraclecorp.com/build/teams/SDK/projects/dexreq-surfaces/branches/main/builds/1234/steps/xyz]" - assert build_log_link("buildsvc-1234", project="CLI") == "[here|https://devops.oci.oraclecorp.com/build/teams/CLI/projects/dexreq-surfaces/branches/main/builds/1234/steps/xyz]" - assert build_log_link("buildsvc-1234", repo="java-sdk") == "[here|https://devops.oci.oraclecorp.com/build/teams/SDK/projects/java-sdk/branches/main/builds/1234/steps/xyz]" - assert build_log_link("buildsvc-1234", default_branch="master") == "[here|https://devops.oci.oraclecorp.com/build/teams/SDK/projects/dexreq-surfaces/branches/master/builds/1234/steps/xyz]" - assert build_log_link("buildsvc-prototype-1234") == "[here|https://devops.oci.oraclecorp.com/build/teams/SDK/projects/dexreq-surfaces/branches/prototype/builds/1234/steps/xyz]" - assert build_log_link("buildsvc-prototype-1234", project="CLI") == "[here|https://devops.oci.oraclecorp.com/build/teams/CLI/projects/dexreq-surfaces/branches/prototype/builds/1234/steps/xyz]" - assert build_log_link("buildsvc-prototype-1234", repo="java-sdk") == "[here|https://devops.oci.oraclecorp.com/build/teams/SDK/projects/java-sdk/branches/prototype/builds/1234/steps/xyz]" - assert build_log_link("buildsvc-prototype-1234", default_branch="master") == "[here|https://devops.oci.oraclecorp.com/build/teams/SDK/projects/dexreq-surfaces/branches/prototype/builds/1234/steps/xyz]" - - assert build_log_link("buildsvc-1234", text="click here") == "[click here|https://devops.oci.oraclecorp.com/build/teams/SDK/projects/dexreq-surfaces/branches/main/builds/1234/steps/xyz]" - assert build_log_link("buildsvc-1234", text="click here", project="CLI") == "[click here|https://devops.oci.oraclecorp.com/build/teams/CLI/projects/dexreq-surfaces/branches/main/builds/1234/steps/xyz]" - assert build_log_link("buildsvc-1234", text="click here", repo="java-sdk") == "[click here|https://devops.oci.oraclecorp.com/build/teams/SDK/projects/java-sdk/branches/main/builds/1234/steps/xyz]" - assert build_log_link("buildsvc-1234", text="click here", default_branch="master") == "[click here|https://devops.oci.oraclecorp.com/build/teams/SDK/projects/dexreq-surfaces/branches/master/builds/1234/steps/xyz]" - assert build_log_link("buildsvc-prototype-1234", text="click here") == "[click here|https://devops.oci.oraclecorp.com/build/teams/SDK/projects/dexreq-surfaces/branches/prototype/builds/1234/steps/xyz]" - assert build_log_link("buildsvc-prototype-1234", text="click here", project="CLI") == "[click here|https://devops.oci.oraclecorp.com/build/teams/CLI/projects/dexreq-surfaces/branches/prototype/builds/1234/steps/xyz]" - assert build_log_link("buildsvc-prototype-1234", text="click here", repo="java-sdk") == "[click here|https://devops.oci.oraclecorp.com/build/teams/SDK/projects/java-sdk/branches/prototype/builds/1234/steps/xyz]" - assert build_log_link("buildsvc-prototype-1234", text="click here", default_branch="master") == "[click here|https://devops.oci.oraclecorp.com/build/teams/SDK/projects/dexreq-surfaces/branches/prototype/builds/1234/steps/xyz]" - - assert build_log_link("buildsvc-1234", text="click here", build_step_name="foo") == "[click here|https://devops.oci.oraclecorp.com/build/teams/SDK/projects/dexreq-surfaces/branches/main/builds/1234/steps/foo]" - assert build_log_link("buildsvc-1234", text="click here", project="CLI", build_step_name="foo") == "[click here|https://devops.oci.oraclecorp.com/build/teams/CLI/projects/dexreq-surfaces/branches/main/builds/1234/steps/foo]" - assert build_log_link("buildsvc-1234", text="click here", repo="java-sdk", build_step_name="foo") == "[click here|https://devops.oci.oraclecorp.com/build/teams/SDK/projects/java-sdk/branches/main/builds/1234/steps/foo]" - assert build_log_link("buildsvc-1234", text="click here", default_branch="master", build_step_name="foo") == "[click here|https://devops.oci.oraclecorp.com/build/teams/SDK/projects/dexreq-surfaces/branches/master/builds/1234/steps/foo]" - assert build_log_link("buildsvc-prototype-1234", text="click here", build_step_name="foo") == "[click here|https://devops.oci.oraclecorp.com/build/teams/SDK/projects/dexreq-surfaces/branches/prototype/builds/1234/steps/foo]" - assert build_log_link("buildsvc-prototype-1234", text="click here", project="CLI", build_step_name="foo") == "[click here|https://devops.oci.oraclecorp.com/build/teams/CLI/projects/dexreq-surfaces/branches/prototype/builds/1234/steps/foo]" - assert build_log_link("buildsvc-prototype-1234", text="click here", repo="java-sdk", build_step_name="foo") == "[click here|https://devops.oci.oraclecorp.com/build/teams/SDK/projects/java-sdk/branches/prototype/builds/1234/steps/foo]" - assert build_log_link("buildsvc-prototype-1234", text="click here", default_branch="master", build_step_name="foo") == "[click here|https://devops.oci.oraclecorp.com/build/teams/SDK/projects/dexreq-surfaces/branches/prototype/builds/1234/steps/foo]" - - assert build_log_link("buildsvc-1234", text="click here", build_step_name="") == "[click here|https://devops.oci.oraclecorp.com/build/teams/SDK/projects/dexreq-surfaces/branches/main/builds/1234]" - assert build_log_link("buildsvc-1234", text="click here", project="CLI", build_step_name="") == "[click here|https://devops.oci.oraclecorp.com/build/teams/CLI/projects/dexreq-surfaces/branches/main/builds/1234]" - assert build_log_link("buildsvc-1234", text="click here", repo="java-sdk", build_step_name="") == "[click here|https://devops.oci.oraclecorp.com/build/teams/SDK/projects/java-sdk/branches/main/builds/1234]" - assert build_log_link("buildsvc-1234", text="click here", default_branch="master", build_step_name="") == "[click here|https://devops.oci.oraclecorp.com/build/teams/SDK/projects/dexreq-surfaces/branches/master/builds/1234]" - assert build_log_link("buildsvc-prototype-1234", text="click here", build_step_name="") == "[click here|https://devops.oci.oraclecorp.com/build/teams/SDK/projects/dexreq-surfaces/branches/prototype/builds/1234]" - assert build_log_link("buildsvc-prototype-1234", text="click here", project="CLI", build_step_name="") == "[click here|https://devops.oci.oraclecorp.com/build/teams/CLI/projects/dexreq-surfaces/branches/prototype/builds/1234]" - assert build_log_link("buildsvc-prototype-1234", text="click here", repo="java-sdk", build_step_name="") == "[click here|https://devops.oci.oraclecorp.com/build/teams/SDK/projects/java-sdk/branches/prototype/builds/1234]" - assert build_log_link("buildsvc-prototype-1234", text="click here", default_branch="master", build_step_name="") == "[click here|https://devops.oci.oraclecorp.com/build/teams/SDK/projects/dexreq-surfaces/branches/prototype/builds/1234]" - - monkeypatch.delenv("BLD_STEP", None) - monkeypatch.delenv("BLD_VCS_ROOT", None) - monkeypatch.delenv("BLD_VSC_ROOT", None) - - -def test_build_log_link_with_vcs_root_env_var(monkeypatch): - monkeypatch.delenv("BLD_VCS_ROOT", None) - monkeypatch.delenv("BLD_VSC_ROOT", None) - monkeypatch.delenv("BLD_STEP", None) - monkeypatch.setenv("BLD_VCS_ROOT", "ssh://git@bitbucket.oci.oraclecorp.com:7999/otherproject/otherrepo.git") - - assert build_log_link("1234") == "[here|https://teamcity.oci.oraclecorp.com/viewLog.html?buildId=1234]" - assert build_log_link("1234", text="click here") == "[click here|https://teamcity.oci.oraclecorp.com/viewLog.html?buildId=1234]" - - assert build_log_link("buildsvc-1234") == "[here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/otherrepo/branches/main/builds/1234]" - assert build_log_link("buildsvc-1234", project="CLI") == "[here|https://devops.oci.oraclecorp.com/build/teams/CLI/projects/otherrepo/branches/main/builds/1234]" - assert build_log_link("buildsvc-1234", repo="java-sdk") == "[here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/java-sdk/branches/main/builds/1234]" - assert build_log_link("buildsvc-1234", default_branch="master") == "[here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/otherrepo/branches/master/builds/1234]" - assert build_log_link("buildsvc-prototype-1234") == "[here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/otherrepo/branches/prototype/builds/1234]" - assert build_log_link("buildsvc-prototype-1234", project="CLI") == "[here|https://devops.oci.oraclecorp.com/build/teams/CLI/projects/otherrepo/branches/prototype/builds/1234]" - assert build_log_link("buildsvc-prototype-1234", repo="java-sdk") == "[here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/java-sdk/branches/prototype/builds/1234]" - assert build_log_link("buildsvc-prototype-1234", default_branch="master") == "[here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/otherrepo/branches/prototype/builds/1234]" - - assert build_log_link("buildsvc-1234", text="click here") == "[click here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/otherrepo/branches/main/builds/1234]" - assert build_log_link("buildsvc-1234", text="click here", project="CLI") == "[click here|https://devops.oci.oraclecorp.com/build/teams/CLI/projects/otherrepo/branches/main/builds/1234]" - assert build_log_link("buildsvc-1234", text="click here", repo="java-sdk") == "[click here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/java-sdk/branches/main/builds/1234]" - assert build_log_link("buildsvc-1234", text="click here", default_branch="master") == "[click here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/otherrepo/branches/master/builds/1234]" - assert build_log_link("buildsvc-prototype-1234", text="click here") == "[click here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/otherrepo/branches/prototype/builds/1234]" - assert build_log_link("buildsvc-prototype-1234", text="click here", project="CLI") == "[click here|https://devops.oci.oraclecorp.com/build/teams/CLI/projects/otherrepo/branches/prototype/builds/1234]" - assert build_log_link("buildsvc-prototype-1234", text="click here", repo="java-sdk") == "[click here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/java-sdk/branches/prototype/builds/1234]" - assert build_log_link("buildsvc-prototype-1234", text="click here", default_branch="master") == "[click here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/otherrepo/branches/prototype/builds/1234]" - - assert build_log_link("buildsvc-1234", text="click here", build_step_name="foo") == "[click here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/otherrepo/branches/main/builds/1234/steps/foo]" - assert build_log_link("buildsvc-1234", text="click here", project="CLI", build_step_name="foo") == "[click here|https://devops.oci.oraclecorp.com/build/teams/CLI/projects/otherrepo/branches/main/builds/1234/steps/foo]" - assert build_log_link("buildsvc-1234", text="click here", repo="java-sdk", build_step_name="foo") == "[click here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/java-sdk/branches/main/builds/1234/steps/foo]" - assert build_log_link("buildsvc-1234", text="click here", default_branch="master", build_step_name="foo") == "[click here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/otherrepo/branches/master/builds/1234/steps/foo]" - assert build_log_link("buildsvc-prototype-1234", text="click here", build_step_name="foo") == "[click here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/otherrepo/branches/prototype/builds/1234/steps/foo]" - assert build_log_link("buildsvc-prototype-1234", text="click here", project="CLI", build_step_name="foo") == "[click here|https://devops.oci.oraclecorp.com/build/teams/CLI/projects/otherrepo/branches/prototype/builds/1234/steps/foo]" - assert build_log_link("buildsvc-prototype-1234", text="click here", repo="java-sdk", build_step_name="foo") == "[click here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/java-sdk/branches/prototype/builds/1234/steps/foo]" - assert build_log_link("buildsvc-prototype-1234", text="click here", default_branch="master", build_step_name="foo") == "[click here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/otherrepo/branches/prototype/builds/1234/steps/foo]" - - assert build_log_link("buildsvc-1234", text="click here", build_step_name="") == "[click here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/otherrepo/branches/main/builds/1234]" - assert build_log_link("buildsvc-1234", text="click here", project="CLI", build_step_name="") == "[click here|https://devops.oci.oraclecorp.com/build/teams/CLI/projects/otherrepo/branches/main/builds/1234]" - assert build_log_link("buildsvc-1234", text="click here", repo="java-sdk", build_step_name="") == "[click here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/java-sdk/branches/main/builds/1234]" - assert build_log_link("buildsvc-1234", text="click here", default_branch="master", build_step_name="") == "[click here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/otherrepo/branches/master/builds/1234]" - assert build_log_link("buildsvc-prototype-1234", text="click here", build_step_name="") == "[click here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/otherrepo/branches/prototype/builds/1234]" - assert build_log_link("buildsvc-prototype-1234", text="click here", project="CLI", build_step_name="") == "[click here|https://devops.oci.oraclecorp.com/build/teams/CLI/projects/otherrepo/branches/prototype/builds/1234]" - assert build_log_link("buildsvc-prototype-1234", text="click here", repo="java-sdk", build_step_name="") == "[click here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/java-sdk/branches/prototype/builds/1234]" - assert build_log_link("buildsvc-prototype-1234", text="click here", default_branch="master", build_step_name="") == "[click here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/otherrepo/branches/prototype/builds/1234]" - - monkeypatch.setenv("BLD_STEP", "xyz") - - assert build_log_link("1234") == "[here|https://teamcity.oci.oraclecorp.com/viewLog.html?buildId=1234]" - assert build_log_link("1234", text="click here") == "[click here|https://teamcity.oci.oraclecorp.com/viewLog.html?buildId=1234]" - - assert build_log_link("buildsvc-1234") == "[here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/otherrepo/branches/main/builds/1234/steps/xyz]" - assert build_log_link("buildsvc-1234", project="CLI") == "[here|https://devops.oci.oraclecorp.com/build/teams/CLI/projects/otherrepo/branches/main/builds/1234/steps/xyz]" - assert build_log_link("buildsvc-1234", repo="java-sdk") == "[here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/java-sdk/branches/main/builds/1234/steps/xyz]" - assert build_log_link("buildsvc-1234", default_branch="master") == "[here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/otherrepo/branches/master/builds/1234/steps/xyz]" - assert build_log_link("buildsvc-prototype-1234") == "[here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/otherrepo/branches/prototype/builds/1234/steps/xyz]" - assert build_log_link("buildsvc-prototype-1234", project="CLI") == "[here|https://devops.oci.oraclecorp.com/build/teams/CLI/projects/otherrepo/branches/prototype/builds/1234/steps/xyz]" - assert build_log_link("buildsvc-prototype-1234", repo="java-sdk") == "[here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/java-sdk/branches/prototype/builds/1234/steps/xyz]" - assert build_log_link("buildsvc-prototype-1234", default_branch="master") == "[here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/otherrepo/branches/prototype/builds/1234/steps/xyz]" - - assert build_log_link("buildsvc-1234", text="click here") == "[click here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/otherrepo/branches/main/builds/1234/steps/xyz]" - assert build_log_link("buildsvc-1234", text="click here", project="CLI") == "[click here|https://devops.oci.oraclecorp.com/build/teams/CLI/projects/otherrepo/branches/main/builds/1234/steps/xyz]" - assert build_log_link("buildsvc-1234", text="click here", repo="java-sdk") == "[click here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/java-sdk/branches/main/builds/1234/steps/xyz]" - assert build_log_link("buildsvc-1234", text="click here", default_branch="master") == "[click here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/otherrepo/branches/master/builds/1234/steps/xyz]" - assert build_log_link("buildsvc-prototype-1234", text="click here") == "[click here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/otherrepo/branches/prototype/builds/1234/steps/xyz]" - assert build_log_link("buildsvc-prototype-1234", text="click here", project="CLI") == "[click here|https://devops.oci.oraclecorp.com/build/teams/CLI/projects/otherrepo/branches/prototype/builds/1234/steps/xyz]" - assert build_log_link("buildsvc-prototype-1234", text="click here", repo="java-sdk") == "[click here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/java-sdk/branches/prototype/builds/1234/steps/xyz]" - assert build_log_link("buildsvc-prototype-1234", text="click here", default_branch="master") == "[click here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/otherrepo/branches/prototype/builds/1234/steps/xyz]" - - assert build_log_link("buildsvc-1234", text="click here", build_step_name="foo") == "[click here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/otherrepo/branches/main/builds/1234/steps/foo]" - assert build_log_link("buildsvc-1234", text="click here", project="CLI", build_step_name="foo") == "[click here|https://devops.oci.oraclecorp.com/build/teams/CLI/projects/otherrepo/branches/main/builds/1234/steps/foo]" - assert build_log_link("buildsvc-1234", text="click here", repo="java-sdk", build_step_name="foo") == "[click here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/java-sdk/branches/main/builds/1234/steps/foo]" - assert build_log_link("buildsvc-1234", text="click here", default_branch="master", build_step_name="foo") == "[click here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/otherrepo/branches/master/builds/1234/steps/foo]" - assert build_log_link("buildsvc-prototype-1234", text="click here", build_step_name="foo") == "[click here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/otherrepo/branches/prototype/builds/1234/steps/foo]" - assert build_log_link("buildsvc-prototype-1234", text="click here", project="CLI", build_step_name="foo") == "[click here|https://devops.oci.oraclecorp.com/build/teams/CLI/projects/otherrepo/branches/prototype/builds/1234/steps/foo]" - assert build_log_link("buildsvc-prototype-1234", text="click here", repo="java-sdk", build_step_name="foo") == "[click here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/java-sdk/branches/prototype/builds/1234/steps/foo]" - assert build_log_link("buildsvc-prototype-1234", text="click here", default_branch="master", build_step_name="foo") == "[click here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/otherrepo/branches/prototype/builds/1234/steps/foo]" - - assert build_log_link("buildsvc-1234", text="click here", build_step_name="") == "[click here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/otherrepo/branches/main/builds/1234]" - assert build_log_link("buildsvc-1234", text="click here", project="CLI", build_step_name="") == "[click here|https://devops.oci.oraclecorp.com/build/teams/CLI/projects/otherrepo/branches/main/builds/1234]" - assert build_log_link("buildsvc-1234", text="click here", repo="java-sdk", build_step_name="") == "[click here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/java-sdk/branches/main/builds/1234]" - assert build_log_link("buildsvc-1234", text="click here", default_branch="master", build_step_name="") == "[click here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/otherrepo/branches/master/builds/1234]" - assert build_log_link("buildsvc-prototype-1234", text="click here", build_step_name="") == "[click here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/otherrepo/branches/prototype/builds/1234]" - assert build_log_link("buildsvc-prototype-1234", text="click here", project="CLI", build_step_name="") == "[click here|https://devops.oci.oraclecorp.com/build/teams/CLI/projects/otherrepo/branches/prototype/builds/1234]" - assert build_log_link("buildsvc-prototype-1234", text="click here", repo="java-sdk", build_step_name="") == "[click here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/java-sdk/branches/prototype/builds/1234]" - assert build_log_link("buildsvc-prototype-1234", text="click here", default_branch="master", build_step_name="") == "[click here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/otherrepo/branches/prototype/builds/1234]" - - monkeypatch.delenv("BLD_STEP", None) - monkeypatch.delenv("BLD_VCS_ROOT", None) - monkeypatch.delenv("BLD_VSC_ROOT", None) - - -def test_build_log_link_with_vsc_root_env_var(monkeypatch): - monkeypatch.delenv("BLD_VCS_ROOT", None) - monkeypatch.delenv("BLD_VSC_ROOT", None) - monkeypatch.delenv("BLD_STEP", None) - monkeypatch.setenv("BLD_VSC_ROOT", "ssh://git@bitbucket.oci.oraclecorp.com:7999/otherproject/otherrepo.git") - - assert build_log_link("1234") == "[here|https://teamcity.oci.oraclecorp.com/viewLog.html?buildId=1234]" - assert build_log_link("1234", text="click here") == "[click here|https://teamcity.oci.oraclecorp.com/viewLog.html?buildId=1234]" - - assert build_log_link("buildsvc-1234") == "[here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/otherrepo/branches/main/builds/1234]" - assert build_log_link("buildsvc-1234", project="CLI") == "[here|https://devops.oci.oraclecorp.com/build/teams/CLI/projects/otherrepo/branches/main/builds/1234]" - assert build_log_link("buildsvc-1234", repo="java-sdk") == "[here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/java-sdk/branches/main/builds/1234]" - assert build_log_link("buildsvc-1234", default_branch="master") == "[here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/otherrepo/branches/master/builds/1234]" - assert build_log_link("buildsvc-prototype-1234") == "[here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/otherrepo/branches/prototype/builds/1234]" - assert build_log_link("buildsvc-prototype-1234", project="CLI") == "[here|https://devops.oci.oraclecorp.com/build/teams/CLI/projects/otherrepo/branches/prototype/builds/1234]" - assert build_log_link("buildsvc-prototype-1234", repo="java-sdk") == "[here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/java-sdk/branches/prototype/builds/1234]" - assert build_log_link("buildsvc-prototype-1234", default_branch="master") == "[here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/otherrepo/branches/prototype/builds/1234]" - - assert build_log_link("buildsvc-1234", text="click here") == "[click here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/otherrepo/branches/main/builds/1234]" - assert build_log_link("buildsvc-1234", text="click here", project="CLI") == "[click here|https://devops.oci.oraclecorp.com/build/teams/CLI/projects/otherrepo/branches/main/builds/1234]" - assert build_log_link("buildsvc-1234", text="click here", repo="java-sdk") == "[click here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/java-sdk/branches/main/builds/1234]" - assert build_log_link("buildsvc-1234", text="click here", default_branch="master") == "[click here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/otherrepo/branches/master/builds/1234]" - assert build_log_link("buildsvc-prototype-1234", text="click here") == "[click here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/otherrepo/branches/prototype/builds/1234]" - assert build_log_link("buildsvc-prototype-1234", text="click here", project="CLI") == "[click here|https://devops.oci.oraclecorp.com/build/teams/CLI/projects/otherrepo/branches/prototype/builds/1234]" - assert build_log_link("buildsvc-prototype-1234", text="click here", repo="java-sdk") == "[click here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/java-sdk/branches/prototype/builds/1234]" - assert build_log_link("buildsvc-prototype-1234", text="click here", default_branch="master") == "[click here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/otherrepo/branches/prototype/builds/1234]" - - assert build_log_link("buildsvc-1234", text="click here", build_step_name="foo") == "[click here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/otherrepo/branches/main/builds/1234/steps/foo]" - assert build_log_link("buildsvc-1234", text="click here", project="CLI", build_step_name="foo") == "[click here|https://devops.oci.oraclecorp.com/build/teams/CLI/projects/otherrepo/branches/main/builds/1234/steps/foo]" - assert build_log_link("buildsvc-1234", text="click here", repo="java-sdk", build_step_name="foo") == "[click here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/java-sdk/branches/main/builds/1234/steps/foo]" - assert build_log_link("buildsvc-1234", text="click here", default_branch="master", build_step_name="foo") == "[click here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/otherrepo/branches/master/builds/1234/steps/foo]" - assert build_log_link("buildsvc-prototype-1234", text="click here", build_step_name="foo") == "[click here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/otherrepo/branches/prototype/builds/1234/steps/foo]" - assert build_log_link("buildsvc-prototype-1234", text="click here", project="CLI", build_step_name="foo") == "[click here|https://devops.oci.oraclecorp.com/build/teams/CLI/projects/otherrepo/branches/prototype/builds/1234/steps/foo]" - assert build_log_link("buildsvc-prototype-1234", text="click here", repo="java-sdk", build_step_name="foo") == "[click here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/java-sdk/branches/prototype/builds/1234/steps/foo]" - assert build_log_link("buildsvc-prototype-1234", text="click here", default_branch="master", build_step_name="foo") == "[click here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/otherrepo/branches/prototype/builds/1234/steps/foo]" - - assert build_log_link("buildsvc-1234", text="click here", build_step_name="") == "[click here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/otherrepo/branches/main/builds/1234]" - assert build_log_link("buildsvc-1234", text="click here", project="CLI", build_step_name="") == "[click here|https://devops.oci.oraclecorp.com/build/teams/CLI/projects/otherrepo/branches/main/builds/1234]" - assert build_log_link("buildsvc-1234", text="click here", repo="java-sdk", build_step_name="") == "[click here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/java-sdk/branches/main/builds/1234]" - assert build_log_link("buildsvc-1234", text="click here", default_branch="master", build_step_name="") == "[click here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/otherrepo/branches/master/builds/1234]" - assert build_log_link("buildsvc-prototype-1234", text="click here", build_step_name="") == "[click here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/otherrepo/branches/prototype/builds/1234]" - assert build_log_link("buildsvc-prototype-1234", text="click here", project="CLI", build_step_name="") == "[click here|https://devops.oci.oraclecorp.com/build/teams/CLI/projects/otherrepo/branches/prototype/builds/1234]" - assert build_log_link("buildsvc-prototype-1234", text="click here", repo="java-sdk", build_step_name="") == "[click here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/java-sdk/branches/prototype/builds/1234]" - assert build_log_link("buildsvc-prototype-1234", text="click here", default_branch="master", build_step_name="") == "[click here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/otherrepo/branches/prototype/builds/1234]" - - monkeypatch.setenv("BLD_STEP", "xyz") - - assert build_log_link("1234") == "[here|https://teamcity.oci.oraclecorp.com/viewLog.html?buildId=1234]" - assert build_log_link("1234", text="click here") == "[click here|https://teamcity.oci.oraclecorp.com/viewLog.html?buildId=1234]" - - assert build_log_link("buildsvc-1234") == "[here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/otherrepo/branches/main/builds/1234/steps/xyz]" - assert build_log_link("buildsvc-1234", project="CLI") == "[here|https://devops.oci.oraclecorp.com/build/teams/CLI/projects/otherrepo/branches/main/builds/1234/steps/xyz]" - assert build_log_link("buildsvc-1234", repo="java-sdk") == "[here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/java-sdk/branches/main/builds/1234/steps/xyz]" - assert build_log_link("buildsvc-1234", default_branch="master") == "[here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/otherrepo/branches/master/builds/1234/steps/xyz]" - assert build_log_link("buildsvc-prototype-1234") == "[here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/otherrepo/branches/prototype/builds/1234/steps/xyz]" - assert build_log_link("buildsvc-prototype-1234", project="CLI") == "[here|https://devops.oci.oraclecorp.com/build/teams/CLI/projects/otherrepo/branches/prototype/builds/1234/steps/xyz]" - assert build_log_link("buildsvc-prototype-1234", repo="java-sdk") == "[here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/java-sdk/branches/prototype/builds/1234/steps/xyz]" - assert build_log_link("buildsvc-prototype-1234", default_branch="master") == "[here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/otherrepo/branches/prototype/builds/1234/steps/xyz]" - - assert build_log_link("buildsvc-1234", text="click here") == "[click here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/otherrepo/branches/main/builds/1234/steps/xyz]" - assert build_log_link("buildsvc-1234", text="click here", project="CLI") == "[click here|https://devops.oci.oraclecorp.com/build/teams/CLI/projects/otherrepo/branches/main/builds/1234/steps/xyz]" - assert build_log_link("buildsvc-1234", text="click here", repo="java-sdk") == "[click here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/java-sdk/branches/main/builds/1234/steps/xyz]" - assert build_log_link("buildsvc-1234", text="click here", default_branch="master") == "[click here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/otherrepo/branches/master/builds/1234/steps/xyz]" - assert build_log_link("buildsvc-prototype-1234", text="click here") == "[click here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/otherrepo/branches/prototype/builds/1234/steps/xyz]" - assert build_log_link("buildsvc-prototype-1234", text="click here", project="CLI") == "[click here|https://devops.oci.oraclecorp.com/build/teams/CLI/projects/otherrepo/branches/prototype/builds/1234/steps/xyz]" - assert build_log_link("buildsvc-prototype-1234", text="click here", repo="java-sdk") == "[click here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/java-sdk/branches/prototype/builds/1234/steps/xyz]" - assert build_log_link("buildsvc-prototype-1234", text="click here", default_branch="master") == "[click here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/otherrepo/branches/prototype/builds/1234/steps/xyz]" - - assert build_log_link("buildsvc-1234", text="click here", build_step_name="foo") == "[click here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/otherrepo/branches/main/builds/1234/steps/foo]" - assert build_log_link("buildsvc-1234", text="click here", project="CLI", build_step_name="foo") == "[click here|https://devops.oci.oraclecorp.com/build/teams/CLI/projects/otherrepo/branches/main/builds/1234/steps/foo]" - assert build_log_link("buildsvc-1234", text="click here", repo="java-sdk", build_step_name="foo") == "[click here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/java-sdk/branches/main/builds/1234/steps/foo]" - assert build_log_link("buildsvc-1234", text="click here", default_branch="master", build_step_name="foo") == "[click here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/otherrepo/branches/master/builds/1234/steps/foo]" - assert build_log_link("buildsvc-prototype-1234", text="click here", build_step_name="foo") == "[click here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/otherrepo/branches/prototype/builds/1234/steps/foo]" - assert build_log_link("buildsvc-prototype-1234", text="click here", project="CLI", build_step_name="foo") == "[click here|https://devops.oci.oraclecorp.com/build/teams/CLI/projects/otherrepo/branches/prototype/builds/1234/steps/foo]" - assert build_log_link("buildsvc-prototype-1234", text="click here", repo="java-sdk", build_step_name="foo") == "[click here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/java-sdk/branches/prototype/builds/1234/steps/foo]" - assert build_log_link("buildsvc-prototype-1234", text="click here", default_branch="master", build_step_name="foo") == "[click here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/otherrepo/branches/prototype/builds/1234/steps/foo]" - - assert build_log_link("buildsvc-1234", text="click here", build_step_name="") == "[click here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/otherrepo/branches/main/builds/1234]" - assert build_log_link("buildsvc-1234", text="click here", project="CLI", build_step_name="") == "[click here|https://devops.oci.oraclecorp.com/build/teams/CLI/projects/otherrepo/branches/main/builds/1234]" - assert build_log_link("buildsvc-1234", text="click here", repo="java-sdk", build_step_name="") == "[click here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/java-sdk/branches/main/builds/1234]" - assert build_log_link("buildsvc-1234", text="click here", default_branch="master", build_step_name="") == "[click here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/otherrepo/branches/master/builds/1234]" - assert build_log_link("buildsvc-prototype-1234", text="click here", build_step_name="") == "[click here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/otherrepo/branches/prototype/builds/1234]" - assert build_log_link("buildsvc-prototype-1234", text="click here", project="CLI", build_step_name="") == "[click here|https://devops.oci.oraclecorp.com/build/teams/CLI/projects/otherrepo/branches/prototype/builds/1234]" - assert build_log_link("buildsvc-prototype-1234", text="click here", repo="java-sdk", build_step_name="") == "[click here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/java-sdk/branches/prototype/builds/1234]" - assert build_log_link("buildsvc-prototype-1234", text="click here", default_branch="master", build_step_name="") == "[click here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/otherrepo/branches/prototype/builds/1234]" - - monkeypatch.delenv("BLD_STEP", None) - monkeypatch.delenv("BLD_VCS_ROOT", None) - monkeypatch.delenv("BLD_VSC_ROOT", None) - - -# Artifacts - -def test_build_artifacts_link_no_vcs_root_env_var(monkeypatch): - monkeypatch.delenv("BLD_VCS_ROOT", None) - monkeypatch.delenv("BLD_VSC_ROOT", None) - monkeypatch.delenv("BLD_STEP", None) - - assert build_artifacts_link("1234") == "[here|https://teamcity.oci.oraclecorp.com/viewLog.html?buildId=1234&tab=artifacts]" - assert build_artifacts_link("1234", text="click here") == "[click here|https://teamcity.oci.oraclecorp.com/viewLog.html?buildId=1234&tab=artifacts]" - - assert build_artifacts_link("buildsvc-1234") == "[here|https://devops.oci.oraclecorp.com/build/teams/SDK/projects/dexreq-surfaces/branches/main/builds/1234&tab=artifacts]" - assert build_artifacts_link("buildsvc-1234", project="CLI") == "[here|https://devops.oci.oraclecorp.com/build/teams/CLI/projects/dexreq-surfaces/branches/main/builds/1234&tab=artifacts]" - assert build_artifacts_link("buildsvc-1234", repo="java-sdk") == "[here|https://devops.oci.oraclecorp.com/build/teams/SDK/projects/java-sdk/branches/main/builds/1234&tab=artifacts]" - assert build_artifacts_link("buildsvc-1234", default_branch="master") == "[here|https://devops.oci.oraclecorp.com/build/teams/SDK/projects/dexreq-surfaces/branches/master/builds/1234&tab=artifacts]" - assert build_artifacts_link("buildsvc-prototype-1234") == "[here|https://devops.oci.oraclecorp.com/build/teams/SDK/projects/dexreq-surfaces/branches/prototype/builds/1234&tab=artifacts]" - assert build_artifacts_link("buildsvc-prototype-1234", project="CLI") == "[here|https://devops.oci.oraclecorp.com/build/teams/CLI/projects/dexreq-surfaces/branches/prototype/builds/1234&tab=artifacts]" - assert build_artifacts_link("buildsvc-prototype-1234", repo="java-sdk") == "[here|https://devops.oci.oraclecorp.com/build/teams/SDK/projects/java-sdk/branches/prototype/builds/1234&tab=artifacts]" - assert build_artifacts_link("buildsvc-prototype-1234", default_branch="master") == "[here|https://devops.oci.oraclecorp.com/build/teams/SDK/projects/dexreq-surfaces/branches/prototype/builds/1234&tab=artifacts]" - - assert build_artifacts_link("buildsvc-1234", text="click here") == "[click here|https://devops.oci.oraclecorp.com/build/teams/SDK/projects/dexreq-surfaces/branches/main/builds/1234&tab=artifacts]" - assert build_artifacts_link("buildsvc-1234", text="click here", project="CLI") == "[click here|https://devops.oci.oraclecorp.com/build/teams/CLI/projects/dexreq-surfaces/branches/main/builds/1234&tab=artifacts]" - assert build_artifacts_link("buildsvc-1234", text="click here", repo="java-sdk") == "[click here|https://devops.oci.oraclecorp.com/build/teams/SDK/projects/java-sdk/branches/main/builds/1234&tab=artifacts]" - assert build_artifacts_link("buildsvc-1234", text="click here", default_branch="master") == "[click here|https://devops.oci.oraclecorp.com/build/teams/SDK/projects/dexreq-surfaces/branches/master/builds/1234&tab=artifacts]" - assert build_artifacts_link("buildsvc-prototype-1234", text="click here") == "[click here|https://devops.oci.oraclecorp.com/build/teams/SDK/projects/dexreq-surfaces/branches/prototype/builds/1234&tab=artifacts]" - assert build_artifacts_link("buildsvc-prototype-1234", text="click here", project="CLI") == "[click here|https://devops.oci.oraclecorp.com/build/teams/CLI/projects/dexreq-surfaces/branches/prototype/builds/1234&tab=artifacts]" - assert build_artifacts_link("buildsvc-prototype-1234", text="click here", repo="java-sdk") == "[click here|https://devops.oci.oraclecorp.com/build/teams/SDK/projects/java-sdk/branches/prototype/builds/1234&tab=artifacts]" - assert build_artifacts_link("buildsvc-prototype-1234", text="click here", default_branch="master") == "[click here|https://devops.oci.oraclecorp.com/build/teams/SDK/projects/dexreq-surfaces/branches/prototype/builds/1234&tab=artifacts]" - - assert build_artifacts_link("buildsvc-1234", text="click here", build_step_name="foo") == "[click here|https://devops.oci.oraclecorp.com/build/teams/SDK/projects/dexreq-surfaces/branches/main/builds/1234/steps/foo&tab=artifacts]" - assert build_artifacts_link("buildsvc-1234", text="click here", project="CLI", build_step_name="foo") == "[click here|https://devops.oci.oraclecorp.com/build/teams/CLI/projects/dexreq-surfaces/branches/main/builds/1234/steps/foo&tab=artifacts]" - assert build_artifacts_link("buildsvc-1234", text="click here", repo="java-sdk", build_step_name="foo") == "[click here|https://devops.oci.oraclecorp.com/build/teams/SDK/projects/java-sdk/branches/main/builds/1234/steps/foo&tab=artifacts]" - assert build_artifacts_link("buildsvc-1234", text="click here", default_branch="master", build_step_name="foo") == "[click here|https://devops.oci.oraclecorp.com/build/teams/SDK/projects/dexreq-surfaces/branches/master/builds/1234/steps/foo&tab=artifacts]" - assert build_artifacts_link("buildsvc-prototype-1234", text="click here", build_step_name="foo") == "[click here|https://devops.oci.oraclecorp.com/build/teams/SDK/projects/dexreq-surfaces/branches/prototype/builds/1234/steps/foo&tab=artifacts]" - assert build_artifacts_link("buildsvc-prototype-1234", text="click here", project="CLI", build_step_name="foo") == "[click here|https://devops.oci.oraclecorp.com/build/teams/CLI/projects/dexreq-surfaces/branches/prototype/builds/1234/steps/foo&tab=artifacts]" - assert build_artifacts_link("buildsvc-prototype-1234", text="click here", repo="java-sdk", build_step_name="foo") == "[click here|https://devops.oci.oraclecorp.com/build/teams/SDK/projects/java-sdk/branches/prototype/builds/1234/steps/foo&tab=artifacts]" - assert build_artifacts_link("buildsvc-prototype-1234", text="click here", default_branch="master", build_step_name="foo") == "[click here|https://devops.oci.oraclecorp.com/build/teams/SDK/projects/dexreq-surfaces/branches/prototype/builds/1234/steps/foo&tab=artifacts]" - - assert build_artifacts_link("buildsvc-1234", text="click here", build_step_name="") == "[click here|https://devops.oci.oraclecorp.com/build/teams/SDK/projects/dexreq-surfaces/branches/main/builds/1234&tab=artifacts]" - assert build_artifacts_link("buildsvc-1234", text="click here", project="CLI", build_step_name="") == "[click here|https://devops.oci.oraclecorp.com/build/teams/CLI/projects/dexreq-surfaces/branches/main/builds/1234&tab=artifacts]" - assert build_artifacts_link("buildsvc-1234", text="click here", repo="java-sdk", build_step_name="") == "[click here|https://devops.oci.oraclecorp.com/build/teams/SDK/projects/java-sdk/branches/main/builds/1234&tab=artifacts]" - assert build_artifacts_link("buildsvc-1234", text="click here", default_branch="master", build_step_name="") == "[click here|https://devops.oci.oraclecorp.com/build/teams/SDK/projects/dexreq-surfaces/branches/master/builds/1234&tab=artifacts]" - assert build_artifacts_link("buildsvc-prototype-1234", text="click here", build_step_name="") == "[click here|https://devops.oci.oraclecorp.com/build/teams/SDK/projects/dexreq-surfaces/branches/prototype/builds/1234&tab=artifacts]" - assert build_artifacts_link("buildsvc-prototype-1234", text="click here", project="CLI", build_step_name="") == "[click here|https://devops.oci.oraclecorp.com/build/teams/CLI/projects/dexreq-surfaces/branches/prototype/builds/1234&tab=artifacts]" - assert build_artifacts_link("buildsvc-prototype-1234", text="click here", repo="java-sdk", build_step_name="") == "[click here|https://devops.oci.oraclecorp.com/build/teams/SDK/projects/java-sdk/branches/prototype/builds/1234&tab=artifacts]" - assert build_artifacts_link("buildsvc-prototype-1234", text="click here", default_branch="master", build_step_name="") == "[click here|https://devops.oci.oraclecorp.com/build/teams/SDK/projects/dexreq-surfaces/branches/prototype/builds/1234&tab=artifacts]" - - monkeypatch.setenv("BLD_STEP", "xyz") - - assert build_artifacts_link("1234") == "[here|https://teamcity.oci.oraclecorp.com/viewLog.html?buildId=1234&tab=artifacts]" - assert build_artifacts_link("1234", text="click here") == "[click here|https://teamcity.oci.oraclecorp.com/viewLog.html?buildId=1234&tab=artifacts]" - - assert build_artifacts_link("buildsvc-1234") == "[here|https://devops.oci.oraclecorp.com/build/teams/SDK/projects/dexreq-surfaces/branches/main/builds/1234/steps/xyz&tab=artifacts]" - assert build_artifacts_link("buildsvc-1234", project="CLI") == "[here|https://devops.oci.oraclecorp.com/build/teams/CLI/projects/dexreq-surfaces/branches/main/builds/1234/steps/xyz&tab=artifacts]" - assert build_artifacts_link("buildsvc-1234", repo="java-sdk") == "[here|https://devops.oci.oraclecorp.com/build/teams/SDK/projects/java-sdk/branches/main/builds/1234/steps/xyz&tab=artifacts]" - assert build_artifacts_link("buildsvc-1234", default_branch="master") == "[here|https://devops.oci.oraclecorp.com/build/teams/SDK/projects/dexreq-surfaces/branches/master/builds/1234/steps/xyz&tab=artifacts]" - assert build_artifacts_link("buildsvc-prototype-1234") == "[here|https://devops.oci.oraclecorp.com/build/teams/SDK/projects/dexreq-surfaces/branches/prototype/builds/1234/steps/xyz&tab=artifacts]" - assert build_artifacts_link("buildsvc-prototype-1234", project="CLI") == "[here|https://devops.oci.oraclecorp.com/build/teams/CLI/projects/dexreq-surfaces/branches/prototype/builds/1234/steps/xyz&tab=artifacts]" - assert build_artifacts_link("buildsvc-prototype-1234", repo="java-sdk") == "[here|https://devops.oci.oraclecorp.com/build/teams/SDK/projects/java-sdk/branches/prototype/builds/1234/steps/xyz&tab=artifacts]" - assert build_artifacts_link("buildsvc-prototype-1234", default_branch="master") == "[here|https://devops.oci.oraclecorp.com/build/teams/SDK/projects/dexreq-surfaces/branches/prototype/builds/1234/steps/xyz&tab=artifacts]" - - assert build_artifacts_link("buildsvc-1234", text="click here") == "[click here|https://devops.oci.oraclecorp.com/build/teams/SDK/projects/dexreq-surfaces/branches/main/builds/1234/steps/xyz&tab=artifacts]" - assert build_artifacts_link("buildsvc-1234", text="click here", project="CLI") == "[click here|https://devops.oci.oraclecorp.com/build/teams/CLI/projects/dexreq-surfaces/branches/main/builds/1234/steps/xyz&tab=artifacts]" - assert build_artifacts_link("buildsvc-1234", text="click here", repo="java-sdk") == "[click here|https://devops.oci.oraclecorp.com/build/teams/SDK/projects/java-sdk/branches/main/builds/1234/steps/xyz&tab=artifacts]" - assert build_artifacts_link("buildsvc-1234", text="click here", default_branch="master") == "[click here|https://devops.oci.oraclecorp.com/build/teams/SDK/projects/dexreq-surfaces/branches/master/builds/1234/steps/xyz&tab=artifacts]" - assert build_artifacts_link("buildsvc-prototype-1234", text="click here") == "[click here|https://devops.oci.oraclecorp.com/build/teams/SDK/projects/dexreq-surfaces/branches/prototype/builds/1234/steps/xyz&tab=artifacts]" - assert build_artifacts_link("buildsvc-prototype-1234", text="click here", project="CLI") == "[click here|https://devops.oci.oraclecorp.com/build/teams/CLI/projects/dexreq-surfaces/branches/prototype/builds/1234/steps/xyz&tab=artifacts]" - assert build_artifacts_link("buildsvc-prototype-1234", text="click here", repo="java-sdk") == "[click here|https://devops.oci.oraclecorp.com/build/teams/SDK/projects/java-sdk/branches/prototype/builds/1234/steps/xyz&tab=artifacts]" - assert build_artifacts_link("buildsvc-prototype-1234", text="click here", default_branch="master") == "[click here|https://devops.oci.oraclecorp.com/build/teams/SDK/projects/dexreq-surfaces/branches/prototype/builds/1234/steps/xyz&tab=artifacts]" - - assert build_artifacts_link("buildsvc-1234", text="click here", build_step_name="foo") == "[click here|https://devops.oci.oraclecorp.com/build/teams/SDK/projects/dexreq-surfaces/branches/main/builds/1234/steps/foo&tab=artifacts]" - assert build_artifacts_link("buildsvc-1234", text="click here", project="CLI", build_step_name="foo") == "[click here|https://devops.oci.oraclecorp.com/build/teams/CLI/projects/dexreq-surfaces/branches/main/builds/1234/steps/foo&tab=artifacts]" - assert build_artifacts_link("buildsvc-1234", text="click here", repo="java-sdk", build_step_name="foo") == "[click here|https://devops.oci.oraclecorp.com/build/teams/SDK/projects/java-sdk/branches/main/builds/1234/steps/foo&tab=artifacts]" - assert build_artifacts_link("buildsvc-1234", text="click here", default_branch="master", build_step_name="foo") == "[click here|https://devops.oci.oraclecorp.com/build/teams/SDK/projects/dexreq-surfaces/branches/master/builds/1234/steps/foo&tab=artifacts]" - assert build_artifacts_link("buildsvc-prototype-1234", text="click here", build_step_name="foo") == "[click here|https://devops.oci.oraclecorp.com/build/teams/SDK/projects/dexreq-surfaces/branches/prototype/builds/1234/steps/foo&tab=artifacts]" - assert build_artifacts_link("buildsvc-prototype-1234", text="click here", project="CLI", build_step_name="foo") == "[click here|https://devops.oci.oraclecorp.com/build/teams/CLI/projects/dexreq-surfaces/branches/prototype/builds/1234/steps/foo&tab=artifacts]" - assert build_artifacts_link("buildsvc-prototype-1234", text="click here", repo="java-sdk", build_step_name="foo") == "[click here|https://devops.oci.oraclecorp.com/build/teams/SDK/projects/java-sdk/branches/prototype/builds/1234/steps/foo&tab=artifacts]" - assert build_artifacts_link("buildsvc-prototype-1234", text="click here", default_branch="master", build_step_name="foo") == "[click here|https://devops.oci.oraclecorp.com/build/teams/SDK/projects/dexreq-surfaces/branches/prototype/builds/1234/steps/foo&tab=artifacts]" - - assert build_artifacts_link("buildsvc-1234", text="click here", build_step_name="") == "[click here|https://devops.oci.oraclecorp.com/build/teams/SDK/projects/dexreq-surfaces/branches/main/builds/1234&tab=artifacts]" - assert build_artifacts_link("buildsvc-1234", text="click here", project="CLI", build_step_name="") == "[click here|https://devops.oci.oraclecorp.com/build/teams/CLI/projects/dexreq-surfaces/branches/main/builds/1234&tab=artifacts]" - assert build_artifacts_link("buildsvc-1234", text="click here", repo="java-sdk", build_step_name="") == "[click here|https://devops.oci.oraclecorp.com/build/teams/SDK/projects/java-sdk/branches/main/builds/1234&tab=artifacts]" - assert build_artifacts_link("buildsvc-1234", text="click here", default_branch="master", build_step_name="") == "[click here|https://devops.oci.oraclecorp.com/build/teams/SDK/projects/dexreq-surfaces/branches/master/builds/1234&tab=artifacts]" - assert build_artifacts_link("buildsvc-prototype-1234", text="click here", build_step_name="") == "[click here|https://devops.oci.oraclecorp.com/build/teams/SDK/projects/dexreq-surfaces/branches/prototype/builds/1234&tab=artifacts]" - assert build_artifacts_link("buildsvc-prototype-1234", text="click here", project="CLI", build_step_name="") == "[click here|https://devops.oci.oraclecorp.com/build/teams/CLI/projects/dexreq-surfaces/branches/prototype/builds/1234&tab=artifacts]" - assert build_artifacts_link("buildsvc-prototype-1234", text="click here", repo="java-sdk", build_step_name="") == "[click here|https://devops.oci.oraclecorp.com/build/teams/SDK/projects/java-sdk/branches/prototype/builds/1234&tab=artifacts]" - assert build_artifacts_link("buildsvc-prototype-1234", text="click here", default_branch="master", build_step_name="") == "[click here|https://devops.oci.oraclecorp.com/build/teams/SDK/projects/dexreq-surfaces/branches/prototype/builds/1234&tab=artifacts]" - - monkeypatch.delenv("BLD_STEP", None) - monkeypatch.delenv("BLD_VCS_ROOT", None) - monkeypatch.delenv("BLD_VSC_ROOT", None) - - -def test_build_artifacts_link_with_vcs_root_env_var(monkeypatch): - monkeypatch.delenv("BLD_VCS_ROOT", None) - monkeypatch.delenv("BLD_VSC_ROOT", None) - monkeypatch.delenv("BLD_STEP", None) - monkeypatch.setenv("BLD_VCS_ROOT", "ssh://git@bitbucket.oci.oraclecorp.com:7999/otherproject/otherrepo.git") - - assert build_artifacts_link("1234") == "[here|https://teamcity.oci.oraclecorp.com/viewLog.html?buildId=1234&tab=artifacts]" - assert build_artifacts_link("1234", text="click here") == "[click here|https://teamcity.oci.oraclecorp.com/viewLog.html?buildId=1234&tab=artifacts]" - - assert build_artifacts_link("buildsvc-1234") == "[here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/otherrepo/branches/main/builds/1234&tab=artifacts]" - assert build_artifacts_link("buildsvc-1234", project="CLI") == "[here|https://devops.oci.oraclecorp.com/build/teams/CLI/projects/otherrepo/branches/main/builds/1234&tab=artifacts]" - assert build_artifacts_link("buildsvc-1234", repo="java-sdk") == "[here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/java-sdk/branches/main/builds/1234&tab=artifacts]" - assert build_artifacts_link("buildsvc-1234", default_branch="master") == "[here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/otherrepo/branches/master/builds/1234&tab=artifacts]" - assert build_artifacts_link("buildsvc-prototype-1234") == "[here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/otherrepo/branches/prototype/builds/1234&tab=artifacts]" - assert build_artifacts_link("buildsvc-prototype-1234", project="CLI") == "[here|https://devops.oci.oraclecorp.com/build/teams/CLI/projects/otherrepo/branches/prototype/builds/1234&tab=artifacts]" - assert build_artifacts_link("buildsvc-prototype-1234", repo="java-sdk") == "[here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/java-sdk/branches/prototype/builds/1234&tab=artifacts]" - assert build_artifacts_link("buildsvc-prototype-1234", default_branch="master") == "[here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/otherrepo/branches/prototype/builds/1234&tab=artifacts]" - - assert build_artifacts_link("buildsvc-1234", text="click here") == "[click here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/otherrepo/branches/main/builds/1234&tab=artifacts]" - assert build_artifacts_link("buildsvc-1234", text="click here", project="CLI") == "[click here|https://devops.oci.oraclecorp.com/build/teams/CLI/projects/otherrepo/branches/main/builds/1234&tab=artifacts]" - assert build_artifacts_link("buildsvc-1234", text="click here", repo="java-sdk") == "[click here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/java-sdk/branches/main/builds/1234&tab=artifacts]" - assert build_artifacts_link("buildsvc-1234", text="click here", default_branch="master") == "[click here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/otherrepo/branches/master/builds/1234&tab=artifacts]" - assert build_artifacts_link("buildsvc-prototype-1234", text="click here") == "[click here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/otherrepo/branches/prototype/builds/1234&tab=artifacts]" - assert build_artifacts_link("buildsvc-prototype-1234", text="click here", project="CLI") == "[click here|https://devops.oci.oraclecorp.com/build/teams/CLI/projects/otherrepo/branches/prototype/builds/1234&tab=artifacts]" - assert build_artifacts_link("buildsvc-prototype-1234", text="click here", repo="java-sdk") == "[click here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/java-sdk/branches/prototype/builds/1234&tab=artifacts]" - assert build_artifacts_link("buildsvc-prototype-1234", text="click here", default_branch="master") == "[click here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/otherrepo/branches/prototype/builds/1234&tab=artifacts]" - - assert build_artifacts_link("buildsvc-1234", text="click here", build_step_name="foo") == "[click here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/otherrepo/branches/main/builds/1234/steps/foo&tab=artifacts]" - assert build_artifacts_link("buildsvc-1234", text="click here", project="CLI", build_step_name="foo") == "[click here|https://devops.oci.oraclecorp.com/build/teams/CLI/projects/otherrepo/branches/main/builds/1234/steps/foo&tab=artifacts]" - assert build_artifacts_link("buildsvc-1234", text="click here", repo="java-sdk", build_step_name="foo") == "[click here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/java-sdk/branches/main/builds/1234/steps/foo&tab=artifacts]" - assert build_artifacts_link("buildsvc-1234", text="click here", default_branch="master", build_step_name="foo") == "[click here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/otherrepo/branches/master/builds/1234/steps/foo&tab=artifacts]" - assert build_artifacts_link("buildsvc-prototype-1234", text="click here", build_step_name="foo") == "[click here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/otherrepo/branches/prototype/builds/1234/steps/foo&tab=artifacts]" - assert build_artifacts_link("buildsvc-prototype-1234", text="click here", project="CLI", build_step_name="foo") == "[click here|https://devops.oci.oraclecorp.com/build/teams/CLI/projects/otherrepo/branches/prototype/builds/1234/steps/foo&tab=artifacts]" - assert build_artifacts_link("buildsvc-prototype-1234", text="click here", repo="java-sdk", build_step_name="foo") == "[click here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/java-sdk/branches/prototype/builds/1234/steps/foo&tab=artifacts]" - assert build_artifacts_link("buildsvc-prototype-1234", text="click here", default_branch="master", build_step_name="foo") == "[click here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/otherrepo/branches/prototype/builds/1234/steps/foo&tab=artifacts]" - - assert build_artifacts_link("buildsvc-1234", text="click here", build_step_name="") == "[click here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/otherrepo/branches/main/builds/1234&tab=artifacts]" - assert build_artifacts_link("buildsvc-1234", text="click here", project="CLI", build_step_name="") == "[click here|https://devops.oci.oraclecorp.com/build/teams/CLI/projects/otherrepo/branches/main/builds/1234&tab=artifacts]" - assert build_artifacts_link("buildsvc-1234", text="click here", repo="java-sdk", build_step_name="") == "[click here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/java-sdk/branches/main/builds/1234&tab=artifacts]" - assert build_artifacts_link("buildsvc-1234", text="click here", default_branch="master", build_step_name="") == "[click here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/otherrepo/branches/master/builds/1234&tab=artifacts]" - assert build_artifacts_link("buildsvc-prototype-1234", text="click here", build_step_name="") == "[click here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/otherrepo/branches/prototype/builds/1234&tab=artifacts]" - assert build_artifacts_link("buildsvc-prototype-1234", text="click here", project="CLI", build_step_name="") == "[click here|https://devops.oci.oraclecorp.com/build/teams/CLI/projects/otherrepo/branches/prototype/builds/1234&tab=artifacts]" - assert build_artifacts_link("buildsvc-prototype-1234", text="click here", repo="java-sdk", build_step_name="") == "[click here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/java-sdk/branches/prototype/builds/1234&tab=artifacts]" - assert build_artifacts_link("buildsvc-prototype-1234", text="click here", default_branch="master", build_step_name="") == "[click here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/otherrepo/branches/prototype/builds/1234&tab=artifacts]" - - monkeypatch.setenv("BLD_STEP", "xyz") - - assert build_artifacts_link("1234") == "[here|https://teamcity.oci.oraclecorp.com/viewLog.html?buildId=1234&tab=artifacts]" - assert build_artifacts_link("1234", text="click here") == "[click here|https://teamcity.oci.oraclecorp.com/viewLog.html?buildId=1234&tab=artifacts]" - - assert build_artifacts_link("buildsvc-1234") == "[here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/otherrepo/branches/main/builds/1234/steps/xyz&tab=artifacts]" - assert build_artifacts_link("buildsvc-1234", project="CLI") == "[here|https://devops.oci.oraclecorp.com/build/teams/CLI/projects/otherrepo/branches/main/builds/1234/steps/xyz&tab=artifacts]" - assert build_artifacts_link("buildsvc-1234", repo="java-sdk") == "[here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/java-sdk/branches/main/builds/1234/steps/xyz&tab=artifacts]" - assert build_artifacts_link("buildsvc-1234", default_branch="master") == "[here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/otherrepo/branches/master/builds/1234/steps/xyz&tab=artifacts]" - assert build_artifacts_link("buildsvc-prototype-1234") == "[here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/otherrepo/branches/prototype/builds/1234/steps/xyz&tab=artifacts]" - assert build_artifacts_link("buildsvc-prototype-1234", project="CLI") == "[here|https://devops.oci.oraclecorp.com/build/teams/CLI/projects/otherrepo/branches/prototype/builds/1234/steps/xyz&tab=artifacts]" - assert build_artifacts_link("buildsvc-prototype-1234", repo="java-sdk") == "[here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/java-sdk/branches/prototype/builds/1234/steps/xyz&tab=artifacts]" - assert build_artifacts_link("buildsvc-prototype-1234", default_branch="master") == "[here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/otherrepo/branches/prototype/builds/1234/steps/xyz&tab=artifacts]" - - assert build_artifacts_link("buildsvc-1234", text="click here") == "[click here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/otherrepo/branches/main/builds/1234/steps/xyz&tab=artifacts]" - assert build_artifacts_link("buildsvc-1234", text="click here", project="CLI") == "[click here|https://devops.oci.oraclecorp.com/build/teams/CLI/projects/otherrepo/branches/main/builds/1234/steps/xyz&tab=artifacts]" - assert build_artifacts_link("buildsvc-1234", text="click here", repo="java-sdk") == "[click here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/java-sdk/branches/main/builds/1234/steps/xyz&tab=artifacts]" - assert build_artifacts_link("buildsvc-1234", text="click here", default_branch="master") == "[click here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/otherrepo/branches/master/builds/1234/steps/xyz&tab=artifacts]" - assert build_artifacts_link("buildsvc-prototype-1234", text="click here") == "[click here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/otherrepo/branches/prototype/builds/1234/steps/xyz&tab=artifacts]" - assert build_artifacts_link("buildsvc-prototype-1234", text="click here", project="CLI") == "[click here|https://devops.oci.oraclecorp.com/build/teams/CLI/projects/otherrepo/branches/prototype/builds/1234/steps/xyz&tab=artifacts]" - assert build_artifacts_link("buildsvc-prototype-1234", text="click here", repo="java-sdk") == "[click here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/java-sdk/branches/prototype/builds/1234/steps/xyz&tab=artifacts]" - assert build_artifacts_link("buildsvc-prototype-1234", text="click here", default_branch="master") == "[click here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/otherrepo/branches/prototype/builds/1234/steps/xyz&tab=artifacts]" - - assert build_artifacts_link("buildsvc-1234", text="click here", build_step_name="foo") == "[click here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/otherrepo/branches/main/builds/1234/steps/foo&tab=artifacts]" - assert build_artifacts_link("buildsvc-1234", text="click here", project="CLI", build_step_name="foo") == "[click here|https://devops.oci.oraclecorp.com/build/teams/CLI/projects/otherrepo/branches/main/builds/1234/steps/foo&tab=artifacts]" - assert build_artifacts_link("buildsvc-1234", text="click here", repo="java-sdk", build_step_name="foo") == "[click here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/java-sdk/branches/main/builds/1234/steps/foo&tab=artifacts]" - assert build_artifacts_link("buildsvc-1234", text="click here", default_branch="master", build_step_name="foo") == "[click here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/otherrepo/branches/master/builds/1234/steps/foo&tab=artifacts]" - assert build_artifacts_link("buildsvc-prototype-1234", text="click here", build_step_name="foo") == "[click here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/otherrepo/branches/prototype/builds/1234/steps/foo&tab=artifacts]" - assert build_artifacts_link("buildsvc-prototype-1234", text="click here", project="CLI", build_step_name="foo") == "[click here|https://devops.oci.oraclecorp.com/build/teams/CLI/projects/otherrepo/branches/prototype/builds/1234/steps/foo&tab=artifacts]" - assert build_artifacts_link("buildsvc-prototype-1234", text="click here", repo="java-sdk", build_step_name="foo") == "[click here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/java-sdk/branches/prototype/builds/1234/steps/foo&tab=artifacts]" - assert build_artifacts_link("buildsvc-prototype-1234", text="click here", default_branch="master", build_step_name="foo") == "[click here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/otherrepo/branches/prototype/builds/1234/steps/foo&tab=artifacts]" - - assert build_artifacts_link("buildsvc-1234", text="click here", build_step_name="") == "[click here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/otherrepo/branches/main/builds/1234&tab=artifacts]" - assert build_artifacts_link("buildsvc-1234", text="click here", project="CLI", build_step_name="") == "[click here|https://devops.oci.oraclecorp.com/build/teams/CLI/projects/otherrepo/branches/main/builds/1234&tab=artifacts]" - assert build_artifacts_link("buildsvc-1234", text="click here", repo="java-sdk", build_step_name="") == "[click here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/java-sdk/branches/main/builds/1234&tab=artifacts]" - assert build_artifacts_link("buildsvc-1234", text="click here", default_branch="master", build_step_name="") == "[click here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/otherrepo/branches/master/builds/1234&tab=artifacts]" - assert build_artifacts_link("buildsvc-prototype-1234", text="click here", build_step_name="") == "[click here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/otherrepo/branches/prototype/builds/1234&tab=artifacts]" - assert build_artifacts_link("buildsvc-prototype-1234", text="click here", project="CLI", build_step_name="") == "[click here|https://devops.oci.oraclecorp.com/build/teams/CLI/projects/otherrepo/branches/prototype/builds/1234&tab=artifacts]" - assert build_artifacts_link("buildsvc-prototype-1234", text="click here", repo="java-sdk", build_step_name="") == "[click here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/java-sdk/branches/prototype/builds/1234&tab=artifacts]" - assert build_artifacts_link("buildsvc-prototype-1234", text="click here", default_branch="master", build_step_name="") == "[click here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/otherrepo/branches/prototype/builds/1234&tab=artifacts]" - - monkeypatch.delenv("BLD_STEP", None) - monkeypatch.delenv("BLD_VCS_ROOT", None) - monkeypatch.delenv("BLD_VSC_ROOT", None) - - -def test_build_artifacts_link_with_vsc_root_env_var(monkeypatch): - monkeypatch.delenv("BLD_VCS_ROOT", None) - monkeypatch.delenv("BLD_VSC_ROOT", None) - monkeypatch.delenv("BLD_STEP", None) - monkeypatch.setenv("BLD_VSC_ROOT", "ssh://git@bitbucket.oci.oraclecorp.com:7999/otherproject/otherrepo.git") - - assert build_artifacts_link("1234") == "[here|https://teamcity.oci.oraclecorp.com/viewLog.html?buildId=1234&tab=artifacts]" - assert build_artifacts_link("1234", text="click here") == "[click here|https://teamcity.oci.oraclecorp.com/viewLog.html?buildId=1234&tab=artifacts]" - - assert build_artifacts_link("buildsvc-1234") == "[here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/otherrepo/branches/main/builds/1234&tab=artifacts]" - assert build_artifacts_link("buildsvc-1234", project="CLI") == "[here|https://devops.oci.oraclecorp.com/build/teams/CLI/projects/otherrepo/branches/main/builds/1234&tab=artifacts]" - assert build_artifacts_link("buildsvc-1234", repo="java-sdk") == "[here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/java-sdk/branches/main/builds/1234&tab=artifacts]" - assert build_artifacts_link("buildsvc-1234", default_branch="master") == "[here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/otherrepo/branches/master/builds/1234&tab=artifacts]" - assert build_artifacts_link("buildsvc-prototype-1234") == "[here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/otherrepo/branches/prototype/builds/1234&tab=artifacts]" - assert build_artifacts_link("buildsvc-prototype-1234", project="CLI") == "[here|https://devops.oci.oraclecorp.com/build/teams/CLI/projects/otherrepo/branches/prototype/builds/1234&tab=artifacts]" - assert build_artifacts_link("buildsvc-prototype-1234", repo="java-sdk") == "[here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/java-sdk/branches/prototype/builds/1234&tab=artifacts]" - assert build_artifacts_link("buildsvc-prototype-1234", default_branch="master") == "[here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/otherrepo/branches/prototype/builds/1234&tab=artifacts]" - - assert build_artifacts_link("buildsvc-1234", text="click here") == "[click here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/otherrepo/branches/main/builds/1234&tab=artifacts]" - assert build_artifacts_link("buildsvc-1234", text="click here", project="CLI") == "[click here|https://devops.oci.oraclecorp.com/build/teams/CLI/projects/otherrepo/branches/main/builds/1234&tab=artifacts]" - assert build_artifacts_link("buildsvc-1234", text="click here", repo="java-sdk") == "[click here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/java-sdk/branches/main/builds/1234&tab=artifacts]" - assert build_artifacts_link("buildsvc-1234", text="click here", default_branch="master") == "[click here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/otherrepo/branches/master/builds/1234&tab=artifacts]" - assert build_artifacts_link("buildsvc-prototype-1234", text="click here") == "[click here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/otherrepo/branches/prototype/builds/1234&tab=artifacts]" - assert build_artifacts_link("buildsvc-prototype-1234", text="click here", project="CLI") == "[click here|https://devops.oci.oraclecorp.com/build/teams/CLI/projects/otherrepo/branches/prototype/builds/1234&tab=artifacts]" - assert build_artifacts_link("buildsvc-prototype-1234", text="click here", repo="java-sdk") == "[click here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/java-sdk/branches/prototype/builds/1234&tab=artifacts]" - assert build_artifacts_link("buildsvc-prototype-1234", text="click here", default_branch="master") == "[click here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/otherrepo/branches/prototype/builds/1234&tab=artifacts]" - - assert build_artifacts_link("buildsvc-1234", text="click here", build_step_name="foo") == "[click here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/otherrepo/branches/main/builds/1234/steps/foo&tab=artifacts]" - assert build_artifacts_link("buildsvc-1234", text="click here", project="CLI", build_step_name="foo") == "[click here|https://devops.oci.oraclecorp.com/build/teams/CLI/projects/otherrepo/branches/main/builds/1234/steps/foo&tab=artifacts]" - assert build_artifacts_link("buildsvc-1234", text="click here", repo="java-sdk", build_step_name="foo") == "[click here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/java-sdk/branches/main/builds/1234/steps/foo&tab=artifacts]" - assert build_artifacts_link("buildsvc-1234", text="click here", default_branch="master", build_step_name="foo") == "[click here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/otherrepo/branches/master/builds/1234/steps/foo&tab=artifacts]" - assert build_artifacts_link("buildsvc-prototype-1234", text="click here", build_step_name="foo") == "[click here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/otherrepo/branches/prototype/builds/1234/steps/foo&tab=artifacts]" - assert build_artifacts_link("buildsvc-prototype-1234", text="click here", project="CLI", build_step_name="foo") == "[click here|https://devops.oci.oraclecorp.com/build/teams/CLI/projects/otherrepo/branches/prototype/builds/1234/steps/foo&tab=artifacts]" - assert build_artifacts_link("buildsvc-prototype-1234", text="click here", repo="java-sdk", build_step_name="foo") == "[click here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/java-sdk/branches/prototype/builds/1234/steps/foo&tab=artifacts]" - assert build_artifacts_link("buildsvc-prototype-1234", text="click here", default_branch="master", build_step_name="foo") == "[click here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/otherrepo/branches/prototype/builds/1234/steps/foo&tab=artifacts]" - - assert build_artifacts_link("buildsvc-1234", text="click here", build_step_name="") == "[click here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/otherrepo/branches/main/builds/1234&tab=artifacts]" - assert build_artifacts_link("buildsvc-1234", text="click here", project="CLI", build_step_name="") == "[click here|https://devops.oci.oraclecorp.com/build/teams/CLI/projects/otherrepo/branches/main/builds/1234&tab=artifacts]" - assert build_artifacts_link("buildsvc-1234", text="click here", repo="java-sdk", build_step_name="") == "[click here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/java-sdk/branches/main/builds/1234&tab=artifacts]" - assert build_artifacts_link("buildsvc-1234", text="click here", default_branch="master", build_step_name="") == "[click here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/otherrepo/branches/master/builds/1234&tab=artifacts]" - assert build_artifacts_link("buildsvc-prototype-1234", text="click here", build_step_name="") == "[click here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/otherrepo/branches/prototype/builds/1234&tab=artifacts]" - assert build_artifacts_link("buildsvc-prototype-1234", text="click here", project="CLI", build_step_name="") == "[click here|https://devops.oci.oraclecorp.com/build/teams/CLI/projects/otherrepo/branches/prototype/builds/1234&tab=artifacts]" - assert build_artifacts_link("buildsvc-prototype-1234", text="click here", repo="java-sdk", build_step_name="") == "[click here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/java-sdk/branches/prototype/builds/1234&tab=artifacts]" - assert build_artifacts_link("buildsvc-prototype-1234", text="click here", default_branch="master", build_step_name="") == "[click here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/otherrepo/branches/prototype/builds/1234&tab=artifacts]" - - monkeypatch.setenv("BLD_STEP", "xyz") - - assert build_artifacts_link("1234") == "[here|https://teamcity.oci.oraclecorp.com/viewLog.html?buildId=1234&tab=artifacts]" - assert build_artifacts_link("1234", text="click here") == "[click here|https://teamcity.oci.oraclecorp.com/viewLog.html?buildId=1234&tab=artifacts]" - - assert build_artifacts_link("buildsvc-1234") == "[here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/otherrepo/branches/main/builds/1234/steps/xyz&tab=artifacts]" - assert build_artifacts_link("buildsvc-1234", project="CLI") == "[here|https://devops.oci.oraclecorp.com/build/teams/CLI/projects/otherrepo/branches/main/builds/1234/steps/xyz&tab=artifacts]" - assert build_artifacts_link("buildsvc-1234", repo="java-sdk") == "[here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/java-sdk/branches/main/builds/1234/steps/xyz&tab=artifacts]" - assert build_artifacts_link("buildsvc-1234", default_branch="master") == "[here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/otherrepo/branches/master/builds/1234/steps/xyz&tab=artifacts]" - assert build_artifacts_link("buildsvc-prototype-1234") == "[here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/otherrepo/branches/prototype/builds/1234/steps/xyz&tab=artifacts]" - assert build_artifacts_link("buildsvc-prototype-1234", project="CLI") == "[here|https://devops.oci.oraclecorp.com/build/teams/CLI/projects/otherrepo/branches/prototype/builds/1234/steps/xyz&tab=artifacts]" - assert build_artifacts_link("buildsvc-prototype-1234", repo="java-sdk") == "[here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/java-sdk/branches/prototype/builds/1234/steps/xyz&tab=artifacts]" - assert build_artifacts_link("buildsvc-prototype-1234", default_branch="master") == "[here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/otherrepo/branches/prototype/builds/1234/steps/xyz&tab=artifacts]" - - assert build_artifacts_link("buildsvc-1234", text="click here") == "[click here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/otherrepo/branches/main/builds/1234/steps/xyz&tab=artifacts]" - assert build_artifacts_link("buildsvc-1234", text="click here", project="CLI") == "[click here|https://devops.oci.oraclecorp.com/build/teams/CLI/projects/otherrepo/branches/main/builds/1234/steps/xyz&tab=artifacts]" - assert build_artifacts_link("buildsvc-1234", text="click here", repo="java-sdk") == "[click here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/java-sdk/branches/main/builds/1234/steps/xyz&tab=artifacts]" - assert build_artifacts_link("buildsvc-1234", text="click here", default_branch="master") == "[click here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/otherrepo/branches/master/builds/1234/steps/xyz&tab=artifacts]" - assert build_artifacts_link("buildsvc-prototype-1234", text="click here") == "[click here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/otherrepo/branches/prototype/builds/1234/steps/xyz&tab=artifacts]" - assert build_artifacts_link("buildsvc-prototype-1234", text="click here", project="CLI") == "[click here|https://devops.oci.oraclecorp.com/build/teams/CLI/projects/otherrepo/branches/prototype/builds/1234/steps/xyz&tab=artifacts]" - assert build_artifacts_link("buildsvc-prototype-1234", text="click here", repo="java-sdk") == "[click here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/java-sdk/branches/prototype/builds/1234/steps/xyz&tab=artifacts]" - assert build_artifacts_link("buildsvc-prototype-1234", text="click here", default_branch="master") == "[click here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/otherrepo/branches/prototype/builds/1234/steps/xyz&tab=artifacts]" - - assert build_artifacts_link("buildsvc-1234", text="click here", build_step_name="foo") == "[click here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/otherrepo/branches/main/builds/1234/steps/foo&tab=artifacts]" - assert build_artifacts_link("buildsvc-1234", text="click here", project="CLI", build_step_name="foo") == "[click here|https://devops.oci.oraclecorp.com/build/teams/CLI/projects/otherrepo/branches/main/builds/1234/steps/foo&tab=artifacts]" - assert build_artifacts_link("buildsvc-1234", text="click here", repo="java-sdk", build_step_name="foo") == "[click here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/java-sdk/branches/main/builds/1234/steps/foo&tab=artifacts]" - assert build_artifacts_link("buildsvc-1234", text="click here", default_branch="master", build_step_name="foo") == "[click here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/otherrepo/branches/master/builds/1234/steps/foo&tab=artifacts]" - assert build_artifacts_link("buildsvc-prototype-1234", text="click here", build_step_name="foo") == "[click here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/otherrepo/branches/prototype/builds/1234/steps/foo&tab=artifacts]" - assert build_artifacts_link("buildsvc-prototype-1234", text="click here", project="CLI", build_step_name="foo") == "[click here|https://devops.oci.oraclecorp.com/build/teams/CLI/projects/otherrepo/branches/prototype/builds/1234/steps/foo&tab=artifacts]" - assert build_artifacts_link("buildsvc-prototype-1234", text="click here", repo="java-sdk", build_step_name="foo") == "[click here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/java-sdk/branches/prototype/builds/1234/steps/foo&tab=artifacts]" - assert build_artifacts_link("buildsvc-prototype-1234", text="click here", default_branch="master", build_step_name="foo") == "[click here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/otherrepo/branches/prototype/builds/1234/steps/foo&tab=artifacts]" - - assert build_artifacts_link("buildsvc-1234", text="click here", build_step_name="") == "[click here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/otherrepo/branches/main/builds/1234&tab=artifacts]" - assert build_artifacts_link("buildsvc-1234", text="click here", project="CLI", build_step_name="") == "[click here|https://devops.oci.oraclecorp.com/build/teams/CLI/projects/otherrepo/branches/main/builds/1234&tab=artifacts]" - assert build_artifacts_link("buildsvc-1234", text="click here", repo="java-sdk", build_step_name="") == "[click here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/java-sdk/branches/main/builds/1234&tab=artifacts]" - assert build_artifacts_link("buildsvc-1234", text="click here", default_branch="master", build_step_name="") == "[click here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/otherrepo/branches/master/builds/1234&tab=artifacts]" - assert build_artifacts_link("buildsvc-prototype-1234", text="click here", build_step_name="") == "[click here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/otherrepo/branches/prototype/builds/1234&tab=artifacts]" - assert build_artifacts_link("buildsvc-prototype-1234", text="click here", project="CLI", build_step_name="") == "[click here|https://devops.oci.oraclecorp.com/build/teams/CLI/projects/otherrepo/branches/prototype/builds/1234&tab=artifacts]" - assert build_artifacts_link("buildsvc-prototype-1234", text="click here", repo="java-sdk", build_step_name="") == "[click here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/java-sdk/branches/prototype/builds/1234&tab=artifacts]" - assert build_artifacts_link("buildsvc-prototype-1234", text="click here", default_branch="master", build_step_name="") == "[click here|https://devops.oci.oraclecorp.com/build/teams/OTHERPROJECT/projects/otherrepo/branches/prototype/builds/1234&tab=artifacts]" - - monkeypatch.delenv("BLD_STEP", None) - monkeypatch.delenv("BLD_VCS_ROOT", None) - monkeypatch.delenv("BLD_VSC_ROOT", None) diff --git a/scripts/auto_gen_utils/tests/test_clean_auto_branches.py b/scripts/auto_gen_utils/tests/test_clean_auto_branches.py deleted file mode 100644 index 161a44ff2f..0000000000 --- a/scripts/auto_gen_utils/tests/test_clean_auto_branches.py +++ /dev/null @@ -1,101 +0,0 @@ -import os - -import clean_auto_branches - - -# Note: If you have the DEBUG_DEXREQ_BRANCH_PREFIX set, this test will exit out -def test_branch_names_no_init_branches(): - if os.environ.get('DEBUG_DEXREQ_BRANCH_PREFIX'): - return - - assert clean_auto_branches.GENERATED_AUTO_PREVIEW_BRANCH_PATTERN == 'refs/remotes/origin/generated-auto-v2-preview-*' - assert clean_auto_branches.AUTO_PREVIEW_BRANCH_PATTERN == 'refs/remotes/origin/auto-v2-preview-*' - assert clean_auto_branches.GENERATED_AUTO_PUBLIC_BRANCH_PATTERN == 'refs/remotes/origin/generated-auto-v2-public-*' - assert clean_auto_branches.AUTO_PUBLIC_BRANCH_PATTERN == 'refs/remotes/origin/auto-v2-public-*' - - assert clean_auto_branches.AUTO_PREVIEW_BRANCH_PREFIX == 'auto-v2-preview-' - assert clean_auto_branches.AUTO_PUBLIC_BRANCH_PREFIX == 'auto-v2-public-' - - -def test_branch_names(monkeypatch): - previous = clean_auto_branches.DEBUG_DEXREQ_BRANCH_PREFIX - try: - monkeypatch.delenv("DEBUG_DEXREQ_BRANCH_PREFIX", None) - - clean_auto_branches.init_branches() - - assert clean_auto_branches.DEBUG_DEXREQ_BRANCH_PREFIX == "" - assert clean_auto_branches.GENERATED_AUTO_PREVIEW_BRANCH_PATTERN == 'refs/remotes/origin/generated-auto-v2-preview-*' - assert clean_auto_branches.AUTO_PREVIEW_BRANCH_PATTERN == 'refs/remotes/origin/auto-v2-preview-*' - assert clean_auto_branches.GENERATED_AUTO_PUBLIC_BRANCH_PATTERN == 'refs/remotes/origin/generated-auto-v2-public-*' - assert clean_auto_branches.AUTO_PUBLIC_BRANCH_PATTERN == 'refs/remotes/origin/auto-v2-public-*' - - assert clean_auto_branches.AUTO_PREVIEW_BRANCH_PREFIX == 'auto-v2-preview-' - assert clean_auto_branches.AUTO_PUBLIC_BRANCH_PREFIX == 'auto-v2-public-' - finally: - monkeypatch.setenv("DEBUG_DEXREQ_BRANCH_PREFIX", previous) - clean_auto_branches.init_branches() - monkeypatch.delenv("DEBUG_DEXREQ_BRANCH_PREFIX", None) - - -def test_branch_names_with_debug_prefix(monkeypatch): - previous = clean_auto_branches.DEBUG_DEXREQ_BRANCH_PREFIX - try: - monkeypatch.setenv("DEBUG_DEXREQ_BRANCH_PREFIX", "xyz-") - - clean_auto_branches.init_branches() - - assert clean_auto_branches.DEBUG_DEXREQ_BRANCH_PREFIX == "xyz-" - assert clean_auto_branches.GENERATED_AUTO_PREVIEW_BRANCH_PATTERN == 'refs/remotes/origin/generated-xyz-auto-v2-preview-*' - assert clean_auto_branches.AUTO_PREVIEW_BRANCH_PATTERN == 'refs/remotes/origin/xyz-auto-v2-preview-*' - assert clean_auto_branches.GENERATED_AUTO_PUBLIC_BRANCH_PATTERN == 'refs/remotes/origin/generated-xyz-auto-v2-public-*' - assert clean_auto_branches.AUTO_PUBLIC_BRANCH_PATTERN == 'refs/remotes/origin/xyz-auto-v2-public-*' - - assert clean_auto_branches.AUTO_PREVIEW_BRANCH_PREFIX == 'xyz-auto-v2-preview-' - assert clean_auto_branches.AUTO_PUBLIC_BRANCH_PREFIX == 'xyz-auto-v2-public-' - finally: - monkeypatch.setenv("DEBUG_DEXREQ_BRANCH_PREFIX", previous) - clean_auto_branches.init_branches() - monkeypatch.delenv("DEBUG_DEXREQ_BRANCH_PREFIX", None) - - -def test_branch_names_with_debug_prefix_empty_quotes(monkeypatch): - previous = clean_auto_branches.DEBUG_DEXREQ_BRANCH_PREFIX - try: - monkeypatch.setenv("DEBUG_DEXREQ_BRANCH_PREFIX", '""') - - clean_auto_branches.init_branches() - - assert clean_auto_branches.DEBUG_DEXREQ_BRANCH_PREFIX == "" - assert clean_auto_branches.GENERATED_AUTO_PREVIEW_BRANCH_PATTERN == 'refs/remotes/origin/generated-auto-v2-preview-*' - assert clean_auto_branches.AUTO_PREVIEW_BRANCH_PATTERN == 'refs/remotes/origin/auto-v2-preview-*' - assert clean_auto_branches.GENERATED_AUTO_PUBLIC_BRANCH_PATTERN == 'refs/remotes/origin/generated-auto-v2-public-*' - assert clean_auto_branches.AUTO_PUBLIC_BRANCH_PATTERN == 'refs/remotes/origin/auto-v2-public-*' - - assert clean_auto_branches.AUTO_PREVIEW_BRANCH_PREFIX == 'auto-v2-preview-' - assert clean_auto_branches.AUTO_PUBLIC_BRANCH_PREFIX == 'auto-v2-public-' - finally: - monkeypatch.setenv("DEBUG_DEXREQ_BRANCH_PREFIX", previous) - clean_auto_branches.init_branches() - monkeypatch.delenv("DEBUG_DEXREQ_BRANCH_PREFIX", None) - - -def test_branch_names_with_debug_prefix_with_quotes(monkeypatch): - previous = clean_auto_branches.DEBUG_DEXREQ_BRANCH_PREFIX - try: - monkeypatch.setenv("DEBUG_DEXREQ_BRANCH_PREFIX", '"xyz-"') - - clean_auto_branches.init_branches() - - assert clean_auto_branches.DEBUG_DEXREQ_BRANCH_PREFIX == "xyz-" - assert clean_auto_branches.GENERATED_AUTO_PREVIEW_BRANCH_PATTERN == 'refs/remotes/origin/generated-xyz-auto-v2-preview-*' - assert clean_auto_branches.AUTO_PREVIEW_BRANCH_PATTERN == 'refs/remotes/origin/xyz-auto-v2-preview-*' - assert clean_auto_branches.GENERATED_AUTO_PUBLIC_BRANCH_PATTERN == 'refs/remotes/origin/generated-xyz-auto-v2-public-*' - assert clean_auto_branches.AUTO_PUBLIC_BRANCH_PATTERN == 'refs/remotes/origin/xyz-auto-v2-public-*' - - assert clean_auto_branches.AUTO_PREVIEW_BRANCH_PREFIX == 'xyz-auto-v2-preview-' - assert clean_auto_branches.AUTO_PUBLIC_BRANCH_PREFIX == 'xyz-auto-v2-public-' - finally: - monkeypatch.setenv("DEBUG_DEXREQ_BRANCH_PREFIX", previous) - clean_auto_branches.init_branches() - monkeypatch.delenv("DEBUG_DEXREQ_BRANCH_PREFIX", None) diff --git a/scripts/auto_gen_utils/tests/test_cli_branch_text.py b/scripts/auto_gen_utils/tests/test_cli_branch_text.py deleted file mode 100644 index 64b849414f..0000000000 --- a/scripts/auto_gen_utils/tests/test_cli_branch_text.py +++ /dev/null @@ -1,32 +0,0 @@ -import config - - -def test_cli_branch_text(): - generated_branch = "generated-auto-v2-public-JavaSDK-DEXREQ-991-2020-01-21-20-16-02" - expected_branch = "generated-auto-v2-public-PythonCLI-DEXREQ-991-2020-01-21-20-16-02" - cli_branch = get_cli_branch_text(generated_branch) - assert expected_branch == cli_branch - - generated_branch = "generated-auto-v2-public-RubySDK-DEXREQ-991-2020-01-21-20-16-02" - expected_branch = "generated-auto-v2-public-PythonCLI-DEXREQ-991-2020-01-21-20-16-02" - cli_branch = get_cli_branch_text(generated_branch) - assert expected_branch == cli_branch - - -def test_cli_branch_text_with_debug_prefix(): - generated_branch = "debug-generated-auto-v2-public-JavaSDK-DEXREQ-991-2020-01-21-20-16-02" - expected_branch = "debug-generated-auto-v2-public-PythonCLI-DEXREQ-991-2020-01-21-20-16-02" - cli_branch = get_cli_branch_text(generated_branch) - assert expected_branch == cli_branch - - generated_branch = "debug-generated-auto-v2-public-RubySDK-DEXREQ-991-2020-01-21-20-16-02" - expected_branch = "debug-generated-auto-v2-public-PythonCLI-DEXREQ-991-2020-01-21-20-16-02" - cli_branch = get_cli_branch_text(generated_branch) - assert expected_branch == cli_branch - - -def get_cli_branch_text(gen_branch): - for tool in config.TOOL_NAMES: - if tool in gen_branch: - return gen_branch.replace(tool, config.CLI_NAME) - return gen_branch diff --git a/scripts/auto_gen_utils/tests/test_config.py b/scripts/auto_gen_utils/tests/test_config.py deleted file mode 100644 index 1fc205cb21..0000000000 --- a/scripts/auto_gen_utils/tests/test_config.py +++ /dev/null @@ -1,221 +0,0 @@ -import config -import os - - -# Note: If you have the DEBUG_DEXREQ_BRANCH_PREFIX set, this test will exit out -def test_branch_names_no_init_branches(): - if os.environ.get('DEBUG_DEXREQ_BRANCH_PREFIX'): - return - - assert config.DEBUG_DEXREQ_BRANCH_PREFIX == "" - assert config.BULK_PREVIEW_BRANCH_PREFIX == "auto-v2-preview-bulk" - assert config.BULK_PREVIEW_BRANCH_PREFIX == 'auto-v2-preview-bulk' - assert config.BULK_PUBLIC_BRANCH_PREFIX == 'auto-v2-public-bulk' - assert config.INDIVIDUAL_PREVIEW_BRANCH_PREFIX == 'auto-v2-preview' - assert config.INDIVIDUAL_PUBLIC_BRANCH_PREFIX == 'auto-v2-public' - - assert config.V1_BULK_PREVIEW_BRANCH_PREFIX == 'auto-preview-bulk' - assert config.V1_INDIVIDUAL_PREVIEW_BRANCH_PREFIX == 'auto-preview' - assert config.V1_INDIVIDUAL_PUBLIC_BRANCH_PREFIX == 'auto-public' - - -def test_branch_names(monkeypatch): - previous = config.DEBUG_DEXREQ_BRANCH_PREFIX - try: - monkeypatch.delenv("DEBUG_DEXREQ_BRANCH_PREFIX", None) - - config.init_branches() - - assert config.DEBUG_DEXREQ_BRANCH_PREFIX == "" - assert config.BULK_PREVIEW_BRANCH_PREFIX == "auto-v2-preview-bulk" - assert config.BULK_PREVIEW_BRANCH_PREFIX == 'auto-v2-preview-bulk' - assert config.BULK_PUBLIC_BRANCH_PREFIX == 'auto-v2-public-bulk' - assert config.INDIVIDUAL_PREVIEW_BRANCH_PREFIX == 'auto-v2-preview' - assert config.INDIVIDUAL_PUBLIC_BRANCH_PREFIX == 'auto-v2-public' - - assert config.V1_BULK_PREVIEW_BRANCH_PREFIX == 'auto-preview-bulk' - assert config.V1_INDIVIDUAL_PREVIEW_BRANCH_PREFIX == 'auto-preview' - assert config.V1_INDIVIDUAL_PUBLIC_BRANCH_PREFIX == 'auto-public' - finally: - monkeypatch.setenv("DEBUG_DEXREQ_BRANCH_PREFIX", previous) - config.init_branches() - monkeypatch.delenv("DEBUG_DEXREQ_BRANCH_PREFIX", None) - - -def test_branch_names_with_debug_prefix(monkeypatch): - previous = config.DEBUG_DEXREQ_BRANCH_PREFIX - try: - monkeypatch.setenv("DEBUG_DEXREQ_BRANCH_PREFIX", "xyz-") - - config.init_branches() - - assert config.DEBUG_DEXREQ_BRANCH_PREFIX == "xyz-" - assert config.BULK_PREVIEW_BRANCH_PREFIX == "xyz-auto-v2-preview-bulk" - assert config.BULK_PREVIEW_BRANCH_PREFIX == 'xyz-auto-v2-preview-bulk' - assert config.BULK_PUBLIC_BRANCH_PREFIX == 'xyz-auto-v2-public-bulk' - assert config.INDIVIDUAL_PREVIEW_BRANCH_PREFIX == 'xyz-auto-v2-preview' - assert config.INDIVIDUAL_PUBLIC_BRANCH_PREFIX == 'xyz-auto-v2-public' - - assert config.V1_BULK_PREVIEW_BRANCH_PREFIX == 'xyz-auto-preview-bulk' - assert config.V1_INDIVIDUAL_PREVIEW_BRANCH_PREFIX == 'xyz-auto-preview' - assert config.V1_INDIVIDUAL_PUBLIC_BRANCH_PREFIX == 'xyz-auto-public' - finally: - monkeypatch.setenv("DEBUG_DEXREQ_BRANCH_PREFIX", previous) - config.init_branches() - monkeypatch.delenv("DEBUG_DEXREQ_BRANCH_PREFIX", None) - - -def test_branch_names_with_debug_prefix_empty_quotes(monkeypatch): - previous = config.DEBUG_DEXREQ_BRANCH_PREFIX - try: - monkeypatch.setenv("DEBUG_DEXREQ_BRANCH_PREFIX", '""') - - config.init_branches() - - assert config.DEBUG_DEXREQ_BRANCH_PREFIX == "" - assert config.BULK_PREVIEW_BRANCH_PREFIX == "auto-v2-preview-bulk" - assert config.BULK_PREVIEW_BRANCH_PREFIX == 'auto-v2-preview-bulk' - assert config.BULK_PUBLIC_BRANCH_PREFIX == 'auto-v2-public-bulk' - assert config.INDIVIDUAL_PREVIEW_BRANCH_PREFIX == 'auto-v2-preview' - assert config.INDIVIDUAL_PUBLIC_BRANCH_PREFIX == 'auto-v2-public' - - assert config.V1_BULK_PREVIEW_BRANCH_PREFIX == 'auto-preview-bulk' - assert config.V1_INDIVIDUAL_PREVIEW_BRANCH_PREFIX == 'auto-preview' - assert config.V1_INDIVIDUAL_PUBLIC_BRANCH_PREFIX == 'auto-public' - finally: - monkeypatch.setenv("DEBUG_DEXREQ_BRANCH_PREFIX", previous) - config.init_branches() - monkeypatch.delenv("DEBUG_DEXREQ_BRANCH_PREFIX", None) - - -def test_branch_names_with_debug_prefix_with_quotes(monkeypatch): - previous = config.DEBUG_DEXREQ_BRANCH_PREFIX - try: - monkeypatch.setenv("DEBUG_DEXREQ_BRANCH_PREFIX", '"xyz-"') - - config.init_branches() - - assert config.DEBUG_DEXREQ_BRANCH_PREFIX == "xyz-" - assert config.BULK_PREVIEW_BRANCH_PREFIX == "xyz-auto-v2-preview-bulk" - assert config.BULK_PREVIEW_BRANCH_PREFIX == 'xyz-auto-v2-preview-bulk' - assert config.BULK_PUBLIC_BRANCH_PREFIX == 'xyz-auto-v2-public-bulk' - assert config.INDIVIDUAL_PREVIEW_BRANCH_PREFIX == 'xyz-auto-v2-preview' - assert config.INDIVIDUAL_PUBLIC_BRANCH_PREFIX == 'xyz-auto-v2-public' - - assert config.V1_BULK_PREVIEW_BRANCH_PREFIX == 'xyz-auto-preview-bulk' - assert config.V1_INDIVIDUAL_PREVIEW_BRANCH_PREFIX == 'xyz-auto-preview' - assert config.V1_INDIVIDUAL_PUBLIC_BRANCH_PREFIX == 'xyz-auto-public' - finally: - monkeypatch.setenv("DEBUG_DEXREQ_BRANCH_PREFIX", previous) - config.init_branches() - monkeypatch.delenv("DEBUG_DEXREQ_BRANCH_PREFIX", None) - - -# Note: If you have the DEXREQ_IGNORED_ISSUES set, this test will exit out -def test_should_ignore_issue_no_init(): - if os.environ.get('DEXREQ_IGNORED_ISSUES'): - return - - assert config.DEXREQ_IGNORED_ISSUES == [] - assert not config.should_ignore_issue("ABC-1234") - assert not config.should_ignore_issue("DEXREQ-4567") - - -def test_should_ignore_issue_names(monkeypatch): - previous = ",".join(config.DEXREQ_IGNORED_ISSUES) - try: - monkeypatch.delenv(config.DEXREQ_IGNORED_ISSUES_ENV_VAR_NAME, None) - - config.init_dexreq_ignored_issues() - - assert config.DEXREQ_IGNORED_ISSUES == [] - assert not config.should_ignore_issue("ABC-1234") - assert not config.should_ignore_issue("DEXREQ-4567") - finally: - monkeypatch.setenv(config.DEXREQ_IGNORED_ISSUES_ENV_VAR_NAME, previous) - config.init_dexreq_ignored_issues() - monkeypatch.delenv(config.DEXREQ_IGNORED_ISSUES_ENV_VAR_NAME, None) - - -def test_should_ignore_issue_with_debug_prefix(monkeypatch): - previous = ",".join(config.DEXREQ_IGNORED_ISSUES) - try: - monkeypatch.setenv(config.DEXREQ_IGNORED_ISSUES_ENV_VAR_NAME, " DEXREQ-1234 , DEXREQ-4567 ") - - config.init_dexreq_ignored_issues() - - assert config.DEXREQ_IGNORED_ISSUES == ["DEXREQ-1234", "DEXREQ-4567"] - assert not config.should_ignore_issue("ABC-1234") - assert config.should_ignore_issue("DEXREQ-1234") - assert config.should_ignore_issue("DEXREQ-4567") - finally: - monkeypatch.setenv(config.DEXREQ_IGNORED_ISSUES_ENV_VAR_NAME, previous) - config.init_dexreq_ignored_issues() - monkeypatch.delenv(config.DEXREQ_IGNORED_ISSUES_ENV_VAR_NAME, None) - - -def test_should_ignore_issue_with_debug_prefix_single(monkeypatch): - previous = ",".join(config.DEXREQ_IGNORED_ISSUES) - try: - monkeypatch.setenv(config.DEXREQ_IGNORED_ISSUES_ENV_VAR_NAME, " DEXREQ-1234 ") - - config.init_dexreq_ignored_issues() - - assert config.DEXREQ_IGNORED_ISSUES == ["DEXREQ-1234"] - assert not config.should_ignore_issue("ABC-1234") - assert config.should_ignore_issue("DEXREQ-1234") - assert not config.should_ignore_issue("DEXREQ-4567") - finally: - monkeypatch.setenv(config.DEXREQ_IGNORED_ISSUES_ENV_VAR_NAME, previous) - config.init_dexreq_ignored_issues() - monkeypatch.delenv(config.DEXREQ_IGNORED_ISSUES_ENV_VAR_NAME, None) - - -def test_should_ignore_issue_with_debug_prefix_empty_quotes(monkeypatch): - previous = ",".join(config.DEXREQ_IGNORED_ISSUES) - try: - monkeypatch.setenv(config.DEXREQ_IGNORED_ISSUES_ENV_VAR_NAME, '""') - - config.init_dexreq_ignored_issues() - - assert config.DEXREQ_IGNORED_ISSUES == [] - assert not config.should_ignore_issue("ABC-1234") - assert not config.should_ignore_issue("DEXREQ-4567") - finally: - monkeypatch.setenv(config.DEXREQ_IGNORED_ISSUES_ENV_VAR_NAME, previous) - config.init_dexreq_ignored_issues() - monkeypatch.delenv(config.DEXREQ_IGNORED_ISSUES_ENV_VAR_NAME, None) - - -def test_should_ignore_issue_with_debug_prefix_with_quotes(monkeypatch): - previous = ",".join(config.DEXREQ_IGNORED_ISSUES) - try: - monkeypatch.setenv(config.DEXREQ_IGNORED_ISSUES_ENV_VAR_NAME, '" DEXREQ-1234 , DEXREQ-4567 "') - - config.init_dexreq_ignored_issues() - - assert config.DEXREQ_IGNORED_ISSUES == ["DEXREQ-1234", "DEXREQ-4567"] - assert not config.should_ignore_issue("ABC-1234") - assert config.should_ignore_issue("DEXREQ-1234") - assert config.should_ignore_issue("DEXREQ-4567") - finally: - monkeypatch.setenv(config.DEXREQ_IGNORED_ISSUES_ENV_VAR_NAME, previous) - config.init_dexreq_ignored_issues() - monkeypatch.delenv(config.DEXREQ_IGNORED_ISSUES_ENV_VAR_NAME, None) - - -def test_should_ignore_issue_with_debug_prefix_with_quotes_single(monkeypatch): - previous = ",".join(config.DEXREQ_IGNORED_ISSUES) - try: - monkeypatch.setenv(config.DEXREQ_IGNORED_ISSUES_ENV_VAR_NAME, '" DEXREQ-1234 "') - - config.init_dexreq_ignored_issues() - - assert config.DEXREQ_IGNORED_ISSUES == ["DEXREQ-1234"] - assert not config.should_ignore_issue("ABC-1234") - assert config.should_ignore_issue("DEXREQ-1234") - assert not config.should_ignore_issue("DEXREQ-4567") - finally: - monkeypatch.setenv(config.DEXREQ_IGNORED_ISSUES_ENV_VAR_NAME, previous) - config.init_dexreq_ignored_issues() - monkeypatch.delenv(config.DEXREQ_IGNORED_ISSUES_ENV_VAR_NAME, None) diff --git a/scripts/auto_gen_utils/tests/test_module_pom_file_add_or_update_spec.py b/scripts/auto_gen_utils/tests/test_module_pom_file_add_or_update_spec.py deleted file mode 100644 index c655f81be7..0000000000 --- a/scripts/auto_gen_utils/tests/test_module_pom_file_add_or_update_spec.py +++ /dev/null @@ -1,32 +0,0 @@ -import xml.etree.ElementTree as ET -from add_or_update_scripts.module_pom_file_add_or_update_spec import add_module_to_parent_pom -from add_or_update_scripts.python_sdk_add_or_update_spec import MODULE_TEMPLATE - - -pom_string = """ - - - poms/budget/pom.xml - poms/core/pom.xml - - -""" - -ns = {"ns": "http://maven.apache.org/POM/4.0.0"} - - -def test_add_module_to_parent_pom(): - spec_name = "datasafe" - module = MODULE_TEMPLATE.format(spec_name) - xpath = ".//ns:modules" - pom = ET.fromstring(pom_string) - properties = pom.findall(xpath, ns)[0] - starting_length = len(properties) - - add_module_to_parent_pom(pom, module) - properties = pom.findall(xpath, ns)[0] - ending_length = len(properties) - - assert(ending_length > starting_length) - # The first module should be the one just added. - assert(spec_name in properties[0].text) diff --git a/scripts/auto_gen_utils/tests/test_python_sdk_add_or_update_spec.py b/scripts/auto_gen_utils/tests/test_python_sdk_add_or_update_spec.py deleted file mode 100644 index 12f36f425a..0000000000 --- a/scripts/auto_gen_utils/tests/test_python_sdk_add_or_update_spec.py +++ /dev/null @@ -1,52 +0,0 @@ -import xml.etree.ElementTree as ET -import pytest -from add_or_update_scripts.python_sdk_add_or_update_spec import GENERATE_EXECUTION_TEMPLATE - - -def test_format_general_execution_template(): - - spec_name = "datasafe" - spec_generation_type = "PREVIEW" - artifact_id = "ads-control-plane-spec" - pom_var_code_gen_language = "${codegen-language}" - pom_var_preprocessed_temp_dir = "${preprocessed-temp-dir}" - pom_var_feature_id_file = "${feature-id-file}" - pom_var_feature_id_dir = "${feature-id-dir}" - - result = GENERATE_EXECUTION_TEMPLATE.format( - artifact_id=artifact_id, - spec_name=spec_name, - spec_generation_type=spec_generation_type, - regional_non_regional_service_overrides="" - ) - - assert(artifact_id in result) - assert(spec_name in result) - assert(spec_generation_type in result) - assert(pom_var_code_gen_language in result) - assert(pom_var_preprocessed_temp_dir in result) - assert(pom_var_feature_id_file in result) - assert(pom_var_feature_id_dir in result) - assert("isRegionalClient" not in result) - print(result) - ET.fromstring(result) - try: - ET.fromstring(result) - except Exception as e: - pytest.fail("Unexpected error parsing XML string") - print(e) - - non_regional_service_overide = 'false\n'.format(service_name="datasafe") - result = result = GENERATE_EXECUTION_TEMPLATE.format( - artifact_id=artifact_id, - spec_name=spec_name, - spec_generation_type=spec_generation_type, - regional_non_regional_service_overrides=non_regional_service_overide - ) - - assert("isRegionalClient" in result) - try: - ET.fromstring(result) - except Exception as e: - pytest.fail("Unexpected error parsing XML string") - print(e) diff --git a/scripts/auto_gen_utils/tests/test_version_utils.py b/scripts/auto_gen_utils/tests/test_version_utils.py deleted file mode 100644 index a21cda52c6..0000000000 --- a/scripts/auto_gen_utils/tests/test_version_utils.py +++ /dev/null @@ -1,70 +0,0 @@ -from shared.version_utils import is_version_not_acceptable, is_version_increasing - - -def test_is_version_not_acceptable(): - assert is_version_not_acceptable("1.2.3") is None - assert is_version_not_acceptable("1.2") is None - assert is_version_not_acceptable("1.2.3-PREVIEW") is None - assert is_version_not_acceptable("1.2.3-releasePreview") is None - assert is_version_not_acceptable("1.2.3-4") is None - assert is_version_not_acceptable("1.2.3-4-PREVIEW") is None - - assert is_version_not_acceptable("1.2.3-20180409.232938-55") is not None - assert is_version_not_acceptable("1.2-20180409.232938-55") is not None - assert is_version_not_acceptable("1.2.3-PREVIEW-20180409.232938-55") is not None - assert is_version_not_acceptable("1.2.3-releasePreview-20180409.232938-55") is not None - assert is_version_not_acceptable("1.2.3-4-20180409.232938-55") is not None - assert is_version_not_acceptable("1.2.3-4-PREVIEW-20180409.232938-55") is not None - - assert is_version_not_acceptable("1.2.3-SNAPSHOT") is not None - assert is_version_not_acceptable("1.2-SNAPSHOT") is not None - assert is_version_not_acceptable("1.2.3-PREVIEW-SNAPSHOT") is not None - assert is_version_not_acceptable("1.2.3-releasePreview-SNAPSHOT") is not None - assert is_version_not_acceptable("1.2.3-4-SNAPSHOT") is not None - assert is_version_not_acceptable("1.2.3-4-PREVIEW-SNAPSHOT") is not None - - assert is_version_not_acceptable("1.a.3") is not None - assert is_version_not_acceptable("1.a") is not None - assert is_version_not_acceptable("1.2.3-foobar") is not None - assert is_version_not_acceptable("1.2.3-1-2") is not None - assert is_version_not_acceptable("1.2.3-x-PREVIEW") is not None - - assert is_version_not_acceptable("1.a.3-20180409.232938-55") is not None - assert is_version_not_acceptable("1.a-20180409.232938-55") is not None - assert is_version_not_acceptable("1.2.3-foobar-20180409.232938-55") is not None - assert is_version_not_acceptable("1.2.3-1-2-20180409.232938-55") is not None - assert is_version_not_acceptable("1-4-20180409.232938-55") is not None - assert is_version_not_acceptable("1.2.3-x-PREVIEW-20180409.232938-55") is not None - - assert is_version_not_acceptable("1.3-PREVIEW") is not None - assert is_version_not_acceptable("1.3-releasePreview") is not None - assert is_version_not_acceptable("1.3-4-PREVIEW") is not None - assert is_version_not_acceptable("1.2.2.3-PREVIEW") is not None - assert is_version_not_acceptable("1.2.2.3-releasePreview") is not None - assert is_version_not_acceptable("1.2.2.3-4-PREVIEW") is not None - - assert is_version_not_acceptable("1") is None - assert is_version_not_acceptable("12") is None - assert is_version_not_acceptable("123") is None - assert is_version_not_acceptable("1-4") is None - - -def test_is_version_increasing(): - assert is_version_increasing("1", "2") - assert not is_version_increasing("2", "1") - - assert is_version_increasing("1.0", "1.1") - assert is_version_increasing("1.0", "2.0") - assert is_version_increasing("1.1", "2.0") - assert not is_version_increasing("1.1", "1.0") - assert not is_version_increasing("2.0", "1.0") - assert not is_version_increasing("2.0", "1.1") - - assert is_version_increasing("1-PREVIEW", "1") - assert not is_version_increasing("1", "1-PREVIEW") - assert not is_version_increasing("1", "1") - assert not is_version_increasing("1-PREVIEW", "1-PREVIEW") - - assert is_version_increasing("1.0", "1.0.1") - assert not is_version_increasing("1.0.1", "1.0") - assert not is_version_increasing("1.0.0", "1.0") diff --git a/scripts/auto_gen_utils/update-codegen-version.sh b/scripts/auto_gen_utils/update-codegen-version.sh deleted file mode 100755 index fd99ca9fe1..0000000000 --- a/scripts/auto_gen_utils/update-codegen-version.sh +++ /dev/null @@ -1,259 +0,0 @@ -#!/bin/bash - -set -e -set -u - -# Syntax: update-codegen-version.sh 1.49-SNAPSHOT path/to/sdks branch-prefix description -# -# Underneath the path/to/sdks, we should see the following: -# . -# ├── java-sdk -# ├── python-cli -# ├── python-sdk -# ├── ruby-sdk -# ├── oci-typescript-sdk -# └── src -#    └── github.com -#       └── oracle -#          └── oci-go-sdk - -if [ $# -ne 4 ]; then - echo "Illegal number of parameters: need 4 parameters" - echo "Syntax: $0 " - exit 1 -fi - -version="$1" -rootpath="$2" -prefix="$3" -desc="$4" - -cd $rootpath - -if [ ! -f java-sdk/bmc-codegen/pom.xml ]; then - echo "java-sdk/bmc-codegen/pom.xml not found" -fi -if [ ! -f ruby-sdk/pom.xml ]; then - echo "ruby-sdk/pom.xml not found" -fi -if [ ! -f src/github.com/oracle/oci-go-sdk/pom.xml ]; then - echo "src/github.com/oracle/oci-go-sdk/pom.xml not found" -fi -if [ ! -f python-sdk/pom.xml ]; then - echo "python-sdk/pom.xml not found" -fi -if [ ! -f python-cli/pom.xml ]; then - echo "python-cli/pom.xml not found" -fi -if [ ! -f oci-typescript-sdk/codegen/pom.xml ]; then - echo "oci-typescript-sdk/codegen/pom.xml not found" -fi -if [ ! -f legacy-java-sdk/bmc-codegen/pom.xml ]; then - echo "legacy-java-sdk/bmc-codegen/pom.xml not found" -fi - -checkout_and_update () { - source_branch="$1" - - git checkout "${source_branch}" - git reset --hard - git pull origin "${source_branch}" - - sed -i .bak -e "/bmc-sdk-swagger-maven-plugin/ {" -e "n; s/>[^<]*${version}&1 | grep -A 1 "remote: Create pull request for" | tail -n 1` - -echo "========================" -echo "=== JAVA MASTER ===" -echo "========================" - -checkout_and_update master -mvn clean process-sources -Pcodegen -commit "${master_branch}" "${desc} (Java Master)" -java_master_pr=`git push origin +"${master_branch}" 2>&1 | grep -A 1 "remote: Create pull request for" | tail -n 1` - -cd ../.. - -echo "========================" -echo "=== RUBY ===" -echo "========================" - -preview_branch="${prefix}-ruby-preview" -master_branch="${prefix}-ruby-master" - -cd ruby-sdk - -git fetch - -echo "========================" -echo "=== RUBY PREVIEW ===" -echo "========================" - -checkout_and_update preview -mvn clean install -commit "${preview_branch}" "${desc} (Ruby Preview)" -ruby_preview_pr=`git push origin +"${preview_branch}" 2>&1 | grep -A 1 "remote: Create pull request for" | tail -n 1` - -echo "========================" -echo "=== RUBY MASTER ===" -echo "========================" - -checkout_and_update master -mvn clean install -commit "${master_branch}" "${desc} (Ruby Master)" -ruby_master_pr=`git push origin +"${master_branch}" 2>&1 | grep -A 1 "remote: Create pull request for" | tail -n 1` - -cd .. - -echo "========================" -echo "=== GO ===" -echo "========================" - -preview_branch="${prefix}-go-preview" -master_branch="${prefix}-go-master" - -export GOPATH="${PWD}" -cd src/github.com/oracle/oci-go-sdk - -git fetch - -echo "========================" -echo "=== GO PREVIEW ===" -echo "========================" - -checkout_and_update preview -make -f MakefileDevelopment.mk release -commit "${preview_branch}" "${desc} (Go Preview)" -go_preview_pr=`git push origin +"${preview_branch}" 2>&1 | grep -A 1 "remote: Create pull request for" | tail -n 1` - - -echo "========================" -echo "=== GO MASTER ===" -echo "========================" - -checkout_and_update master -make -f MakefileDevelopment.mk release -commit "${master_branch}" "${desc} (Go Master)" -go_master_pr=`git push origin +"${master_branch}" 2>&1 | grep -A 1 "remote: Create pull request for" | tail -n 1` - -cd ../../../.. - -echo "========================" -echo "=== PYTHON ===" -echo "========================" - -preview_branch="${prefix}-python-preview" -master_branch="${prefix}-python-master" - -cd python-sdk - -git fetch - -echo "========================" -echo "=== PYTHON PREVIEW ===" -echo "========================" - -checkout_and_update preview -mvn clean install -commit "${preview_branch}" "${desc} (Python Preview)" -python_preview_pr=`git push origin +"${preview_branch}" 2>&1 | grep -A 1 "remote: Create pull request for" | tail -n 1` - - -echo "========================" -echo "=== PYTHON MASTER ===" -echo "========================" - -checkout_and_update master -mvn clean install -commit "${master_branch}" "${desc} (Python Master)" -python_master_pr=`git push origin +"${master_branch}" 2>&1 | grep -A 1 "remote: Create pull request for" | tail -n 1` - -cd .. - -echo "========================" -echo "=== CLI ===" -echo "========================" - -preview_branch="${prefix}-cli-preview" -master_branch="${prefix}-cli-master" - -cd python-cli - -git fetch - -echo "========================" -echo "=== CLI PREVIEW ===" -echo "========================" - -checkout_and_update preview -mvn clean install -commit "${preview_branch}" "${desc} (CLI Preview)" -cli_preview_pr=`git push origin +"${preview_branch}" 2>&1 | grep -A 1 "remote: Create pull request for" | tail -n 1` - - -echo "========================" -echo "=== CLI MASTER ===" -echo "========================" - -checkout_and_update master -mvn clean install -commit "${master_branch}" "${desc} (CLI Master)" -cli_master_pr=`git push origin +"${master_branch}" 2>&1 | grep -A 1 "remote: Create pull request for" | tail -n 1` - -cd .. - -echo "========================" -echo "=== PULL REQUESTS ===" -echo "========================" - -echo "Java:" -echo "${java_preview_pr}" -echo "${java_master_pr}" -echo "Ruby:" -echo "${ruby_preview_pr}" -echo "${ruby_master_pr}" -echo "Go:" -echo "${go_preview_pr}" -echo "${go_master_pr}" -echo "Python:" -echo "${python_preview_pr}" -echo "${python_master_pr}" -echo "CLI:" -echo "${cli_preview_pr}" -echo "${cli_master_pr}" diff --git a/scripts/auto_gen_utils/update_region.py b/scripts/auto_gen_utils/update_region.py deleted file mode 100644 index 13ad6a8de6..0000000000 --- a/scripts/auto_gen_utils/update_region.py +++ /dev/null @@ -1,156 +0,0 @@ -import argparse -import config -import datetime -import os -import urllib3 -import util -from sdk_regions_updater.region_updater_utils import get_new_regions_info_from_issues, get_issues_with_special_regions_to_be_ignored -from sdk_regions_updater.sdk_regions_updater import SdkRegionsUpdater -from git import Repo -from shared.bitbucket_utils import setup_bitbucket, get_pullrequest # noqa: ignore=F402 - -urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) - - -TOOL_ARGUMENT_ALL = 'All' -BRANCH_TIMESTAMP = datetime.datetime.now().strftime('%Y-%m-%d-%H-%M-%S') -PR_COMMENT_PREFIX = 'PR posted for' -# Region type information in Storekeeper is stored as free form texts. -# The ONSR region type contains "(ONSR)" substring -# The DRCC region type contains "DedicatedRegion" substring -# The Lab Testing region type contains "Development" substring -REGION_TYPES_TO_IGNORE = ['(ONSR)', 'Development'] -DEFAULT_REGION_AUTOMATION_OWNER = os.environ.get('DEFAULT_REGION_AUTOMATION_OWNER') or 'anurggar' - - -def checkout_branch(repo_path, base_branch, new_branch): - # Checkout branch and create commit - repo = Repo.init(repo_path) - repo.git.checkout(base_branch) - repo.git.checkout(B=new_branch) - - -def push_change_to_remote(repo_path, tool_name, new_region_ids): - repo = Repo.init(repo_path) - repo.git.add(A=True) - message = 'Region update for {} with new region(s): {}'.format(tool_name, new_region_ids) - repo.git.commit("-m", message, "--allow-empty") - repo.git.push('-u','origin','HEAD') - - -def create_pull_request(base_branch, new_branch, tool_name, new_region_ids, issues): - # create PR - repo_name = config.REPO_NAMES_FOR_TOOL[tool_name][0] - repo_id = util.get_repo_id(repo_name) - print('repo id for {} is {}'.format(config.REPO_NAMES_FOR_TOOL[tool_name], repo_id)) - # TODO: Need to update title and description - title = 'New region support for {} - {}'.format(tool_name, new_region_ids) - issue_keys = (issue.key for issue in issues) - description = 'This PR includes new region(s) from {}.'.format(', '.join(issue_keys)) - pr_url = util.create_pull_request(repo_name, new_branch, title, description, repo_id, repo_name, base_branch) - print("Automatically generated pull request: {}".format(pr_url)) - return pr_url - - -def check_in_region_update_for_sdk(base_branch, tool_name, new_regions, issues): - region_ids = [] - for region in new_regions: - region_ids.append(region['regionIdentifier']) - region_ids_str = ', '.join(region_ids) - new_branch = '{}-region-update-{}'.format(tool_name.lower(), BRANCH_TIMESTAMP) - repo_path = os.environ.get('{}_path'.format(tool_name)) - if repo_path is None: - repo_path = config.REPO_RELATIVE_LOCATION_FOR_TOOL[tool_name] - checkout_branch(repo_path, base_branch, new_branch) - print('Branch {} checked out.'.format(new_branch)) - sdk_updater = SdkRegionsUpdater(tool_name) - sdk_updater.update(new_regions) - print('region information updated.') - push_change_to_remote(repo_path, tool_name, region_ids_str) - print('changes pushed to remote branch.') - pr_url = create_pull_request(base_branch, new_branch, tool_name, region_ids_str, issues) - print('PR created.') - - for issue in issues: - comment = '{} {}: {}'.format(PR_COMMENT_PREFIX, tool_name, pr_url) - util.add_jira_comment(issue.key, comment) - - -def close_tickets_if_all_prs_are_merged(): - print('Checking if there are new region support tickets to be closed.') - in_progress_issues = util.get_in_progress_region_support_tickets() - for issue in in_progress_issues: - print('Checking if {} can be closed.'.format(issue.key)) - pr_urls = util.get_all_pr_urls_from_comment(issue.key, PR_COMMENT_PREFIX) - if (len(pr_urls) > 0): - if util.are_all_prs_merged(pr_urls): - comment = 'All PRs have been merged. Closing the ticket.' - util.add_jira_comment(issue.key, comment) - util.transition_issue_overall_status(util.JIRA_CLIENT(), issue, config.STATUS_DONE) - else: - print('Not all PRs have been merged.') - else: - print('No PRs found for the ticket.') - - -def process_new_tickets_with_special_regions(base_branch): - issues = util.get_unprocessed_region_suppport_tickets(base_branch) - issues_to_ignore, issues_with_invalid_regions = get_issues_with_special_regions_to_be_ignored(issues, REGION_TYPES_TO_IGNORE) - for issue in issues_to_ignore: - region_id = issue.raw['fields']['summary'].split()[-1] - print('Region {} in issue {} should not be included in SDK. Closing the issue directly.'.format(region_id, issue.key)) - comment = 'Region {} should not be included in SDK. Closing the issue directly.'.format(region_id) - util.add_jira_comment(issue.key, comment) - util.transition_issue_overall_status(util.JIRA_CLIENT(), issue, config.STATUS_CLOSED) - for issue in issues_with_invalid_regions: - region_id = issue.raw['fields']['summary'].split()[-1] - contact = issue.fields.assignee.name if issue.fields.assignee else DEFAULT_REGION_AUTOMATION_OWNER - comment = '[~{}],\nRegion {} in issue {} is invalid. Please check with the Region build team to verify if this region can be added or not'.format(contact, region_id, issue.key) - print(comment) - util.add_jira_comment(issue.key, comment) - util.transition_issue_overall_status(util.JIRA_CLIENT(), issue, config.STATUS_MORE_INFORMATION_NEEDED) - - -def process_new_tickets(tool_name, base_branch): - print('Checking if there are new region support tickets to be processed.') - issues = util.get_region_support_tickets_to_process(base_branch) - new_regions = get_new_regions_info_from_issues(issues) - if len(new_regions) == 0: - print('No new regions to update.') - else: - if (tool_name == TOOL_ARGUMENT_ALL): - for tool in config.SDKS_SUPPORTING_REGION_UPDATE: - check_in_region_update_for_sdk(base_branch, tool, new_regions, issues) - else: - check_in_region_update_for_sdk(base_branch, tool_name, new_regions, issues) - for issue in issues: - comment = 'Transitioning ticket status to In Progress.' - util.add_jira_comment(issue.key, comment) - util.transition_issue_overall_status(util.JIRA_CLIENT(), issue, config.STATUS_IN_PROGRESS) - issue_keys = (issue.key for issue in issues) - print('{} new regions updated for tickets {}.'.format(len(new_regions), ', '.join(issue_keys))) - - -if __name__ == "__main__": - parser = argparse.ArgumentParser() - parser.add_argument('--tool', - required=True, - choices=config.SDKS_SUPPORTING_REGION_UPDATE + [TOOL_ARGUMENT_ALL], - help='The tool for which to generate the preview. Accepted values: {}.'.format(', '.join(config.SDKS_SUPPORTING_REGION_UPDATE + [TOOL_ARGUMENT_ALL]))) - parser.add_argument('--base-branch', - default='preview', - help='The base branch to start from (default = preview).') - args = parser.parse_args() - tool_name = args.tool - base_branch = args.base_branch - - setup_bitbucket(None) - - # Close tickets in progress if all PRs have been merged - close_tickets_if_all_prs_are_merged() - - # Close tickets for regions that are not supposed to be added to SDK - process_new_tickets_with_special_regions(base_branch) - - # Find new tickets to process and update regions - process_new_tickets(tool_name, base_branch) diff --git a/scripts/auto_gen_utils/update_testing_service_dependency.py b/scripts/auto_gen_utils/update_testing_service_dependency.py deleted file mode 100644 index 4c6ed2bb98..0000000000 --- a/scripts/auto_gen_utils/update_testing_service_dependency.py +++ /dev/null @@ -1,46 +0,0 @@ -import util -import argparse -import config -from git import Repo -from shared.bitbucket_utils import setup_bitbucket, get_pullrequest # noqa: ignore=F402 - - -def push_change_to_remote(repo_path): - repo = Repo.init(repo_path) - repo.git.add(A=True) - message = 'Pom Dependencies Update' - repo.git.commit("-m", message, "--allow-empty") - repo.git.push('-u','origin','HEAD') - - -def create_pull_request(base_branch, new_branch): - # create PR - repo_name = config.REPO_NAMES_FOR_TOOL["TestingService"][0] - repo_id = util.get_repo_id(repo_name) - print('repo id for {} is {}'.format(config.REPO_NAMES_FOR_TOOL["TestingService"], repo_id)) - # TODO: Need to update title and description - title = 'OCI Testing Service Update Pom dependencies' - description = 'This PR includes new dependencies from java sdk' - pr_url = util.create_pull_request(repo_name, new_branch, title, description, repo_id, repo_name, base_branch) - print("Automatically generated pull request: {}".format(pr_url)) - return pr_url - - -if __name__ == "__main__": - parser = argparse.ArgumentParser() - parser.add_argument('--base-branch', - default='preview', - help='The base branch to start from (default = preview).') - parser.add_argument('--branch-name', - help='The branch name') - parser.add_argument('--repo-path', - help='The repo location') - args = parser.parse_args() - base_branch = args.base_branch - branch_name = args.branch_name - repo_path = args.repo_path - - setup_bitbucket(None) - push_change_to_remote(repo_path) - pr_link = create_pull_request(base_branch, branch_name) - print(pr_link) diff --git a/scripts/auto_gen_utils/util.py b/scripts/auto_gen_utils/util.py deleted file mode 100644 index ac8cf13e04..0000000000 --- a/scripts/auto_gen_utils/util.py +++ /dev/null @@ -1,1064 +0,0 @@ -import config -from jira import JIRA -import sys -import requests -import os -import textwrap -import re -import json -import six -import datetime -import pytz -import shared.bitbucket_utils - -from functools import reduce -from recordclass import recordclass -from jira import JIRAError - -IS_VERBOSE = False -_JIRA_CLIENT = None -PACIFIC_TIME_ZONE = pytz.timezone("America/Los_Angeles") - -JIRA_ISSUE_FIELDS_REQUIRED_BY_UTIL = ["issuetype", "project", config.CUSTOM_FIELD_ID_PREVIEW_ISSUE] - -PUBLIC_TICKET_CONFLUENCE_URL = "https://confluence.oci.oraclecorp.com/pages/viewpage.action?pageId=43683000" -PUBLIC_TICKET_CONFLUENCE_URL_PREVIEW_ISSUE_FIELD_ANCHOR = PUBLIC_TICKET_CONFLUENCE_URL + "#RequestingapublicSDK/CLI-PreviewIssueField" -PUBLIC_TICKET_CONFLUENCE_URL_LOADING_FIELDS_FROM_PREVIEW_ISSUE_ANCHOR = PUBLIC_TICKET_CONFLUENCE_URL + "#RequestingapublicSDK/CLI-LoadingFieldsFromPreviewIssue" - -# These were already in flight. -PUBLIC_DEXREQ_TICKETS_NOT_LOADED_FROM_PREVIEW = ['DEXREQ-663', 'DEXREQ-668'] - -# CLI sprint name -CLI_SPRINT_NAME_PREFIX = 'DEX CLI' - -REGION_SUPPORT_TICKET_SUMMARY_PREFIX = 'Next set of region support in' -BUG_BASH_TICKET_SUMMARY_PREFIX = 'SDK ACTION REQUIRED' - -# Release day is the day of week value, starting from Monday = 0 -RELEASE_DAY = 1 - - -def JIRA_CLIENT(): - global _JIRA_CLIENT - - if _JIRA_CLIENT: - return _JIRA_CLIENT - - # attempt to log in using user name and password if present, if not use config.JSESSIONID - if config.USERNAME and config.PASSWORD: - print('Building JIRA client with username / password auth') - _JIRA_CLIENT = JIRA(config.JIRA_OPTIONS, basic_auth=config.JIRA_BASIC_AUTH) - elif config.JSESSIONID: - print('Building JIRA client with cookie based auth') - cookie_options = dict(config.JIRA_OPTIONS) - cookie_options['cookies'] = { - 'JSESSIONID': config.JSESSIONID - } - - _JIRA_CLIENT = JIRA(cookie_options) - else: - sys.exit('Could not authenticate with JIRA server. Must specify environment variables for either config.JSESSIONID or JIRA_USERNAME and JIRA_PASSWORD.') - - return _JIRA_CLIENT - - -def printv(s, flush=False): - if IS_VERBOSE: - print(s) - if flush: - # Flush, so we make sure the output of the issue key is already visible - # NOTE: This is to help debug for DEX-6382 - sys.stdout.flush() - - -def get_jira_issue_keys(s, jira_project_key): - if not s: - return [] - - parts = [x.strip() for x in s.split(',')] - issue_keys = [] - for part in parts: - issue_keys.extend(re.findall("({}-[0-9]+)".format(jira_project_key), part)) - - return list(set(issue_keys)) - - -def get_dexreq_issue_keys(s): - return get_jira_issue_keys(s, "DEXREQ") - - -def add_error(errors, message, kind="ERROR"): - printv("{}: {}".format(kind, message)) - if errors is not None: - errors.append(message) - - return errors - - -def get_preview_dexreq_key(public_issue, errors=None, warnings=None): - preview_issues_value = getattr(public_issue.fields, config.CUSTOM_FIELD_ID_PREVIEW_ISSUE).strip().upper() - issue_keys = get_jira_issue_keys(preview_issues_value,"") - preview_issue_keys = get_dexreq_issue_keys(preview_issues_value) - printv("NOTE: Public ticket '{}' refers to preview ticket(s): '{}'".format(public_issue.key, ", ".join(str(k) for k in preview_issue_keys))) - - if "DEXREQ" not in preview_issues_value: - if len(issue_keys) > 1: - add_error(errors, "A public DEXREQ ticket must refer a preview DEXREQ ticket. {} are not DEXREQ tickets.".format(preview_issues_value)) - else: - add_error(errors, "A public DEXREQ ticket must refer a preview DEXREQ ticket. {} is not a DEXREQ ticket.".format(preview_issues_value)) - - if len(preview_issue_keys) == 0: - add_error(errors, "A public DEXREQ ticket must refer to exactly one preview DEXREQ ticket. Add the preview ticket in the '{}' field. See {} .".format( - config.CUSTOM_FIELD_NAME_FOR_ID[config.CUSTOM_FIELD_ID_PREVIEW_ISSUE], - PUBLIC_TICKET_CONFLUENCE_URL_PREVIEW_ISSUE_FIELD_ANCHOR)) - - return None - - if len(preview_issue_keys) > 1: - add_error(errors, "A public DEXREQ ticket must refer to exactly one preview DEXREQ ticket. This public ticket referred to {} preview tickets in the '{}' field ('{}'). See {} .".format( - len(preview_issue_keys), config.CUSTOM_FIELD_NAME_FOR_ID[config.CUSTOM_FIELD_ID_PREVIEW_ISSUE], ", ".join(preview_issue_keys), - PUBLIC_TICKET_CONFLUENCE_URL_PREVIEW_ISSUE_FIELD_ANCHOR)) - - return None - - return preview_issue_keys[0] - - -def handle_public_ticket(issue, fields=None, expand=None, errors=None, warnings=None): - ticket_type_id = issue.fields.issuetype.id - if ticket_type_id != config.PUBLIC_ISSUE_TYPE_ID: - return issue - - if issue.key in PUBLIC_DEXREQ_TICKETS_NOT_LOADED_FROM_PREVIEW: - print("NOTE: Public ticket {} is not being loaded from the linked preview ticket because it was already in flight".format(issue.key)) - return issue - - # This is a public ticket, get the preview ticket - preview_issue_key = get_preview_dexreq_key(issue, errors=errors, warnings=warnings) - if not preview_issue_key: - return issue - - try: - printv("Loading preview issue '{}' from JIRA".format(preview_issue_key)) - preview_issue = JIRA_CLIENT().issue(preview_issue_key, fields=fields, expand=expand) - printv("Done loading preview issue '{}' from JIRA".format(preview_issue_key)) - if not preview_issue.fields.project.name == config.JIRA_PROJECT: - add_error(errors, 'The preview ticket provided was not in the DEXREQ project: {}.'.format(preview_issue)) - - return issue - - if not preview_issue.fields.issuetype.id == config.PREVIEW_ISSUE_TYPE_ID: - add_error(errors, 'The DEXREQ ticket provided was not a preview ticket: {}.'.format(preview_issue)) - - return issue - - any_public_fields_ignored = False - # preview_issue is a DEXREQ preview ticket - for custom_field_id in config.CUSTOM_FIELD_IDS_READ_FROM_PREVIEW_TICKET: - public_value = None - public_value_str = "" - if hasattr(issue.fields, custom_field_id): - public_value = getattr(issue.fields, custom_field_id) - if public_value: - public_value_str = "'{}'".format(public_value.encode('utf8')) - else: - public_value_str = "" - - preview_value = None - preview_value_str = "" - if hasattr(preview_issue.fields, custom_field_id): - preview_value = getattr(preview_issue.fields, custom_field_id) - if preview_value: - preview_value_str = "'{}'".format(preview_value.encode('utf8')) - else: - preview_value_str = "" - - public_value_differs = (public_value != preview_value) - public_value_missing_preview_exists = (public_value_str == "" and preview_value_str != "") - - if public_value == preview_value: - printv("NOTE: The value {} for '{}' in public ticket was the same as in the preview ticket {}.".format( - public_value_str, config.CUSTOM_FIELD_NAME_FOR_ID[custom_field_id], preview_issue_key)) - else: - if public_value_missing_preview_exists or (public_value_differs and public_value is None): - # Just put this in the log: it's expected that public ticket has fields missing or empty now - printv("NOTE: Using value {} for '{}' from preview ticket {} instead, ignoring value {} from public ticket.".format( - preview_value_str, config.CUSTOM_FIELD_NAME_FOR_ID[custom_field_id], preview_issue_key, public_value_str)) - elif public_value_differs: - add_error(warnings, "Using value {} for '{}' from preview ticket {} instead, ignoring value {} from public ticket.".format( - preview_value_str, config.CUSTOM_FIELD_NAME_FOR_ID[custom_field_id], preview_issue_key, public_value_str), kind="WARN") - any_public_fields_ignored = True - - if preview_value_str == "": - # make sure it's removed in the public ticket too - if hasattr(issue.fields, custom_field_id): - delattr(issue.fields, custom_field_id) - issue.raw['fields'].pop(custom_field_id, None) - else: - setattr(issue.fields, custom_field_id, preview_value) - issue.raw['fields'][custom_field_id] = preview_value - - if any_public_fields_ignored: - add_error(warnings, "For more information about the 'using value from preview ticket instead' warnings above, see {} .".format( - PUBLIC_TICKET_CONFLUENCE_URL_LOADING_FIELDS_FROM_PREVIEW_ISSUE_ANCHOR), kind="WARN") - except JIRAError as e: - if e.status_code == 404: - add_error(errors, 'The preview ticket provided could not be resolved. Please ensure {} is a valid JIRA issue.'.format(preview_issue)) - else: - add_error(errors, 'The preview ticket provided: {} could not be resolved. Error: {}.'.format(preview_issue, str(e))) - - return issue - - -# Wrapper function for jira_client.issue(), which takes care of reading values from the Preview ticket, if it's a Public ticket -def get_dexreq_issue(issue_key, fields=None, expand=None, errors=None, warnings=None): - if fields and not set(JIRA_ISSUE_FIELDS_REQUIRED_BY_UTIL).issubset(set(fields)): - # Need issue type to determine if we need special processing for Public tickets - fields = fields + JIRA_ISSUE_FIELDS_REQUIRED_BY_UTIL - issue = JIRA_CLIENT().issue(issue_key, fields=', '.join(fields) if fields else None, expand=', '.join(expand) if expand else None) - return handle_public_ticket(issue, fields, expand, errors, warnings) - - -def jira_search_issues(query, fields=None, expand=None): - block_size = 50 - block_num = 1 - - issues = [] - - while True: - print("Querying block {}".format(block_num)) - more_issues = JIRA_CLIENT().search_issues(query, startAt=len(issues), maxResults=block_size, fields=fields, expand=expand) - print("Block {} returned {} issues".format(block_num, len(more_issues))) - if more_issues: - issues.extend(more_issues) - if len(more_issues) != block_size: - break - else: - break - block_num += 1 - - print("Returned {} issues".format(len(issues))) - return issues - - -# Wrapper function for jira_client.search_issues(), which takes care of reading values from the Preview ticket, if it's a Public ticket -# errors and warnings should be dictionaries from issue key to list of strings, so you can look up errors and warnings for each issue separately -def search_dexreq_issues(query, fields=None, expand=None, errors=None, warnings=None): - if fields and not set(JIRA_ISSUE_FIELDS_REQUIRED_BY_UTIL).issubset(set(fields)): - # Need issue type to determine if we need special processing for Public tickets - fields = fields + JIRA_ISSUE_FIELDS_REQUIRED_BY_UTIL - - issues = jira_search_issues(query, fields=', '.join(fields) if fields else None, expand=', '.join(expand) if expand else None) - - for issue in issues: - if errors is not None: - errors_list = [] - else: - errors_list = None - - if warnings is not None: - warnings_list = [] - else: - warnings_list = None - - handle_public_ticket(issue, fields, expand, errors_list, warnings_list) - - if errors is not None: - errors[issue.key] = errors_list - if warnings is not None: - warnings[issue.key] = warnings_list - - return issues - - -# updates the SDK / CLI 'Status' field of an issue to the specified value -def transition_issue_per_tool_status(jira_client, issue, status, tool_name): - if not is_tool_jira_reportable(tool_name): - print("Not transitioning issue {issue_key} for tool status {status}: " - "Field for tool {tool_name} does not exist and is not reportable".format(issue_key=issue.key, - status=status, - tool_name=tool_name)) - else: - custom_field_id_for_tool = config.CUSTOM_FIELD_ID_FOR_TOOL[tool_name] - custom_field_name_for_tool = config.CUSTOM_FIELD_NAME_FOR_TOOL[tool_name] - if config.IS_DRY_RUN: - print('DRY-RUN: not transitioning issue {issue_key} to state {status_field_name}: {status}'.format( - issue_key=issue.key, - status_field_name=custom_field_name_for_tool, - status=status - )) - else: - print('Transitioning issue {issue_key} to state {status_field_name}: {status}'.format( - issue_key=issue.key, - status_field_name=custom_field_name_for_tool, - status=status - )) - - kwargs = {} - kwargs[custom_field_id_for_tool] = {'value': status} - issue.update(**kwargs) - - -def transition_issue_overall_status(jira_client, issue, status): - transitions = JIRA_CLIENT().transitions(issue) - transition_to_apply = None - for transition in transitions: - if transition['name'] == status: - transition_to_apply = transition - break - - if issue and issue.fields and issue.fields.status and issue.fields.status.name == status: - print('Not transitioning issue {} to status "{}" because it already has that status'.format(issue.key, status)) - - if transition_to_apply: - if config.IS_DRY_RUN: - print("DRY-RUN: not transitioning {} to {} using transition {}".format(issue.key, transition_to_apply['name'], transition_to_apply['id'])) - else: - print("Transitioning {} to {} using transition {}".format(issue.key, transition_to_apply['name'], transition_to_apply['id'])) - JIRA_CLIENT().transition_issue(issue, transition_to_apply['id']) - else: - print("\tDon't know how to transition this issue to '{}'".format(status)) - - -def transition_issue_overall_status_if_not_in_status(jira_client, issue, desired_status, blacklisted_status): - if issue and issue.fields and issue.fields.status and issue.fields.status.name == blacklisted_status: - print('Not transitioning issue {} to status "{}" because this transition was explicitly disallowed'.format(issue.key, desired_status)) - return - - transition_issue_overall_status(jira_client, issue, desired_status) - - -def update_issue_fields_helper(jira_client, issue, update_list, issue_key, preview_note=""): - if not update_list: - # Nothing to do - return None - - if config.IS_DRY_RUN: - for update in update_list: - print('DRY-RUN: not changing issue {issue_key}{preview_note} field {field_name} (id {field_id}) to "{new_value}"'.format( - issue_key=issue_key, - preview_note=preview_note, - field_name=update['field_name'], - field_id=update['field_id'], - new_value=update['new_value'] - )) - else: - fields = {} - for update in update_list: - print('Changing issue {issue_key}{preview_note} field {field_name} (id {field_id}) to "{new_value}"'.format( - issue_key=issue_key, - preview_note=preview_note, - field_name=update['field_name'], - field_id=update['field_id'], - new_value=update['new_value'] - )) - fields[update['field_id']] = update['new_value'] - - issue.update(fields=fields) - - return None - - -# updates a fields of an issue to specified values -# update_list is a list of {field_name: "field_name", field_id: "field_id", new_value: "new_value"} triples -def update_issue_fields(jira_client, issue, update_list): - ticket_type_id = issue.fields.issuetype.id - - preview_updates = [] - public_updates = [] - - if ticket_type_id == config.PUBLIC_ISSUE_TYPE_ID: - for update in update_list: - if update['field_id'] in config.CUSTOM_FIELD_IDS_READ_FROM_PREVIEW_TICKET: - # This field is actually in the preview ticket - preview_updates.append(update) - else: - public_updates.append(update) - - update_list = public_updates - - if preview_updates: - # This is a public ticket, and some fields are actually in the preview ticket - errors = [] - preview_issue_key = get_preview_dexreq_key(issue, errors=errors) - if preview_issue_key: - fields = [u['field_id'] for u in preview_updates] - preview_issue = jira_client.issue(preview_issue_key, fields=fields) - return update_issue_fields_helper(jira_client, preview_issue, preview_updates, issue.key, preview_note=" (in preview ticket {})".format(preview_issue.key)) - else: - return errors[0] - - return update_issue_fields_helper(jira_client, issue, update_list, issue.key) - - -# updates a field of an issue to the specified value -def update_issue_field(jira_client, issue, field_name, field_id, new_value): - return update_issue_fields(jira_client, issue, [{'field_name': field_name, 'field_id': field_id, 'new_value': new_value}]) - - -def apply_issue_transition(issue, transition): - if config.IS_DRY_RUN: - print('DRY-RUN: not applying transition: {transition_name} to issue {issue_key}'.format( - issue_key=issue.key, - transition_name=transition['name'] - )) - else: - print('Applying transition: {transition_name} to issue {issue_key}'.format( - issue_key=issue.key, - transition_name=transition['name'] - )) - - JIRA_CLIENT().transition_issue(issue, transition['id']) - - -def get_udx_issue_keys(udx_issue_keys): - return get_jira_issue_keys(udx_issue_keys, "UDX") - - -def parse_issue_keys_from_commit_message(commit_message): - return parse_issue_keys_from_specific_commit_message(commit_message, '[[') - - -# bracket_prefix allows you to narrow this down to a certain kind of message, -# e.g. "Running generation for: [[" -def parse_issue_keys_from_specific_commit_message(commit_message, bracket_prefix): - issue_keys = [] - while bracket_prefix in commit_message: - commit_message = commit_message[commit_message.index(bracket_prefix) + len(bracket_prefix):] - - if "]]" not in commit_message: - break - - substring = commit_message[:commit_message.index("]]")] - parts = [part.strip() for part in substring.split(',')] - issue_keys.extend(parts) - - commit_message = commit_message[commit_message.index("]]") + 2:] - - # filter out duplicates - seen = set() - seen_add = seen.add # speed up method resolution - return [x for x in issue_keys if not (x in seen or seen_add(x))] - - -def add_jira_comment(issue_key, comment, comment_type=None): - color = config.COMMENT_TYPE_TO_COLOR.get(comment_type, None) - if color: - comment = '{{color:{color}}}{comment}{{color}}'.format(color=color, comment=comment) - - if config.IS_DRY_RUN: - print("DRY-RUN: not making the following comment for {issue_key}".format(issue_key=issue_key)) - print(comment) - else: - print("Making the following comment for {issue_key}".format(issue_key=issue_key)) - print(comment) - JIRA_CLIENT().add_comment(issue_key, comment) - - -def safe_delete_branch(repo, branch): - for prefix in config.BRANCH_PREFIXES_SAFE_FOR_DELETION: - if branch.startswith(prefix): - if config.IS_DRY_RUN: - print('DRY RUN: Not deleting branch: {}'.format(branch)) - else: - print('Deleting branch: {}'.format(branch)) - repo.git.push('--delete', 'origin', branch) - return - - print('Refusing to delete branch: {}'.format(branch)) - - -def join(l): - if len(l) < 1: - return "" - if len(l) == 1: - return l[0] - - all_but_last = ', '.join(l[:-1]) - last = str(l[-1]) - - return ' and '.join([all_but_last, last]) - - -def get_last_commit_message(tool_name): - current_branch = [branch.strip()[2:] for branch in config.REPOS_FOR_TOOL[tool_name][-1].git.branch().split('\n') if branch.startswith('* ')][0] - - last_commit_messages = {} - for name, repo in zip(config.REPO_NAMES_FOR_TOOL[tool_name], config.REPOS_FOR_TOOL[tool_name]): - # check out equivalent branch everywhere - repo.git.checkout(current_branch) - - # parse DEXREQ issue out of last commit message (so we can post in the issue which build is running) - # this build is distinct from the first build id we post in the ticket which just updates the pom.xml - repo_last_commit_message = repo.git.log(n=1, format='%s%n%b') - last_commit_messages[name] = repo_last_commit_message - - return last_commit_messages[config.REPO_NAMES_FOR_TOOL[tool_name][-1]] - - -def get_repo_id(repo_slug): - url = 'https://bitbucket.oci.oraclecorp.com/rest/api/1.0/projects/SDK/repos/{repo}'.format(repo=repo_slug) - r = requests.get(url, auth=config.JIRA_BASIC_AUTH) - return r.json()['id'] - - -def get_default_reviewers(repo_slug, source_repo_id, source_branch, target_repo_id, target_branch): - url = 'https://bitbucket.oci.oraclecorp.com/rest/default-reviewers/1.0/projects/SDK/repos/{repo}/reviewers?sourceRepoId={source_repo_id}&targetRepoId={target_repo_id}&sourceRefId=refs/heads/{source_branch}&targetRefId=refs/heads/{target_branch}'.format( - repo=repo_slug, - source_repo_id=source_repo_id, - target_repo_id=target_repo_id, - source_branch=source_branch, - target_branch=target_branch) - r = requests.get(url, auth=config.JIRA_BASIC_AUTH) - - if r.status_code >= 300: - print(r.json()) - - raise ValueError("Failed to get default reviewers: {}".format(r.json())) - - return r.json() - - -# CLI PR is required for the following cases: -# * Manual changes suggested in Design review ticket -# * Test failures found during CLI Generation. -def is_cli_pr_required(issue): - for label in config.CLI_PR_REQUIRED_LABELS: - if label in issue.fields.labels: - return True - - return False - - -def create_pull_request(repo, branch, title, description, target_repo_id, target_repo, target_branch, additional_reviewers=[]): - repo_id = get_repo_id(repo) - - reviewers = [{"user": r} for r in additional_reviewers] - if config.IS_DRY_RUN: - print('DRY-RUN: not getting default reviewers, because the branch has not been pushed') - else: - reviewers.extend([{"user": r} for r in get_default_reviewers(repo, repo_id, branch, target_repo_id or repo_id, target_branch)]) - - url = 'https://bitbucket.oci.oraclecorp.com/rest/api/1.0/projects/SDK/repos/{target_repo}/pull-requests'.format( - target_repo=target_repo) - headers = {'content-type': 'application/json'} - - json_data = { - "title":title, - "description":description, - "state":"OPEN", - "open":True, - "closed":False, - "fromRef":{ - "id":"refs/heads/{}".format(branch), - "repository":{ - "slug":repo, - "name":None, - "project":{ - "key":"SDK" - } - } - }, - "toRef":{ - "id":"refs/heads/{}".format(target_branch), - "repository":{ - "slug":target_repo, - "name":None, - "project":{ - "key":"SDK" - } - } - }, - "locked":False, - "reviewers": reviewers - } - - if config.IS_DRY_RUN: - print('DRY-RUN: not issueing POST to {url} to with headers {headers} and body:\n{body}'.format( - url=url, - headers=headers, - body=json_data - )) - - return None - else: - r = requests.post(url, headers=headers, json=json_data, auth=config.JIRA_BASIC_AUTH) - if r.status_code >= 300: - print(r) - print(r.json()) - - raise ValueError("Failed to post pull request: {}".format(r.json())) - - return r.json()['links']['self'][0]['href'] - - -def add_reviewer_to_pull_request(pr_id, target_repo, additional_reviewers=[]): - reviewers = [] - - url = 'https://bitbucket.oci.oraclecorp.com/rest/api/1.0/projects/SDK/repos/{target_repo}/pull-requests/{pr_id}'.format( - target_repo=target_repo, pr_id=pr_id) - headers = {'content-type': 'application/json'} - - r = requests.get(url, headers=headers, auth=config.JIRA_BASIC_AUTH) - - if 'reviewers' in r.json(): - for reviewer in r.json()['reviewers']: - reviewers.append(reviewer) - - for reviewer_name in additional_reviewers: - reviewers.append({"user": {"name": reviewer_name}}) - - json_data = { - "reviewers": reviewers, - "version": r.json()['version'] - } - - if config.IS_DRY_RUN: - print('DRY-RUN: not issueing PUT to {url} to with headers {headers} and body:\n{body}'.format( - url=url, - headers=headers, - body=json_data - )) - - return None - else: - r = requests.put(url, headers=headers, json=json_data, auth=config.JIRA_BASIC_AUTH) - if r.status_code >= 300: - print(r) - print(r.json()) - - raise ValueError("Failed to post pull request: {}".format(r.json())) - - return r.json()['links']['self'][0]['href'] - - -def were_steps_successful(tool_name): - # this script runs after both generation and build have completed - # it will run even 'If previous steps have failed' to ensure we can report failures - generation_success = {} - for name, success_generation_file in zip(config.REPO_NAMES_FOR_TOOL[tool_name], config.SUCCESS_GENERATION_FILES_FOR_TOOL[tool_name]): - generation_success[name] = os.path.exists(success_generation_file) - - build_success = {} - for name, success_build_file in zip(config.REPO_NAMES_FOR_TOOL[tool_name], config.SUCCESS_BUILD_FILES_FOR_TOOL[tool_name]): - build_success[name] = os.path.exists(success_build_file) - - # report to JIRA tasks whether or not jobs succeeded - # if either failed, just give link to build log and say that generation failed for some reason - # this will be hard for external users to investigate so we want to cover easy errors earlier in the process with explicit errors: - # - spec artifact / group / version doesnt exist in artifactory - # - invalid param set - # - relative spec path doesn't point at a spec (yaml file) - print("Generation successful? {}".format(generation_success)) - print("Build successful? {}".format(build_success)) - - generation_pass = all(x is True for x in generation_success.values()) - build_pass = all(x is True for x in build_success.values()) - - return generation_pass, build_pass - - -def update_file_with_feature_ids(file, feature_ids, line_regex, added_line_pattern, new_template): - if not os.path.exists(file): - with open(file, 'w') as f: - f.write(new_template) - - with open(file, 'r') as f: - content = f.read() - - content_array = content.lower().split('\n') - - need_new_line = not content.endswith('\n') - with open(file, 'a') as f: - if feature_ids: - for feature_id in feature_ids: - already_exists = False - for line in content_array: - # Remove comments - if '#' in line: - line = line[line.index('#') + 1:] - line = line.strip() - if re.match(line_regex.format(feature_id.lower()), line): - already_exists = True - break - if not already_exists: - if need_new_line: - f.write('\n') - need_new_line = False - - f.write(added_line_pattern.format(feature_id.lower())) - else: - print('Feature ID: {} was already contained in: {}'.format(feature_id, file)) - - -# TODO: add comment to file with JIRA title or link to preview ticket -def update_feature_id_file(dir, feature_ids, issue_key): - update_file_with_feature_ids(os.path.join(dir, "{}.yaml".format(issue_key)), feature_ids, r'^-\s{{1,}}{}$', ' - {}\n', - textwrap.dedent("""\ - # Add whitelisted feature ids here. Please include a comment describing what the feature is. - # Example: - # whitelisted: - # # comment - # - udx-123 - # # comment - # - udx-456 - whitelisted: - """)) - - -def update_pre_processor_file(dir, feature_ids, issue_key): - update_file_with_feature_ids(os.path.join(dir, "{}.txt".format(issue_key)), feature_ids, '^{}$', '{}\n', - textwrap.dedent("""\ - # This configuration file determines which conditional groups are enabled. - # Use --group_file enabled.txt or -f enabled.txt - # - # The hash '#' character starts a comment - # - # Group names can contain the characters A-Z, a-z, 0-9 and the underscore '_' and the hyphen '-'. - # Whitespace before or after the group name is ignored. - # - # GROUP1 # comment - # GROUP2 # comment - """)) - - -# TODO: this is currently broken -- https://jira.oci.oraclecorp.com/browse/DEX-7328 -def get_dev_status_info_for_issue(jira_client, issue): - jira_internal_session = JIRA_CLIENT()._session - - issue_dev_status_url = config.JIRA_DEV_STATUS_REST_API_URL_FORMAT.format(issue.id) - print("issue_dev_status_url: {}".format(issue_dev_status_url)) - return json.loads(jira_internal_session.get(issue_dev_status_url).content) - - -# TODO: this is currently very inefficient -- https://jira.oci.oraclecorp.com/browse/DEX-7328 -def get_pull_requests_for_issue(jira_client, issue): - created_date = getattr(issue.fields, 'created') - - prs = [] - repos = get_jira_reportable_repo_names_for_tool() - all_repo_names = [] - for tool_name, repo_names in repos.items(): - all_repo_names.extend(repo_names) - - all_repo_names.append(config.DEXREQ_REPO_NAME) - all_repo_names = list(set(all_repo_names)) - - # The spec diff PR can't be older than the DEXREQ ticket, so only search that far - printv("To get all spec diff PR, listing all PRs in {} newer than {}".format(all_repo_names, created_date)) - - for repo in all_repo_names: - result = shared.bitbucket_utils.get_all_pullrequest_with_string_after('SDK', repo, issue.key, created_date) - printv("Found {} PRs for repo {}".format(len(result), repo)) - prs.extend(result) - - printv("Found {} PRs total".format(len(prs))) - - return prs - - -def timestamp_to_utc_string(timestamp): - # 'dateAdded' is a timestamp in milliseconds - # Turn it into a UTC datetime - build_added_datetime = datetime.datetime.utcfromtimestamp(timestamp / 1000.0) - # Then turn it into a string, like the other dates - build_added = "{}.{:03.0f}".format( - build_added_datetime.strftime('%Y-%m-%dT%H:%M:%S'), - build_added_datetime.microsecond / 1000.0 - ) - return build_added - - -def find_dex_tools_active_sprint_id(): - boards = JIRA_CLIENT().boards() - dex_board = next((board for board in boards if board.name.startswith('DEX Tools')), None) - if dex_board is None: - return None - sprints = JIRA_CLIENT().sprints(dex_board.id) - active_sprint = next((sprint for sprint in sprints if (sprint.state == 'ACTIVE' and sprint.name.upper().startswith(CLI_SPRINT_NAME_PREFIX.upper()))), None) - - if not active_sprint: - # get the first future sprint! - future_sprints = [sprint for sprint in sprints if (sprint.state == 'FUTURE' and sprint.name.upper().startswith(CLI_SPRINT_NAME_PREFIX.upper()))] - if len(future_sprints) > 0: - future_sprints.sort(key=lambda sprint: sprint.sequence) - return future_sprints[0].id - return None - - return active_sprint.id - - -PrStatusForTools = recordclass('PrStatusForTools', 'tools prs_per_tool all_prs_initiated all_prs_approved all_prs_merged last_pr_update last_build_update last_update') -PrsPerTool = recordclass('PrsPerTool', 'merged approved open') -PrAndUrl = recordclass('PrAndUrl', 'pr url tool_name') - - -# TODO: this is currently not very efficient -- https://jira.oci.oraclecorp.com/browse/DEX-7328 -def get_pr_status_for_tools(jira_client, issue, tool_names, target_branch_filter=None, - filter=None): - pr_statuses_for_tool = PrStatusForTools( - tools={}, - prs_per_tool={}, - all_prs_initiated=True, - all_prs_approved=True, - all_prs_merged=True, - last_pr_update=None, - last_build_update=None, - last_update=None - ) - - for tool_name in tool_names: - empty = PrsPerTool( - merged=[], - approved=[], # Open and has at least one sign-off -- this is since service teams should sign off first. - open=[] # Open, but no sign-off. - ) - pr_statuses_for_tool.prs_per_tool[tool_name] = empty - - pull_requests = get_pull_requests_for_issue(jira_client, issue) - - # if a filter is supplied, only consider PRs to that branch - if target_branch_filter: - pull_requests = [pr for pr in pull_requests if deep_get(pr, 'toRef.id') == 'refs/heads/{}'.format(target_branch_filter)] - - if filter: - pull_requests = [pr for pr in pull_requests if filter(pr)] - - printv("pull_requests after filtering: {}".format(len(pull_requests))) - - if pull_requests: - for pr in pull_requests: - pr_url = None - pr_and_url = None - hrefs = deep_get(pr, 'links.self') - if hrefs: - pr_url = deep_get(hrefs[0], 'href') - - printv(pr_url) - # Take the timestamps the validation builds were created into account - # and store the latest one. - pr_build_status = shared.bitbucket_utils.get_bitbucket_build_status_for_pr(pr) - for build in pr_build_status['values']: - build_added = timestamp_to_utc_string(build['dateAdded']) - if not pr_statuses_for_tool.last_build_update or pr_statuses_for_tool.last_build_update < build_added: - pr_statuses_for_tool.last_build_update = build_added - - # Keep track of the last time the PR was changed (code commits, not comments) - # and store the latest one. - pr_last_update = timestamp_to_utc_string(pr['updatedDate']) - if not pr_statuses_for_tool.last_pr_update or pr_statuses_for_tool.last_pr_update < pr_last_update: - pr_statuses_for_tool.last_pr_update = pr_last_update - - pr_status = pr['state'] - target_repo_name = deep_get(pr, 'toRef.repository.name') - corresponding_tool_name = config.REPO_NAME_TO_PRIMARY_TOOL.get(target_repo_name) - if not corresponding_tool_name: - print('Ignored pull request: {} for unrecognized repo'.format(target_repo_name)) - continue - - if corresponding_tool_name not in tool_names: - continue - - if pr_url: - pr_and_url = PrAndUrl(pr=pr, url=pr_url, tool_name=corresponding_tool_name) - - prs_for_this_tool = pr_statuses_for_tool.prs_per_tool[corresponding_tool_name] - - # if there are ANY open PRs, we override the status to 'OPEN' so we don't count it as done - if pr_status == config.PULL_REQUEST_STATUS_OPEN: - pr_statuses_for_tool.tools[corresponding_tool_name] = pr_status - # Check reviewers[i].approved - approved = False - for reviewer in pr['reviewers']: - if reviewer['approved']: - approved = True - break - - if approved and pr_and_url: - prs_for_this_tool.approved.append(pr_and_url) - else: - prs_for_this_tool.open.append(pr_and_url) - pr_statuses_for_tool.all_prs_approved = False - - if pr_status == config.PULL_REQUEST_STATUS_MERGED: - prs_for_this_tool.merged.append(pr_and_url) - - # it may be that there are some PRs merged, and some PRs still open - # we only want to return status = MERGED if ALL PRs are merged, so if status is already set to 'OPEN' leave it that way - if not (pr_statuses_for_tool.tools.get(corresponding_tool_name) == config.PULL_REQUEST_STATUS_OPEN) and pr_status == config.PULL_REQUEST_STATUS_MERGED: - pr_statuses_for_tool.tools[corresponding_tool_name] = pr_status - - for tool, status in six.iteritems(pr_statuses_for_tool.tools): - if status != config.PULL_REQUEST_STATUS_MERGED: - pr_statuses_for_tool.all_prs_merged = False - - # it's possible that some PRs are merged, and others are still open, in this case we still - # want to report all_prs_initiated = True - if status != config.PULL_REQUEST_STATUS_OPEN and status != config.PULL_REQUEST_STATUS_MERGED: - pr_statuses_for_tool.all_prs_initiated = False - - # If there's a tool that we didn't find, we can't possibly have all PRs initiated, approved or merged - for tool in tool_names: - if tool not in pr_statuses_for_tool.tools: - pr_statuses_for_tool.all_prs_initiated = False - pr_statuses_for_tool.all_prs_approved = False - pr_statuses_for_tool.all_prs_merged = False - break - - if pr_statuses_for_tool.last_pr_update > pr_statuses_for_tool.last_build_update: - pr_statuses_for_tool.last_update = pr_statuses_for_tool.last_pr_update - else: - pr_statuses_for_tool.last_update = pr_statuses_for_tool.last_build_update - - printv("pr_statuses_for_tool: {}".format(pr_statuses_for_tool)) - - return pr_statuses_for_tool - - -def get_branch_prefix_for_spec_diff(build_type): - prefix = config.SPEC_BRANCH_PREFIX + "-" - if build_type == config.BUILD_TYPE_INDIVIDUAL_PREVIEW: - return prefix + config.INDIVIDUAL_PREVIEW_BRANCH_PREFIX - elif build_type == config.BUILD_TYPE_BULK_PENDING_MERGE_PREVIEW: - return prefix + config.BULK_PREVIEW_BRANCH_PREFIX - elif build_type == config.BUILD_TYPE_BULK_PENDING_MERGE_PUBLIC: - return prefix + config.BULK_PUBLIC_BRANCH_PREFIX - elif build_type == config.BUILD_TYPE_INDIVIDUAL_PUBLIC: - return prefix + config.INDIVIDUAL_PUBLIC_BRANCH_PREFIX - else: - raise ValueError("Unknown build type: ".format(build_type)) - - -def is_tool_jira_reportable(tool_name): - return tool_name in config.CUSTOM_FIELD_NAME_FOR_TOOL - - -def get_jira_reportable_tool_names(): - tool_names = list(config.TOOL_NAMES) - return list(filter(is_tool_jira_reportable, tool_names)) - - -def get_jira_reportable_repo_names_for_tool(): - tool_names = get_jira_reportable_tool_names() - filtered_repo_names = dict() - for tool_name, repo_names in config.REPO_NAMES_FOR_TOOL.items(): - if tool_name in tool_names: - filtered_repo_names[tool_name] = repo_names - - return filtered_repo_names - - -def get_jira_custom_field_ids_for_tool(): - tool_names = get_jira_reportable_tool_names() - filtered_field_ids = dict() - for tool_name, repo_names in config.CUSTOM_FIELD_ID_FOR_TOOL.items(): - if tool_name in tool_names: - filtered_field_ids[tool_name] = repo_names - - return filtered_field_ids - - -def deep_get(dictionary, keys, default=None): - return reduce(lambda d, key: d.get(key, default) if isinstance(d, dict) else default, keys.split("."), dictionary) - - -def get_all_pr_urls_from_comment(issue, pr_comment_prefix=None): - pr_urls = [] - comments = JIRA_CLIENT().comments(issue) - for comment in reversed(comments): - if pr_comment_prefix: - comment_body = comment.body - if comment_body.startswith(pr_comment_prefix): - pr_url = comment_body.split()[-1].strip('[]') - print(pr_url) - if pr_url.startswith(config.BITBUCKET_PR_URL_PREFIX): - pr_urls.append(pr_url) - else: - urls = re.findall(r'({}[^\s\]]+)'.format(config.BITBUCKET_PR_URL_PREFIX), comment.body) - pr_urls.extend(urls) - print('Found {} PRs from comments.'.format(len(pr_urls))) - return pr_urls - - -def are_all_prs_merged(pr_urls): - for pr_url in pr_urls: - pr = shared.bitbucket_utils.get_pullrequest_from_url(pr_url) - if pr.json() and 'state' in pr.json() and pr.json()['state'].lower() == 'merged': - print('PR merged: {}'.format(pr_url)) - else: - print('PR not merged: {}'.format(pr_url)) - return False - return True - - -def get_next_release_day(release_day): - d = datetime.datetime.now() - while d.weekday() != 1: - d += datetime.timedelta(days=release_day) - d = d.replace(hour=0, minute=0, second=0, microsecond=0) - print('Next Tuesday is {}'.format(d)) - return d - - -def get_in_progress_region_support_tickets(): - query = 'project = "DEX" AND summary ~ "{}" AND status in ("{}")'.format(REGION_SUPPORT_TICKET_SUMMARY_PREFIX, config.STATUS_IN_PROGRESS) - return jira_search_issues(query) - - -# Assumption: Bug Bash ticket is in the format of -# "[SDK] ACTION REQUIRED: DAL_TEST Customer Validation BugBash us-gov-fortworth-3" -# Ticket status should not be Done, Closed, or In Progress. -def get_unprocessed_bug_bash_tickets(): - query = ('project = "Bug Bash" AND summary ~ "{}" AND status not in ("{}", "{}", "{}", "{}")' - .format(BUG_BASH_TICKET_SUMMARY_PREFIX, config.STATUS_CLOSED, config.STATUS_DONE, config.STATUS_IN_PROGRESS, config.STATUS_IN_REVIEW)) - return jira_search_issues(query) - - -# Assumption: DEX ticket is in the format of -# "Next set of region support in Public SDKs - mx-queretaro-1" for master branch or -# "Next set of region support in preview SDKs - mx-queretaro-1" for preview branch -# Ticket status should not be Done, Closed, or In Progress. -def get_unprocessed_region_suppport_tickets(branch): - if branch.lower() == 'preview': - branch_in_query = 'preview' - elif branch.lower() == 'master': - branch_in_query = 'Public' - else: - raise ValueError('Branch must be either preview or master') - query = 'project = "DEX" AND summary ~ "{} {} SDKs" AND status not in ("{}", "{}", "{}", "{}")'.format( - REGION_SUPPORT_TICKET_SUMMARY_PREFIX, branch_in_query, config.STATUS_CLOSED, config.STATUS_DONE, config.STATUS_IN_PROGRESS, config.STATUS_MORE_INFORMATION_NEEDED) - return jira_search_issues(query) - - -# Assumption: DEX ticket uses "due date" field to specify the expected release date of the new region -# If the due date is before the coming Tuesday, it is ready be processed. -def get_region_support_tickets_to_process(branch): - issues = get_unprocessed_region_suppport_tickets(branch) - # For public SDK tickets, check due date and only return tickets that are due less than a week before the next SDK release. - # Otherwise, for preview SDK tickets, return all unprocessed ones. - if branch == 'master': - ready_issues = [] - for issue in issues: - for key in issue.raw['fields'].keys(): - if str(key).startswith('duedate'): - date_str = issue.raw['fields'][key] - date = datetime.datetime.strptime(date_str, '%Y-%m-%d') - today = datetime.datetime.now() - if date >= today: - next_tuesday = get_next_release_day(RELEASE_DAY) - if date <= next_tuesday: - print('Ticket {} with due date {} should be included in the next release.'.format(issue, date_str)) - ready_issues.append(issue) - print('Found {} issues ready to be added.'.format(len(ready_issues))) - return ready_issues - else: - return issues - - -# Assumption: DEX ticket is in the format of "Next set of region support in Public SDKs - mx-queretaro-1" -def get_region_id_from_dex_tickets(issues): - region_ids = [] - for issue in issues: - region_id = issue.raw['fields']['summary'].split()[-1] - region_ids.append(region_id) - return region_ids diff --git a/scripts/auto_gen_utils/verify.sh b/scripts/auto_gen_utils/verify.sh deleted file mode 100755 index 347939377a..0000000000 --- a/scripts/auto_gen_utils/verify.sh +++ /dev/null @@ -1,12 +0,0 @@ -set -e - -# TODO: fix flake8 problems in sdk_regions_updater, python_cli, and team_city_scripts -# flake8Excludes: "./venv,./temp,./input_ocibuild,./output_ocibuild*" -flake8Excludes="./venv,./temp,./input_ocibuild,./output_ocibuild*,./sdk_regions_updater,./python_cli,./team_city_scripts" - -# TODO: fix these problems so we don't have to ignore the errors -flake8IgnoredErrors="N806,N802,N803,N817,E501,E128,E241,E231,W291,W293" - -python -m flake8 --exclude=${flake8Excludes} --ignore=${flake8IgnoredErrors} - -pytest tests/ \ No newline at end of file diff --git a/scripts/clone_key_repo.sh b/scripts/clone_key_repo.sh deleted file mode 100755 index 76bef97294..0000000000 --- a/scripts/clone_key_repo.sh +++ /dev/null @@ -1,7 +0,0 @@ -SCRIPT_DIR=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd ) - -echo "===== Setting up git secrets =====" -source ${SCRIPT_DIR}/../shared-build-service-scripts/setup-git-secrets.sh -echo "===== Done setting up git secrets =====" - -git clone ssh://git@bitbucket.oci.oraclecorp.com:7999/sdk/oci-sdk-cli-keys.git -b master --single-branch --depth 1 \ No newline at end of file diff --git a/scripts/generate_docs.sh b/scripts/generate_docs.sh deleted file mode 100755 index 3ec290bc07..0000000000 --- a/scripts/generate_docs.sh +++ /dev/null @@ -1,126 +0,0 @@ -#!/bin/bash - -set -e -set -x - -# Check if tag was set in the environment variable -if [ -z ${OCI_GO_SDK_VERSION_TAG+x} ]; then - # No, point it at the master branch - export OCI_GO_SDK_VERSION_TAG="master" -else - # Check the format of the tag. - if [[ ${OCI_GO_SDK_VERSION_TAG} =~ ^v[0-9][0-9]*\.[0-9][0-9]*\.[0-9][0-9]*$ ]]; then - echo "Version tag: ${OCI_GO_SDK_VERSION_TAG}" - else - echo "Version tag: ${OCI_GO_SDK_VERSION_TAG} should look like 'v1.2.3'" - exit 1 - fi -fi - -# Kill godoc servers that might be running -set +e -killall godoc -set -e - -version=$(echo ${OCI_GO_SDK_VERSION_TAG}|cut -d 'v' -f2) -find ../.. -name \*.go |xargs sed -i "s#https://docs\.cloud\.oracle\.com/en-us/iaas/tools/go-sdk-examples/latest/#https://docs\.cloud\.oracle\.com/en-us/iaas/tools/go-sdk-examples/${version}/#g" - - -# Check if port was set in the environment variable -if [ -z ${GODOC_PORT+x} ]; then - # No, set it to port 6060 - export GODOC_PORT=6060 -fi - -echo "Running godoc on port ${GODOC_PORT}..." - -godoc -http=:${GODOC_PORT} & -GODOC_PID=$! -echo "Web server PID is ${GODOC_PID}" - -ATTEMPTS=24 # 240 seconds = 4 minutes max wait time for godoc server to have finished indexing -WAIT_TIME_SECONDS=10 - -n=0 - -# Temporarily turn "fail on error" off, because wget will report some 404s, but that's okay -set +e - -until [ "$n" -ge $ATTEMPTS ] -do - n=$((n+1)) - - echo "Attempt $n of ${ATTEMPTS}:" - echo "Waiting ${WAIT_TIME_SECONDS} seconds..." - sleep ${WAIT_TIME_SECONDS} - - # Mirror the website, starting at /pkg/github.com/oracle/oci-go-sdk/ - # But only allow the /pkg/github.com/oracle/oci-go-sdk/ and the lib directories. - if wget http://localhost:${GODOC_PORT}/pkg/github.com/oracle/oci-go-sdk/ ; then - # This was successful - n=0 - break - fi -done - -if [ "$n" -gt 0 ]; then - echo "Failed to contact godoc webserver. Aborting..." - exit 1 -fi - -echo "Successfully contacted godoc webserver. Mirroring..." - -# Mirror the website, starting at /pkg/github.com/oracle/oci-go-sdk/ -# But only allow the /pkg/github.com/oracle/oci-go-sdk/ and the lib directories. -wget -nv -m -k -erobots=off --no-host-directories --no-use-server-timestamps \ - --include-directories pkg/github.com/oracle/oci-go-sdk,lib \ - http://localhost:${GODOC_PORT}/pkg/github.com/oracle/oci-go-sdk/ \ - 2>&1 | grep -v "Last-modified header missing" - -# Turn "fail on error" back on -set -e - - -# Since we only have the directory for the oci-go-sdk, move the lib directory with -# the stylesheets and scripts into it. We replace the references below. -mv lib pkg/github.com/oracle/oci-go-sdk/ - -find . -type f -name \*.html -or -name \*.css - -echo "Replacing links..." - -function xargs_inplace_sed() { - if [[ "$OSTYPE" == "darwin"* ]]; then - xargs -n 10 sed -i '' "$@" - else - xargs -n 10 sed -i "$@" - fi -} - -# There are some links that point to localhost. -# Change them so they point to the public internet. -# (May not work on ONSR, but we can't mirror everything -- the entire Go documentation). -# We also remove the jquery stylesheets and scripts. They have 404s in them. -find . -type f -name \*.html -or -name \*.css \ - | xargs_inplace_sed \ - -e "s_http://localhost:${GODOC_PORT}/pkg/builtin_https://godoc.org/builtin_g" \ - -e "s_http://localhost:${GODOC_PORT}/pkg/_https://godoc.org/_g" \ - -e "s_http://localhost:${GODOC_PORT}/doc/_https://golang.org/_g" \ - -e "s_http://localhost:${GODOC_PORT}/search_https://golang.org/search_g" \ - -e "s_http://localhost:${GODOC_PORT}/blog/_https://blog.golang.org/_g" \ - -e "s_http://localhost:${GODOC_PORT}/help/_https://golang.org/help/_g" \ - -e "s_http://localhost:${GODOC_PORT}/project/_https://golang.org/project/_g" \ - -e "s_http://localhost:${GODOC_PORT}/LICENSE_https://golang.org/LICENSE_g" \ - -e "s_http://localhost:${GODOC_PORT}/src/github.com/oracle/oci-go-sdk/_https://github.com/oracle/oci-go-sdk/blob/${OCI_GO_SDK_VERSION_TAG}/_g" \ - -e 's_^$__' \ - -e 's_^$__' \ - -e "s_http://localhost:${GODOC_PORT}/_https://golang.org/_g" \ - -e "s_../../../../lib/godoc_lib/godoc_g" - -# Change into the directory with the docs and zip them up at top level. -cd pkg/github.com/oracle/oci-go-sdk/ -zip -r ../../../../oci-go-sdk-godoc.zip * -cd - - -# Kill the godoc server. -kill ${GODOC_PID} diff --git a/scripts/setup_test_docker.sh b/scripts/setup_test_docker.sh deleted file mode 100644 index cf95189385..0000000000 --- a/scripts/setup_test_docker.sh +++ /dev/null @@ -1,23 +0,0 @@ -#!/bin/bash - -## This script recieves -## 1. A setting for the private key -## 2. A setting for the make file name -## 3. A setting for make file target -## 4. A setting to run the make targer w or without debug - -set -e ; set -x ; -echo $GOPATH -type -a go -type -a golint -type -a gofmt - -# Inject the key into the key file -set +x -cp ./go_sdk_test_user_key.pem $TF_VAR_private_key_path -set -x - -#Run the test -# cd $GOPATH/src/github.com/oracle/oci-go-sdk/ -cd ${SOURCE_DIR} -go mod tidy \ No newline at end of file diff --git a/scripts/test_pom.original.xml b/scripts/test_pom.original.xml deleted file mode 100644 index 7920a077de..0000000000 --- a/scripts/test_pom.original.xml +++ /dev/null @@ -1,1269 +0,0 @@ - - - - 4.0.0 - - - com.oracle.pic.commons - pic-pom - 0.4.3 - - - com.oci.sdk - go-sdk - 0.0.1 - - Public Go SDK - - - PREVIEW - ${env.PROJECT_NAME} - ${project.build.directory} - ${project.build.directory}/preferred - ${project.build.directory}/preprocessed - - 0.1.64 - coreservices-api-spec-preview - coreservices-api-spec-20160918-external.yaml - base.proto.yaml - blockstorage.proto.yaml - compute.proto.yaml - vcn.proto.yaml - - 0.2.9-PREVIEW - identity-control-plane-api-spec-preview - identity-control-plane-api-spec-20160918.yaml - - 1.0.16 - oralb-api-spec-preview - spec-20170115.yaml - - 1.1.17-PREVIEW - casper-api-spec - casper-api.yaml - - 0.0.8-releasePreview - dbaas-api-spec - dbaas-api-spec-20160918.yaml - - 0.1.8 - hemlock-spec - hemlock-api-20160918.yaml - - 0.0.11 - fss-api-spec - fss-api-spec-20171215.yaml - - 1.0.0-1-PREVIEW - public-dns-api-spec - public-dns-api-spec.yaml - - 1.0.0 - email-api-spec - email-api-spec.yaml - - 0.0.2 - maestro-spec - api.yaml - - 0.0.40 - kms-api-spec - kms-api-spec-20180201.yaml - - 0.0.4 - resource-query-service-spec - rqs.yaml - - UTF-8 - - ${project.basedir}/release-sdk.txt - - - - - - - org.apache.maven.plugins - maven-dependency-plugin - 2.10 - - - unpack-coreservices - initialize - - unpack - - - - - com.oracle.pic.commons - ${core.artifact.id} - jar - **/* - ${spec.temp.dir}/core - - - - - - unpack-identity - initialize - - unpack - - - - - com.oracle.pic.identity - ${identity.artifact.id} - jar - **/* - ${spec.temp.dir}/identity - - - - - - unpack-oralb - initialize - - unpack - - - - - com.oracle.pic.lb - ${lb.artifact.id} - jar - **/* - ${spec.temp.dir}/lb - - - - - - unpack-objectstorage - initialize - - unpack - - - - - com.oracle.pic.casper - ${objectstorage.artifact.id} - jar - **/* - ${spec.temp.dir}/objectstorage - - - - - - unpack-database - initialize - - unpack - - - - - com.oracle.pic.dbaas - ${database.artifact.id} - jar - **/* - ${spec.temp.dir}/database - - - - - - unpack-audit - initialize - - unpack - - - - - com.oracle.pic.sherlock - ${audit.artifact.id} - jar - **/* - ${spec.temp.dir}/audit - - - - - - unpack-filestorage - initialize - - unpack - - - - - com.oracle.pic.ffsw - ${filestorage.artifact.id} - jar - **/* - ${spec.temp.dir}/filestorage - - - - - - unpack-dns - initialize - - unpack - - - - - com.oracle.pic.dns.pub - ${dns.artifact.id} - jar - **/* - ${spec.temp.dir}/dns - - - - - - unpack-email - initialize - - unpack - - - - - com.oracle.pic.email - ${email.artifact.id} - jar - **/* - ${spec.temp.dir}/email - - - - - - unpack-kms - initialize - - unpack - - - - - com.oracle.pic.kms - ${kms.artifact.id} - jar - **/* - ${spec.temp.dir}/kms - - - - - - unpack-resourcemanager - initialize - - unpack - - - - - com.oracle.pic.orchestration.orm - ${resourcemanager.artifact.id} - jar - **/* - ${spec.temp.dir}/resourcemanager - - - - - - unpack-resourcequery - initialize - - unpack - - - - - com.oracle.pic.query - ${resourcequery.artifact.id} - jar - **/* - ${spec.temp.dir}/resourcequery - - - - - - - - - - com.oracle.oci.sdk.utilities - spec-conditionals-preprocessor-plugin - 0.0.1-SNAPSHOT - - - - spec-conditionals-prefer-core - initialize - - prefer - - - - ${spec.temp.dir}/core/${core.spec.name} - - ${preferred.temp.dir}/${core.spec.name} - - - - spec-conditionals-preprocess-core - initialize - - preprocess - - - ${preferred.temp.dir}/${core.spec.name} - ${preprocessed.temp.dir}/${core.spec.name} - ${enabled.groups.file} - - - - - - spec-conditionals-prefer-identity - initialize - - prefer - - - - - ${spec.temp.dir}/identity/source/${identity.spec.name} - - ${spec.temp.dir}/identity/${identity.spec.name} - - ${preferred.temp.dir}/${identity.spec.name} - - - - spec-conditionals-preprocess-identity - initialize - - preprocess - - - ${preferred.temp.dir}/${identity.spec.name} - ${preprocessed.temp.dir}/${identity.spec.name} - ${enabled.groups.file} - - - - - - spec-conditionals-prefer-objectstorage - initialize - - prefer - - - - - ${spec.temp.dir}/objectstorage/source/${objectstorage.spec.name} - - ${spec.temp.dir}/objectstorage/${objectstorage.spec.name} - - ${preferred.temp.dir}/${objectstorage.spec.name} - - - - spec-conditionals-preprocess-objectstorage - initialize - - preprocess - - - ${preferred.temp.dir}/${objectstorage.spec.name} - ${preprocessed.temp.dir}/${objectstorage.spec.name} - ${enabled.groups.file} - - - - - - spec-conditionals-prefer-lb - initialize - - prefer - - - - - ${spec.temp.dir}/lb/source/${lb.spec.name} - - ${spec.temp.dir}/lb/${lb.spec.name} - - ${preferred.temp.dir}/${lb.spec.name} - - - - spec-conditionals-preprocess-lb - initialize - - preprocess - - - ${preferred.temp.dir}/${lb.spec.name} - ${preprocessed.temp.dir}/${lb.spec.name} - ${enabled.groups.file} - - - - - - spec-conditionals-prefer-database - initialize - - prefer - - - - - ${spec.temp.dir}/database/source/${database.spec.name} - - ${spec.temp.dir}/database/${database.spec.name} - - ${preferred.temp.dir}/${database.spec.name} - - - - spec-conditionals-preprocess-database - initialize - - preprocess - - - ${preferred.temp.dir}/${database.spec.name} - ${preprocessed.temp.dir}/${database.spec.name} - ${enabled.groups.file} - - - - - - spec-conditionals-prefer-audit - initialize - - prefer - - - - - ${spec.temp.dir}/audit/source/${audit.spec.name} - - ${spec.temp.dir}/audit/${audit.spec.name} - - ${preferred.temp.dir}/${audit.spec.name} - - - - spec-conditionals-preprocess-audit - initialize - - preprocess - - - ${preferred.temp.dir}/${audit.spec.name} - ${preprocessed.temp.dir}/${audit.spec.name} - ${enabled.groups.file} - - - - - - spec-conditionals-prefer-filestorage - initialize - - prefer - - - - - ${spec.temp.dir}/filestorage/source/${filestorage.spec.name} - - ${spec.temp.dir}/filestorage/${filestorage.spec.name} - - ${preferred.temp.dir}/${filestorage.spec.name} - - - - spec-conditionals-preprocess-filestorage - initialize - - preprocess - - - ${preferred.temp.dir}/${filestorage.spec.name} - ${preprocessed.temp.dir}/${filestorage.spec.name} - ${enabled.groups.file} - - - - - - spec-conditionals-prefer-dns - initialize - - prefer - - - - - ${spec.temp.dir}/dns/source/${dns.spec.name} - - ${spec.temp.dir}/dns/${dns.spec.name} - - ${preferred.temp.dir}/${dns.spec.name} - - - - spec-conditionals-preprocess-dns - initialize - - preprocess - - - ${preferred.temp.dir}/${dns.spec.name} - ${preprocessed.temp.dir}/${dns.spec.name} - ${enabled.groups.file} - - - - - - spec-conditionals-prefer-email - initialize - - prefer - - - - - ${spec.temp.dir}/email/source/${email.spec.name} - - ${spec.temp.dir}/email/${email.spec.name} - - ${preferred.temp.dir}/${email.spec.name} - - - - spec-conditionals-preprocess-email - initialize - - preprocess - - - ${preferred.temp.dir}/${email.spec.name} - ${preprocessed.temp.dir}/${email.spec.name} - ${enabled.groups.file} - - - - - - spec-conditionals-prefer-kms - initialize - - prefer - - - - - ${spec.temp.dir}/kms/source/${kms.spec.name} - - ${spec.temp.dir}/kms/${kms.spec.name} - - ${preferred.temp.dir}/${kms.spec.name} - - - - spec-conditionals-preprocess-kms - initialize - - preprocess - - - ${preferred.temp.dir}/${kms.spec.name} - ${preprocessed.temp.dir}/${kms.spec.name} - ${enabled.groups.file} - - - - - - spec-conditionals-prefer-resourcemanager - initialize - - prefer - - - - - ${spec.temp.dir}/resourcemanager/source/${resourcemanager.spec.name} - - ${spec.temp.dir}/resourcemanager/${resourcemanager.spec.name} - - ${preferred.temp.dir}/${resourcemanager.spec.name} - - - - spec-conditionals-preprocess-resourcemanager - initialize - - preprocess - - - ${preferred.temp.dir}/${resourcemanager.spec.name} - ${preprocessed.temp.dir}/${resourcemanager.spec.name} - ${enabled.groups.file} - - - - - spec-conditionals-prefer-resourcequery - initialize - - prefer - - - - - ${spec.temp.dir}/resourcequery/source/${resourcequery.spec.name} - - ${spec.temp.dir}/resourcequery/${resourcequery.spec.name} - - ${preferred.temp.dir}/${resourcequery.spec.name} - - - - spec-conditionals-preprocess-resourcequery - initialize - - preprocess - - - ${preferred.temp.dir}/${resourcequery.spec.name} - ${preprocessed.temp.dir}/${resourcequery.spec.name} - ${enabled.groups.file} - - - - - - - - com.oracle.bmc.sdk - bmc-sdk-swagger-maven-plugin - 1.25-SNAPSHOT - - - go-public-sdk-core - compile - - generate - - - oracle-go-sdk - ${preprocessed.temp.dir}/${core.spec.name} - ${env.GOPATH}/src/${fullyQualifiedProjectName} - ${generationType} - core - - core - ${fullyQualifiedProjectName} - - ${project.basedir}/featureId.yaml - - - - go-public-sdk-identity - compile - - generate - - - oracle-go-sdk - ${preprocessed.temp.dir}/${identity.spec.name} - ${env.GOPATH}/src/${fullyQualifiedProjectName} - ${generationType} - identity - - identity - ${fullyQualifiedProjectName} - - ${project.basedir}/featureId.yaml - - - - go-public-sdk-coreservices - compile - - generate - - - oracle-go-sdk - ${preprocessed.temp.dir}/${core.spec.name} - ${env.GOPATH}/src/${fullyQualifiedProjectName} - ${generationType} - core - - core - ${fullyQualifiedProjectName} - iaas - - ${project.basedir}/featureId.yaml - - - - go-public-sdk-oralb - compile - - generate - - - oracle-go-sdk - ${preprocessed.temp.dir}/${lb.spec.name} - ${env.GOPATH}/src/${fullyQualifiedProjectName} - ${generationType} - loadbalancer - - loadbalancer - ${fullyQualifiedProjectName} - iaas - - ${project.basedir}/featureId.yaml - - - - go-public-sdk-objectstorage - compile - - generate - - - oracle-go-sdk - ${preprocessed.temp.dir}/${objectstorage.spec.name} - ${env.GOPATH}/src/${fullyQualifiedProjectName} - ${generationType} - objectstorage - - objectstorage - ${fullyQualifiedProjectName} - - ${project.basedir}/featureId.yaml - - - - go-public-sdk-database - compile - - generate - - - oracle-go-sdk - ${preprocessed.temp.dir}/${database.spec.name} - ${env.GOPATH}/src/${fullyQualifiedProjectName} - ${generationType} - database - - database - ${fullyQualifiedProjectName} - - ${project.basedir}/featureId.yaml - - - - go-public-sdk-audit - compile - - generate - - - oracle-go-sdk - ${preprocessed.temp.dir}/${audit.spec.name} - ${env.GOPATH}/src/${fullyQualifiedProjectName} - ${generationType} - audit - - audit - ${fullyQualifiedProjectName} - - ${project.basedir}/featureId.yaml - - - - go-public-sdk-filestorage - compile - - generate - - - oracle-go-sdk - ${preprocessed.temp.dir}/${filestorage.spec.name} - ${env.GOPATH}/src/${fullyQualifiedProjectName} - ${generationType} - filestorage - - filestorage - ${fullyQualifiedProjectName} - - ${project.basedir}/featureId.yaml - - - - go-public-sdk-dns.pub - compile - - generate - - - oracle-go-sdk - ${preprocessed.temp.dir}/${dns.spec.name} - ${env.GOPATH}/src/${fullyQualifiedProjectName} - ${generationType} - dns - - dns - ${fullyQualifiedProjectName} - - ${project.basedir}/featureId.yaml - - - - go-public-sdk-email - compile - - generate - - - oracle-go-sdk - ${preprocessed.temp.dir}/${email.spec.name} - ${env.GOPATH}/src/${fullyQualifiedProjectName} - ${generationType} - email - - email - ${fullyQualifiedProjectName} - - ${project.basedir}/featureId.yaml - - - - go-public-sdk-resourcemanager - compile - - generate - - - oracle-go-sdk - ${preprocessed.temp.dir}/${resourcemanager.spec.name} - ${env.GOPATH}/src/${fullyQualifiedProjectName} - ${generationType} - resourcemanager - - resourcemanager - ${fullyQualifiedProjectName} - - ${project.basedir}/featureId.yaml - - - - go-public-sdk-kms - compile - - generate - - - oracle-go-sdk - ${preprocessed.temp.dir}/${kms.spec.name} - ${env.GOPATH}/src/${fullyQualifiedProjectName} - ${generationType} - kms - - kms - ${fullyQualifiedProjectName} - false - false - - - - - go-public-sdk-resourcequery - compile - - generate - - - oracle-go-sdk - ${preprocessed.temp.dir}/${resourcequery.spec.name} - ${env.GOPATH}/src/${fullyQualifiedProjectName} - ${generationType} - resourcequery - - resourcequery - ${fullyQualifiedProjectName} - query - - - - - - - maven-clean-plugin - 3.0.0 - - - - lib/oci/core - - **/* - - - util.rb - - - - lib/oci/identity - - **/* - - - util.rb - - - - lib/oci/load_balancer - - util.rb - - - **/* - - - - lib/oci/database - - util.rb - - - **/* - - - - lib/oci/object_storage - - util.rb - - - **/* - - - - lib/oci/audit - - util.rb - - - **/* - - - - lib/oci/file_storage - - util.rb - - - **/* - - - - lib/oci/dns - - util.rb - - - **/* - - - - lib/oci/email - - util.rb - - - **/* - - - - lib/oci/resourcemanager - - util.rb - - - **/* - - - - lib/oci/resourcequery - - util.rb - - - **/* - - - - .yardoc - - **/* - - - - doc - - **/* - - - - variants - - **/* - - - - - - - - - - - - com.oracle.pic.commons - ${core.artifact.id} - ${core.artifact.version} - - - com.oracle.pic.identity - ${identity.artifact.id} - ${identity.artifact.version} - - - com.oracle.pic.casper - ${objectstorage.artifact.id} - ${objectstorage.artifact.version} - - - com.oracle.pic.dbaas - ${database.artifact.id} - ${database.artifact.version} - - - com.oracle.pic.sherlock - ${audit.artifact.id} - ${audit.artifact.version} - - - com.oracle.pic.ffsw - ${filestorage.artifact.id} - ${filestorage.artifact.version} - - - com.oracle.pic.dns.pub - ${dns.artifact.id} - ${dns.artifact.version} - - - com.oracle.pic.email - ${email.artifact.id} - ${email.artifact.version} - - - com.oracle.pic.kms - ${kms.artifact.id} - ${kms.artifact.version} - - - com.oracle.pic.lb - ${lb.artifact.id} - ${lb.artifact.version} - - - com.oracle.pic.orchestration.orm - ${resourcemanager.artifact.id} - ${resourcemanager.artifact.version} - - - com.oracle.pic.query - ${resourcequery.artifact.id} - ${resourcequery.artifact.version} - - - com.fasterxml.jackson.core - jackson-annotations - 2.5.4 - - - com.fasterxml.jackson.core - jackson-core - 2.5.4 - - - com.fasterxml.jackson.core - jackson-databind - 2.5.4 - - - com.google.collections - google-collections - 1.0 - - - com.google.guava - guava - 18.0 - - - commons-codec - commons-codec - 1.9 - - - commons-io - commons-io - 2.3 - - - io.swagger - swagger-codegen - 2.1.2 - - - io.swagger - swagger-models - 1.5.0 - - - io.swagger - swagger-parser - 1.0.8 - - - joda-time - joda-time - 2.8.2 - - - junit - junit - 4.12 - - - org.apache.commons - commons-lang3 - 3.2.1 - - - org.apache.maven - maven-artifact - 2.0.7 - - - org.apache.maven - maven-artifact-manager - 2.0.7 - - - org.apache.maven - maven-core - 3.3.3 - - - org.apache.maven - maven-model - 3.3.3 - - - org.apache.maven - maven-plugin-api - 3.3.3 - - - org.apache.maven - maven-project - 2.0.11 - - - org.apache.maven - maven-repository-metadata - 3.3.3 - - - org.apache.maven - maven-settings - 3.3.3 - - - org.apache.maven.plugin-tools - maven-plugin-annotations - 3.3 - - - org.codehaus.plexus - plexus-classworlds - 2.5.2 - - - org.codehaus.plexus - plexus-container-default - 1.6 - - - org.codehaus.plexus - plexus-interpolation - 1.22 - - - org.codehaus.plexus - plexus-utils - 3.0.20 - - - org.projectlombok - lombok - 1.16.6 - - - org.slf4j - slf4j-api - 1.7.12 - - - - diff --git a/shared-build-service-scripts/.gitignore b/shared-build-service-scripts/.gitignore deleted file mode 100644 index 6f5cf8a928..0000000000 --- a/shared-build-service-scripts/.gitignore +++ /dev/null @@ -1,20 +0,0 @@ -target/ -.idea/ -.vscode/ -*.iml -.classpath -.project -.settings -.factorypath -.ruby-version -.DS_Store -/bin/ -*.pyc -*.egg-info -py*_sdk/ -py*_cli/ -.DS_Store -**/*.versionsBackup -input_ocibuild/ -output_ocibuild*/ -clones/ \ No newline at end of file diff --git a/shared-build-service-scripts/README.md b/shared-build-service-scripts/README.md deleted file mode 100644 index e2a9f5922b..0000000000 --- a/shared-build-service-scripts/README.md +++ /dev/null @@ -1,18 +0,0 @@ -# Shared Build Service Scripts - -These scripts are meant to be used as a Git submodule inside another repository. - -``` -git submodule add -b main ssh://git@bitbucket.oci.oraclecorp.com:7999/sdk/shared-build-service-scripts.git -``` - -Make sure that in your ocibuild.conf file, you don't set cloneSubmodules: false (the [default is true](https://confluence.oci.oraclecorp.com/display/BLD/Build+Service+ocibuild.conf+Reference+Guide#BuildServiceocibuild.confReferenceGuide-GitStepProperties)). - - -Also, the Git submodule does not automatically update to the latest version. If we make changes to the shared-build-service-scripts repo or add new scripts, you have to update the Git submodule in your repository and commit the change: - -``` -git submodule update --remote -git commit -a -m "Submodule update." -git push origin HEAD -``` diff --git a/shared-build-service-scripts/check-secrets.sh b/shared-build-service-scripts/check-secrets.sh deleted file mode 100755 index 4b7cd41239..0000000000 --- a/shared-build-service-scripts/check-secrets.sh +++ /dev/null @@ -1,64 +0,0 @@ -#!/bin/bash - -set -e - -SHARED_SCRIPT_DIR=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd ) - -if [ -z ${BITBUCKET_READONLY_PRIVATEKEY+x} ]; then - echo "BITBUCKET_READONLY_PRIVATEKEY is unset" -else - echo "BITBUCKET_READONLY_PRIVATEKEY is set, md5:" - echo ${BITBUCKET_READONLY_PRIVATEKEY} | md5sum -fi - -if [ -z ${BITBUCKET_READONLY_PASSPHRASE+x} ]; then - echo "BITBUCKET_READONLY_PASSPHRASE is unset" -else - echo "BITBUCKET_READONLY_PASSPHRASE is set, md5:" - echo ${BITBUCKET_READONLY_PASSPHRASE} | md5sum -fi - -if [ -z ${BITBUCKET_READWRITE_PRIVATEKEY+x} ]; then - echo "BITBUCKET_READWRITE_PRIVATEKEY is unset" -else - echo "BITBUCKET_READWRITE_PRIVATEKEY is set, md5:" - echo ${BITBUCKET_READWRITE_PRIVATEKEY} | md5sum -fi - -if [ -z ${BITBUCKET_READWRITE_PASSPHRASE+x} ]; then - echo "BITBUCKET_READWRITE_PASSPHRASE is unset" -else - echo "BITBUCKET_READWRITE_PASSPHRASE is set, md5:" - echo ${BITBUCKET_READWRITE_PASSPHRASE} | md5sum -fi - -if [ -z ${BITBUCKET_API_USERNAME+x} ]; then - echo "BITBUCKET_API_USERNAME is unset" -else - echo "BITBUCKET_API_USERNAME is set, md5:" - echo ${BITBUCKET_API_USERNAME} | md5sum -fi - -if [ -z ${BITBUCKET_API_PASSWORD+x} ]; then - echo "BITBUCKET_API_PASSWORD is unset" -else - echo "BITBUCKET_API_PASSWORD is set, md5:" - echo ${BITBUCKET_API_PASSWORD} | md5sum -fi - -if [ -z ${JIRA_USERNAME+x} ]; then - echo "JIRA_USERNAME is unset" -else - echo "JIRA_USERNAME is set, md5:" - echo ${JIRA_USERNAME} | md5sum -fi - -if [ -z ${JIRA_PASSWORD+x} ]; then - echo "JIRA_PASSWORD is unset" -else - echo "JIRA_PASSWORD is set, md5:" - echo ${JIRA_PASSWORD} | md5sum -fi - -git version -type -a git diff --git a/shared-build-service-scripts/git-submodule-helpers.sh b/shared-build-service-scripts/git-submodule-helpers.sh deleted file mode 100755 index ef7d599b8b..0000000000 --- a/shared-build-service-scripts/git-submodule-helpers.sh +++ /dev/null @@ -1,111 +0,0 @@ -#!/bin/bash - -# Usage: -# -# source git-submodule-helpers.sh -# -# or -# -# . git-submodule-helpers.sh -# - -SHARED_SCRIPT_DIR=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd ) - -# Usage: -# -# git_submodule_update_all -# -# To update all submodules, if all the submodules have a tracking branch set. -# Must have this "branch = ..." setting: -# cat .gitmodules -# [submodule "bmc-sdk-swagger-validator/test/python-sdk-preview-submodule"] -# path = bmc-sdk-swagger-validator/test/python-sdk -# url = ssh://git@bitbucket.oci.oraclecorp.com:7999/sdk/python-sdk.git -# branch = preview -function git_submodule_update_all() { - # Check that - if ! git submodule foreach 'git config -f ${toplevel}/.gitmodules --get submodule."$name".branch' > /dev/null 2>&1; then - echo "Not all submodules had tracking branches" - return 1 - fi - git submodule foreach 'git pull origin `git config -f ${toplevel}/.gitmodules --get submodule."$name".branch`' -} - -# Usage: -# -# git_submodule_update 'path/to/submodule' -# -# To update a single submodule, if it has a tracking branch set. -# Must have this "branch = ..." setting: -# cat .gitmodules -# [submodule "bmc-sdk-swagger-validator/test/python-sdk-preview-submodule"] -# path = bmc-sdk-swagger-validator/test/python-sdk -# url = ssh://git@bitbucket.oci.oraclecorp.com:7999/sdk/python-sdk.git -# branch = preview -function git_submodule_update() { - path_to_submodule="$1" - - if [ -z ${BLD_INPUT_DIR+x} ]; then - echo "BLD_INPUT_DIR is unset" - return 1 - fi - - if [ ! -d ${path_to_submodule} ]; then - echo "Directory ${path_to_submodule} does not exist" - return 1 - fi - - # Determine the path from the toplevel Git directory to the submodule - cd ${path_to_submodule} - abs_path_to_submodule=`pwd` - cd - > /dev/null - rel_path_to_submodule=`${SHARED_SCRIPT_DIR}/relpath.sh ${BLD_INPUT_DIR} ${abs_path_to_submodule}` - # echo "rel_path_to_submodule: $rel_path_to_submodule" - - # Determine the name of the submodule, based on the rel_path_to_submodule - cd ${BLD_INPUT_DIR} - submodule_name=`git config -f .gitmodules --get-regexp "submodule\..*\.path" "^${rel_path_to_submodule}$" | sed 's_^submodule\.\(.*\)\.path .*$_\1_'` - cd - > /dev/null - # echo "submodule_name: $submodule_name" - if [ "" == "${submodule_name}" ]; then - echo "Could not find a submodule in directory ${path_to_submodule}" - return 1 - fi - - # Determine the tracking branch of the submodule, based on submodule_name - cd ${BLD_INPUT_DIR} - submodule_tracking_branch=`git config -f .gitmodules --get submodule."${submodule_name}".branch` - cd - > /dev/null - # echo "submodule_tracking_branch: $submodule_tracking_branch" - if [ "" == "${submodule_tracking_branch}" ]; then - echo "Could not find a tracking branch for the submodule in directory ${path_to_submodule}" - return 1 - fi - - # Change into the directory and pull the tracking branch - cd ${path_to_submodule} - git pull --ff-only origin ${submodule_tracking_branch} - cd - > /dev/null -} - -# Usage: -# -# git_submodule_update_to_branch 'path/to/submodule' 'preview' -# -# To update a single submodule to a certain remote branch. -function git_submodule_update_to_branch() { - path_to_submodule="$1" - target_branch="$2" - - if [ ! -d ${path_to_submodule} ]; then - echo "Directory ${path_to_submodule} does not exist" - return 1 - fi - - cd ${path_to_submodule} - git pull --ff-only origin ${target_branch} - ret=$? - cd - > /dev/null - - return $ret -} diff --git a/shared-build-service-scripts/make-pr-comment.sh b/shared-build-service-scripts/make-pr-comment.sh deleted file mode 100755 index 04ec489f4a..0000000000 --- a/shared-build-service-scripts/make-pr-comment.sh +++ /dev/null @@ -1,136 +0,0 @@ -#!/bin/bash - -# Usage: -# -# Read from stdin: -# -# echo "Hello world!" | buildServiceScripts/make-pr-comment.sh -# -# -# Read from file: -# -# buildServiceScripts/make-pr-comment.sh myFile.txt -# -# The script JSON-escapes the input. -# -# Uses username and password from environment variables: -# - BITBUCKET_API_USERNAME -# - BITBUCKET_API_PASSWORD -# -# Uses Bitbucket project and repo from environment variables: -# - BITBUCKET_PROJECT -# - BITBUCKET_REPO -# -# If those are not set, then the script will attempt to extract -# them from BLD_VCS_ROOT or BLD_VSC_ROOT. -# -# Only runs if in the context of a pull-request build. -# Extracts the pull request id from BLD_BRANCH. - -set -e - -# from https://stackoverflow.com/questions/10053678/escaping-characters-in-bash-for-json/77930217#77930217 -function escape_json_string() { - local input=$1 - for ((i = 0; i < ${#input}; i++)); do - local char="${input:i:1}" - local escaped="${char}" - case "${char}" in - $'"' ) escaped="\\\"";; - $'\\') escaped="\\\\";; - *) - if [[ "${char}" < $'\x20' ]]; then - case "${char}" in - $'\b') escaped="\\b";; - $'\f') escaped="\\f";; - $'\n') escaped="\\n";; - $'\r') escaped="\\r";; - $'\t') escaped="\\t";; - *) escaped=$(printf "\u%04X" "'${char}") - esac - fi;; - esac - echo -n "${escaped}" - done -} - -SHARED_SCRIPT_DIR=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd ) - -if [ $# -lt 1 ]; then - # read from STDIN - INPUT=$(> $tempfile - done < "$1" - TEXT=$(<$tempfile) - rm $tempfile -fi - -if ! echo "${BLD_BRANCH}" | grep "^PR-[1-9][0-9]*$" > /dev/null; then - echo "Not running as a pull-request build." - exit 0 -fi - -PR_ID=`echo ${BLD_BRANCH} | sed "s/^PR-//"` -echo "Running as pull-request build for pull-request ${PR_ID}." - -if [ -z ${BITBUCKET_API_USERNAME+x} ]; then - echo "BITBUCKET_API_USERNAME is unset" - exit 1 -fi - -if [ -z ${BITBUCKET_API_PASSWORD+x} ]; then - echo "BITBUCKET_API_PASSWORD is unset" - exit 1 -fi - -if [ ! -z ${BLD_VCS_ROOT+x} ]; then - vcsroot="${BLD_VCS_ROOT}" -elif [ ! -z ${BLD_VSC_ROOT+x} ]; then - # Build service team has misspelled the variable - vcsroot="${BLD_VSC_ROOT}" -fi - -echo "VCS root: ${vcsroot}" - -if [ -z ${BITBUCKET_PROJECT+x} ]; then - echo "BITBUCKET_PROJECT is unset, extracting from vcsroot ${vcsroot}" - - # ssh://git@bitbucket.oci.oraclecorp.com:7999/sdk/bmc-sdk-swagger-validator.git - # should become "SDK" - - if ! echo "${vcsroot}" | grep "^ssh://git@bitbucket.oci.oraclecorp.com:7999/" > /dev/null; then - echo "vcsroot does not start with 'ssh://git@bitbucket.oci.oraclecorp.com:7999/'" - exit 1 - fi - BITBUCKET_PROJECT=`echo ${vcsroot} | sed 's_^ssh://[^:]*:7999/\([^/]*\).*$_\1_' | tr '[a-z]' '[A-Z]'` - echo "Using BITBUCKET_PROJECT=${BITBUCKET_PROJECT}" -fi -if [ -z ${BITBUCKET_REPO+x} ]; then - echo "BITBUCKET_REPO is unset, extracting from vcsroot ${vcsroot}" - - # ssh://git@bitbucket.oci.oraclecorp.com:7999/sdk/bmc-sdk-swagger-validator.git - # should become "bmc-sdk-swagger-validator" - - if ! echo "${vcsroot}" | grep "^ssh://git@bitbucket.oci.oraclecorp.com:7999/" > /dev/null; then - echo "vcsroot does not start with 'ssh://git@bitbucket.oci.oraclecorp.com:7999/'" - exit 1 - fi - BITBUCKET_REPO=`echo ${vcsroot} | sed 's_^ssh://[^:]*:7999/[^/]*/\(.*\)\.git$_\1_'` - echo "Using BITBUCKET_REPO=${BITBUCKET_REPO}" -fi - -url="https://bitbucket.oci.oraclecorp.com/rest/api/1.0/projects/${BITBUCKET_PROJECT}/repos/${BITBUCKET_REPO}/pull-requests/${PR_ID}/comments" - -curl -k -u ${BITBUCKET_API_USERNAME}:${BITBUCKET_API_PASSWORD} \ - ${url} \ - -X POST \ - -H "X-Atlassian-Token: no-check" \ - -H "Content-Type: application/json" \ - -d "{\"text\":\"${TEXT}\"}" diff --git a/shared-build-service-scripts/relpath.sh b/shared-build-service-scripts/relpath.sh deleted file mode 100755 index 206f4fb27f..0000000000 --- a/shared-build-service-scripts/relpath.sh +++ /dev/null @@ -1,3 +0,0 @@ -#!/bin/bash -s=$(cd ${1%%/};pwd); d=$(cd $2;pwd); b=; while [ "${d#$s/}" == "${d}" ] -do s=$(dirname $s);b="../${b}"; done; echo ${b}${d#$s/} diff --git a/shared-build-service-scripts/setup-git-secrets-readwrite.sh b/shared-build-service-scripts/setup-git-secrets-readwrite.sh deleted file mode 100755 index 30f659a1c8..0000000000 --- a/shared-build-service-scripts/setup-git-secrets-readwrite.sh +++ /dev/null @@ -1,60 +0,0 @@ -#!/bin/bash - -# Usage: -# -# source setup-git-secrets-readwrite.sh -# -# or -# -# . setup-git-secrets-readwrite.sh -# -# Since the ssh-agent must run in order to use private keys -# with passphrase, it is not enough to just run this script. -# It has to be "sourced" (see https://linuxize.com/post/bash-source-command/). -# Otherwise, the ssh-agent stops running at the end of this script, -# and later ssh or git commands will fail. - -set -e -set -x - -SHARED_SCRIPT_DIR=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd ) - -if [ -z ${BITBUCKET_READWRITE_PRIVATEKEY+x} ]; then - echo "BITBUCKET_READWRITE_PRIVATEKEY is unset" - exit 1 -else - echo "BITBUCKET_READWRITE_PRIVATEKEY is set" -fi -if [ -z ${BITBUCKET_READWRITE_PASSPHRASE+x} ]; then - echo "BITBUCKET_READWRITE_PASSPHRASE is unset" - exit 1 -else - echo "BITBUCKET_READWRITE_PASSPHRASE is set" -fi - -mkdir -p ~/.ssh - -# Disable 'set -x', otherwise we log the contents of the private key -old_set_x=${-//[^x]/} -set +x -printf '%s' "${BITBUCKET_READWRITE_PRIVATEKEY}" > ~/.ssh/id_rsa_rw -if [[ -n "$old_set_x" ]]; then set -x; fi - -chmod 600 ~/.ssh/* - -ssh-keyscan -p 7999 -t rsa bitbucket.oci.oraclecorp.com >> ~/.ssh/known_hosts - -if [ ! -z ${SSH_AGENT_PID+x} ] && ps -p ${SSH_AGENT_PID} > /dev/null -then - echo "ssh-agent is already running, pid ${SSH_AGENT_PID}" -else - eval `ssh-agent -s` - echo "Started ssh-agent, pid ${SSH_AGENT_PID}" -fi - -echo "Adding ~/.ssh/id_rsa_rw with passphrase coming from BITBUCKET_READWRITE_PASSPHRASE" -SSH_ASKPASS=${SHARED_SCRIPT_DIR}/ssh_give_pass.sh ssh-add ~/.ssh/id_rsa_rw <<< "${BITBUCKET_READWRITE_PASSPHRASE}" - -echo "Added ~/.ssh/id_rsa_rw" - -ssh-add -L diff --git a/shared-build-service-scripts/setup-git-secrets.sh b/shared-build-service-scripts/setup-git-secrets.sh deleted file mode 100755 index 0981cbfe08..0000000000 --- a/shared-build-service-scripts/setup-git-secrets.sh +++ /dev/null @@ -1,60 +0,0 @@ -#!/bin/bash - -# Usage: -# -# source setup-git-secrets.sh -# -# or -# -# . setup-git-secrets.sh -# -# Since the ssh-agent must run in order to use private keys -# with passphrase, it is not enough to just run this script. -# It has to be "sourced" (see https://linuxize.com/post/bash-source-command/). -# Otherwise, the ssh-agent stops running at the end of this script, -# and later ssh or git commands will fail. - -set -e -set -x - -SHARED_SCRIPT_DIR=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd ) - -if [ -z ${BITBUCKET_READONLY_PRIVATEKEY+x} ]; then - echo "BITBUCKET_READONLY_PRIVATEKEY is unset" - exit 1 -else - echo "BITBUCKET_READONLY_PRIVATEKEY is set" -fi -if [ -z ${BITBUCKET_READONLY_PASSPHRASE+x} ]; then - echo "BITBUCKET_READONLY_PASSPHRASE is unset" - exit 1 -else - echo "BITBUCKET_READONLY_PASSPHRASE is set" -fi - -mkdir -p ~/.ssh - -# Disable 'set -x', otherwise we log the contents of the private key -old_set_x=${-//[^x]/} -set +x -printf '%s' "${BITBUCKET_READONLY_PRIVATEKEY}" > ~/.ssh/id_rsa -if [[ -n "$old_set_x" ]]; then set -x; fi - -chmod 600 ~/.ssh/* - -ssh-keyscan -p 7999 -t rsa bitbucket.oci.oraclecorp.com >> ~/.ssh/known_hosts - -if [ ! -z ${SSH_AGENT_PID+x} ] && ps -p ${SSH_AGENT_PID} > /dev/null -then - echo "ssh-agent is already running, pid ${SSH_AGENT_PID}" -else - eval `ssh-agent -s` - echo "Started ssh-agent, pid ${SSH_AGENT_PID}" -fi - -echo "Adding ~/.ssh/id_rsa with passphrase coming from BITBUCKET_READONLY_PASSPHRASE" -SSH_ASKPASS=${SHARED_SCRIPT_DIR}/ssh_give_pass.sh ssh-add ~/.ssh/id_rsa <<< "${BITBUCKET_READONLY_PASSPHRASE}" - -echo "Added ~/.ssh/id_rsa" - -ssh-add -L diff --git a/shared-build-service-scripts/ssh_give_pass.sh b/shared-build-service-scripts/ssh_give_pass.sh deleted file mode 100755 index a78e77635a..0000000000 --- a/shared-build-service-scripts/ssh_give_pass.sh +++ /dev/null @@ -1,6 +0,0 @@ -#!/bin/bash -# from https://stackoverflow.com/a/52671286 -# Parameter $1 passed to the script is the prompt text -# READ Secret from STDIN and echo it -read SECRET -echo $SECRET \ No newline at end of file From a7e62d19f5a53622f6e099767609a69163c69f94 Mon Sep 17 00:00:00 2001 From: DEXREQ Automation Date: Mon, 10 Jun 2024 14:40:27 +0000 Subject: [PATCH 2/2] Pull request #2780: Releasing Version 65.67.1 Merge in SDK/oci-go-sdk from merge_to_github2024-06-10 to github Squashed commit of the following: commit fed614011402a9e0c10fd27816b00235cc7da31a Author: oci-dex-release-bot Date: Mon Jun 10 14:18:12 2024 +0000 Releasing version 65 67 1 --- CHANGELOG.md | 10 + README.md | 2 +- apmconfig/apmconfig_config_client.go | 59 +++++ apmconfig/filter_text_or_id.go | 45 ++++ apmconfig/test_details.go | 78 +++++++ apmconfig/test_output.go | 78 +++++++ apmconfig/test_request_response.go | 97 ++++++++ apmconfig/test_span_enrichment_details.go | 63 +++++ apmconfig/test_span_enrichment_output.go | 58 +++++ apmconfig/test_types.go | 53 +++++ cloudguard/resource_profile.go | 10 +- .../resource_profile_endpoint_summary.go | 11 +- ...ource_profile_impacted_resource_summary.go | 12 + cloudguard/resource_profile_summary.go | 10 +- cloudguard/security_policy_collection.go | 15 ++ cloudguard/sighting.go | 10 +- cloudguard/sighting_endpoint_summary.go | 11 +- .../sighting_impacted_resource_summary.go | 12 + cloudguard/sighting_summary.go | 10 +- common/version.go | 2 +- fusionapps/extract_details_collection.go | 39 ++++ fusionapps/extract_details_summary.go | 45 ++++ fusionapps/fusion_environment.go | 6 + .../fusionapps_fusionapplications_client.go | 116 ++++++++++ ...nerate_extract_details_request_response.go | 90 ++++++++ .../initiate_extract_request_response.go | 90 ++++++++ fusionapps/subscription.go | 216 ++++++++++++++++++ fusionapps/work_request.go | 24 ++ .../create_dedicated_sharded_database.go | 12 + .../dedicated_sharded_database.go | 55 +++++ .../dedicated_sharded_database_summary.go | 12 + ...ficate_signing_request_request_response.go | 3 + .../get_private_endpoint_request_response.go | 7 + .../get_sharded_database_request_response.go | 7 + ...ddatabase_shardeddatabaseservice_client.go | 63 +++++ globallydistributeddatabase/operation_type.go | 48 ++++ .../private_endpoint.go | 3 + ...instate_proxy_instance_request_response.go | 104 +++++++++ monitoring/alarm.go | 38 ++- monitoring/alarm_dimension_states_entry.go | 11 +- monitoring/alarm_history_entry.go | 8 + monitoring/alarm_override.go | 15 +- monitoring/alarm_status_summary.go | 11 +- monitoring/alarm_summary.go | 28 ++- monitoring/create_alarm_details.go | 38 ++- monitoring/update_alarm_details.go | 38 ++- queue/get_message.go | 4 + queue/queue.go | 4 + queue/queue_client.go | 4 +- queue/queue_stats.go | 2 +- redis/redis_rediscluster_client.go | 22 +- 51 files changed, 1763 insertions(+), 46 deletions(-) create mode 100644 apmconfig/filter_text_or_id.go create mode 100644 apmconfig/test_details.go create mode 100644 apmconfig/test_output.go create mode 100644 apmconfig/test_request_response.go create mode 100644 apmconfig/test_span_enrichment_details.go create mode 100644 apmconfig/test_span_enrichment_output.go create mode 100644 apmconfig/test_types.go create mode 100644 fusionapps/extract_details_collection.go create mode 100644 fusionapps/extract_details_summary.go create mode 100644 fusionapps/generate_extract_details_request_response.go create mode 100644 fusionapps/initiate_extract_request_response.go create mode 100644 globallydistributeddatabase/reinstate_proxy_instance_request_response.go diff --git a/CHANGELOG.md b/CHANGELOG.md index aeafa31900..e8b0e3459b 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,16 @@ All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog](http://keepachangelog.com/) +## 65.67.1 - 2024-06-11 +### Added +- Support for 23ai based databases in Globally Distributed Database service +- Support for testing span enrichment groups in Application Performance Monitoring service +- Support for subscription suspension and termination orders in Fusion Apps as a Service +- Support for time first occurred and time last occurred for resource sightings in Cloud Guard service +- Support for alarm summary, notification title, and slack duration on create and update operations in Monitoring service +- Support for message creation timestamp in Queue service + + ## 65.67.0 - 2024-06-04 ### Added - Support for creating cross-region autonomous data guards in the Database service diff --git a/README.md b/README.md index 1efc4f015c..419f6a7fc7 100644 --- a/README.md +++ b/README.md @@ -209,7 +209,7 @@ go install github.com/golang/lint/golint ``` - Install [staticcheck](https://github.com/dominikh/go-tools) with the command: ``` -go install honnef.co/go/tools/cmd/staticcheck@2023.1.3 +go install honnef.co/go/tools/cmd/staticcheck@2023.1.7 ``` ### Linting and Staticcheck diff --git a/apmconfig/apmconfig_config_client.go b/apmconfig/apmconfig_config_client.go index f7f29aaadf..e9cb21850c 100644 --- a/apmconfig/apmconfig_config_client.go +++ b/apmconfig/apmconfig_config_client.go @@ -445,6 +445,65 @@ func (client ConfigClient) retrieveNamespaces(ctx context.Context, request commo return response, err } +// Test Tests a data processing operation on the provided input, returning the potentially modified +// input as output. Returns 200 on success, 422 when the input can not be processed. +// +// # See also +// +// Click https://docs.cloud.oracle.com/en-us/iaas/tools/go-sdk-examples/latest/apmconfig/Test.go.html to see an example of how to use Test API. +// A default retry strategy applies to this operation Test() +func (client ConfigClient) Test(ctx context.Context, request TestRequest) (response TestResponse, err error) { + var ociResponse common.OCIResponse + policy := common.DefaultRetryPolicy() + if client.RetryPolicy() != nil { + policy = *client.RetryPolicy() + } + if request.RetryPolicy() != nil { + policy = *request.RetryPolicy() + } + ociResponse, err = common.Retry(ctx, request, client.test, policy) + if err != nil { + if ociResponse != nil { + if httpResponse := ociResponse.HTTPResponse(); httpResponse != nil { + opcRequestId := httpResponse.Header.Get("opc-request-id") + response = TestResponse{RawResponse: httpResponse, OpcRequestId: &opcRequestId} + } else { + response = TestResponse{} + } + } + return + } + if convertedResponse, ok := ociResponse.(TestResponse); ok { + response = convertedResponse + } else { + err = fmt.Errorf("failed to convert OCIResponse into TestResponse") + } + return +} + +// test implements the OCIOperation interface (enables retrying operations) +func (client ConfigClient) test(ctx context.Context, request common.OCIRequest, binaryReqBody *common.OCIReadSeekCloser, extraHeaders map[string]string) (common.OCIResponse, error) { + + httpRequest, err := request.HTTPRequest(http.MethodPost, "/actions/test", binaryReqBody, extraHeaders) + if err != nil { + return nil, err + } + + var response TestResponse + var httpResponse *http.Response + httpResponse, err = client.Call(ctx, &httpRequest) + defer common.CloseBodyIfValid(httpResponse) + response.RawResponse = httpResponse + if err != nil { + apiReferenceLink := "https://docs.oracle.com/iaas/api/#/en/apm-config/20210201/TestOutput/Test" + err = common.PostProcessServiceError(err, "Config", "Test", apiReferenceLink) + return response, err + } + + err = common.UnmarshalResponseWithPolymorphicBody(httpResponse, &response, &testoutput{}) + return response, err +} + // UpdateConfig Updates the details of the configuration item identified by the OCID. // // # See also diff --git a/apmconfig/filter_text_or_id.go b/apmconfig/filter_text_or_id.go new file mode 100644 index 0000000000..b1006e1d0f --- /dev/null +++ b/apmconfig/filter_text_or_id.go @@ -0,0 +1,45 @@ +// Copyright (c) 2016, 2018, 2024, Oracle and/or its affiliates. All rights reserved. +// This software is dual-licensed to you under the Universal Permissive License (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl or Apache License 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose either license. +// Code generated. DO NOT EDIT. + +// Application Performance Monitoring Configuration API +// +// Use the Application Performance Monitoring Configuration API to query and set Application Performance Monitoring +// configuration. For more information, see Application Performance Monitoring (https://docs.oracle.com/iaas/application-performance-monitoring/index.html). +// + +package apmconfig + +import ( + "fmt" + "github.com/oracle/oci-go-sdk/v65/common" + "strings" +) + +// FilterTextOrId A span filter written in text, or as the OCID (https://docs.cloud.oracle.com/Content/General/Concepts/identifiers.htm) of a +// SpanFilter resource. If both are given, the filterText is used. +type FilterTextOrId struct { + + // The OCID (https://docs.cloud.oracle.com/Content/General/Concepts/identifiers.htm) of a Span Filter. The filterId is mandatory for the creation + // of MetricGroups. A filterId is generated when a Span Filter is created. + FilterId *string `mandatory:"false" json:"filterId"` + + // The string that defines the Span Filter expression. + FilterText *string `mandatory:"false" json:"filterText"` +} + +func (m FilterTextOrId) String() string { + return common.PointerString(m) +} + +// ValidateEnumValue returns an error when providing an unsupported enum value +// This function is being called during constructing API request process +// Not recommended for calling this function directly +func (m FilterTextOrId) ValidateEnumValue() (bool, error) { + errMessage := []string{} + + if len(errMessage) > 0 { + return true, fmt.Errorf(strings.Join(errMessage, "\n")) + } + return false, nil +} diff --git a/apmconfig/test_details.go b/apmconfig/test_details.go new file mode 100644 index 0000000000..0488a5c35b --- /dev/null +++ b/apmconfig/test_details.go @@ -0,0 +1,78 @@ +// Copyright (c) 2016, 2018, 2024, Oracle and/or its affiliates. All rights reserved. +// This software is dual-licensed to you under the Universal Permissive License (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl or Apache License 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose either license. +// Code generated. DO NOT EDIT. + +// Application Performance Monitoring Configuration API +// +// Use the Application Performance Monitoring Configuration API to query and set Application Performance Monitoring +// configuration. For more information, see Application Performance Monitoring (https://docs.oracle.com/iaas/application-performance-monitoring/index.html). +// + +package apmconfig + +import ( + "encoding/json" + "fmt" + "github.com/oracle/oci-go-sdk/v65/common" + "strings" +) + +// TestDetails The request body used to execute a test. +type TestDetails interface { +} + +type testdetails struct { + JsonData []byte + TestType string `json:"testType"` +} + +// UnmarshalJSON unmarshals json +func (m *testdetails) UnmarshalJSON(data []byte) error { + m.JsonData = data + type Unmarshalertestdetails testdetails + s := struct { + Model Unmarshalertestdetails + }{} + err := json.Unmarshal(data, &s.Model) + if err != nil { + return err + } + m.TestType = s.Model.TestType + + return err +} + +// UnmarshalPolymorphicJSON unmarshals polymorphic json +func (m *testdetails) UnmarshalPolymorphicJSON(data []byte) (interface{}, error) { + + if data == nil || string(data) == "null" { + return nil, nil + } + + var err error + switch m.TestType { + case "SPAN_ENRICHMENT": + mm := TestSpanEnrichmentDetails{} + err = json.Unmarshal(data, &mm) + return mm, err + default: + common.Logf("Recieved unsupported enum value for TestDetails: %s.", m.TestType) + return *m, nil + } +} + +func (m testdetails) String() string { + return common.PointerString(m) +} + +// ValidateEnumValue returns an error when providing an unsupported enum value +// This function is being called during constructing API request process +// Not recommended for calling this function directly +func (m testdetails) ValidateEnumValue() (bool, error) { + errMessage := []string{} + + if len(errMessage) > 0 { + return true, fmt.Errorf(strings.Join(errMessage, "\n")) + } + return false, nil +} diff --git a/apmconfig/test_output.go b/apmconfig/test_output.go new file mode 100644 index 0000000000..f4aaae96b9 --- /dev/null +++ b/apmconfig/test_output.go @@ -0,0 +1,78 @@ +// Copyright (c) 2016, 2018, 2024, Oracle and/or its affiliates. All rights reserved. +// This software is dual-licensed to you under the Universal Permissive License (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl or Apache License 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose either license. +// Code generated. DO NOT EDIT. + +// Application Performance Monitoring Configuration API +// +// Use the Application Performance Monitoring Configuration API to query and set Application Performance Monitoring +// configuration. For more information, see Application Performance Monitoring (https://docs.oracle.com/iaas/application-performance-monitoring/index.html). +// + +package apmconfig + +import ( + "encoding/json" + "fmt" + "github.com/oracle/oci-go-sdk/v65/common" + "strings" +) + +// TestOutput The result of running a test. +type TestOutput interface { +} + +type testoutput struct { + JsonData []byte + TestType string `json:"testType"` +} + +// UnmarshalJSON unmarshals json +func (m *testoutput) UnmarshalJSON(data []byte) error { + m.JsonData = data + type Unmarshalertestoutput testoutput + s := struct { + Model Unmarshalertestoutput + }{} + err := json.Unmarshal(data, &s.Model) + if err != nil { + return err + } + m.TestType = s.Model.TestType + + return err +} + +// UnmarshalPolymorphicJSON unmarshals polymorphic json +func (m *testoutput) UnmarshalPolymorphicJSON(data []byte) (interface{}, error) { + + if data == nil || string(data) == "null" { + return nil, nil + } + + var err error + switch m.TestType { + case "SPAN_ENRICHMENT": + mm := TestSpanEnrichmentOutput{} + err = json.Unmarshal(data, &mm) + return mm, err + default: + common.Logf("Recieved unsupported enum value for TestOutput: %s.", m.TestType) + return *m, nil + } +} + +func (m testoutput) String() string { + return common.PointerString(m) +} + +// ValidateEnumValue returns an error when providing an unsupported enum value +// This function is being called during constructing API request process +// Not recommended for calling this function directly +func (m testoutput) ValidateEnumValue() (bool, error) { + errMessage := []string{} + + if len(errMessage) > 0 { + return true, fmt.Errorf(strings.Join(errMessage, "\n")) + } + return false, nil +} diff --git a/apmconfig/test_request_response.go b/apmconfig/test_request_response.go new file mode 100644 index 0000000000..cf3283ec2f --- /dev/null +++ b/apmconfig/test_request_response.go @@ -0,0 +1,97 @@ +// Copyright (c) 2016, 2018, 2024, Oracle and/or its affiliates. All rights reserved. +// This software is dual-licensed to you under the Universal Permissive License (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl or Apache License 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose either license. +// Code generated. DO NOT EDIT. + +package apmconfig + +import ( + "fmt" + "github.com/oracle/oci-go-sdk/v65/common" + "net/http" + "strings" +) + +// TestRequest wrapper for the Test operation +// +// # See also +// +// Click https://docs.cloud.oracle.com/en-us/iaas/tools/go-sdk-examples/latest/apmconfig/Test.go.html to see an example of how to use TestRequest. +type TestRequest struct { + + // The APM Domain ID the request is intended for. + ApmDomainId *string `mandatory:"true" contributesTo:"query" name:"apmDomainId"` + + // The test input. + TestDetails `contributesTo:"body"` + + // Unique identifier for the request. + // If you need to contact Oracle about a particular request, please provide the request ID. + OpcRequestId *string `mandatory:"false" contributesTo:"header" name:"opc-request-id"` + + // Metadata about the request. This information will not be transmitted to the service, but + // represents information that the SDK will consume to drive retry behavior. + RequestMetadata common.RequestMetadata +} + +func (request TestRequest) String() string { + return common.PointerString(request) +} + +// HTTPRequest implements the OCIRequest interface +func (request TestRequest) HTTPRequest(method, path string, binaryRequestBody *common.OCIReadSeekCloser, extraHeaders map[string]string) (http.Request, error) { + + _, err := request.ValidateEnumValue() + if err != nil { + return http.Request{}, err + } + return common.MakeDefaultHTTPRequestWithTaggedStructAndExtraHeaders(method, path, request, extraHeaders) +} + +// BinaryRequestBody implements the OCIRequest interface +func (request TestRequest) BinaryRequestBody() (*common.OCIReadSeekCloser, bool) { + + return nil, false + +} + +// RetryPolicy implements the OCIRetryableRequest interface. This retrieves the specified retry policy. +func (request TestRequest) RetryPolicy() *common.RetryPolicy { + return request.RequestMetadata.RetryPolicy +} + +// ValidateEnumValue returns an error when providing an unsupported enum value +// This function is being called during constructing API request process +// Not recommended for calling this function directly +func (request TestRequest) ValidateEnumValue() (bool, error) { + errMessage := []string{} + if len(errMessage) > 0 { + return true, fmt.Errorf(strings.Join(errMessage, "\n")) + } + return false, nil +} + +// TestResponse wrapper for the Test operation +type TestResponse struct { + + // The underlying http response + RawResponse *http.Response + + // The TestOutput instance + TestOutput `presentIn:"body"` + + // Unique Oracle-assigned identifier for the request. If you need to contact + // Oracle about a particular request, please provide the request ID. + OpcRequestId *string `presentIn:"header" name:"opc-request-id"` + + // For optimistic concurrency control. See `if-match`. + Etag *string `presentIn:"header" name:"etag"` +} + +func (response TestResponse) String() string { + return common.PointerString(response) +} + +// HTTPResponse implements the OCIResponse interface +func (response TestResponse) HTTPResponse() *http.Response { + return response.RawResponse +} diff --git a/apmconfig/test_span_enrichment_details.go b/apmconfig/test_span_enrichment_details.go new file mode 100644 index 0000000000..d11f51fb11 --- /dev/null +++ b/apmconfig/test_span_enrichment_details.go @@ -0,0 +1,63 @@ +// Copyright (c) 2016, 2018, 2024, Oracle and/or its affiliates. All rights reserved. +// This software is dual-licensed to you under the Universal Permissive License (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl or Apache License 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose either license. +// Code generated. DO NOT EDIT. + +// Application Performance Monitoring Configuration API +// +// Use the Application Performance Monitoring Configuration API to query and set Application Performance Monitoring +// configuration. For more information, see Application Performance Monitoring (https://docs.oracle.com/iaas/application-performance-monitoring/index.html). +// + +package apmconfig + +import ( + "encoding/json" + "fmt" + "github.com/oracle/oci-go-sdk/v65/common" + "strings" +) + +// TestSpanEnrichmentDetails Run a set of span enrichment rules on a given span to see the result. +type TestSpanEnrichmentDetails struct { + + // The span enrichment rules to test in the format of an Options resource. + Options *interface{} `mandatory:"true" json:"options"` + + // The span to test the rules on. This should be a valid JSON object that follows one + // of the formats used by distributed tracing frameworks, such as OpenTelemetry, Zipkin, or + // Oracle Application Performance Monitoring. + Span *interface{} `mandatory:"true" json:"span"` + + // A list of filters to try against the given span. + Filters []FilterTextOrId `mandatory:"false" json:"filters"` +} + +func (m TestSpanEnrichmentDetails) String() string { + return common.PointerString(m) +} + +// ValidateEnumValue returns an error when providing an unsupported enum value +// This function is being called during constructing API request process +// Not recommended for calling this function directly +func (m TestSpanEnrichmentDetails) ValidateEnumValue() (bool, error) { + errMessage := []string{} + + if len(errMessage) > 0 { + return true, fmt.Errorf(strings.Join(errMessage, "\n")) + } + return false, nil +} + +// MarshalJSON marshals to json representation +func (m TestSpanEnrichmentDetails) MarshalJSON() (buff []byte, e error) { + type MarshalTypeTestSpanEnrichmentDetails TestSpanEnrichmentDetails + s := struct { + DiscriminatorParam string `json:"testType"` + MarshalTypeTestSpanEnrichmentDetails + }{ + "SPAN_ENRICHMENT", + (MarshalTypeTestSpanEnrichmentDetails)(m), + } + + return json.Marshal(&s) +} diff --git a/apmconfig/test_span_enrichment_output.go b/apmconfig/test_span_enrichment_output.go new file mode 100644 index 0000000000..32e17176ca --- /dev/null +++ b/apmconfig/test_span_enrichment_output.go @@ -0,0 +1,58 @@ +// Copyright (c) 2016, 2018, 2024, Oracle and/or its affiliates. All rights reserved. +// This software is dual-licensed to you under the Universal Permissive License (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl or Apache License 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose either license. +// Code generated. DO NOT EDIT. + +// Application Performance Monitoring Configuration API +// +// Use the Application Performance Monitoring Configuration API to query and set Application Performance Monitoring +// configuration. For more information, see Application Performance Monitoring (https://docs.oracle.com/iaas/application-performance-monitoring/index.html). +// + +package apmconfig + +import ( + "encoding/json" + "fmt" + "github.com/oracle/oci-go-sdk/v65/common" + "strings" +) + +// TestSpanEnrichmentOutput Output of running a set of span enrichment rules against a span. +type TestSpanEnrichmentOutput struct { + + // The span after applying enrichment rules. + Span *interface{} `mandatory:"false" json:"span"` + + // A list of booleans indicating whether the corresponding filter in the input matched the input span. + Filters []bool `mandatory:"false" json:"filters"` +} + +func (m TestSpanEnrichmentOutput) String() string { + return common.PointerString(m) +} + +// ValidateEnumValue returns an error when providing an unsupported enum value +// This function is being called during constructing API request process +// Not recommended for calling this function directly +func (m TestSpanEnrichmentOutput) ValidateEnumValue() (bool, error) { + errMessage := []string{} + + if len(errMessage) > 0 { + return true, fmt.Errorf(strings.Join(errMessage, "\n")) + } + return false, nil +} + +// MarshalJSON marshals to json representation +func (m TestSpanEnrichmentOutput) MarshalJSON() (buff []byte, e error) { + type MarshalTypeTestSpanEnrichmentOutput TestSpanEnrichmentOutput + s := struct { + DiscriminatorParam string `json:"testType"` + MarshalTypeTestSpanEnrichmentOutput + }{ + "SPAN_ENRICHMENT", + (MarshalTypeTestSpanEnrichmentOutput)(m), + } + + return json.Marshal(&s) +} diff --git a/apmconfig/test_types.go b/apmconfig/test_types.go new file mode 100644 index 0000000000..02c1bfc736 --- /dev/null +++ b/apmconfig/test_types.go @@ -0,0 +1,53 @@ +// Copyright (c) 2016, 2018, 2024, Oracle and/or its affiliates. All rights reserved. +// This software is dual-licensed to you under the Universal Permissive License (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl or Apache License 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose either license. +// Code generated. DO NOT EDIT. + +// Application Performance Monitoring Configuration API +// +// Use the Application Performance Monitoring Configuration API to query and set Application Performance Monitoring +// configuration. For more information, see Application Performance Monitoring (https://docs.oracle.com/iaas/application-performance-monitoring/index.html). +// + +package apmconfig + +import ( + "strings" +) + +// TestTypesEnum Enum with underlying type: string +type TestTypesEnum string + +// Set of constants representing the allowable values for TestTypesEnum +const ( + TestTypesSpanEnrichment TestTypesEnum = "SPAN_ENRICHMENT" +) + +var mappingTestTypesEnum = map[string]TestTypesEnum{ + "SPAN_ENRICHMENT": TestTypesSpanEnrichment, +} + +var mappingTestTypesEnumLowerCase = map[string]TestTypesEnum{ + "span_enrichment": TestTypesSpanEnrichment, +} + +// GetTestTypesEnumValues Enumerates the set of values for TestTypesEnum +func GetTestTypesEnumValues() []TestTypesEnum { + values := make([]TestTypesEnum, 0) + for _, v := range mappingTestTypesEnum { + values = append(values, v) + } + return values +} + +// GetTestTypesEnumStringValues Enumerates the set of values in String for TestTypesEnum +func GetTestTypesEnumStringValues() []string { + return []string{ + "SPAN_ENRICHMENT", + } +} + +// GetMappingTestTypesEnum performs case Insensitive comparison on enum value and return the desired enum +func GetMappingTestTypesEnum(val string) (TestTypesEnum, bool) { + enum, ok := mappingTestTypesEnumLowerCase[strings.ToLower(val)] + return enum, ok +} diff --git a/cloudguard/resource_profile.go b/cloudguard/resource_profile.go index 557ce9af74..28a3d17053 100644 --- a/cloudguard/resource_profile.go +++ b/cloudguard/resource_profile.go @@ -37,10 +37,10 @@ type ResourceProfile struct { // Risk score for the resource profile RiskScore *float64 `mandatory:"true" json:"riskScore"` - // The date and time the resource profile was first detected. Format defined by RFC3339. + // Time the activities were first detected. Format defined by RFC3339. TimeFirstDetected *common.SDKTime `mandatory:"true" json:"timeFirstDetected"` - // The date and time the resource profile was last detected. Format defined by RFC3339. + // Time the activities were last detected. Format defined by RFC3339. TimeLastDetected *common.SDKTime `mandatory:"true" json:"timeLastDetected"` // List of tactic summaries associated with the resource profile @@ -64,6 +64,12 @@ type ResourceProfile struct { // The date and time for the peak risk score. Format defined by RFC3339. TimePeakScore *common.SDKTime `mandatory:"false" json:"timePeakScore"` + // Time the activities were first performed. Format defined by RFC3339. + TimeFirstOccurred *common.SDKTime `mandatory:"false" json:"timeFirstOccurred"` + + // Time the activities were last performed. Format defined by RFC3339. + TimeLastOccurred *common.SDKTime `mandatory:"false" json:"timeLastOccurred"` + // Locks associated with this resource. Locks []ResourceLock `mandatory:"false" json:"locks"` } diff --git a/cloudguard/resource_profile_endpoint_summary.go b/cloudguard/resource_profile_endpoint_summary.go index 03e8eb3188..8f2b0ba324 100644 --- a/cloudguard/resource_profile_endpoint_summary.go +++ b/cloudguard/resource_profile_endpoint_summary.go @@ -37,7 +37,7 @@ type ResourceProfileEndpointSummary struct { // Type of IP address for sighting IpAddressType *string `mandatory:"true" json:"ipAddressType"` - // Date and time when activities were created + // Time the activities were last detected. TimeLastDetected *common.SDKTime `mandatory:"true" json:"timeLastDetected"` // Problem ID for sighting endpoints @@ -63,6 +63,15 @@ type ResourceProfileEndpointSummary struct { // List of services where activities were performed from this IP address Services []string `mandatory:"false" json:"services"` + + // Time the activities were first detected. + TimeFirstDetected *common.SDKTime `mandatory:"false" json:"timeFirstDetected"` + + // Time the activities were first performed. + TimeFirstOccurred *common.SDKTime `mandatory:"false" json:"timeFirstOccurred"` + + // Time the activities were last performed. + TimeLastOccurred *common.SDKTime `mandatory:"false" json:"timeLastOccurred"` } func (m ResourceProfileEndpointSummary) String() string { diff --git a/cloudguard/resource_profile_impacted_resource_summary.go b/cloudguard/resource_profile_impacted_resource_summary.go index e7a70e1554..9c543694bc 100644 --- a/cloudguard/resource_profile_impacted_resource_summary.go +++ b/cloudguard/resource_profile_impacted_resource_summary.go @@ -51,6 +51,18 @@ type ResourceProfileImpactedResourceSummary struct { // Problem ID associated with the impacted resource ProblemId *string `mandatory:"false" json:"problemId"` + + // Time the activities were first detected. + TimeFirstDetected *common.SDKTime `mandatory:"false" json:"timeFirstDetected"` + + // Time the activities were last detected. Same as timeIdentified. + TimeLastDetected *common.SDKTime `mandatory:"false" json:"timeLastDetected"` + + // Time the activities were first performed. + TimeFirstOccurred *common.SDKTime `mandatory:"false" json:"timeFirstOccurred"` + + // Time the activities were last performed. + TimeLastOccurred *common.SDKTime `mandatory:"false" json:"timeLastOccurred"` } func (m ResourceProfileImpactedResourceSummary) String() string { diff --git a/cloudguard/resource_profile_summary.go b/cloudguard/resource_profile_summary.go index c8685ea9d1..55a4d69c9b 100644 --- a/cloudguard/resource_profile_summary.go +++ b/cloudguard/resource_profile_summary.go @@ -37,15 +37,21 @@ type ResourceProfileSummary struct { // List of tactic summaries associated with the resource profile Tactics []TacticSummary `mandatory:"true" json:"tactics"` - // The date and time the resource profile was first detected. Format defined by RFC3339. + // Time the activities were first detected. Format defined by RFC3339. TimeFirstDetected *common.SDKTime `mandatory:"true" json:"timeFirstDetected"` - // The date and time the resource profile was last detected. Format defined by RFC3339. + // Time the activities were last detected. Format defined by RFC3339. TimeLastDetected *common.SDKTime `mandatory:"true" json:"timeLastDetected"` // Number of sightings associated with the resource profile SightingsCount *int `mandatory:"false" json:"sightingsCount"` + // Time the activities were first performed. Format defined by RFC3339. + TimeFirstOccurred *common.SDKTime `mandatory:"false" json:"timeFirstOccurred"` + + // Time the activities were last performed. Format defined by RFC3339. + TimeLastOccurred *common.SDKTime `mandatory:"false" json:"timeLastOccurred"` + // Number of problems associated with this resource profile ProblemsCount *int `mandatory:"false" json:"problemsCount"` } diff --git a/cloudguard/security_policy_collection.go b/cloudguard/security_policy_collection.go index be3b2a79e6..4d5c4e8926 100644 --- a/cloudguard/security_policy_collection.go +++ b/cloudguard/security_policy_collection.go @@ -24,6 +24,21 @@ type SecurityPolicyCollection struct { // Locks associated with this resource. Locks []ResourceLock `mandatory:"false" json:"locks"` + + // Simple key-value pair that is applied without any predefined name, type or scope. Exists for cross-compatibility only. + // Example: `{"bar-key": "value"}` + // Avoid entering confidential information. + FreeformTags map[string]string `mandatory:"false" json:"freeformTags"` + + // Defined tags for this resource. Each key is predefined and scoped to a namespace. + // Example: `{"foo-namespace": {"bar-key": "value"}}` + DefinedTags map[string]map[string]interface{} `mandatory:"false" json:"definedTags"` + + // System tags for this resource. Each key is predefined and scoped to a namespace. + // For more information, see Resource Tags (https://docs.cloud.oracle.com/Content/General/Concepts/resourcetags.htm). + // System tags can be viewed by users, but can only be created by the system. + // Example: `{"orcl-cloud": {"free-tier-retained": "true"}}` + SystemTags map[string]map[string]interface{} `mandatory:"false" json:"systemTags"` } func (m SecurityPolicyCollection) String() string { diff --git a/cloudguard/sighting.go b/cloudguard/sighting.go index c3e05fb875..fca1a49761 100644 --- a/cloudguard/sighting.go +++ b/cloudguard/sighting.go @@ -52,10 +52,10 @@ type Sighting struct { // Level of confidence that the sighting is not a false positive Confidence ConfidenceEnum `mandatory:"true" json:"confidence"` - // The date and time the sighting was first detected. Format defined by RFC3339. + // Time the activities were first detected. Format defined by RFC3339. TimeFirstDetected *common.SDKTime `mandatory:"true" json:"timeFirstDetected"` - // The date and time the sighting was last detected. Format defined by RFC3339. + // Time the activities were last detected. Format defined by RFC3339. TimeLastDetected *common.SDKTime `mandatory:"true" json:"timeLastDetected"` // List of regions involved in the sighting @@ -73,6 +73,12 @@ type Sighting struct { // Type of the principal actor ActorPrincipalType *string `mandatory:"false" json:"actorPrincipalType"` + // Time the activities were first performed. Format defined by RFC3339. + TimeFirstOccurred *common.SDKTime `mandatory:"false" json:"timeFirstOccurred"` + + // Time the activities were last performed. Format defined by RFC3339. + TimeLastOccurred *common.SDKTime `mandatory:"false" json:"timeLastOccurred"` + // The additional details for the sighting AdditionalDetails map[string]string `mandatory:"false" json:"additionalDetails"` diff --git a/cloudguard/sighting_endpoint_summary.go b/cloudguard/sighting_endpoint_summary.go index a4ea51d556..376de15cfa 100644 --- a/cloudguard/sighting_endpoint_summary.go +++ b/cloudguard/sighting_endpoint_summary.go @@ -31,7 +31,7 @@ type SightingEndpointSummary struct { // Type of IP address involved in sighting IpAddressType *string `mandatory:"true" json:"ipAddressType"` - // Date and time when activities were created + // Time the activities were last detected. TimeLastDetected *common.SDKTime `mandatory:"true" json:"timeLastDetected"` // Problem ID for sighting endpoints @@ -57,6 +57,15 @@ type SightingEndpointSummary struct { // List of services where activities were performed from this IP address Services []string `mandatory:"false" json:"services"` + + // Time the activities were first detected. + TimeFirstDetected *common.SDKTime `mandatory:"false" json:"timeFirstDetected"` + + // Time the activities were first performed. + TimeFirstOccurred *common.SDKTime `mandatory:"false" json:"timeFirstOccurred"` + + // Time the activities were last performed. + TimeLastOccurred *common.SDKTime `mandatory:"false" json:"timeLastOccurred"` } func (m SightingEndpointSummary) String() string { diff --git a/cloudguard/sighting_impacted_resource_summary.go b/cloudguard/sighting_impacted_resource_summary.go index e0b127ded8..ef66b5dde5 100644 --- a/cloudguard/sighting_impacted_resource_summary.go +++ b/cloudguard/sighting_impacted_resource_summary.go @@ -45,6 +45,18 @@ type SightingImpactedResourceSummary struct { // Problem ID for impacted resource ProblemId *string `mandatory:"false" json:"problemId"` + + // Time the activities were first detected. + TimeFirstDetected *common.SDKTime `mandatory:"false" json:"timeFirstDetected"` + + // Time the activities were last detected. Same as timeIdentified. + TimeLastDetected *common.SDKTime `mandatory:"false" json:"timeLastDetected"` + + // Time the activities were first performed. + TimeFirstOccurred *common.SDKTime `mandatory:"false" json:"timeFirstOccurred"` + + // Time the activities were last performed. + TimeLastOccurred *common.SDKTime `mandatory:"false" json:"timeLastOccurred"` } func (m SightingImpactedResourceSummary) String() string { diff --git a/cloudguard/sighting_summary.go b/cloudguard/sighting_summary.go index 9811477567..4b7cb27b3c 100644 --- a/cloudguard/sighting_summary.go +++ b/cloudguard/sighting_summary.go @@ -52,10 +52,10 @@ type SightingSummary struct { // Confidence level that the sighting is not a false positive Confidence ConfidenceEnum `mandatory:"true" json:"confidence"` - // The date and time the sighting was first detected. Format defined by RFC3339. + // Time the activities were first detected. Format defined by RFC3339. TimeFirstDetected *common.SDKTime `mandatory:"true" json:"timeFirstDetected"` - // The date and time the sighting was last detected. Format defined by RFC3339. + // Time the activities were last detected. Format defined by RFC3339. TimeLastDetected *common.SDKTime `mandatory:"true" json:"timeLastDetected"` // List of regions involved in the sighting @@ -72,6 +72,12 @@ type SightingSummary struct { // Type of principal actor ActorPrincipalType *string `mandatory:"false" json:"actorPrincipalType"` + + // Time the activities were first performed. Format defined by RFC3339. + TimeFirstOccurred *common.SDKTime `mandatory:"false" json:"timeFirstOccurred"` + + // Time the activities were last performed. Format defined by RFC3339. + TimeLastOccurred *common.SDKTime `mandatory:"false" json:"timeLastOccurred"` } func (m SightingSummary) String() string { diff --git a/common/version.go b/common/version.go index 5c2e2d0a96..93816d7329 100644 --- a/common/version.go +++ b/common/version.go @@ -13,7 +13,7 @@ import ( const ( major = "65" minor = "67" - patch = "0" + patch = "1" tag = "" ) diff --git a/fusionapps/extract_details_collection.go b/fusionapps/extract_details_collection.go new file mode 100644 index 0000000000..5d112793ed --- /dev/null +++ b/fusionapps/extract_details_collection.go @@ -0,0 +1,39 @@ +// Copyright (c) 2016, 2018, 2024, Oracle and/or its affiliates. All rights reserved. +// This software is dual-licensed to you under the Universal Permissive License (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl or Apache License 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose either license. +// Code generated. DO NOT EDIT. + +// Fusion Applications Environment Management API +// +// Use the Fusion Applications Environment Management API to manage the environments where your Fusion Applications run. For more information, see the Fusion Applications Environment Management documentation (https://docs.cloud.oracle.com/iaas/Content/fusion-applications/home.htm). +// + +package fusionapps + +import ( + "fmt" + "github.com/oracle/oci-go-sdk/v65/common" + "strings" +) + +// ExtractDetailsCollection Results of GenerateExtractDetails for a pod. +type ExtractDetailsCollection struct { + + // A page of ExtractDetailsSummary objects. + Items []ExtractDetailsSummary `mandatory:"true" json:"items"` +} + +func (m ExtractDetailsCollection) String() string { + return common.PointerString(m) +} + +// ValidateEnumValue returns an error when providing an unsupported enum value +// This function is being called during constructing API request process +// Not recommended for calling this function directly +func (m ExtractDetailsCollection) ValidateEnumValue() (bool, error) { + errMessage := []string{} + + if len(errMessage) > 0 { + return true, fmt.Errorf(strings.Join(errMessage, "\n")) + } + return false, nil +} diff --git a/fusionapps/extract_details_summary.go b/fusionapps/extract_details_summary.go new file mode 100644 index 0000000000..36418ab9d8 --- /dev/null +++ b/fusionapps/extract_details_summary.go @@ -0,0 +1,45 @@ +// Copyright (c) 2016, 2018, 2024, Oracle and/or its affiliates. All rights reserved. +// This software is dual-licensed to you under the Universal Permissive License (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl or Apache License 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose either license. +// Code generated. DO NOT EDIT. + +// Fusion Applications Environment Management API +// +// Use the Fusion Applications Environment Management API to manage the environments where your Fusion Applications run. For more information, see the Fusion Applications Environment Management documentation (https://docs.cloud.oracle.com/iaas/Content/fusion-applications/home.htm). +// + +package fusionapps + +import ( + "fmt" + "github.com/oracle/oci-go-sdk/v65/common" + "strings" +) + +// ExtractDetailsSummary Details about where the data can be extracted from and password. +type ExtractDetailsSummary struct { + + // The parUrl to extract data extract from Object Storage bucket. + ParUrl *string `mandatory:"true" json:"parUrl"` + + // The password to decrypt data extract. + Password *string `mandatory:"true" json:"password"` + + // The time when the data extract was requested. An RFC3339 formatted datetime string + TimeCreated *common.SDKTime `mandatory:"true" json:"timeCreated"` +} + +func (m ExtractDetailsSummary) String() string { + return common.PointerString(m) +} + +// ValidateEnumValue returns an error when providing an unsupported enum value +// This function is being called during constructing API request process +// Not recommended for calling this function directly +func (m ExtractDetailsSummary) ValidateEnumValue() (bool, error) { + errMessage := []string{} + + if len(errMessage) > 0 { + return true, fmt.Errorf(strings.Join(errMessage, "\n")) + } + return false, nil +} diff --git a/fusionapps/fusion_environment.go b/fusionapps/fusion_environment.go index 2fc77e2dd6..42b18263ef 100644 --- a/fusionapps/fusion_environment.go +++ b/fusionapps/fusion_environment.go @@ -104,6 +104,9 @@ type FusionEnvironment struct { // Defined tags for this resource. Each key is predefined and scoped to a namespace. // Example: `{"foo-namespace": {"bar-key": "value"}}` DefinedTags map[string]map[string]interface{} `mandatory:"false" json:"definedTags"` + + // Environment suspended status + IsSuspended *bool `mandatory:"false" json:"isSuspended"` } func (m FusionEnvironment) String() string { @@ -155,6 +158,7 @@ func (m *FusionEnvironment) UnmarshalJSON(data []byte) (e error) { EnvironmentRole *EnvironmentRole `json:"environmentRole"` FreeformTags map[string]string `json:"freeformTags"` DefinedTags map[string]map[string]interface{} `json:"definedTags"` + IsSuspended *bool `json:"isSuspended"` Id *string `json:"id"` DisplayName *string `json:"displayName"` CompartmentId *string `json:"compartmentId"` @@ -225,6 +229,8 @@ func (m *FusionEnvironment) UnmarshalJSON(data []byte) (e error) { m.DefinedTags = model.DefinedTags + m.IsSuspended = model.IsSuspended + m.Id = model.Id m.DisplayName = model.DisplayName diff --git a/fusionapps/fusionapps_fusionapplications_client.go b/fusionapps/fusionapps_fusionapplications_client.go index 5677f35b64..bae3dcf9b7 100644 --- a/fusionapps/fusionapps_fusionapplications_client.go +++ b/fusionapps/fusionapps_fusionapplications_client.go @@ -887,6 +887,64 @@ func (client FusionApplicationsClient) deleteServiceAttachment(ctx context.Conte return response, err } +// GenerateExtractDetails Begin the process of showing the details about where to retrieve data extract for a Fusion environment. +// +// # See also +// +// Click https://docs.cloud.oracle.com/en-us/iaas/tools/go-sdk-examples/latest/fusionapps/GenerateExtractDetails.go.html to see an example of how to use GenerateExtractDetails API. +// A default retry strategy applies to this operation GenerateExtractDetails() +func (client FusionApplicationsClient) GenerateExtractDetails(ctx context.Context, request GenerateExtractDetailsRequest) (response GenerateExtractDetailsResponse, err error) { + var ociResponse common.OCIResponse + policy := common.DefaultRetryPolicy() + if client.RetryPolicy() != nil { + policy = *client.RetryPolicy() + } + if request.RetryPolicy() != nil { + policy = *request.RetryPolicy() + } + ociResponse, err = common.Retry(ctx, request, client.generateExtractDetails, policy) + if err != nil { + if ociResponse != nil { + if httpResponse := ociResponse.HTTPResponse(); httpResponse != nil { + opcRequestId := httpResponse.Header.Get("opc-request-id") + response = GenerateExtractDetailsResponse{RawResponse: httpResponse, OpcRequestId: &opcRequestId} + } else { + response = GenerateExtractDetailsResponse{} + } + } + return + } + if convertedResponse, ok := ociResponse.(GenerateExtractDetailsResponse); ok { + response = convertedResponse + } else { + err = fmt.Errorf("failed to convert OCIResponse into GenerateExtractDetailsResponse") + } + return +} + +// generateExtractDetails implements the OCIOperation interface (enables retrying operations) +func (client FusionApplicationsClient) generateExtractDetails(ctx context.Context, request common.OCIRequest, binaryReqBody *common.OCIReadSeekCloser, extraHeaders map[string]string) (common.OCIResponse, error) { + + httpRequest, err := request.HTTPRequest(http.MethodPost, "/fusionEnvironments/{fusionEnvironmentId}/actions/generateExtractDetails", binaryReqBody, extraHeaders) + if err != nil { + return nil, err + } + + var response GenerateExtractDetailsResponse + var httpResponse *http.Response + httpResponse, err = client.Call(ctx, &httpRequest) + defer common.CloseBodyIfValid(httpResponse) + response.RawResponse = httpResponse + if err != nil { + apiReferenceLink := "https://docs.oracle.com/iaas/api/#/en/fusion-applications/20211201/FusionEnvironment/GenerateExtractDetails" + err = common.PostProcessServiceError(err, "FusionApplications", "GenerateExtractDetails", apiReferenceLink) + return response, err + } + + err = common.UnmarshalResponse(httpResponse, &response) + return response, err +} + // GetDataMaskingActivity Gets a DataMaskingActivity by identifier // // # See also @@ -1467,6 +1525,64 @@ func (client FusionApplicationsClient) getWorkRequest(ctx context.Context, reque return response, err } +// InitiateExtract Begin the process of generating the data extract for a Fusion environment. +// +// # See also +// +// Click https://docs.cloud.oracle.com/en-us/iaas/tools/go-sdk-examples/latest/fusionapps/InitiateExtract.go.html to see an example of how to use InitiateExtract API. +// A default retry strategy applies to this operation InitiateExtract() +func (client FusionApplicationsClient) InitiateExtract(ctx context.Context, request InitiateExtractRequest) (response InitiateExtractResponse, err error) { + var ociResponse common.OCIResponse + policy := common.DefaultRetryPolicy() + if client.RetryPolicy() != nil { + policy = *client.RetryPolicy() + } + if request.RetryPolicy() != nil { + policy = *request.RetryPolicy() + } + ociResponse, err = common.Retry(ctx, request, client.initiateExtract, policy) + if err != nil { + if ociResponse != nil { + if httpResponse := ociResponse.HTTPResponse(); httpResponse != nil { + opcRequestId := httpResponse.Header.Get("opc-request-id") + response = InitiateExtractResponse{RawResponse: httpResponse, OpcRequestId: &opcRequestId} + } else { + response = InitiateExtractResponse{} + } + } + return + } + if convertedResponse, ok := ociResponse.(InitiateExtractResponse); ok { + response = convertedResponse + } else { + err = fmt.Errorf("failed to convert OCIResponse into InitiateExtractResponse") + } + return +} + +// initiateExtract implements the OCIOperation interface (enables retrying operations) +func (client FusionApplicationsClient) initiateExtract(ctx context.Context, request common.OCIRequest, binaryReqBody *common.OCIReadSeekCloser, extraHeaders map[string]string) (common.OCIResponse, error) { + + httpRequest, err := request.HTTPRequest(http.MethodPost, "/fusionEnvironments/{fusionEnvironmentId}/actions/initiateExtract", binaryReqBody, extraHeaders) + if err != nil { + return nil, err + } + + var response InitiateExtractResponse + var httpResponse *http.Response + httpResponse, err = client.Call(ctx, &httpRequest) + defer common.CloseBodyIfValid(httpResponse) + response.RawResponse = httpResponse + if err != nil { + apiReferenceLink := "https://docs.oracle.com/iaas/api/#/en/fusion-applications/20211201/FusionEnvironment/InitiateExtract" + err = common.PostProcessServiceError(err, "FusionApplications", "InitiateExtract", apiReferenceLink) + return response, err + } + + err = common.UnmarshalResponse(httpResponse, &response) + return response, err +} + // ListAdminUsers List all FusionEnvironment admin users // // # See also diff --git a/fusionapps/generate_extract_details_request_response.go b/fusionapps/generate_extract_details_request_response.go new file mode 100644 index 0000000000..7404678605 --- /dev/null +++ b/fusionapps/generate_extract_details_request_response.go @@ -0,0 +1,90 @@ +// Copyright (c) 2016, 2018, 2024, Oracle and/or its affiliates. All rights reserved. +// This software is dual-licensed to you under the Universal Permissive License (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl or Apache License 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose either license. +// Code generated. DO NOT EDIT. + +package fusionapps + +import ( + "fmt" + "github.com/oracle/oci-go-sdk/v65/common" + "net/http" + "strings" +) + +// GenerateExtractDetailsRequest wrapper for the GenerateExtractDetails operation +// +// # See also +// +// Click https://docs.cloud.oracle.com/en-us/iaas/tools/go-sdk-examples/latest/fusionapps/GenerateExtractDetails.go.html to see an example of how to use GenerateExtractDetailsRequest. +type GenerateExtractDetailsRequest struct { + + // unique FusionEnvironment identifier + FusionEnvironmentId *string `mandatory:"true" contributesTo:"path" name:"fusionEnvironmentId"` + + // The client request ID for tracing. + OpcRequestId *string `mandatory:"false" contributesTo:"header" name:"opc-request-id"` + + // Metadata about the request. This information will not be transmitted to the service, but + // represents information that the SDK will consume to drive retry behavior. + RequestMetadata common.RequestMetadata +} + +func (request GenerateExtractDetailsRequest) String() string { + return common.PointerString(request) +} + +// HTTPRequest implements the OCIRequest interface +func (request GenerateExtractDetailsRequest) HTTPRequest(method, path string, binaryRequestBody *common.OCIReadSeekCloser, extraHeaders map[string]string) (http.Request, error) { + + _, err := request.ValidateEnumValue() + if err != nil { + return http.Request{}, err + } + return common.MakeDefaultHTTPRequestWithTaggedStructAndExtraHeaders(method, path, request, extraHeaders) +} + +// BinaryRequestBody implements the OCIRequest interface +func (request GenerateExtractDetailsRequest) BinaryRequestBody() (*common.OCIReadSeekCloser, bool) { + + return nil, false + +} + +// RetryPolicy implements the OCIRetryableRequest interface. This retrieves the specified retry policy. +func (request GenerateExtractDetailsRequest) RetryPolicy() *common.RetryPolicy { + return request.RequestMetadata.RetryPolicy +} + +// ValidateEnumValue returns an error when providing an unsupported enum value +// This function is being called during constructing API request process +// Not recommended for calling this function directly +func (request GenerateExtractDetailsRequest) ValidateEnumValue() (bool, error) { + errMessage := []string{} + if len(errMessage) > 0 { + return true, fmt.Errorf(strings.Join(errMessage, "\n")) + } + return false, nil +} + +// GenerateExtractDetailsResponse wrapper for the GenerateExtractDetails operation +type GenerateExtractDetailsResponse struct { + + // The underlying http response + RawResponse *http.Response + + // The ExtractDetailsCollection instance + ExtractDetailsCollection `presentIn:"body"` + + // Unique Oracle-assigned identifier for the request. If you need to contact + // Oracle about a particular request, please provide the request ID. + OpcRequestId *string `presentIn:"header" name:"opc-request-id"` +} + +func (response GenerateExtractDetailsResponse) String() string { + return common.PointerString(response) +} + +// HTTPResponse implements the OCIResponse interface +func (response GenerateExtractDetailsResponse) HTTPResponse() *http.Response { + return response.RawResponse +} diff --git a/fusionapps/initiate_extract_request_response.go b/fusionapps/initiate_extract_request_response.go new file mode 100644 index 0000000000..e8d9e14255 --- /dev/null +++ b/fusionapps/initiate_extract_request_response.go @@ -0,0 +1,90 @@ +// Copyright (c) 2016, 2018, 2024, Oracle and/or its affiliates. All rights reserved. +// This software is dual-licensed to you under the Universal Permissive License (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl or Apache License 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose either license. +// Code generated. DO NOT EDIT. + +package fusionapps + +import ( + "fmt" + "github.com/oracle/oci-go-sdk/v65/common" + "net/http" + "strings" +) + +// InitiateExtractRequest wrapper for the InitiateExtract operation +// +// # See also +// +// Click https://docs.cloud.oracle.com/en-us/iaas/tools/go-sdk-examples/latest/fusionapps/InitiateExtract.go.html to see an example of how to use InitiateExtractRequest. +type InitiateExtractRequest struct { + + // unique FusionEnvironment identifier + FusionEnvironmentId *string `mandatory:"true" contributesTo:"path" name:"fusionEnvironmentId"` + + // The client request ID for tracing. + OpcRequestId *string `mandatory:"false" contributesTo:"header" name:"opc-request-id"` + + // Metadata about the request. This information will not be transmitted to the service, but + // represents information that the SDK will consume to drive retry behavior. + RequestMetadata common.RequestMetadata +} + +func (request InitiateExtractRequest) String() string { + return common.PointerString(request) +} + +// HTTPRequest implements the OCIRequest interface +func (request InitiateExtractRequest) HTTPRequest(method, path string, binaryRequestBody *common.OCIReadSeekCloser, extraHeaders map[string]string) (http.Request, error) { + + _, err := request.ValidateEnumValue() + if err != nil { + return http.Request{}, err + } + return common.MakeDefaultHTTPRequestWithTaggedStructAndExtraHeaders(method, path, request, extraHeaders) +} + +// BinaryRequestBody implements the OCIRequest interface +func (request InitiateExtractRequest) BinaryRequestBody() (*common.OCIReadSeekCloser, bool) { + + return nil, false + +} + +// RetryPolicy implements the OCIRetryableRequest interface. This retrieves the specified retry policy. +func (request InitiateExtractRequest) RetryPolicy() *common.RetryPolicy { + return request.RequestMetadata.RetryPolicy +} + +// ValidateEnumValue returns an error when providing an unsupported enum value +// This function is being called during constructing API request process +// Not recommended for calling this function directly +func (request InitiateExtractRequest) ValidateEnumValue() (bool, error) { + errMessage := []string{} + if len(errMessage) > 0 { + return true, fmt.Errorf(strings.Join(errMessage, "\n")) + } + return false, nil +} + +// InitiateExtractResponse wrapper for the InitiateExtract operation +type InitiateExtractResponse struct { + + // The underlying http response + RawResponse *http.Response + + // Unique Oracle-assigned identifier for the asynchronous request. You can use this to query status of the asynchronous operation. + OpcWorkRequestId *string `presentIn:"header" name:"opc-work-request-id"` + + // Unique Oracle-assigned identifier for the request. If you need to contact + // Oracle about a particular request, please provide the request ID. + OpcRequestId *string `presentIn:"header" name:"opc-request-id"` +} + +func (response InitiateExtractResponse) String() string { + return common.PointerString(response) +} + +// HTTPResponse implements the OCIResponse interface +func (response InitiateExtractResponse) HTTPResponse() *http.Response { + return response.RawResponse +} diff --git a/fusionapps/subscription.go b/fusionapps/subscription.go index f2010c1167..f63f9a0e4e 100644 --- a/fusionapps/subscription.go +++ b/fusionapps/subscription.go @@ -29,6 +29,12 @@ type Subscription struct { // Stock keeping unit. Skus []SubscriptionSku `mandatory:"true" json:"skus"` + + // Lifecycle state of the subscription. + LifecycleState SubscriptionLifecycleStateEnum `mandatory:"false" json:"lifecycleState,omitempty"` + + // Subscription resource intermediate states. + LifecycleDetails SubscriptionLifecycleDetailsEnum `mandatory:"false" json:"lifecycleDetails,omitempty"` } func (m Subscription) String() string { @@ -41,8 +47,218 @@ func (m Subscription) String() string { func (m Subscription) ValidateEnumValue() (bool, error) { errMessage := []string{} + if _, ok := GetMappingSubscriptionLifecycleStateEnum(string(m.LifecycleState)); !ok && m.LifecycleState != "" { + errMessage = append(errMessage, fmt.Sprintf("unsupported enum value for LifecycleState: %s. Supported values are: %s.", m.LifecycleState, strings.Join(GetSubscriptionLifecycleStateEnumStringValues(), ","))) + } + if _, ok := GetMappingSubscriptionLifecycleDetailsEnum(string(m.LifecycleDetails)); !ok && m.LifecycleDetails != "" { + errMessage = append(errMessage, fmt.Sprintf("unsupported enum value for LifecycleDetails: %s. Supported values are: %s.", m.LifecycleDetails, strings.Join(GetSubscriptionLifecycleDetailsEnumStringValues(), ","))) + } if len(errMessage) > 0 { return true, fmt.Errorf(strings.Join(errMessage, "\n")) } return false, nil } + +// SubscriptionLifecycleStateEnum Enum with underlying type: string +type SubscriptionLifecycleStateEnum string + +// Set of constants representing the allowable values for SubscriptionLifecycleStateEnum +const ( + SubscriptionLifecycleStateCreating SubscriptionLifecycleStateEnum = "CREATING" + SubscriptionLifecycleStateActive SubscriptionLifecycleStateEnum = "ACTIVE" + SubscriptionLifecycleStateInactive SubscriptionLifecycleStateEnum = "INACTIVE" + SubscriptionLifecycleStateUpdating SubscriptionLifecycleStateEnum = "UPDATING" + SubscriptionLifecycleStateDeleting SubscriptionLifecycleStateEnum = "DELETING" + SubscriptionLifecycleStateDeleted SubscriptionLifecycleStateEnum = "DELETED" + SubscriptionLifecycleStateFailed SubscriptionLifecycleStateEnum = "FAILED" + SubscriptionLifecycleStateNeedsAttention SubscriptionLifecycleStateEnum = "NEEDS_ATTENTION" +) + +var mappingSubscriptionLifecycleStateEnum = map[string]SubscriptionLifecycleStateEnum{ + "CREATING": SubscriptionLifecycleStateCreating, + "ACTIVE": SubscriptionLifecycleStateActive, + "INACTIVE": SubscriptionLifecycleStateInactive, + "UPDATING": SubscriptionLifecycleStateUpdating, + "DELETING": SubscriptionLifecycleStateDeleting, + "DELETED": SubscriptionLifecycleStateDeleted, + "FAILED": SubscriptionLifecycleStateFailed, + "NEEDS_ATTENTION": SubscriptionLifecycleStateNeedsAttention, +} + +var mappingSubscriptionLifecycleStateEnumLowerCase = map[string]SubscriptionLifecycleStateEnum{ + "creating": SubscriptionLifecycleStateCreating, + "active": SubscriptionLifecycleStateActive, + "inactive": SubscriptionLifecycleStateInactive, + "updating": SubscriptionLifecycleStateUpdating, + "deleting": SubscriptionLifecycleStateDeleting, + "deleted": SubscriptionLifecycleStateDeleted, + "failed": SubscriptionLifecycleStateFailed, + "needs_attention": SubscriptionLifecycleStateNeedsAttention, +} + +// GetSubscriptionLifecycleStateEnumValues Enumerates the set of values for SubscriptionLifecycleStateEnum +func GetSubscriptionLifecycleStateEnumValues() []SubscriptionLifecycleStateEnum { + values := make([]SubscriptionLifecycleStateEnum, 0) + for _, v := range mappingSubscriptionLifecycleStateEnum { + values = append(values, v) + } + return values +} + +// GetSubscriptionLifecycleStateEnumStringValues Enumerates the set of values in String for SubscriptionLifecycleStateEnum +func GetSubscriptionLifecycleStateEnumStringValues() []string { + return []string{ + "CREATING", + "ACTIVE", + "INACTIVE", + "UPDATING", + "DELETING", + "DELETED", + "FAILED", + "NEEDS_ATTENTION", + } +} + +// GetMappingSubscriptionLifecycleStateEnum performs case Insensitive comparison on enum value and return the desired enum +func GetMappingSubscriptionLifecycleStateEnum(val string) (SubscriptionLifecycleStateEnum, bool) { + enum, ok := mappingSubscriptionLifecycleStateEnumLowerCase[strings.ToLower(val)] + return enum, ok +} + +// SubscriptionLifecycleDetailsEnum Enum with underlying type: string +type SubscriptionLifecycleDetailsEnum string + +// Set of constants representing the allowable values for SubscriptionLifecycleDetailsEnum +const ( + SubscriptionLifecycleDetailsActive SubscriptionLifecycleDetailsEnum = "ACTIVE" + SubscriptionLifecycleDetailsPurged SubscriptionLifecycleDetailsEnum = "PURGED" + SubscriptionLifecycleDetailsCanceled SubscriptionLifecycleDetailsEnum = "CANCELED" + SubscriptionLifecycleDetailsPaidPurged SubscriptionLifecycleDetailsEnum = "PAID_PURGED" + SubscriptionLifecycleDetailsInitialized SubscriptionLifecycleDetailsEnum = "INITIALIZED" + SubscriptionLifecycleDetailsSoftTerminated SubscriptionLifecycleDetailsEnum = "SOFT_TERMINATED" + SubscriptionLifecycleDetailsDisabled SubscriptionLifecycleDetailsEnum = "DISABLED" + SubscriptionLifecycleDetailsBeginTermination SubscriptionLifecycleDetailsEnum = "BEGIN_TERMINATION" + SubscriptionLifecycleDetailsMigrated SubscriptionLifecycleDetailsEnum = "MIGRATED" + SubscriptionLifecycleDetailsPendingCancelation SubscriptionLifecycleDetailsEnum = "PENDING_CANCELATION" + SubscriptionLifecycleDetailsArchived SubscriptionLifecycleDetailsEnum = "ARCHIVED" + SubscriptionLifecycleDetailsNonRecoverable SubscriptionLifecycleDetailsEnum = "NON_RECOVERABLE" + SubscriptionLifecycleDetailsBeginSoftTermination SubscriptionLifecycleDetailsEnum = "BEGIN_SOFT_TERMINATION" + SubscriptionLifecycleDetailsActivated SubscriptionLifecycleDetailsEnum = "ACTIVATED" + SubscriptionLifecycleDetailsAccessDisabled SubscriptionLifecycleDetailsEnum = "ACCESS_DISABLED" + SubscriptionLifecycleDetailsPendingRegistration SubscriptionLifecycleDetailsEnum = "PENDING_REGISTRATION" + SubscriptionLifecycleDetailsTerminated SubscriptionLifecycleDetailsEnum = "TERMINATED" + SubscriptionLifecycleDetailsRelocating SubscriptionLifecycleDetailsEnum = "RELOCATING" + SubscriptionLifecycleDetailsDeprovisioned SubscriptionLifecycleDetailsEnum = "DEPROVISIONED" + SubscriptionLifecycleDetailsProvisioned SubscriptionLifecycleDetailsEnum = "PROVISIONED" + SubscriptionLifecycleDetailsBeginTerminationPassive SubscriptionLifecycleDetailsEnum = "BEGIN_TERMINATION_PASSIVE" + SubscriptionLifecycleDetailsLocked SubscriptionLifecycleDetailsEnum = "LOCKED" + SubscriptionLifecycleDetailsPendingDeprovisioning SubscriptionLifecycleDetailsEnum = "PENDING_DEPROVISIONING" + SubscriptionLifecycleDetailsRegistered SubscriptionLifecycleDetailsEnum = "REGISTERED" + SubscriptionLifecycleDetailsCancelled SubscriptionLifecycleDetailsEnum = "CANCELLED" + SubscriptionLifecycleDetailsExpired SubscriptionLifecycleDetailsEnum = "EXPIRED" +) + +var mappingSubscriptionLifecycleDetailsEnum = map[string]SubscriptionLifecycleDetailsEnum{ + "ACTIVE": SubscriptionLifecycleDetailsActive, + "PURGED": SubscriptionLifecycleDetailsPurged, + "CANCELED": SubscriptionLifecycleDetailsCanceled, + "PAID_PURGED": SubscriptionLifecycleDetailsPaidPurged, + "INITIALIZED": SubscriptionLifecycleDetailsInitialized, + "SOFT_TERMINATED": SubscriptionLifecycleDetailsSoftTerminated, + "DISABLED": SubscriptionLifecycleDetailsDisabled, + "BEGIN_TERMINATION": SubscriptionLifecycleDetailsBeginTermination, + "MIGRATED": SubscriptionLifecycleDetailsMigrated, + "PENDING_CANCELATION": SubscriptionLifecycleDetailsPendingCancelation, + "ARCHIVED": SubscriptionLifecycleDetailsArchived, + "NON_RECOVERABLE": SubscriptionLifecycleDetailsNonRecoverable, + "BEGIN_SOFT_TERMINATION": SubscriptionLifecycleDetailsBeginSoftTermination, + "ACTIVATED": SubscriptionLifecycleDetailsActivated, + "ACCESS_DISABLED": SubscriptionLifecycleDetailsAccessDisabled, + "PENDING_REGISTRATION": SubscriptionLifecycleDetailsPendingRegistration, + "TERMINATED": SubscriptionLifecycleDetailsTerminated, + "RELOCATING": SubscriptionLifecycleDetailsRelocating, + "DEPROVISIONED": SubscriptionLifecycleDetailsDeprovisioned, + "PROVISIONED": SubscriptionLifecycleDetailsProvisioned, + "BEGIN_TERMINATION_PASSIVE": SubscriptionLifecycleDetailsBeginTerminationPassive, + "LOCKED": SubscriptionLifecycleDetailsLocked, + "PENDING_DEPROVISIONING": SubscriptionLifecycleDetailsPendingDeprovisioning, + "REGISTERED": SubscriptionLifecycleDetailsRegistered, + "CANCELLED": SubscriptionLifecycleDetailsCancelled, + "EXPIRED": SubscriptionLifecycleDetailsExpired, +} + +var mappingSubscriptionLifecycleDetailsEnumLowerCase = map[string]SubscriptionLifecycleDetailsEnum{ + "active": SubscriptionLifecycleDetailsActive, + "purged": SubscriptionLifecycleDetailsPurged, + "canceled": SubscriptionLifecycleDetailsCanceled, + "paid_purged": SubscriptionLifecycleDetailsPaidPurged, + "initialized": SubscriptionLifecycleDetailsInitialized, + "soft_terminated": SubscriptionLifecycleDetailsSoftTerminated, + "disabled": SubscriptionLifecycleDetailsDisabled, + "begin_termination": SubscriptionLifecycleDetailsBeginTermination, + "migrated": SubscriptionLifecycleDetailsMigrated, + "pending_cancelation": SubscriptionLifecycleDetailsPendingCancelation, + "archived": SubscriptionLifecycleDetailsArchived, + "non_recoverable": SubscriptionLifecycleDetailsNonRecoverable, + "begin_soft_termination": SubscriptionLifecycleDetailsBeginSoftTermination, + "activated": SubscriptionLifecycleDetailsActivated, + "access_disabled": SubscriptionLifecycleDetailsAccessDisabled, + "pending_registration": SubscriptionLifecycleDetailsPendingRegistration, + "terminated": SubscriptionLifecycleDetailsTerminated, + "relocating": SubscriptionLifecycleDetailsRelocating, + "deprovisioned": SubscriptionLifecycleDetailsDeprovisioned, + "provisioned": SubscriptionLifecycleDetailsProvisioned, + "begin_termination_passive": SubscriptionLifecycleDetailsBeginTerminationPassive, + "locked": SubscriptionLifecycleDetailsLocked, + "pending_deprovisioning": SubscriptionLifecycleDetailsPendingDeprovisioning, + "registered": SubscriptionLifecycleDetailsRegistered, + "cancelled": SubscriptionLifecycleDetailsCancelled, + "expired": SubscriptionLifecycleDetailsExpired, +} + +// GetSubscriptionLifecycleDetailsEnumValues Enumerates the set of values for SubscriptionLifecycleDetailsEnum +func GetSubscriptionLifecycleDetailsEnumValues() []SubscriptionLifecycleDetailsEnum { + values := make([]SubscriptionLifecycleDetailsEnum, 0) + for _, v := range mappingSubscriptionLifecycleDetailsEnum { + values = append(values, v) + } + return values +} + +// GetSubscriptionLifecycleDetailsEnumStringValues Enumerates the set of values in String for SubscriptionLifecycleDetailsEnum +func GetSubscriptionLifecycleDetailsEnumStringValues() []string { + return []string{ + "ACTIVE", + "PURGED", + "CANCELED", + "PAID_PURGED", + "INITIALIZED", + "SOFT_TERMINATED", + "DISABLED", + "BEGIN_TERMINATION", + "MIGRATED", + "PENDING_CANCELATION", + "ARCHIVED", + "NON_RECOVERABLE", + "BEGIN_SOFT_TERMINATION", + "ACTIVATED", + "ACCESS_DISABLED", + "PENDING_REGISTRATION", + "TERMINATED", + "RELOCATING", + "DEPROVISIONED", + "PROVISIONED", + "BEGIN_TERMINATION_PASSIVE", + "LOCKED", + "PENDING_DEPROVISIONING", + "REGISTERED", + "CANCELLED", + "EXPIRED", + } +} + +// GetMappingSubscriptionLifecycleDetailsEnum performs case Insensitive comparison on enum value and return the desired enum +func GetMappingSubscriptionLifecycleDetailsEnum(val string) (SubscriptionLifecycleDetailsEnum, bool) { + enum, ok := mappingSubscriptionLifecycleDetailsEnumLowerCase[strings.ToLower(val)] + return enum, ok +} diff --git a/fusionapps/work_request.go b/fusionapps/work_request.go index 7e15bc26ff..21be9c4622 100644 --- a/fusionapps/work_request.go +++ b/fusionapps/work_request.go @@ -99,6 +99,12 @@ const ( WorkRequestOperationTypeRefreshFusionEnvironment WorkRequestOperationTypeEnum = "REFRESH_FUSION_ENVIRONMENT" WorkRequestOperationTypeExecuteColdPatch WorkRequestOperationTypeEnum = "EXECUTE_COLD_PATCH" WorkRequestOperationTypeDataMaskFusionEnvironment WorkRequestOperationTypeEnum = "DATA_MASK_FUSION_ENVIRONMENT" + WorkRequestOperationTypeInitiateExtract WorkRequestOperationTypeEnum = "INITIATE_EXTRACT" + WorkRequestOperationTypeSubscriptionSuspend WorkRequestOperationTypeEnum = "SUBSCRIPTION_SUSPEND" + WorkRequestOperationTypeSubscriptionExpire WorkRequestOperationTypeEnum = "SUBSCRIPTION_EXPIRE" + WorkRequestOperationTypeSubscriptionUpdate WorkRequestOperationTypeEnum = "SUBSCRIPTION_UPDATE" + WorkRequestOperationTypeSubscriptionResume WorkRequestOperationTypeEnum = "SUBSCRIPTION_RESUME" + WorkRequestOperationTypeSubscriptionTerminate WorkRequestOperationTypeEnum = "SUBSCRIPTION_TERMINATE" ) var mappingWorkRequestOperationTypeEnum = map[string]WorkRequestOperationTypeEnum{ @@ -123,6 +129,12 @@ var mappingWorkRequestOperationTypeEnum = map[string]WorkRequestOperationTypeEnu "REFRESH_FUSION_ENVIRONMENT": WorkRequestOperationTypeRefreshFusionEnvironment, "EXECUTE_COLD_PATCH": WorkRequestOperationTypeExecuteColdPatch, "DATA_MASK_FUSION_ENVIRONMENT": WorkRequestOperationTypeDataMaskFusionEnvironment, + "INITIATE_EXTRACT": WorkRequestOperationTypeInitiateExtract, + "SUBSCRIPTION_SUSPEND": WorkRequestOperationTypeSubscriptionSuspend, + "SUBSCRIPTION_EXPIRE": WorkRequestOperationTypeSubscriptionExpire, + "SUBSCRIPTION_UPDATE": WorkRequestOperationTypeSubscriptionUpdate, + "SUBSCRIPTION_RESUME": WorkRequestOperationTypeSubscriptionResume, + "SUBSCRIPTION_TERMINATE": WorkRequestOperationTypeSubscriptionTerminate, } var mappingWorkRequestOperationTypeEnumLowerCase = map[string]WorkRequestOperationTypeEnum{ @@ -147,6 +159,12 @@ var mappingWorkRequestOperationTypeEnumLowerCase = map[string]WorkRequestOperati "refresh_fusion_environment": WorkRequestOperationTypeRefreshFusionEnvironment, "execute_cold_patch": WorkRequestOperationTypeExecuteColdPatch, "data_mask_fusion_environment": WorkRequestOperationTypeDataMaskFusionEnvironment, + "initiate_extract": WorkRequestOperationTypeInitiateExtract, + "subscription_suspend": WorkRequestOperationTypeSubscriptionSuspend, + "subscription_expire": WorkRequestOperationTypeSubscriptionExpire, + "subscription_update": WorkRequestOperationTypeSubscriptionUpdate, + "subscription_resume": WorkRequestOperationTypeSubscriptionResume, + "subscription_terminate": WorkRequestOperationTypeSubscriptionTerminate, } // GetWorkRequestOperationTypeEnumValues Enumerates the set of values for WorkRequestOperationTypeEnum @@ -182,6 +200,12 @@ func GetWorkRequestOperationTypeEnumStringValues() []string { "REFRESH_FUSION_ENVIRONMENT", "EXECUTE_COLD_PATCH", "DATA_MASK_FUSION_ENVIRONMENT", + "INITIATE_EXTRACT", + "SUBSCRIPTION_SUSPEND", + "SUBSCRIPTION_EXPIRE", + "SUBSCRIPTION_UPDATE", + "SUBSCRIPTION_RESUME", + "SUBSCRIPTION_TERMINATE", } } diff --git a/globallydistributeddatabase/create_dedicated_sharded_database.go b/globallydistributeddatabase/create_dedicated_sharded_database.go index 55312f9447..4bb63b5220 100644 --- a/globallydistributeddatabase/create_dedicated_sharded_database.go +++ b/globallydistributeddatabase/create_dedicated_sharded_database.go @@ -68,6 +68,12 @@ type CreateDedicatedShardedDatabase struct { // Example: `{"foo-namespace": {"bar-key": "value"}}` DefinedTags map[string]map[string]interface{} `mandatory:"false" json:"definedTags"` + // The Replication factor for RAFT replication based sharded database. Currently supported values are 3, 5 and 7. + ReplicationFactor *int `mandatory:"false" json:"replicationFactor"` + + // For RAFT replication based sharded database, the value should be atleast twice the number of shards. + ReplicationUnit *int `mandatory:"false" json:"replicationUnit"` + // The certificate common name used in all cloudAutonomousVmClusters for the sharded database topology. Eg. Production. // All the clusters used in one sharded database topology shall have same CABundle setup. Valid characterset for // clusterCertificateCommonName include uppercase or lowercase letters, numbers, hyphens, underscores, and period. @@ -82,6 +88,9 @@ type CreateDedicatedShardedDatabase struct { // Sharding Method. ShardingMethod CreateDedicatedShardedDatabaseShardingMethodEnum `mandatory:"true" json:"shardingMethod"` + + // The Replication method for sharded database. + ReplicationMethod DedicatedShardedDatabaseReplicationMethodEnum `mandatory:"false" json:"replicationMethod,omitempty"` } // GetCompartmentId returns CompartmentId @@ -120,6 +129,9 @@ func (m CreateDedicatedShardedDatabase) ValidateEnumValue() (bool, error) { errMessage = append(errMessage, fmt.Sprintf("unsupported enum value for ShardingMethod: %s. Supported values are: %s.", m.ShardingMethod, strings.Join(GetCreateDedicatedShardedDatabaseShardingMethodEnumStringValues(), ","))) } + if _, ok := GetMappingDedicatedShardedDatabaseReplicationMethodEnum(string(m.ReplicationMethod)); !ok && m.ReplicationMethod != "" { + errMessage = append(errMessage, fmt.Sprintf("unsupported enum value for ReplicationMethod: %s. Supported values are: %s.", m.ReplicationMethod, strings.Join(GetDedicatedShardedDatabaseReplicationMethodEnumStringValues(), ","))) + } if len(errMessage) > 0 { return true, fmt.Errorf(strings.Join(errMessage, "\n")) } diff --git a/globallydistributeddatabase/dedicated_sharded_database.go b/globallydistributeddatabase/dedicated_sharded_database.go index 149b880d52..79e09dcded 100644 --- a/globallydistributeddatabase/dedicated_sharded_database.go +++ b/globallydistributeddatabase/dedicated_sharded_database.go @@ -61,6 +61,12 @@ type DedicatedShardedDatabase struct { // Example: `{"orcl-cloud": {"free-tier-retained": "true"}}` SystemTags map[string]map[string]interface{} `mandatory:"false" json:"systemTags"` + // The Replication factor for RAFT replication based sharded database. Currently supported values are 3, 5 and 7. + ReplicationFactor *int `mandatory:"false" json:"replicationFactor"` + + // For RAFT replication based sharded database, the value should be atleast twice the number of shards. + ReplicationUnit *int `mandatory:"false" json:"replicationUnit"` + // The certificate common name used in all cloudAutonomousVmClusters for the sharded database topology. Eg. Production. // All the clusters used in one sharded database topology shall have same CABundle setup. Valid characterset for // clusterCertificateCommonName include uppercase or lowercase letters, numbers, hyphens, underscores, and period. @@ -99,6 +105,10 @@ type DedicatedShardedDatabase struct { // Details of ATP-D based catalogs. CatalogDetails []DedicatedCatalogDetails `mandatory:"false" json:"catalogDetails"` + // The Replication method for sharded database. Use RAFT for Raft replication, and DG for + // DataGuard. If replicationMethod is not provided, it defaults to DG. + ReplicationMethod DedicatedShardedDatabaseReplicationMethodEnum `mandatory:"false" json:"replicationMethod,omitempty"` + // Possible workload types. DbWorkload DedicatedShardedDatabaseDbWorkloadEnum `mandatory:"false" json:"dbWorkload,omitempty"` @@ -168,6 +178,9 @@ func (m DedicatedShardedDatabase) String() string { // Not recommended for calling this function directly func (m DedicatedShardedDatabase) ValidateEnumValue() (bool, error) { errMessage := []string{} + if _, ok := GetMappingDedicatedShardedDatabaseReplicationMethodEnum(string(m.ReplicationMethod)); !ok && m.ReplicationMethod != "" { + errMessage = append(errMessage, fmt.Sprintf("unsupported enum value for ReplicationMethod: %s. Supported values are: %s.", m.ReplicationMethod, strings.Join(GetDedicatedShardedDatabaseReplicationMethodEnumStringValues(), ","))) + } if _, ok := GetMappingDedicatedShardedDatabaseDbWorkloadEnum(string(m.DbWorkload)); !ok && m.DbWorkload != "" { errMessage = append(errMessage, fmt.Sprintf("unsupported enum value for DbWorkload: %s. Supported values are: %s.", m.DbWorkload, strings.Join(GetDedicatedShardedDatabaseDbWorkloadEnumStringValues(), ","))) } @@ -198,6 +211,48 @@ func (m DedicatedShardedDatabase) MarshalJSON() (buff []byte, e error) { return json.Marshal(&s) } +// DedicatedShardedDatabaseReplicationMethodEnum Enum with underlying type: string +type DedicatedShardedDatabaseReplicationMethodEnum string + +// Set of constants representing the allowable values for DedicatedShardedDatabaseReplicationMethodEnum +const ( + DedicatedShardedDatabaseReplicationMethodRaft DedicatedShardedDatabaseReplicationMethodEnum = "RAFT" + DedicatedShardedDatabaseReplicationMethodDg DedicatedShardedDatabaseReplicationMethodEnum = "DG" +) + +var mappingDedicatedShardedDatabaseReplicationMethodEnum = map[string]DedicatedShardedDatabaseReplicationMethodEnum{ + "RAFT": DedicatedShardedDatabaseReplicationMethodRaft, + "DG": DedicatedShardedDatabaseReplicationMethodDg, +} + +var mappingDedicatedShardedDatabaseReplicationMethodEnumLowerCase = map[string]DedicatedShardedDatabaseReplicationMethodEnum{ + "raft": DedicatedShardedDatabaseReplicationMethodRaft, + "dg": DedicatedShardedDatabaseReplicationMethodDg, +} + +// GetDedicatedShardedDatabaseReplicationMethodEnumValues Enumerates the set of values for DedicatedShardedDatabaseReplicationMethodEnum +func GetDedicatedShardedDatabaseReplicationMethodEnumValues() []DedicatedShardedDatabaseReplicationMethodEnum { + values := make([]DedicatedShardedDatabaseReplicationMethodEnum, 0) + for _, v := range mappingDedicatedShardedDatabaseReplicationMethodEnum { + values = append(values, v) + } + return values +} + +// GetDedicatedShardedDatabaseReplicationMethodEnumStringValues Enumerates the set of values in String for DedicatedShardedDatabaseReplicationMethodEnum +func GetDedicatedShardedDatabaseReplicationMethodEnumStringValues() []string { + return []string{ + "RAFT", + "DG", + } +} + +// GetMappingDedicatedShardedDatabaseReplicationMethodEnum performs case Insensitive comparison on enum value and return the desired enum +func GetMappingDedicatedShardedDatabaseReplicationMethodEnum(val string) (DedicatedShardedDatabaseReplicationMethodEnum, bool) { + enum, ok := mappingDedicatedShardedDatabaseReplicationMethodEnumLowerCase[strings.ToLower(val)] + return enum, ok +} + // DedicatedShardedDatabaseDbWorkloadEnum Enum with underlying type: string type DedicatedShardedDatabaseDbWorkloadEnum string diff --git a/globallydistributeddatabase/dedicated_sharded_database_summary.go b/globallydistributeddatabase/dedicated_sharded_database_summary.go index dce2ec4616..6c7bd19760 100644 --- a/globallydistributeddatabase/dedicated_sharded_database_summary.go +++ b/globallydistributeddatabase/dedicated_sharded_database_summary.go @@ -55,6 +55,12 @@ type DedicatedShardedDatabaseSummary struct { // Example: `{"orcl-cloud": {"free-tier-retained": "true"}}` SystemTags map[string]map[string]interface{} `mandatory:"false" json:"systemTags"` + // The Replication factor for RAFT replication based sharded database. Currently supported values are 3, 5 and 7. + ReplicationFactor *int `mandatory:"false" json:"replicationFactor"` + + // For RAFT replication based sharded database, the value should be atleast twice the number of shards. + ReplicationUnit *int `mandatory:"false" json:"replicationUnit"` + // The certificate common name used in all cloudAutonomousVmClusters for the sharded database topology. Eg. Production. // All the clusters used in one sharded database topology shall have same CABundle setup. Valid characterset for // clusterCertificateCommonName include uppercase or lowercase letters, numbers, hyphens, underscores, and period. @@ -96,6 +102,9 @@ type DedicatedShardedDatabaseSummary struct { // Lifecycle state of sharded database. LifecycleState ShardedDatabaseLifecycleStateEnum `mandatory:"true" json:"lifecycleState"` + + // The Replication method for sharded database. + ReplicationMethod DedicatedShardedDatabaseReplicationMethodEnum `mandatory:"false" json:"replicationMethod,omitempty"` } // GetId returns Id @@ -167,6 +176,9 @@ func (m DedicatedShardedDatabaseSummary) ValidateEnumValue() (bool, error) { if _, ok := GetMappingShardedDatabaseLifecycleStateEnum(string(m.LifecycleState)); !ok && m.LifecycleState != "" { errMessage = append(errMessage, fmt.Sprintf("unsupported enum value for LifecycleState: %s. Supported values are: %s.", m.LifecycleState, strings.Join(GetShardedDatabaseLifecycleStateEnumStringValues(), ","))) } + if _, ok := GetMappingDedicatedShardedDatabaseReplicationMethodEnum(string(m.ReplicationMethod)); !ok && m.ReplicationMethod != "" { + errMessage = append(errMessage, fmt.Sprintf("unsupported enum value for ReplicationMethod: %s. Supported values are: %s.", m.ReplicationMethod, strings.Join(GetDedicatedShardedDatabaseReplicationMethodEnumStringValues(), ","))) + } if len(errMessage) > 0 { return true, fmt.Errorf(strings.Join(errMessage, "\n")) } diff --git a/globallydistributeddatabase/generate_gsm_certificate_signing_request_request_response.go b/globallydistributeddatabase/generate_gsm_certificate_signing_request_request_response.go index e5f11eac8d..579d2d55b1 100644 --- a/globallydistributeddatabase/generate_gsm_certificate_signing_request_request_response.go +++ b/globallydistributeddatabase/generate_gsm_certificate_signing_request_request_response.go @@ -38,6 +38,9 @@ type GenerateGsmCertificateSigningRequestRequest struct { // might be rejected. OpcRetryToken *string `mandatory:"false" contributesTo:"header" name:"opc-retry-token"` + // The ID of the Ca Bundle. + CaBundleId *string `mandatory:"false" contributesTo:"query" name:"caBundleId"` + // Metadata about the request. This information will not be transmitted to the service, but // represents information that the SDK will consume to drive retry behavior. RequestMetadata common.RequestMetadata diff --git a/globallydistributeddatabase/get_private_endpoint_request_response.go b/globallydistributeddatabase/get_private_endpoint_request_response.go index 7424987947..2eeab1bb3e 100644 --- a/globallydistributeddatabase/get_private_endpoint_request_response.go +++ b/globallydistributeddatabase/get_private_endpoint_request_response.go @@ -21,6 +21,13 @@ type GetPrivateEndpointRequest struct { // Oracle Sharded Database PrivateEndpoint identifier PrivateEndpointId *string `mandatory:"true" contributesTo:"path" name:"privateEndpointId"` + // For conditional requests. In the GET call for a resource, set the + // `If-None-Match` header to the value of the ETag from a previous GET (or + // POST or PUT) response for that resource. The server will return with + // either a 304 Not Modified response if the resource has not changed, or a + // 200 OK response with the updated representation. + IfNoneMatch *string `mandatory:"false" contributesTo:"header" name:"if-none-match"` + // The client request ID for tracing. OpcRequestId *string `mandatory:"false" contributesTo:"header" name:"opc-request-id"` diff --git a/globallydistributeddatabase/get_sharded_database_request_response.go b/globallydistributeddatabase/get_sharded_database_request_response.go index 11ea0feec0..a2b0095450 100644 --- a/globallydistributeddatabase/get_sharded_database_request_response.go +++ b/globallydistributeddatabase/get_sharded_database_request_response.go @@ -25,6 +25,13 @@ type GetShardedDatabaseRequest struct { // An example is metadata=VM_CLUSTER_INFO,ADDITIONAL_RESOURCE_INFO. Metadata *string `mandatory:"false" contributesTo:"query" name:"metadata"` + // For conditional requests. In the GET call for a resource, set the + // `If-None-Match` header to the value of the ETag from a previous GET (or + // POST or PUT) response for that resource. The server will return with + // either a 304 Not Modified response if the resource has not changed, or a + // 200 OK response with the updated representation. + IfNoneMatch *string `mandatory:"false" contributesTo:"header" name:"if-none-match"` + // The client request ID for tracing. OpcRequestId *string `mandatory:"false" contributesTo:"header" name:"opc-request-id"` diff --git a/globallydistributeddatabase/globallydistributeddatabase_shardeddatabaseservice_client.go b/globallydistributeddatabase/globallydistributeddatabase_shardeddatabaseservice_client.go index 90178b479f..9b38510472 100644 --- a/globallydistributeddatabase/globallydistributeddatabase_shardeddatabaseservice_client.go +++ b/globallydistributeddatabase/globallydistributeddatabase_shardeddatabaseservice_client.go @@ -1507,6 +1507,69 @@ func (client ShardedDatabaseServiceClient) prevalidateShardedDatabase(ctx contex return response, err } +// ReinstateProxyInstance API to reinstate the proxy instances associated with the private endpoint. +// +// # See also +// +// Click https://docs.cloud.oracle.com/en-us/iaas/tools/go-sdk-examples/latest/globallydistributeddatabase/ReinstateProxyInstance.go.html to see an example of how to use ReinstateProxyInstance API. +// A default retry strategy applies to this operation ReinstateProxyInstance() +func (client ShardedDatabaseServiceClient) ReinstateProxyInstance(ctx context.Context, request ReinstateProxyInstanceRequest) (response ReinstateProxyInstanceResponse, err error) { + var ociResponse common.OCIResponse + policy := common.DefaultRetryPolicy() + if client.RetryPolicy() != nil { + policy = *client.RetryPolicy() + } + if request.RetryPolicy() != nil { + policy = *request.RetryPolicy() + } + + if !(request.OpcRetryToken != nil && *request.OpcRetryToken != "") { + request.OpcRetryToken = common.String(common.RetryToken()) + } + + ociResponse, err = common.Retry(ctx, request, client.reinstateProxyInstance, policy) + if err != nil { + if ociResponse != nil { + if httpResponse := ociResponse.HTTPResponse(); httpResponse != nil { + opcRequestId := httpResponse.Header.Get("opc-request-id") + response = ReinstateProxyInstanceResponse{RawResponse: httpResponse, OpcRequestId: &opcRequestId} + } else { + response = ReinstateProxyInstanceResponse{} + } + } + return + } + if convertedResponse, ok := ociResponse.(ReinstateProxyInstanceResponse); ok { + response = convertedResponse + } else { + err = fmt.Errorf("failed to convert OCIResponse into ReinstateProxyInstanceResponse") + } + return +} + +// reinstateProxyInstance implements the OCIOperation interface (enables retrying operations) +func (client ShardedDatabaseServiceClient) reinstateProxyInstance(ctx context.Context, request common.OCIRequest, binaryReqBody *common.OCIReadSeekCloser, extraHeaders map[string]string) (common.OCIResponse, error) { + + httpRequest, err := request.HTTPRequest(http.MethodPost, "/privateEndpoints/{privateEndpointId}/actions/reinstateProxyInstance", binaryReqBody, extraHeaders) + if err != nil { + return nil, err + } + + var response ReinstateProxyInstanceResponse + var httpResponse *http.Response + httpResponse, err = client.Call(ctx, &httpRequest) + defer common.CloseBodyIfValid(httpResponse) + response.RawResponse = httpResponse + if err != nil { + apiReferenceLink := "https://docs.oracle.com/iaas/api/#/en/globally-distributed-autonomous-database/20230301/PrivateEndpoint/ReinstateProxyInstance" + err = common.PostProcessServiceError(err, "ShardedDatabaseService", "ReinstateProxyInstance", apiReferenceLink) + return response, err + } + + err = common.UnmarshalResponse(httpResponse, &response) + return response, err +} + // StartShardedDatabase Start the shards, catalog and GSMs of Sharded Database. // // # See also diff --git a/globallydistributeddatabase/operation_type.go b/globallydistributeddatabase/operation_type.go index 6fe22d4fc5..0f287ab65a 100644 --- a/globallydistributeddatabase/operation_type.go +++ b/globallydistributeddatabase/operation_type.go @@ -36,6 +36,18 @@ const ( OperationTypeUploadSignedCertAndGenerateWallet OperationTypeEnum = "UPLOAD_SIGNED_CERT_AND_GENERATE_WALLET" OperationTypeGenerateGsmCertSigningReq OperationTypeEnum = "GENERATE_GSM_CERT_SIGNING_REQ" OperationTypeConfigureSharding OperationTypeEnum = "CONFIGURE_SHARDING" + OperationTypeExecuteValidateNetworkTests OperationTypeEnum = "EXECUTE_VALIDATE_NETWORK_TESTS" + OperationTypeUpdateShard OperationTypeEnum = "UPDATE_SHARD" + OperationTypePrivateDeleteSdb OperationTypeEnum = "PRIVATE_DELETE_SDB" + OperationTypeProcessShardedDatabase OperationTypeEnum = "PROCESS_SHARDED_DATABASE" + OperationTypeCreateCatalogs OperationTypeEnum = "CREATE_CATALOGS" + OperationTypeCreateShards OperationTypeEnum = "CREATE_SHARDS" + OperationTypeCreateGsmNodes OperationTypeEnum = "CREATE_GSM_NODES" + OperationTypeAddGsmNodes OperationTypeEnum = "ADD_GSM_NODES" + OperationTypePrivateDeleteAtpdCatalog OperationTypeEnum = "PRIVATE_DELETE_ATPD_CATALOG" + OperationTypePrivateDeleteAtpdShard OperationTypeEnum = "PRIVATE_DELETE_ATPD_SHARD" + OperationTypePrivateDeleteGsm OperationTypeEnum = "PRIVATE_DELETE_GSM" + OperationTypeReinstateProxyInstance OperationTypeEnum = "REINSTATE_PROXY_INSTANCE" ) var mappingOperationTypeEnum = map[string]OperationTypeEnum{ @@ -57,6 +69,18 @@ var mappingOperationTypeEnum = map[string]OperationTypeEnum{ "UPLOAD_SIGNED_CERT_AND_GENERATE_WALLET": OperationTypeUploadSignedCertAndGenerateWallet, "GENERATE_GSM_CERT_SIGNING_REQ": OperationTypeGenerateGsmCertSigningReq, "CONFIGURE_SHARDING": OperationTypeConfigureSharding, + "EXECUTE_VALIDATE_NETWORK_TESTS": OperationTypeExecuteValidateNetworkTests, + "UPDATE_SHARD": OperationTypeUpdateShard, + "PRIVATE_DELETE_SDB": OperationTypePrivateDeleteSdb, + "PROCESS_SHARDED_DATABASE": OperationTypeProcessShardedDatabase, + "CREATE_CATALOGS": OperationTypeCreateCatalogs, + "CREATE_SHARDS": OperationTypeCreateShards, + "CREATE_GSM_NODES": OperationTypeCreateGsmNodes, + "ADD_GSM_NODES": OperationTypeAddGsmNodes, + "PRIVATE_DELETE_ATPD_CATALOG": OperationTypePrivateDeleteAtpdCatalog, + "PRIVATE_DELETE_ATPD_SHARD": OperationTypePrivateDeleteAtpdShard, + "PRIVATE_DELETE_GSM": OperationTypePrivateDeleteGsm, + "REINSTATE_PROXY_INSTANCE": OperationTypeReinstateProxyInstance, } var mappingOperationTypeEnumLowerCase = map[string]OperationTypeEnum{ @@ -78,6 +102,18 @@ var mappingOperationTypeEnumLowerCase = map[string]OperationTypeEnum{ "upload_signed_cert_and_generate_wallet": OperationTypeUploadSignedCertAndGenerateWallet, "generate_gsm_cert_signing_req": OperationTypeGenerateGsmCertSigningReq, "configure_sharding": OperationTypeConfigureSharding, + "execute_validate_network_tests": OperationTypeExecuteValidateNetworkTests, + "update_shard": OperationTypeUpdateShard, + "private_delete_sdb": OperationTypePrivateDeleteSdb, + "process_sharded_database": OperationTypeProcessShardedDatabase, + "create_catalogs": OperationTypeCreateCatalogs, + "create_shards": OperationTypeCreateShards, + "create_gsm_nodes": OperationTypeCreateGsmNodes, + "add_gsm_nodes": OperationTypeAddGsmNodes, + "private_delete_atpd_catalog": OperationTypePrivateDeleteAtpdCatalog, + "private_delete_atpd_shard": OperationTypePrivateDeleteAtpdShard, + "private_delete_gsm": OperationTypePrivateDeleteGsm, + "reinstate_proxy_instance": OperationTypeReinstateProxyInstance, } // GetOperationTypeEnumValues Enumerates the set of values for OperationTypeEnum @@ -110,6 +146,18 @@ func GetOperationTypeEnumStringValues() []string { "UPLOAD_SIGNED_CERT_AND_GENERATE_WALLET", "GENERATE_GSM_CERT_SIGNING_REQ", "CONFIGURE_SHARDING", + "EXECUTE_VALIDATE_NETWORK_TESTS", + "UPDATE_SHARD", + "PRIVATE_DELETE_SDB", + "PROCESS_SHARDED_DATABASE", + "CREATE_CATALOGS", + "CREATE_SHARDS", + "CREATE_GSM_NODES", + "ADD_GSM_NODES", + "PRIVATE_DELETE_ATPD_CATALOG", + "PRIVATE_DELETE_ATPD_SHARD", + "PRIVATE_DELETE_GSM", + "REINSTATE_PROXY_INSTANCE", } } diff --git a/globallydistributeddatabase/private_endpoint.go b/globallydistributeddatabase/private_endpoint.go index 4315b709b7..8b55f3b0f2 100644 --- a/globallydistributeddatabase/private_endpoint.go +++ b/globallydistributeddatabase/private_endpoint.go @@ -57,6 +57,9 @@ type PrivateEndpoint struct { // Detailed message for the lifecycle state. LifecycleStateDetails *string `mandatory:"false" json:"lifecycleStateDetails"` + // The identifier of the proxy compute instance. + ProxyComputeInstanceId *string `mandatory:"false" json:"proxyComputeInstanceId"` + // Simple key-value pair that is applied without any predefined name, type or scope. Exists for cross-compatibility only. // Example: `{"bar-key": "value"}` FreeformTags map[string]string `mandatory:"false" json:"freeformTags"` diff --git a/globallydistributeddatabase/reinstate_proxy_instance_request_response.go b/globallydistributeddatabase/reinstate_proxy_instance_request_response.go new file mode 100644 index 0000000000..1f76d44103 --- /dev/null +++ b/globallydistributeddatabase/reinstate_proxy_instance_request_response.go @@ -0,0 +1,104 @@ +// Copyright (c) 2016, 2018, 2024, Oracle and/or its affiliates. All rights reserved. +// This software is dual-licensed to you under the Universal Permissive License (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl or Apache License 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose either license. +// Code generated. DO NOT EDIT. + +package globallydistributeddatabase + +import ( + "fmt" + "github.com/oracle/oci-go-sdk/v65/common" + "net/http" + "strings" +) + +// ReinstateProxyInstanceRequest wrapper for the ReinstateProxyInstance operation +// +// # See also +// +// Click https://docs.cloud.oracle.com/en-us/iaas/tools/go-sdk-examples/latest/globallydistributeddatabase/ReinstateProxyInstance.go.html to see an example of how to use ReinstateProxyInstanceRequest. +type ReinstateProxyInstanceRequest struct { + + // Oracle Sharded Database PrivateEndpoint identifier + PrivateEndpointId *string `mandatory:"true" contributesTo:"path" name:"privateEndpointId"` + + // The client request ID for tracing. + OpcRequestId *string `mandatory:"false" contributesTo:"header" name:"opc-request-id"` + + // For optimistic concurrency control. In the PUT or DELETE call + // for a resource, set the `if-match` parameter to the value of the + // etag from a previous GET or POST response for that resource. + // The resource will be updated or deleted only if the etag you + // provide matches the resource's current etag value. + IfMatch *string `mandatory:"false" contributesTo:"header" name:"if-match"` + + // A token that uniquely identifies a request so it can be retried in case of a timeout or + // server error without risk of executing that same action again. Retry tokens expire after 24 + // hours, but can be invalidated before then due to conflicting operations. For example, if a resource + // has been deleted and purged from the system, then a retry of the original creation request + // might be rejected. + OpcRetryToken *string `mandatory:"false" contributesTo:"header" name:"opc-retry-token"` + + // Metadata about the request. This information will not be transmitted to the service, but + // represents information that the SDK will consume to drive retry behavior. + RequestMetadata common.RequestMetadata +} + +func (request ReinstateProxyInstanceRequest) String() string { + return common.PointerString(request) +} + +// HTTPRequest implements the OCIRequest interface +func (request ReinstateProxyInstanceRequest) HTTPRequest(method, path string, binaryRequestBody *common.OCIReadSeekCloser, extraHeaders map[string]string) (http.Request, error) { + + _, err := request.ValidateEnumValue() + if err != nil { + return http.Request{}, err + } + return common.MakeDefaultHTTPRequestWithTaggedStructAndExtraHeaders(method, path, request, extraHeaders) +} + +// BinaryRequestBody implements the OCIRequest interface +func (request ReinstateProxyInstanceRequest) BinaryRequestBody() (*common.OCIReadSeekCloser, bool) { + + return nil, false + +} + +// RetryPolicy implements the OCIRetryableRequest interface. This retrieves the specified retry policy. +func (request ReinstateProxyInstanceRequest) RetryPolicy() *common.RetryPolicy { + return request.RequestMetadata.RetryPolicy +} + +// ValidateEnumValue returns an error when providing an unsupported enum value +// This function is being called during constructing API request process +// Not recommended for calling this function directly +func (request ReinstateProxyInstanceRequest) ValidateEnumValue() (bool, error) { + errMessage := []string{} + if len(errMessage) > 0 { + return true, fmt.Errorf(strings.Join(errMessage, "\n")) + } + return false, nil +} + +// ReinstateProxyInstanceResponse wrapper for the ReinstateProxyInstance operation +type ReinstateProxyInstanceResponse struct { + + // The underlying http response + RawResponse *http.Response + + // Unique Oracle-assigned identifier for the asynchronous work. You can use this to query its status. + OpcWorkRequestId *string `presentIn:"header" name:"opc-work-request-id"` + + // Unique Oracle-assigned identifier for the request. If you need to contact + // Oracle about a particular request, please provide the request ID. + OpcRequestId *string `presentIn:"header" name:"opc-request-id"` +} + +func (response ReinstateProxyInstanceResponse) String() string { + return common.PointerString(response) +} + +// HTTPResponse implements the OCIResponse interface +func (response ReinstateProxyInstanceResponse) HTTPResponse() *http.Response { + return response.RawResponse +} diff --git a/monitoring/alarm.go b/monitoring/alarm.go index 7a0981b7d4..61ec35b3f5 100644 --- a/monitoring/alarm.go +++ b/monitoring/alarm.go @@ -55,7 +55,10 @@ type Alarm struct { // rule condition has been met. The query must specify a metric, statistic, interval, and trigger // rule (threshold or absence). Supported values for interval depend on the specified time range. More // interval values are supported for smaller time ranges. You can optionally - // specify dimensions and grouping functions. Supported grouping functions: `grouping()`, `groupBy()`. + // specify dimensions and grouping functions. + // Also, you can customize the + // absence detection period (https://docs.cloud.oracle.com/iaas/Content/Monitoring/Tasks/create-edit-alarm-query-absence-detection-period.htm). + // Supported grouping functions: `grouping()`, `groupBy()`. // For information about writing MQL expressions, see // Editing the MQL Expression for a Query (https://docs.cloud.oracle.com/iaas/Content/Monitoring/Tasks/query-metric-mql.htm). // For details about MQL, see @@ -70,6 +73,12 @@ type Alarm struct { // ----- // CpuUtilization[1m]{availabilityDomain="cumS:PHX-AD-1"}.absent() // ----- + // Example of absence alarm with custom absence detection period of 20 hours: + // ----- + // + // CpuUtilization[1m]{availabilityDomain="cumS:PHX-AD-1"}.absent(20h) + // + // ----- Query *string `mandatory:"true" json:"query"` // The perceived type of response required when the alarm is in the "FIRING" state. @@ -128,7 +137,9 @@ type Alarm struct { // Example: `PT5M` PendingDuration *string `mandatory:"false" json:"pendingDuration"` - // The human-readable content of the delivered alarm notification. Oracle recommends providing guidance + // The human-readable content of the delivered alarm notification. + // Optionally include dynamic variables (https://docs.cloud.oracle.com/iaas/Content/Monitoring/Tasks/update-alarm-dynamic-variables.htm). + // Oracle recommends providing guidance // to operators for resolving the alarm condition. Consider adding links to standard runbook // practices. Avoid entering confidential information. // Example: `High CPU usage alert. Follow runbook instructions for resolution.` @@ -169,13 +180,32 @@ type Alarm struct { Overrides []AlarmOverride `mandatory:"false" json:"overrides"` // Identifier of the alarm's base values for alarm evaluation, for use when the alarm contains overrides. - // A valid ruleName value starts with an alphabetic character and includes only alphanumeric characters, underscores and square brackets. - // Minimum number of characters: 3. Default value is `BASE`. For information about alarm overrides, see AlarmOverride. + // Default value is `BASE`. For information about alarm overrides, see AlarmOverride. RuleName *string `mandatory:"false" json:"ruleName"` // The version of the alarm notification to be delivered. Allowed value: `1.X` // The value must start with a number (up to four digits), followed by a period and an uppercase X. NotificationVersion *string `mandatory:"false" json:"notificationVersion"` + + // Customizable notification title (`title` alarm message parameter (https://docs.cloud.oracle.com/iaas/Content/Monitoring/alarm-message-format.htm)). + // Optionally include dynamic variables (https://docs.cloud.oracle.com/iaas/Content/Monitoring/Tasks/update-alarm-dynamic-variables.htm). + // The notification title appears as the subject line in a formatted email message and as the title in a Slack message. + NotificationTitle *string `mandatory:"false" json:"notificationTitle"` + + // Customizable slack period to wait for metric ingestion before evaluating the alarm. + // Specify a string in ISO 8601 format (`PT10M` for ten minutes or `PT1H` + // for one hour). Minimum: PT3M. Maximum: PT2H. Default: PT3M. + // For more information about the slack period, see + // About the Internal Reset Period (https://docs.cloud.oracle.com/iaas/Content/Monitoring/Concepts/monitoringoverview.htm#reset). + EvaluationSlackDuration *string `mandatory:"false" json:"evaluationSlackDuration"` + + // Customizable alarm summary (`alarmSummary` alarm message parameter (https://docs.cloud.oracle.com/iaas/Content/Monitoring/alarm-message-format.htm)). + // Optionally include dynamic variables (https://docs.cloud.oracle.com/iaas/Content/Monitoring/Tasks/update-alarm-dynamic-variables.htm). + // The alarm summary appears within the body of the alarm message and in responses to + // ListAlarmsStatus + // GetAlarmHistory and + // RetrieveDimensionStates. + AlarmSummary *string `mandatory:"false" json:"alarmSummary"` } func (m Alarm) String() string { diff --git a/monitoring/alarm_dimension_states_entry.go b/monitoring/alarm_dimension_states_entry.go index 47bb628d67..55cbaffeca 100644 --- a/monitoring/alarm_dimension_states_entry.go +++ b/monitoring/alarm_dimension_states_entry.go @@ -21,6 +21,14 @@ import ( // AlarmDimensionStatesEntry A timestamped alarm state entry for a metric stream. type AlarmDimensionStatesEntry struct { + // Customizable alarm summary (`alarmSummary` alarm message parameter (https://docs.cloud.oracle.com/iaas/Content/Monitoring/alarm-message-format.htm)). + // Optionally include dynamic variables (https://docs.cloud.oracle.com/iaas/Content/Monitoring/Tasks/update-alarm-dynamic-variables.htm). + // The alarm summary appears within the body of the alarm message and in responses to + // ListAlarmsStatus + // GetAlarmHistory and + // RetrieveDimensionStates. + AlarmSummary *string `mandatory:"true" json:"alarmSummary"` + // Indicator of the metric stream associated with the alarm state entry. Includes one or more dimension key-value pairs. Dimensions map[string]string `mandatory:"true" json:"dimensions"` @@ -29,8 +37,7 @@ type AlarmDimensionStatesEntry struct { Status AlarmDimensionStatesEntryStatusEnum `mandatory:"true" json:"status"` // Identifier of the alarm's base values for alarm evaluation, for use when the alarm contains overrides. - // A valid ruleName value starts with an alphabetic character and includes only alphanumeric characters, underscores and square brackets. - // Minimum number of characters: 3. Default value is `BASE`. For information about alarm overrides, see AlarmOverride. + // Default value is `BASE`. For information about alarm overrides, see AlarmOverride. RuleName *string `mandatory:"true" json:"ruleName"` // Transition time associated with the alarm state entry. Format defined by RFC3339. diff --git a/monitoring/alarm_history_entry.go b/monitoring/alarm_history_entry.go index 95cd7179af..3dd2bfa198 100644 --- a/monitoring/alarm_history_entry.go +++ b/monitoring/alarm_history_entry.go @@ -22,6 +22,14 @@ import ( // If the entry corresponds to a state transition, such as OK to Firing, then the entry also includes a transition timestamp. type AlarmHistoryEntry struct { + // Customizable alarm summary (`alarmSummary` alarm message parameter (https://docs.cloud.oracle.com/iaas/Content/Monitoring/alarm-message-format.htm)). + // Optionally include dynamic variables (https://docs.cloud.oracle.com/iaas/Content/Monitoring/Tasks/update-alarm-dynamic-variables.htm). + // The alarm summary appears within the body of the alarm message and in responses to + // ListAlarmsStatus + // GetAlarmHistory and + // RetrieveDimensionStates. + AlarmSummary *string `mandatory:"true" json:"alarmSummary"` + // Description for this alarm history entry. // Example 1 - alarm state history entry: `The alarm state is FIRING` // Example 2 - alarm state transition history entry: `State transitioned from OK to Firing` diff --git a/monitoring/alarm_override.go b/monitoring/alarm_override.go index 7763681a0f..ce49d96f67 100644 --- a/monitoring/alarm_override.go +++ b/monitoring/alarm_override.go @@ -42,7 +42,9 @@ type AlarmOverride struct { // Example: `CRITICAL` Severity AlarmSeverityEnum `mandatory:"false" json:"severity,omitempty"` - // The human-readable content of the delivered alarm notification. Oracle recommends providing guidance + // The human-readable content of the delivered alarm notification. + // Optionally include dynamic variables (https://docs.cloud.oracle.com/iaas/Content/Monitoring/Tasks/update-alarm-dynamic-variables.htm). + // Oracle recommends providing guidance // to operators for resolving the alarm condition. Consider adding links to standard runbook // practices. Avoid entering confidential information. // Example: `High CPU usage alert. Follow runbook instructions for resolution.` @@ -57,7 +59,10 @@ type AlarmOverride struct { // rule condition has been met. The query must specify a metric, statistic, interval, and trigger // rule (threshold or absence). Supported values for interval depend on the specified time range. More // interval values are supported for smaller time ranges. You can optionally - // specify dimensions and grouping functions. Supported grouping functions: `grouping()`, `groupBy()`. + // specify dimensions and grouping functions. + // Also, you can customize the + // absence detection period (https://docs.cloud.oracle.com/iaas/Content/Monitoring/Tasks/create-edit-alarm-query-absence-detection-period.htm). + // Supported grouping functions: `grouping()`, `groupBy()`. // For information about writing MQL expressions, see // Editing the MQL Expression for a Query (https://docs.cloud.oracle.com/iaas/Content/Monitoring/Tasks/query-metric-mql.htm). // For details about MQL, see @@ -72,6 +77,12 @@ type AlarmOverride struct { // ----- // CpuUtilization[1m]{availabilityDomain="cumS:PHX-AD-1"}.absent() // ----- + // Example of absence alarm with custom absence detection period of 20 hours: + // ----- + // + // CpuUtilization[1m]{availabilityDomain="cumS:PHX-AD-1"}.absent(20h) + // + // ----- Query *string `mandatory:"false" json:"query"` } diff --git a/monitoring/alarm_status_summary.go b/monitoring/alarm_status_summary.go index c102765b15..0a889cff79 100644 --- a/monitoring/alarm_status_summary.go +++ b/monitoring/alarm_status_summary.go @@ -42,8 +42,7 @@ type AlarmStatusSummary struct { Severity AlarmStatusSummarySeverityEnum `mandatory:"true" json:"severity"` // Identifier of the alarm's base values for alarm evaluation, for use when the alarm contains overrides. - // A valid ruleName value starts with an alphabetic character and includes only alphanumeric characters, underscores and square brackets. - // Minimum number of characters: 3. Default value is `BASE`. For information about alarm overrides, see AlarmOverride. + // Default value is `BASE`. For information about alarm overrides, see AlarmOverride. RuleName *string `mandatory:"true" json:"ruleName"` // Timestamp for the transition of the alarm state. For example, the time when the alarm transitioned from OK to Firing. @@ -51,6 +50,14 @@ type AlarmStatusSummary struct { // Example: `2023-02-01T01:02:29.600Z` TimestampTriggered *common.SDKTime `mandatory:"true" json:"timestampTriggered"` + // Customizable alarm summary (`alarmSummary` alarm message parameter (https://docs.cloud.oracle.com/iaas/Content/Monitoring/alarm-message-format.htm)). + // Optionally include dynamic variables (https://docs.cloud.oracle.com/iaas/Content/Monitoring/Tasks/update-alarm-dynamic-variables.htm). + // The alarm summary appears within the body of the alarm message and in responses to + // ListAlarmsStatus + // GetAlarmHistory and + // RetrieveDimensionStates. + AlarmSummary *string `mandatory:"true" json:"alarmSummary"` + // The status of this alarm. // Status is collective, across all metric streams in the alarm. // To list alarm status for each metric stream, use RetrieveDimensionStates. diff --git a/monitoring/alarm_summary.go b/monitoring/alarm_summary.go index 95c3e69ae7..72cfb01626 100644 --- a/monitoring/alarm_summary.go +++ b/monitoring/alarm_summary.go @@ -113,13 +113,37 @@ type AlarmSummary struct { Overrides []AlarmOverride `mandatory:"false" json:"overrides"` // Identifier of the alarm's base values for alarm evaluation, for use when the alarm contains overrides. - // A valid ruleName value starts with an alphabetic character and includes only alphanumeric characters, underscores and square brackets. - // Minimum number of characters: 3. Default value is `BASE`. For information about alarm overrides, see AlarmOverride. + // Default value is `BASE`. For information about alarm overrides, see AlarmOverride. RuleName *string `mandatory:"false" json:"ruleName"` // The version of the alarm notification to be delivered. Allowed value: `1.X` // The value must start with a number (up to four digits), followed by a period and an uppercase X. NotificationVersion *string `mandatory:"false" json:"notificationVersion"` + + // Customizable notification title (`title` alarm message parameter (https://docs.cloud.oracle.com/iaas/Content/Monitoring/alarm-message-format.htm)). + // Optionally include dynamic variables (https://docs.cloud.oracle.com/iaas/Content/Monitoring/Tasks/update-alarm-dynamic-variables.htm). + // The notification title appears as the subject line in a formatted email message and as the title in a Slack message. + NotificationTitle *string `mandatory:"false" json:"notificationTitle"` + + // Customizable slack period to wait for metric ingestion before evaluating the alarm. + // Specify a string in ISO 8601 format (`PT10M` for ten minutes or `PT1H` + // for one hour). Minimum: PT3M. Maximum: PT2H. Default: PT3M. + // For more information about the slack period, see + // About the Internal Reset Period (https://docs.cloud.oracle.com/iaas/Content/Monitoring/Concepts/monitoringoverview.htm#reset). + EvaluationSlackDuration *string `mandatory:"false" json:"evaluationSlackDuration"` + + // Customizable alarm summary (`alarmSummary` alarm message parameter (https://docs.cloud.oracle.com/iaas/Content/Monitoring/alarm-message-format.htm)). + // Optionally include dynamic variables (https://docs.cloud.oracle.com/iaas/Content/Monitoring/Tasks/update-alarm-dynamic-variables.htm). + // The alarm summary appears within the body of the alarm message and in responses to + // ListAlarmsStatus + // GetAlarmHistory and + // RetrieveDimensionStates. + AlarmSummary *string `mandatory:"false" json:"alarmSummary"` + + // Resource group that you want to match. A null value returns only metric data that has no resource groups. The specified resource group must exist in the definition of the posted metric. Only one resource group can be applied per metric. + // A valid resourceGroup value starts with an alphabetical character and includes only alphanumeric characters, periods (.), underscores (_), hyphens (-), and dollar signs ($). + // Example: `frontend-fleet` + ResourceGroup *string `mandatory:"false" json:"resourceGroup"` } func (m AlarmSummary) String() string { diff --git a/monitoring/create_alarm_details.go b/monitoring/create_alarm_details.go index 546b47b7a5..b17f2a1fda 100644 --- a/monitoring/create_alarm_details.go +++ b/monitoring/create_alarm_details.go @@ -44,7 +44,10 @@ type CreateAlarmDetails struct { // rule condition has been met. The query must specify a metric, statistic, interval, and trigger // rule (threshold or absence). Supported values for interval depend on the specified time range. More // interval values are supported for smaller time ranges. You can optionally - // specify dimensions and grouping functions. Supported grouping functions: `grouping()`, `groupBy()`. + // specify dimensions and grouping functions. + // Also, you can customize the + // absence detection period (https://docs.cloud.oracle.com/iaas/Content/Monitoring/Tasks/create-edit-alarm-query-absence-detection-period.htm). + // Supported grouping functions: `grouping()`, `groupBy()`. // For information about writing MQL expressions, see // Editing the MQL Expression for a Query (https://docs.cloud.oracle.com/iaas/Content/Monitoring/Tasks/query-metric-mql.htm). // For details about MQL, see @@ -59,6 +62,12 @@ type CreateAlarmDetails struct { // ----- // CpuUtilization[1m]{availabilityDomain="cumS:PHX-AD-1"}.absent() // ----- + // Example of absence alarm with custom absence detection period of 20 hours: + // ----- + // + // CpuUtilization[1m]{availabilityDomain="cumS:PHX-AD-1"}.absent(20h) + // + // ----- Query *string `mandatory:"true" json:"query"` // The perceived type of response required when the alarm is in the "FIRING" state. @@ -106,7 +115,9 @@ type CreateAlarmDetails struct { // Example: `PT5M` PendingDuration *string `mandatory:"false" json:"pendingDuration"` - // The human-readable content of the delivered alarm notification. Oracle recommends providing guidance + // The human-readable content of the delivered alarm notification. + // Optionally include dynamic variables (https://docs.cloud.oracle.com/iaas/Content/Monitoring/Tasks/update-alarm-dynamic-variables.htm). + // Oracle recommends providing guidance // to operators for resolving the alarm condition. Consider adding links to standard runbook // practices. Avoid entering confidential information. // Example: `High CPU usage alert. Follow runbook instructions for resolution.` @@ -148,13 +159,32 @@ type CreateAlarmDetails struct { Overrides []AlarmOverride `mandatory:"false" json:"overrides"` // Identifier of the alarm's base values for alarm evaluation, for use when the alarm contains overrides. - // A valid ruleName value starts with an alphabetic character and includes only alphanumeric characters, underscores and square brackets. - // Minimum number of characters: 3. Default value is `BASE`. For information about alarm overrides, see AlarmOverride. + // Default value is `BASE`. For information about alarm overrides, see AlarmOverride. RuleName *string `mandatory:"false" json:"ruleName"` // The version of the alarm notification to be delivered. Allowed value: `1.X` // The value must start with a number (up to four digits), followed by a period and an uppercase X. NotificationVersion *string `mandatory:"false" json:"notificationVersion"` + + // Customizable notification title (`title` alarm message parameter (https://docs.cloud.oracle.com/iaas/Content/Monitoring/alarm-message-format.htm)). + // Optionally include dynamic variables (https://docs.cloud.oracle.com/iaas/Content/Monitoring/Tasks/update-alarm-dynamic-variables.htm). + // The notification title appears as the subject line in a formatted email message and as the title in a Slack message. + NotificationTitle *string `mandatory:"false" json:"notificationTitle"` + + // Customizable slack period to wait for metric ingestion before evaluating the alarm. + // Specify a string in ISO 8601 format (`PT10M` for ten minutes or `PT1H` + // for one hour). Minimum: PT3M. Maximum: PT2H. Default: PT3M. + // For more information about the slack period, see + // About the Internal Reset Period (https://docs.cloud.oracle.com/iaas/Content/Monitoring/Concepts/monitoringoverview.htm#reset). + EvaluationSlackDuration *string `mandatory:"false" json:"evaluationSlackDuration"` + + // Customizable alarm summary (`alarmSummary` alarm message parameter (https://docs.cloud.oracle.com/iaas/Content/Monitoring/alarm-message-format.htm)). + // Optionally include dynamic variables (https://docs.cloud.oracle.com/iaas/Content/Monitoring/Tasks/update-alarm-dynamic-variables.htm). + // The alarm summary appears within the body of the alarm message and in responses to + // ListAlarmsStatus + // GetAlarmHistory and + // RetrieveDimensionStates. + AlarmSummary *string `mandatory:"false" json:"alarmSummary"` } func (m CreateAlarmDetails) String() string { diff --git a/monitoring/update_alarm_details.go b/monitoring/update_alarm_details.go index c8e5d1fd94..f390c3cfc3 100644 --- a/monitoring/update_alarm_details.go +++ b/monitoring/update_alarm_details.go @@ -58,7 +58,10 @@ type UpdateAlarmDetails struct { // rule condition has been met. The query must specify a metric, statistic, interval, and trigger // rule (threshold or absence). Supported values for interval depend on the specified time range. More // interval values are supported for smaller time ranges. You can optionally - // specify dimensions and grouping functions. Supported grouping functions: `grouping()`, `groupBy()`. + // specify dimensions and grouping functions. + // Also, you can customize the + // absence detection period (https://docs.cloud.oracle.com/iaas/Content/Monitoring/Tasks/create-edit-alarm-query-absence-detection-period.htm). + // Supported grouping functions: `grouping()`, `groupBy()`. // For information about writing MQL expressions, see // Editing the MQL Expression for a Query (https://docs.cloud.oracle.com/iaas/Content/Monitoring/Tasks/query-metric-mql.htm). // For details about MQL, see @@ -73,6 +76,12 @@ type UpdateAlarmDetails struct { // ----- // CpuUtilization[1m]{availabilityDomain="cumS:PHX-AD-1"}.absent() // ----- + // Example of absence alarm with custom absence detection period of 20 hours: + // ----- + // + // CpuUtilization[1m]{availabilityDomain="cumS:PHX-AD-1"}.absent(20h) + // + // ----- Query *string `mandatory:"false" json:"query"` // The time between calculated aggregation windows for the alarm. Supported value: `1m` @@ -95,7 +104,9 @@ type UpdateAlarmDetails struct { // Example: `CRITICAL` Severity AlarmSeverityEnum `mandatory:"false" json:"severity,omitempty"` - // The human-readable content of the delivered alarm notification. Oracle recommends providing guidance + // The human-readable content of the delivered alarm notification. + // Optionally include dynamic variables (https://docs.cloud.oracle.com/iaas/Content/Monitoring/Tasks/update-alarm-dynamic-variables.htm). + // Oracle recommends providing guidance // to operators for resolving the alarm condition. Consider adding links to standard runbook // practices. Avoid entering confidential information. // Example: `High CPU usage alert. Follow runbook instructions for resolution.` @@ -147,13 +158,32 @@ type UpdateAlarmDetails struct { Overrides []AlarmOverride `mandatory:"false" json:"overrides"` // Identifier of the alarm's base values for alarm evaluation, for use when the alarm contains overrides. - // A valid ruleName value starts with an alphabetic character and includes only alphanumeric characters, underscores and square brackets. - // Minimum number of characters: 3. Default value is `BASE`. For information about alarm overrides, see AlarmOverride. + // Default value is `BASE`. For information about alarm overrides, see AlarmOverride. RuleName *string `mandatory:"false" json:"ruleName"` // The version of the alarm notification to be delivered. Allowed value: `1.X` // The value must start with a number (up to four digits), followed by a period and an uppercase X. NotificationVersion *string `mandatory:"false" json:"notificationVersion"` + + // Customizable notification title (`title` alarm message parameter (https://docs.cloud.oracle.com/iaas/Content/Monitoring/alarm-message-format.htm)). + // Optionally include dynamic variables (https://docs.cloud.oracle.com/iaas/Content/Monitoring/Tasks/update-alarm-dynamic-variables.htm). + // The notification title appears as the subject line in a formatted email message and as the title in a Slack message. + NotificationTitle *string `mandatory:"false" json:"notificationTitle"` + + // Customizable slack period to wait for metric ingestion before evaluating the alarm. + // Specify a string in ISO 8601 format (`PT10M` for ten minutes or `PT1H` + // for one hour). Minimum: PT3M. Maximum: PT2H. Default: PT3M. + // For more information about the slack period, see + // About the Internal Reset Period (https://docs.cloud.oracle.com/iaas/Content/Monitoring/Concepts/monitoringoverview.htm#reset). + EvaluationSlackDuration *string `mandatory:"false" json:"evaluationSlackDuration"` + + // Customizable alarm summary (`alarmSummary` alarm message parameter (https://docs.cloud.oracle.com/iaas/Content/Monitoring/alarm-message-format.htm)). + // Optionally include dynamic variables (https://docs.cloud.oracle.com/iaas/Content/Monitoring/Tasks/update-alarm-dynamic-variables.htm). + // The alarm summary appears within the body of the alarm message and in responses to + // ListAlarmsStatus + // GetAlarmHistory and + // RetrieveDimensionStates. + AlarmSummary *string `mandatory:"false" json:"alarmSummary"` } func (m UpdateAlarmDetails) String() string { diff --git a/queue/get_message.go b/queue/get_message.go index 966e8118af..71f57dbed4 100644 --- a/queue/get_message.go +++ b/queue/get_message.go @@ -39,6 +39,10 @@ type GetMessage struct { // Example: `2018-04-20T00:00:07.405Z` ExpireAfter *common.SDKTime `mandatory:"true" json:"expireAfter"` + // The time when message was created in queue. + // Example: `2018-04-20T00:00:07.405Z` + CreatedAt *common.SDKTime `mandatory:"true" json:"createdAt"` + Metadata *MessageMetadata `mandatory:"false" json:"metadata"` } diff --git a/queue/queue.go b/queue/queue.go index 0ea9d0bcf7..a009f45be2 100644 --- a/queue/queue.go +++ b/queue/queue.go @@ -105,6 +105,7 @@ const ( QueueLifecycleStateDeleting QueueLifecycleStateEnum = "DELETING" QueueLifecycleStateDeleted QueueLifecycleStateEnum = "DELETED" QueueLifecycleStateFailed QueueLifecycleStateEnum = "FAILED" + QueueLifecycleStateInactive QueueLifecycleStateEnum = "INACTIVE" ) var mappingQueueLifecycleStateEnum = map[string]QueueLifecycleStateEnum{ @@ -114,6 +115,7 @@ var mappingQueueLifecycleStateEnum = map[string]QueueLifecycleStateEnum{ "DELETING": QueueLifecycleStateDeleting, "DELETED": QueueLifecycleStateDeleted, "FAILED": QueueLifecycleStateFailed, + "INACTIVE": QueueLifecycleStateInactive, } var mappingQueueLifecycleStateEnumLowerCase = map[string]QueueLifecycleStateEnum{ @@ -123,6 +125,7 @@ var mappingQueueLifecycleStateEnumLowerCase = map[string]QueueLifecycleStateEnum "deleting": QueueLifecycleStateDeleting, "deleted": QueueLifecycleStateDeleted, "failed": QueueLifecycleStateFailed, + "inactive": QueueLifecycleStateInactive, } // GetQueueLifecycleStateEnumValues Enumerates the set of values for QueueLifecycleStateEnum @@ -143,6 +146,7 @@ func GetQueueLifecycleStateEnumStringValues() []string { "DELETING", "DELETED", "FAILED", + "INACTIVE", } } diff --git a/queue/queue_client.go b/queue/queue_client.go index 40ac6202d9..cb02fa2edb 100644 --- a/queue/queue_client.go +++ b/queue/queue_client.go @@ -151,7 +151,7 @@ func (client QueueClient) deleteMessage(ctx context.Context, request common.OCIR return response, err } -// DeleteMessages Deletes multiple messages from the queue. +// DeleteMessages Deletes multiple messages from the queue or the consumer group. Only messages from the same queue/consumer group can be deleted at once. // You must use the messages endpoint (https://docs.cloud.oracle.com/iaas/Content/queue/messages.htm#messages__messages-endpoint) to delete messages. // The messages endpoint may be different for different queues. Use GetQueue to find the queue's `messagesEndpoint`. // @@ -513,7 +513,7 @@ func (client QueueClient) updateMessage(ctx context.Context, request common.OCIR return response, err } -// UpdateMessages Updates multiple messages in the queue. +// UpdateMessages Updates multiple messages in the queue or the consumer group. Only messages from the same queue/consumer group can be updated at once. // You must use the messages endpoint (https://docs.cloud.oracle.com/iaas/Content/queue/messages.htm#messages__messages-endpoint) to update messages. // The messages endpoint may be different for different queues. Use GetQueue to find the queue's `messagesEndpoint`. // diff --git a/queue/queue_stats.go b/queue/queue_stats.go index 8719e3254f..124a7a9dfc 100644 --- a/queue/queue_stats.go +++ b/queue/queue_stats.go @@ -21,7 +21,7 @@ type QueueStats struct { Dlq *Stats `mandatory:"true" json:"dlq"` - // If channelId is presented in GetStats call, the channel id will be returned in the GetStats response. + // If channelId is present in GetStats call, the channel id will be returned in the GetStats response. ChannelId *string `mandatory:"false" json:"channelId"` } diff --git a/redis/redis_rediscluster_client.go b/redis/redis_rediscluster_client.go index 28184a46b4..747821af12 100644 --- a/redis/redis_rediscluster_client.go +++ b/redis/redis_rediscluster_client.go @@ -140,7 +140,7 @@ func (client RedisClusterClient) cancelWorkRequest(ctx context.Context, request defer common.CloseBodyIfValid(httpResponse) response.RawResponse = httpResponse if err != nil { - apiReferenceLink := "https://docs.oracle.com/iaas/api/#/en/redis/20220315/WorkRequest/CancelWorkRequest" + apiReferenceLink := "" err = common.PostProcessServiceError(err, "RedisCluster", "CancelWorkRequest", apiReferenceLink) return response, err } @@ -203,7 +203,7 @@ func (client RedisClusterClient) changeRedisClusterCompartment(ctx context.Conte defer common.CloseBodyIfValid(httpResponse) response.RawResponse = httpResponse if err != nil { - apiReferenceLink := "https://docs.oracle.com/iaas/api/#/en/redis/20220315/RedisCluster/ChangeRedisClusterCompartment" + apiReferenceLink := "" err = common.PostProcessServiceError(err, "RedisCluster", "ChangeRedisClusterCompartment", apiReferenceLink) return response, err } @@ -266,7 +266,7 @@ func (client RedisClusterClient) createRedisCluster(ctx context.Context, request defer common.CloseBodyIfValid(httpResponse) response.RawResponse = httpResponse if err != nil { - apiReferenceLink := "https://docs.oracle.com/iaas/api/#/en/redis/20220315/RedisCluster/CreateRedisCluster" + apiReferenceLink := "" err = common.PostProcessServiceError(err, "RedisCluster", "CreateRedisCluster", apiReferenceLink) return response, err } @@ -324,7 +324,7 @@ func (client RedisClusterClient) deleteRedisCluster(ctx context.Context, request defer common.CloseBodyIfValid(httpResponse) response.RawResponse = httpResponse if err != nil { - apiReferenceLink := "https://docs.oracle.com/iaas/api/#/en/redis/20220315/RedisCluster/DeleteRedisCluster" + apiReferenceLink := "" err = common.PostProcessServiceError(err, "RedisCluster", "DeleteRedisCluster", apiReferenceLink) return response, err } @@ -382,7 +382,7 @@ func (client RedisClusterClient) getRedisCluster(ctx context.Context, request co defer common.CloseBodyIfValid(httpResponse) response.RawResponse = httpResponse if err != nil { - apiReferenceLink := "https://docs.oracle.com/iaas/api/#/en/redis/20220315/RedisCluster/GetRedisCluster" + apiReferenceLink := "" err = common.PostProcessServiceError(err, "RedisCluster", "GetRedisCluster", apiReferenceLink) return response, err } @@ -440,7 +440,7 @@ func (client RedisClusterClient) getWorkRequest(ctx context.Context, request com defer common.CloseBodyIfValid(httpResponse) response.RawResponse = httpResponse if err != nil { - apiReferenceLink := "https://docs.oracle.com/iaas/api/#/en/redis/20220315/WorkRequest/GetWorkRequest" + apiReferenceLink := "" err = common.PostProcessServiceError(err, "RedisCluster", "GetWorkRequest", apiReferenceLink) return response, err } @@ -498,7 +498,7 @@ func (client RedisClusterClient) listRedisClusters(ctx context.Context, request defer common.CloseBodyIfValid(httpResponse) response.RawResponse = httpResponse if err != nil { - apiReferenceLink := "https://docs.oracle.com/iaas/api/#/en/redis/20220315/RedisClusterSummary/ListRedisClusters" + apiReferenceLink := "" err = common.PostProcessServiceError(err, "RedisCluster", "ListRedisClusters", apiReferenceLink) return response, err } @@ -556,7 +556,7 @@ func (client RedisClusterClient) listWorkRequestErrors(ctx context.Context, requ defer common.CloseBodyIfValid(httpResponse) response.RawResponse = httpResponse if err != nil { - apiReferenceLink := "https://docs.oracle.com/iaas/api/#/en/redis/20220315/WorkRequestError/ListWorkRequestErrors" + apiReferenceLink := "" err = common.PostProcessServiceError(err, "RedisCluster", "ListWorkRequestErrors", apiReferenceLink) return response, err } @@ -614,7 +614,7 @@ func (client RedisClusterClient) listWorkRequestLogs(ctx context.Context, reques defer common.CloseBodyIfValid(httpResponse) response.RawResponse = httpResponse if err != nil { - apiReferenceLink := "https://docs.oracle.com/iaas/api/#/en/redis/20220315/WorkRequestLogEntry/ListWorkRequestLogs" + apiReferenceLink := "" err = common.PostProcessServiceError(err, "RedisCluster", "ListWorkRequestLogs", apiReferenceLink) return response, err } @@ -672,7 +672,7 @@ func (client RedisClusterClient) listWorkRequests(ctx context.Context, request c defer common.CloseBodyIfValid(httpResponse) response.RawResponse = httpResponse if err != nil { - apiReferenceLink := "https://docs.oracle.com/iaas/api/#/en/redis/20220315/WorkRequest/ListWorkRequests" + apiReferenceLink := "" err = common.PostProcessServiceError(err, "RedisCluster", "ListWorkRequests", apiReferenceLink) return response, err } @@ -730,7 +730,7 @@ func (client RedisClusterClient) updateRedisCluster(ctx context.Context, request defer common.CloseBodyIfValid(httpResponse) response.RawResponse = httpResponse if err != nil { - apiReferenceLink := "https://docs.oracle.com/iaas/api/#/en/redis/20220315/RedisCluster/UpdateRedisCluster" + apiReferenceLink := "" err = common.PostProcessServiceError(err, "RedisCluster", "UpdateRedisCluster", apiReferenceLink) return response, err }