├── .git-blame-ignore-revs ├── .github └── workflows │ ├── black.yaml │ ├── parser-config.yaml │ ├── test-changes.yaml │ └── test-process-pr.yaml ├── .gitignore ├── DMWM ├── AggregatePylint.py ├── AnalyzePy27.py ├── AnalyzePyFuture.py ├── AnalyzePylint.py ├── CompareTests.py ├── IdentifyPythonFiles.py ├── IssueMessage.py ├── TestWatchdog.py ├── activate-start-agent.sh ├── deploy-wmagent.sh ├── deploy-wmagentpy3.sh ├── install-crabdev.sh ├── install-wmcore.sh ├── install-wmcorepy3.sh ├── kill-databases.sh ├── latest-dmwm-versions.sh ├── setup-cmsbot.sh ├── setup-rucio.sh ├── setup-secrets.sh ├── temporary-patches.sh ├── test-wmcore.sh ├── test-wmcorepy3.sh ├── update-deployment.sh ├── update-gh-pr-tags.sh ├── update-gh-repo.sh ├── update-wmcore.sh └── xunitparser.py ├── FAQ.md ├── IBPageHead.txt ├── IBPageTail.txt ├── README.md ├── RelValArgs.py ├── _py2with3compatibility.py ├── add-externals-gh-labels.py ├── archive ├── checkLogFile.py ├── cmssw-change2branch.sh ├── create_json.py ├── das-utils │ └── CMSWeb.py ├── deprecate-releases.py ├── deprecate_releases.py ├── docker │ └── check-repositories.py ├── es-cleanup-indexes.py ├── es_hypernews.py ├── es_hypernews_log.py ├── find-changed-workflows.py ├── generate-json-performance-charts.py ├── generate-performance-summary.py ├── get-builds-stats.py ├── get-git-tags.py ├── get-local-build-stats.py ├── getWorkflowStatsFromES.py ├── get_Github_API_rate.py ├── get_repo_authors.py ├── gh_create_branches.py ├── gh_update_pr_milestone.py ├── github_get_file_changes.py ├── gitmergesgraph.py ├── ib-pr-workflow-changed.py ├── ib-upload-logs.py ├── merge-pull-request.py ├── parse_workflow_time.py ├── port-pull-request.py ├── report-cmsdist-pull-request-results.py ├── report_size.py ├── scram-package-monitor-sender.py ├── scram-package-monitor-timestamps.py └── updateVOTags.py ├── auto-update-git-branches ├── autoInstallLogs.sh ├── backport-pr.py ├── build-cmssw-ib-with-patch ├── build-fwlite-ib ├── build-node-stats ├── build-release ├── build-release-watchers.yaml ├── buildLogAnalyzer.py ├── build_hosts.txt ├── cache-pull-request.py ├── categories.py ├── categories_map.py ├── category-watchers.yaml ├── check-future-commits-prs.py ├── checkDirSizes.py ├── checkLibDeps.py ├── checkPyConfigs.py ├── checkTestLog.py ├── chk-invalid-headers.py ├── cleanup-auto-build ├── cleanup-cmssdt ├── cleanup-tags ├── cms-docker-gh-issues.py ├── cms-filename-checks.py ├── cms-jenkins-api.py ├── cms_static.py ├── cmsdist-comp-pr-process.py ├── cmsdist-tool-update.sh ├── cmsdist_merge_permissions.py ├── cmspkg-clone ├── cmsrep.sh ├── cmssdt.sh ├── cmssw-pr-test-config ├── cmssw-subprojcts ├── Stitched.filter ├── git_filter.cfg └── setup.sh ├── cmssw-test-lto.sh ├── cmssw_known_errors.py ├── cmssw_l2 ├── check.sh ├── commit.txt ├── l2.json └── update.py ├── cmsutils.py ├── comment-gh-pr.py ├── common ├── README.md ├── get_config_map_line.sh ├── get_cpu_number.sh └── github_reports.sh ├── comp └── create_wm_archs.py ├── compare-material-budget ├── compareTriggerResults.py ├── compareTriggerResultsSummary.py ├── comparisons ├── analyzeFWComparison.py ├── compare-maxmem-summary.py ├── compare-maxmem.py ├── compareProducts.awk ├── compareProducts.sh ├── compareValHists.C ├── makeDiff.sh ├── resources-diff.py ├── validate.C └── validateJR.py ├── condor ├── autoload.sh ├── connect-job.sh ├── connect.sh ├── connect.sub ├── jenkins │ ├── connect.sh │ └── node.xml ├── shutdown.sh ├── submit.sh ├── submit.sub ├── tests │ ├── node-check.py │ └── node-check.sh └── webhook ├── config-disabled.map ├── config.map ├── crab ├── CMSSW_5_3_X │ └── pset.py ├── CMSSW_8_0_X ├── CMSSW_9_4_X ├── FrameworkJobReport.xml ├── crab-test │ ├── pset.py │ ├── run.sh │ ├── setup.sh │ └── task.py ├── fake_pset.py ├── ib-monitor-crab.sh ├── ib-run-crab.sh ├── ib-run-pr-crab.sh ├── multiarch │ └── setup.sh ├── pset.py ├── python26 │ └── functools.py ├── run.sh ├── scram-build │ ├── pset.py │ ├── run.sh │ ├── setup.sh │ └── task.py ├── script │ ├── pset.py │ ├── run.sh │ ├── setup.sh │ └── task.py ├── short-matrix │ ├── pset.py │ ├── run.sh │ ├── setup.sh │ └── task.py ├── short-matrix1 │ ├── pset.py │ ├── run.sh │ ├── setup.sh │ └── task.py └── task.py ├── create-gh-issue.py ├── create-gh-pr.py ├── create-gh-release.py ├── create-github-hooks.py ├── create-new-data-pr.py ├── cuda └── install-cuda.py ├── cvmfs ├── cms-ci.cern.ch │ ├── cvmfsdirtab.sh │ └── cvmfsdirtab.txt ├── cms-ib-test.cern.ch ├── cms-ib.cern.ch │ ├── cvmfsdirtab.sh │ └── cvmfsdirtab.txt ├── cms.cern.ch │ └── .cvmfsdirtab └── cvmfsdirtab.sh ├── cvmfsInstall.sh ├── cvmfs_deployment ├── abort_transaction.sh ├── bootstrap_dir_for_arch.sh ├── cvmfs-cms-install-package.sh ├── git-reference.sh ├── has_lease.py ├── ib-install-siteconf.sh ├── install-qemu.sh ├── publish_transaction.sh ├── reseed_arch.sh ├── root-WebGui-fix.sh ├── start_transaction.sh ├── trasnaction.sh ├── utils.sh └── write_bookeeping_record.sh ├── das-utils ├── cleanup-unused-ibeos.py ├── copy-ib-lfn-to-eos.sh ├── das_cache.py ├── das_client ├── dasgoclient ├── ib-datasets.py ├── ib-eos-files.py ├── ibeos-lfn-sort ├── order-das-files.py ├── update-das-queries └── use-ibeos-sort ├── docker_launcher.sh ├── dockerrun.sh ├── es-kibana-dashboards ├── CMSREP.json ├── Hypernews.json ├── IB-Unit-Tests.json ├── IBs.json ├── IBsDataSets.json ├── IWYU.json ├── Jenkins.json ├── JobReports.json ├── Main.json └── RelVals.json ├── es-reindex.py ├── es ├── README.md ├── es_close_index.py ├── es_close_indexes.py ├── es_delete_indexes.py ├── es_get_templates.py ├── es_git_repo_size.py ├── es_open_index.py ├── es_open_indexes.py ├── es_send_templates.py ├── es_show_indexes.py └── templates │ ├── cmssdt-apache-logs.json │ ├── cmssdt-iwyu-logs.json │ ├── cmssdt-jenkins-builds.json │ ├── cmssdt-runthematrix-data.json │ ├── cmssdt-timestamp.json │ └── cmssdt-unittests.json ├── es_cmsdoxygen_apache.py ├── es_cmsrep_apache.py ├── es_cmssdt_apache.py ├── es_doxygen_apache.py ├── es_externals_stats.py ├── es_ib_build_stats.py ├── es_ibs_log.py ├── es_iwyu_logs.py ├── es_reindex_indexes_with_pattern.py ├── es_relval_log.py ├── es_relval_stats.py ├── es_utils.py ├── fix-backport-labels.py ├── fix-igprof-sql.py ├── forward-pull-requests.py ├── forward_ports_map.py ├── gen-relval-jobs.py ├── generate-categories-json.py ├── generate-class-version.sh ├── get-pr-branch.py ├── get-pr-changed-files ├── get-production-arch ├── get-relval-failures.py ├── gh-teams.py ├── github-rate-limits.py ├── github_backup.py ├── github_hooks_config.py ├── github_modified_files.py ├── github_scripts └── simultaneous_files_modifications_by_PRs.py ├── github_utils.py ├── githublabels.py ├── gpu_flavors.txt ├── groups.yaml ├── ib-create-tag.py ├── ib-profiling-data.py ├── ib-profiling-data.sh ├── ib-rotate-weeks ├── ib-weeks ├── ib2buildlog.py ├── ignore-releases-for-tests ├── ignore-webhooks ├── import-stitched.sh ├── init.sh ├── is-weekly-ib-available.sh ├── jenkins-artifacts ├── jenkins-jobs ├── check-unused-cmsdist-packages ├── cmd-with-retry.sh ├── cmssw-afs-eos-comparison ├── es-cmssw-afs-eos.py ├── git │ ├── git-mirror-repository.py │ └── git-notify-ib-updates.py ├── process-relval-logs ├── run-code-format.sh └── test-bootstrap.sh ├── jenkins ├── add-cpu-labels.groovy ├── auto-nodes.txt ├── auto-nodes │ └── grid-create-gpu-node ├── blacklist-lxplus.txt ├── connect.sh ├── delete-build.groovy ├── find-jenkins-job.groovy ├── jenkins-check-nodes.sh ├── jenkins-cli-test ├── jenkins-kill-placeholder-job.py ├── jenkins-project-report-to-markdown.py ├── jenkins-projects-full-report-json.groovy ├── jenkins-projects-report.groovy ├── kill-build-release.groovy ├── kill-build.groovy ├── kill-jenkins-job.groovy ├── milkv-connect.sh ├── nodes-sanity-check.sh ├── nodes-status-summary.sh ├── parser │ ├── actions.py │ ├── helpers.py │ ├── jenkins-parser-job.py │ ├── jenkins-parser-monitor-job.py │ ├── jenkins-retry-job.py │ ├── jobs-config.json │ └── paser-config-unittest.py ├── report-jenkins-jobs.py ├── restart-slaves.groovy ├── retry-build.groovy ├── set-slave-labels.groovy ├── start-slave.sh ├── system-info.sh └── test-jenkins-webhook ├── jenkins_callback.py ├── jenkins_monitor_queue.py ├── jobs ├── create-relval-jobs.py ├── jobscheduler.py ├── run.sh ├── stats.py └── workflow_final.py ├── keytab.sh ├── kill-build-release ├── kill-stuck-pr-test.sh ├── lizard-processing ├── src │ └── lizard_to_html.py ├── test-data │ └── lizard-test-output.txt └── test │ └── test_lizard_to_html.py ├── logRootQA.py ├── logUpdater.py ├── logreaderUtils.py ├── logwatch.py ├── lumi ├── connect.sh ├── get_slot.sh ├── jenkins_java.sh └── update_image.sh ├── lxr ├── checkout-version.py ├── delete-index.sh ├── generate-index.sh └── version_utils.sh ├── mark_commit_status.py ├── mass-update-cache.py ├── material_budget_ref.py ├── merge-git-branch ├── milestones.py ├── mirror-root ├── modify_comment.py ├── monitor_command.sh ├── monitor_workflow.py ├── new-release-cycle.py ├── openstack ├── hg │ ├── env.sh │ ├── os.sh │ └── vocmssdt │ │ ├── cmssdt.sh │ │ ├── doxygen.sh │ │ ├── dxr.sh │ │ ├── hypernews.sh │ │ ├── jenkins │ │ ├── cms.sh │ │ ├── dev.sh │ │ ├── dmwm.sh │ │ ├── prod.sh │ │ └── user.sh │ │ ├── lxr.sh │ │ ├── puppet_test.sh │ │ └── sdt │ │ ├── builder.sh │ │ ├── builder │ │ └── arm.sh │ │ ├── cmsdocker.sh │ │ ├── cmsrep.sh │ │ ├── cmsrep9.sh │ │ ├── cmssdt.sh │ │ ├── cmsuser.sh │ │ ├── dev7.sh │ │ ├── dev8.sh │ │ ├── dmwm.sh │ │ ├── dmwm9.sh │ │ ├── docker.sh │ │ ├── docker8.sh │ │ └── docker9.sh └── scripts │ ├── aiadm-cmd.sh │ ├── cmds │ ├── ai_bs.sh │ ├── ai_kill.sh │ ├── setup-env.sh │ └── showhost.sh │ └── process.sh ├── package2category.py ├── parse_iwyu_logs.py ├── parse_jenkins_builds.json ├── parse_jenkins_builds.py ├── pr-checks └── check-pr-files.py ├── pr-schedule-tests ├── pr_testing ├── _helper_functions.sh ├── cmssw-pr-package.spec ├── gen-pr-install-cmssw.sh ├── get-merged-prs.py ├── get_external_name.sh ├── get_source_flag_for_cmsbuild.sh ├── merge_cms-bot_pr.sh ├── retry-command.sh ├── run-das-query.py ├── run-pr-addon.sh ├── run-pr-comparisons ├── run-pr-external_checks.sh ├── run-pr-hlt-p2-integration.sh ├── run-pr-hlt-p2-timing.sh ├── run-pr-profiling.sh ├── run-pr-qa-unit.sh ├── run-pr-relvals.sh ├── run-pr-unittests.sh ├── setup-pr-test-env.sh ├── test-cmssw-provides.sh └── test_multiple_prs.sh ├── prepare-repo-clone-for-port.sh ├── process-build-release-request.py ├── process-create-data-repo-request.py ├── process-error-reports.py ├── process-partial-logs-relval.py ├── process-pull-request.py ├── process_pr.py ├── push-pkg-info.py ├── pyproject.toml ├── python └── archived_argparse.py ├── query-and-process-prs.py ├── query-new-pull-requests.py ├── reco_profiling └── profileRunner.py ├── release-deploy-afs ├── release-notes.py ├── release_notes_collection.py ├── releases.map ├── releases.py ├── repo_config.py ├── report-build-release-status ├── report-build-release-status.py ├── report-pull-request-results ├── report-pull-request-results.py ├── report-summary-merged-prs.py ├── repos ├── EcalLaserValidation │ ├── HLT_EcalLaserValidation │ │ ├── __init__.py │ │ ├── categories.py │ │ ├── category-watchers.yaml │ │ ├── groups.yaml │ │ ├── releases.py │ │ ├── repo_config.py │ │ ├── run-pr-tests │ │ ├── super-users.yaml │ │ └── watchers.yaml │ ├── L1T_EcalLaserValidation │ │ ├── __init__.py │ │ ├── categories.py │ │ ├── category-watchers.yaml │ │ ├── groups.yaml │ │ ├── releases.py │ │ ├── repo_config.py │ │ ├── run-pr-tests │ │ ├── super-users.yaml │ │ └── watchers.yaml │ ├── RECO_EcalPulseShapeValidation │ │ ├── __init__.py │ │ ├── categories.py │ │ ├── category-watchers.yaml │ │ ├── groups.yaml │ │ ├── releases.py │ │ ├── repo_config.py │ │ ├── run-pr-tests │ │ ├── super-users.yaml │ │ └── watchers.yaml │ ├── TPG_EcalLaserValidation │ │ ├── __init__.py │ │ ├── categories.py │ │ ├── category-watchers.yaml │ │ ├── check-for-valid-push │ │ ├── groups.yaml │ │ ├── releases.py │ │ ├── repo_config.py │ │ ├── run-pr-tests │ │ ├── super-users.yaml │ │ └── watchers.yaml │ └── __init__.py ├── Eric100911 │ └── collatz │ │ └── repo_config.py ├── HcalConditionsAutomatization │ ├── ConditionsValidation │ │ ├── __init__.py │ │ ├── categories.py │ │ ├── category-watchers.yaml │ │ ├── groups.yaml │ │ ├── releases.py │ │ ├── repo_config.py │ │ ├── run-pr-tests │ │ ├── super-users.yaml │ │ └── watchers.yaml │ └── __init__.py ├── README.md ├── __init__.py ├── cms_patatrack │ ├── __init__.py │ └── cmssw │ │ ├── __init__.py │ │ ├── categories.py │ │ ├── category-watchers.yaml │ │ ├── groups.yaml │ │ ├── releases.py │ │ ├── repo_config.py │ │ ├── super-users.yaml │ │ └── watchers.yaml ├── cms_sw │ └── cms_docker │ │ ├── categories.py │ │ ├── githublabels.py │ │ └── repo_config.py ├── dmwm │ └── CRABServer │ │ └── repo_config.py ├── iarspider_cmssw │ ├── __init__.py │ ├── cmsdist │ │ ├── __init__.py │ │ ├── categories.py │ │ ├── category-watchers.yaml │ │ ├── groups.yaml │ │ ├── l2.json │ │ ├── milestones.py │ │ ├── releases.py │ │ ├── repo_config.py │ │ └── watchers.yaml │ └── cmssw │ │ ├── __init__.py │ │ ├── categories.py │ │ ├── category-watchers.yaml │ │ ├── groups.yaml │ │ ├── l2.json │ │ ├── milestones.py │ │ ├── releases.py │ │ ├── repo_config.py │ │ └── watchers.yaml └── smuzaffar │ ├── SCRAM │ ├── __init__.py │ └── repo_config.py │ ├── __init__.py │ ├── cmssw │ ├── __init__.py │ ├── categories.py │ ├── category-watchers.yaml │ ├── groups.yaml │ ├── releases.py │ ├── repo_config.py │ ├── super-users.yaml │ └── watchers.yaml │ └── int_build │ ├── __init__.py │ ├── categories.py │ ├── category-watchers.yaml │ ├── groups.yaml │ ├── releases.py │ ├── repo_config.py │ ├── run-pr-tests │ ├── super-users.yaml │ └── watchers.yaml ├── rucio ├── deploy │ ├── oidc │ │ └── etc │ │ │ └── rucio.cfg │ └── rucio.cfg ├── install.sh ├── premixFile-rucio.csh └── setup.sh ├── run-hlt-validation ├── run-ib-addon.py ├── run-ib-classversion ├── run-ib-dxr ├── run-ib-geometry ├── run-ib-igprof ├── run-ib-iwyu ├── run-ib-material-budget ├── run-ib-pr-matrix.sh ├── run-ib-python3 ├── run-ib-relval.py ├── run-ib-testbase.sh ├── run-material-budget ├── run-pr-code-checks ├── run-tests-release ├── run-user-pr-test ├── runPyRelValThread.py ├── runTests.py ├── run_clang_static_analysis ├── run_class_dumper.sh ├── s3 ├── s3-upload-policy.sh └── s3utils.sh ├── schedule-additional-tests ├── shift ├── libib.py ├── report.py └── uniq-errors.py ├── show-ibs-schedule.py ├── simple-cms-bot.py ├── spack ├── backport.sh ├── bootstrap.sh ├── build.sh ├── cvmfsInstall.sh ├── install.sh └── singBuild.sh ├── splitDepViolationLog.py ├── splitUnitTestLog.py ├── stageout_verify.sh ├── statics-filter1.txt ├── system-overrides.sh ├── system-tools └── nproc │ └── nproc ├── tag-and-schedule-ibs ├── tag-ib.py ├── templates ├── PullRequestSummary.html ├── js │ ├── render.js │ ├── render.min.js │ └── renderPRTests.js ├── performance-summary-plots-list ├── performanceSummaryOut.html └── showIB.html ├── test-prs.sh ├── tests ├── Framework.patch ├── PRActionData │ ├── TestProcessPr.test_abort.json │ ├── TestProcessPr.test_ack_many_files.json │ ├── TestProcessPr.test_assign.json │ ├── TestProcessPr.test_assign_from.json │ ├── TestProcessPr.test_assign_from_invalid.json │ ├── TestProcessPr.test_assign_from_with_label.json │ ├── TestProcessPr.test_backport.json │ ├── TestProcessPr.test_backport_already_seen.json │ ├── TestProcessPr.test_backport_ok.json │ ├── TestProcessPr.test_cache_add_missing_items.json │ ├── TestProcessPr.test_clean_squash.json │ ├── TestProcessPr.test_close.json │ ├── TestProcessPr.test_cmsdist_start_tests.json │ ├── TestProcessPr.test_code_check_approved.json │ ├── TestProcessPr.test_code_checks_with.json │ ├── TestProcessPr.test_convert_cache.json │ ├── TestProcessPr.test_create_compressed_cache.json │ ├── TestProcessPr.test_create_repo.json │ ├── TestProcessPr.test_create_split_cache.json │ ├── TestProcessPr.test_dirty_squash.json │ ├── TestProcessPr.test_draft_pr_ask_ready.json │ ├── TestProcessPr.test_draft_pr_assign.json │ ├── TestProcessPr.test_draft_pr_fully_signed.json │ ├── TestProcessPr.test_draft_pr_opened.json │ ├── TestProcessPr.test_draft_pr_ready.json │ ├── TestProcessPr.test_draft_pr_start_test.json │ ├── TestProcessPr.test_draft_pr_updated.json │ ├── TestProcessPr.test_empty_pr.json │ ├── TestProcessPr.test_enable_none.json │ ├── TestProcessPr.test_future_commit.json │ ├── TestProcessPr.test_get_backported_pr.json │ ├── TestProcessPr.test_grant.json │ ├── TestProcessPr.test_hold.json │ ├── TestProcessPr.test_ignore_rejected_invalid.json │ ├── TestProcessPr.test_ignore_rejected_valid.json │ ├── TestProcessPr.test_ignore_sign.json │ ├── TestProcessPr.test_ignore_smth.json │ ├── TestProcessPr.test_invalid_test_params.json │ ├── TestProcessPr.test_invalid_type.json │ ├── TestProcessPr.test_many_commits_ok.json │ ├── TestProcessPr.test_many_commits_warn.json │ ├── TestProcessPr.test_merge_pr.json │ ├── TestProcessPr.test_new_cmsdist_pr.json │ ├── TestProcessPr.test_new_issue.json │ ├── TestProcessPr.test_new_pr.json │ ├── TestProcessPr.test_orp_issue.json │ ├── TestProcessPr.test_partial_reset.json │ ├── TestProcessPr.test_partial_reset_dirty_squash.json │ ├── TestProcessPr.test_pr_develop_branch.json │ ├── TestProcessPr.test_read_compressed_cache.json │ ├── TestProcessPr.test_read_split_cache.json │ ├── TestProcessPr.test_remove_type.json │ ├── TestProcessPr.test_reopen.json │ ├── TestProcessPr.test_reset_signature.json │ ├── TestProcessPr.test_revert.json │ ├── TestProcessPr.test_run_test_params.json │ ├── TestProcessPr.test_sign_core.json │ ├── TestProcessPr.test_sign_reject.json │ ├── TestProcessPr.test_start_tests.json │ ├── TestProcessPr.test_test_all_params.json │ ├── TestProcessPr.test_test_for_arch.json │ ├── TestProcessPr.test_test_for_quearch.json │ ├── TestProcessPr.test_test_for_queue.json │ ├── TestProcessPr.test_test_params.json │ ├── TestProcessPr.test_test_using_addpkg.json │ ├── TestProcessPr.test_test_using_full.json │ ├── TestProcessPr.test_test_with_pr.json │ ├── TestProcessPr.test_test_workflow.json │ ├── TestProcessPr.test_testparams_all_params.json │ ├── TestProcessPr.test_tests_passed.json │ ├── TestProcessPr.test_tests_rejected.json │ ├── TestProcessPr.test_too_many_commits.json │ ├── TestProcessPr.test_too_many_files.json │ ├── TestProcessPr.test_type_invalid.json │ ├── TestProcessPr.test_type_valid.json │ ├── TestProcessPr.test_unassign.json │ ├── TestProcessPr.test_unhold.json │ ├── TestProcessPr.test_urgent.json │ ├── TestProcessPr.test_valid_type.json │ └── TestProcessPr.test_warn_many_files.json ├── README.md ├── ReplayData │ ├── TestProcessPr.test_abort.txt │ ├── TestProcessPr.test_ack_many_files.txt │ ├── TestProcessPr.test_assign.txt │ ├── TestProcessPr.test_assign_from.txt │ ├── TestProcessPr.test_assign_from_invalid.txt │ ├── TestProcessPr.test_assign_from_with_label.txt │ ├── TestProcessPr.test_backport.txt │ ├── TestProcessPr.test_backport_already_seen.txt │ ├── TestProcessPr.test_backport_ok.txt │ ├── TestProcessPr.test_cache_add_missing_items.txt │ ├── TestProcessPr.test_clean_squash.txt │ ├── TestProcessPr.test_close.txt │ ├── TestProcessPr.test_cmsdist_start_tests.txt │ ├── TestProcessPr.test_code_check_approved.txt │ ├── TestProcessPr.test_code_checks_with.txt │ ├── TestProcessPr.test_convert_cache.txt │ ├── TestProcessPr.test_create_compressed_cache.txt │ ├── TestProcessPr.test_create_repo.txt │ ├── TestProcessPr.test_create_split_cache.txt │ ├── TestProcessPr.test_dirty_squash.txt │ ├── TestProcessPr.test_draft_pr_ask_ready.txt │ ├── TestProcessPr.test_draft_pr_assign.txt │ ├── TestProcessPr.test_draft_pr_fully_signed.txt │ ├── TestProcessPr.test_draft_pr_opened.txt │ ├── TestProcessPr.test_draft_pr_ready.txt │ ├── TestProcessPr.test_draft_pr_start_test.txt │ ├── TestProcessPr.test_draft_pr_updated.txt │ ├── TestProcessPr.test_empty_pr.txt │ ├── TestProcessPr.test_enable_none.txt │ ├── TestProcessPr.test_future_commit.txt │ ├── TestProcessPr.test_get_backported_pr.txt │ ├── TestProcessPr.test_grant.txt │ ├── TestProcessPr.test_hold.txt │ ├── TestProcessPr.test_ignore_rejected_invalid.txt │ ├── TestProcessPr.test_ignore_rejected_valid.txt │ ├── TestProcessPr.test_ignore_sign.txt │ ├── TestProcessPr.test_ignore_smth.txt │ ├── TestProcessPr.test_invalid_test_params.txt │ ├── TestProcessPr.test_invalid_type.txt │ ├── TestProcessPr.test_many_commits.txt │ ├── TestProcessPr.test_many_commits_ok.txt │ ├── TestProcessPr.test_many_commits_warn.txt │ ├── TestProcessPr.test_merge_pr.txt │ ├── TestProcessPr.test_new_cmsdist_pr.txt │ ├── TestProcessPr.test_new_issue.txt │ ├── TestProcessPr.test_new_pr.txt │ ├── TestProcessPr.test_orp_issue.txt │ ├── TestProcessPr.test_partial_reset.txt │ ├── TestProcessPr.test_partial_reset_dirty_squash.txt │ ├── TestProcessPr.test_pr_develop_branch.txt │ ├── TestProcessPr.test_read_compressed_cache.txt │ ├── TestProcessPr.test_read_split_cache.txt │ ├── TestProcessPr.test_remove_type.txt │ ├── TestProcessPr.test_reopen.txt │ ├── TestProcessPr.test_reset_signature.txt │ ├── TestProcessPr.test_revert.txt │ ├── TestProcessPr.test_run_test_params.txt │ ├── TestProcessPr.test_sign_core.txt │ ├── TestProcessPr.test_sign_reject.txt │ ├── TestProcessPr.test_start_tests.txt │ ├── TestProcessPr.test_test_all_params.txt │ ├── TestProcessPr.test_test_for_arch.txt │ ├── TestProcessPr.test_test_for_quearch.txt │ ├── TestProcessPr.test_test_for_queue.txt │ ├── TestProcessPr.test_test_params.txt │ ├── TestProcessPr.test_test_using_addpkg.txt │ ├── TestProcessPr.test_test_using_full.txt │ ├── TestProcessPr.test_test_with_pr.txt │ ├── TestProcessPr.test_test_workflow.txt │ ├── TestProcessPr.test_testparams_all_params.txt │ ├── TestProcessPr.test_tests_passed.txt │ ├── TestProcessPr.test_tests_rejected.txt │ ├── TestProcessPr.test_too_many_commits.txt │ ├── TestProcessPr.test_too_many_files.txt │ ├── TestProcessPr.test_type_invalid.txt │ ├── TestProcessPr.test_type_valid.txt │ ├── TestProcessPr.test_unassign.txt │ ├── TestProcessPr.test_unhold.txt │ ├── TestProcessPr.test_urgent.txt │ ├── TestProcessPr.test_valid_type.txt │ ├── TestProcessPr.test_warn_many_files.txt │ ├── test_ack_many_files.json │ ├── test_create_compressed_cache.json │ ├── test_create_split_cache.json │ ├── test_read_compressed_cache.json │ ├── test_too_many_files.json │ └── test_warn_many_files.json ├── __init__.py ├── conftest.py ├── coverage_process_pr.sh ├── record_test.sh ├── run_pr_tests.sh ├── test-requirements.txt ├── test_config-map.py ├── test_logreaderUtils.py ├── test_process_pr.py ├── test_watchers.py └── verify-load-cache.py ├── trigger_jenkins_job.py ├── update-commit-statues-matching.py ├── update-github-hooks-ip.py ├── update-release-map ├── upload-build-log ├── upload-release ├── utils └── cmsdist_pip_pkgs_update.py ├── watchers.yaml └── weekly-cmsset_default.sh /.git-blame-ignore-revs: -------------------------------------------------------------------------------- 1 | # Reformat everything with Black, part 1 2 | d64ce0527da177ffaf4a9efcb1fc9fd31a9f416e 3 | # Reformat everything with Black, part 2 4 | 64e3a6a4d9a8de478b780ea090fe33f2a8e8646f 5 | -------------------------------------------------------------------------------- /.github/workflows/black.yaml: -------------------------------------------------------------------------------- 1 | name: Black Style Check 2 | 3 | on: 4 | pull_request: 5 | paths: 6 | - '**/*.py' 7 | 8 | jobs: 9 | black_check: 10 | name: Check Python Code Style with Black 11 | runs-on: ubuntu-latest 12 | 13 | steps: 14 | - name: Check out the code 15 | uses: actions/checkout@v4 16 | 17 | - uses: psf/black@stable 18 | with: 19 | options: "--check --diff --verbose -l 99 -t py36 -t py37 -t py38 -t py39 -t py310 -t py311" 20 | -------------------------------------------------------------------------------- /.github/workflows/parser-config.yaml: -------------------------------------------------------------------------------- 1 | name: test-changes 2 | on: 3 | push: 4 | paths: 5 | - jenkins/parser/jobs-config.json 6 | pull_request: 7 | paths: 8 | - jenkins/parser/jobs-config.json 9 | jobs: 10 | configfile-sanity-checks: 11 | runs-on: ubuntu-latest 12 | steps: 13 | - name: Clone github repo 14 | uses: actions/checkout@v2 15 | - name: Setup python3 16 | uses: actions/setup-python@v2 17 | with: 18 | python-version: '3.x' 19 | architecture: x64 20 | - name: Checking job's configuration in parser job 21 | run: PYTHONPATH=. python jenkins/parser/paser-config-unittest.py 22 | -------------------------------------------------------------------------------- /.github/workflows/test-changes.yaml: -------------------------------------------------------------------------------- 1 | name: test-changes 2 | on: 3 | push: 4 | branches: 5 | - master 6 | pull_request: 7 | branches: 8 | - master 9 | jobs: 10 | build: 11 | runs-on: ubuntu-latest 12 | strategy: 13 | matrix: 14 | python-version: [ '3.x' ] 15 | name: Python ${{ matrix.python-version }} 16 | steps: 17 | - uses: actions/checkout@v2 18 | - name: Set up Python 19 | uses: actions/setup-python@v2 20 | with: 21 | python-version: ${{ matrix.python-version }} 22 | architecture: x64 23 | - name: Install dependencies 24 | run: | 25 | python -m pip install --upgrade pip 26 | pip install PyYaml 27 | - name: Checking python version 28 | run: python -c "import sys; print(sys.version)" 29 | - name: Compiling all python files 30 | run: python -m compileall $(ls | grep -v cuda | grep -v v_scheduler | grep -v venv) 31 | - name: Test importing CMSSW_L2/L1 32 | run: python -c 'from categories import CMSSW_L2,CMSSW_ORP' 33 | - name: Checking watchers 34 | run: PYTHONPATH=. python tests/test_watchers.py 35 | - name: Checking config map 36 | run: PYTHONPATH=. python tests/test_config-map.py 37 | -------------------------------------------------------------------------------- /.github/workflows/test-process-pr.yaml: -------------------------------------------------------------------------------- 1 | name: Test changes to process_pr.py 2 | 3 | on: 4 | push: 5 | paths: 6 | - process_pr.py 7 | - .github/workflows/test-process-pr.yaml 8 | 9 | jobs: 10 | build: 11 | runs-on: ubuntu-latest 12 | strategy: 13 | matrix: 14 | python-version: ["3.9", "3.10", "3.11", "3.12", "3.13"] 15 | 16 | steps: 17 | - uses: MathRobin/timezone-action@v1.1 18 | with: 19 | timezoneLinux: 'Europe/Paris' 20 | - uses: actions/checkout@v4 21 | - uses: actions/checkout@v4 22 | with: 23 | repository: PyGithub/PyGithub 24 | ref: v1.56 25 | path: tmp 26 | sparse-checkout: tests/Framework.py 27 | sparse-checkout-cone-mode: false 28 | - run: mv tmp/tests/Framework.py tests/ 29 | - run: | 30 | cd tests 31 | patch -p0 < Framework.patch 32 | - name: Set up Python ${{ matrix.python-version }} 33 | uses: actions/setup-python@v4 34 | with: 35 | python-version: ${{ matrix.python-version }} 36 | - name: Install dependencies 37 | run: | 38 | python -m pip install --upgrade pip 39 | pip install -r tests/test-requirements.txt 40 | - name: Test with pytest 41 | run: | 42 | pytest -v -s tests/test_process_pr.py --auth_with_token 43 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | *.pyc 2 | .idea 3 | *.properties 4 | tests/Framework.py 5 | venv* 6 | GithubCredentials.py 7 | .coverage 8 | htmlcov 9 | /tests/ReplayData/TestProcessPr.test_mark_rejected.txt 10 | /tests/ReplayData/TestProcessPr.test_mark_passed.txt 11 | /tests/*.log 12 | -------------------------------------------------------------------------------- /DMWM/AnalyzePyFuture.py: -------------------------------------------------------------------------------- 1 | #! /usr/bin/env python 2 | 3 | from __future__ import print_function, division 4 | 5 | with open("addedFiles.txt", "r") as addedFiles: 6 | for fileName in addedFiles: 7 | fileName = fileName.strip() 8 | if fileName.endswith("__init__.py"): 9 | continue 10 | with open(fileName, "r") as pyFile: 11 | pyLines = pyFile.readlines() 12 | if fileName.endswith(".py") or "python" in pyLines[0]: 13 | foundDivision = False 14 | for line in pyLines: 15 | if "__future__" in line and "division" in line: 16 | foundDivision = True 17 | if not foundDivision: 18 | print( 19 | "* New file %s does not use python 3 division. Please add `from __future__ import division`.\n" 20 | % fileName 21 | ) 22 | -------------------------------------------------------------------------------- /DMWM/IdentifyPythonFiles.py: -------------------------------------------------------------------------------- 1 | #! /usr/bin/env python 2 | 3 | from __future__ import print_function, division 4 | 5 | import os 6 | from optparse import OptionParser 7 | 8 | usage = "usage: %prog [options] list_of_files.txt" 9 | parser = OptionParser(usage) 10 | (options, args) = parser.parse_args() 11 | if len(args) != 1: 12 | parser.error("You must supply a file with a list of files to check") 13 | 14 | list_of_files = args[0] 15 | 16 | with open(list_of_files, "r") as changedFiles: 17 | for fileName in changedFiles: 18 | fileName = fileName.strip() 19 | if not fileName: 20 | continue 21 | if fileName.endswith(".py"): 22 | print(fileName) 23 | continue 24 | try: 25 | with open(fileName, "r") as pyFile: 26 | pyLines = pyFile.readlines() 27 | if "python" in pyLines[0]: 28 | print(fileName) 29 | continue 30 | except IOError: 31 | pass 32 | -------------------------------------------------------------------------------- /DMWM/IssueMessage.py: -------------------------------------------------------------------------------- 1 | #! /usr/bin/env python 2 | 3 | import os 4 | 5 | from github import Github 6 | from optparse import OptionParser 7 | 8 | usage = "usage: %prog [options] message" 9 | parser = OptionParser(usage) 10 | (options, args) = parser.parse_args() 11 | if len(args) != 1: 12 | parser.error("You must supply a message.") 13 | 14 | message = args[0] 15 | issueID = None 16 | url = "" 17 | 18 | if "ghprbPullId" in os.environ: 19 | issueID = os.environ["ghprbPullId"] 20 | if "BUILD_URL" in os.environ: 21 | url = os.environ["BUILD_URL"] 22 | message += "\nSee %s for details" % url 23 | 24 | gh = Github(os.environ["DMWMBOT_TOKEN"]) 25 | 26 | codeRepo = os.environ.get("CODE_REPO", "WMCore") 27 | repoName = "%s/%s" % (os.environ["WMCORE_REPO"], codeRepo) 28 | 29 | issue = gh.get_repo(repoName).get_issue(int(issueID)) 30 | 31 | issue.create_comment(message) 32 | -------------------------------------------------------------------------------- /DMWM/activate-start-agent.sh: -------------------------------------------------------------------------------- 1 | #! /bin/bash -e 2 | 3 | rm -rf $DBSOCK || true 4 | echo "Attempting to activate agent" 5 | if [[ -d $PWD/deploy/current/config/wmagentpy3 ]] 6 | then 7 | $PWD/deploy/current/config/wmagentpy3/manage activate-agent 8 | else 9 | $PWD/deploy/current/config/wmagent/manage activate-agent 10 | fi 11 | unlink deploy/current/sw*/var || /bin/true 12 | 13 | echo "Starting services" 14 | if [[ -d $PWD/deploy/current/config/wmagentpy3 ]] 15 | then 16 | $PWD/deploy/current/config/wmagentpy3/manage start-services 17 | else 18 | $PWD/deploy/current/config/wmagent/manage start-services 19 | fi 20 | -------------------------------------------------------------------------------- /DMWM/deploy-wmagent.sh: -------------------------------------------------------------------------------- 1 | #! /bin/bash -e 2 | start=`date +%s` 3 | 4 | export DBSOCK=/tmp/`uuidgen`-mysql.sock 5 | 6 | echo "Deploying wmagent@$WMAGENT_VERSION from $COMP_REPO" 7 | $PWD/deployment/Deploy -R wmagent-dev@${WMAGENT_VERSION} -r comp=$COMP_REPO -t $WMAGENT_VERSION -A $DMWM_ARCH -s 'prep sw post' $PWD/deploy admin/devtools wmagent 8 | 9 | perl -p -i -e 's/set-variable = innodb_buffer_pool_size=2G/set-variable = innodb_buffer_pool_size=50M/' deploy/current/config/mysql/my.cnf 10 | perl -p -i -e 's/set-variable = innodb_log_file_size=512M/set-variable = innodb_log_file_size=20M/' deploy/current/config/mysql/my.cnf 11 | perl -p -i -e 's/key_buffer=4000M/key_buffer=100M/' deploy/current/config/mysql/my.cnf 12 | perl -p -i -e 's/max_heap_table_size=2048M/max_heap_table_size=100M/' deploy/current/config/mysql/my.cnf 13 | perl -p -i -e 's/tmp_table_size=2048M/tmp_table_size=100M/' deploy/current/config/mysql/my.cnf 14 | 15 | end=`date +%s` 16 | runtime=$((end-start)) 17 | 18 | echo "Total time to deploy WMAgent: $runtime" 19 | -------------------------------------------------------------------------------- /DMWM/deploy-wmagentpy3.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | start=`date +%s` 3 | 4 | export DBSOCK=/tmp/`uuidgen`-mysql.sock 5 | 6 | echo "Deploying wmagentpy3-dev@$WMAGENT_VERSION from $COMP_REPO" 7 | $PWD/deployment/Deploy -R wmagentpy3-dev@${WMAGENT_VERSION} -r comp=${COMP_REPO} -t $WMAGENT_VERSION -A $DMWM_ARCH -s 'prep sw post' $PWD/deploy wmagentpy3/devtools 8 | 9 | perl -p -i -e 's/set-variable = innodb_buffer_pool_size=2G/set-variable = innodb_buffer_pool_size=50M/' deploy/current/config/mysql/my.cnf 10 | perl -p -i -e 's/set-variable = innodb_log_file_size=512M/set-variable = innodb_log_file_size=20M/' deploy/current/config/mysql/my.cnf 11 | perl -p -i -e 's/key_buffer=4000M/key_buffer=100M/' deploy/current/config/mysql/my.cnf 12 | perl -p -i -e 's/max_heap_table_size=2048M/max_heap_table_size=100M/' deploy/current/config/mysql/my.cnf 13 | perl -p -i -e 's/tmp_table_size=2048M/tmp_table_size=100M/' deploy/current/config/mysql/my.cnf 14 | 15 | end=`date +%s` 16 | runtime=$((end-start)) 17 | 18 | echo "Total time to deploy WMAgentPy3: $runtime" 19 | -------------------------------------------------------------------------------- /DMWM/install-crabdev.sh: -------------------------------------------------------------------------------- 1 | #! /bin/bash -e 2 | 3 | # Install crab-dev RPM 4 | 5 | mkdir crabdev 6 | pushd crabdev/ 7 | 8 | export SCRAM_ARCH=$DMWM_ARCH 9 | wget -O bootstrap.sh http://cmsrep.cern.ch/cmssw/$COMP_REPO/bootstrap.sh 10 | sh bootstrap.sh -architecture $DMWM_ARCH -path `pwd` -repository $COMP_REPO setup 11 | source $DMWM_ARCH/external/apt/*/etc/profile.d/init.sh 12 | apt-get update 13 | apt-get -y install cms+crab-devtools+${CRABDEV_LATEST} 14 | source $DMWM_ARCH/cms/crab-devtools/${CRABDEV_LATEST}/etc/profile.d/init.sh 15 | popd 16 | 17 | -------------------------------------------------------------------------------- /DMWM/install-wmcore.sh: -------------------------------------------------------------------------------- 1 | #! /bin/bash -e 2 | 3 | # Install from git code, set paths appropriately for testing 4 | 5 | set +x 6 | . deploy/current/apps/wmagent/etc/profile.d/init.sh 7 | set -x 8 | rm -rf install 9 | 10 | (cd code && python setup.py install --prefix=../install) 11 | 12 | set +x 13 | . deploy/current/apps/wmagent/etc/profile.d/init.sh 14 | set -x 15 | . deploy/current/config/admin/init.sh 16 | 17 | export WMCORE_ROOT=$PWD/install 18 | export PATH=$WMCORE_ROOT/install/bin:$PATH 19 | export PYTHONPATH=$WMCORE_ROOT/lib/python2.7/site-packages:$PYTHONPATH 20 | export PYTHONPATH=$WMCORE_ROOT/test/python:$PYTHONPATH 21 | 22 | echo "Sourcing secrets and setting DB connectors" 23 | set +x # don't echo secrets 24 | . $WMAGENT_SECRETS_LOCATION 25 | export DATABASE=mysql://${MYSQL_USER}@localhost/wmcore_unittest 26 | export COUCHURL="http://${COUCH_USER}:${COUCH_PASS}@${COUCH_HOST}:${COUCH_PORT}" 27 | set -x 28 | 29 | export RUCIO_HOST=$RUCIO_HOST 30 | export RUCIO_AUTH=$RUCIO_AUTH 31 | -------------------------------------------------------------------------------- /DMWM/install-wmcorepy3.sh: -------------------------------------------------------------------------------- 1 | #! /bin/bash -e 2 | 3 | # Install from git code, set paths appropriately for testing 4 | set +x 5 | . deploy/current/apps/wmagentpy3/etc/profile.d/init.sh 6 | set -x 7 | 8 | rm -rf install 9 | python3 --version 10 | (cd code && python3 setup.py install --prefix=../install) 11 | 12 | echo "Sourcing wmagentpy3 and wmcorepy3-devtools init.sh scripts" 13 | set +x 14 | . deploy/current/apps/wmagentpy3/etc/profile.d/init.sh 15 | # Instead of sourcing deploy/current/config/admin/init.sh, let's try wmcorepy3-devtools 16 | . deploy/current/apps/wmcorepy3-devtools/etc/profile.d/init.sh 17 | set -x 18 | 19 | # these export are too verbose 20 | set +x 21 | export WMCORE_ROOT=$PWD/install 22 | export PATH=$WMCORE_ROOT/install/bin:$PATH 23 | export PYTHONPATH=$WMCORE_ROOT/lib/python3.8/site-packages:$PYTHONPATH 24 | set -x 25 | export PYTHONPATH=$WMCORE_ROOT/test/python:$PYTHONPATH 26 | 27 | echo "Sourcing secrets and setting DB connectors" 28 | set +x # don't echo secrets 29 | . $WMAGENT_SECRETS_LOCATION 30 | export DATABASE=mysql://${MYSQL_USER}@localhost/wmcore_unittest 31 | export COUCHURL="http://${COUCH_USER}:${COUCH_PASS}@${COUCH_HOST}:${COUCH_PORT}" 32 | set -x 33 | 34 | export RUCIO_HOST=$RUCIO_HOST 35 | export RUCIO_AUTH=$RUCIO_AUTH 36 | -------------------------------------------------------------------------------- /DMWM/kill-databases.sh: -------------------------------------------------------------------------------- 1 | #! /bin/bash -e 2 | set -x 3 | 4 | start=`date +%s` 5 | 6 | echo "Trying to kill earlier processes" 7 | pkill -9 -f $PWD/deploy || true 8 | pkill -9 -f external/mariadb || true # Kill off any other slices too 9 | pkill -9 -f external/couchdb || true # Kill off any other slices too 10 | pkill -9 -f external/erlang || true # Kill off any other slices too 11 | pkill -9 -f code/setup.py || true # Kill off any other slices too 12 | 13 | end=`date +%s` 14 | runtime=$((end-start)) 15 | echo "Total time to kill databases: $runtime" 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | -------------------------------------------------------------------------------- /DMWM/latest-dmwm-versions.sh: -------------------------------------------------------------------------------- 1 | #! /bin/bash -e 2 | 3 | export WMAGENT_LATEST=$(curl -s "http://cmsrep.cern.ch/cgi-bin/repos/$COMP_REPO/$DMWM_ARCH?C=M;O=D" | grep -oP "(?<=>cms\+wmagent-dev\+).*(?=-1-1)" | head -1) 4 | export WMAGENTPY3_LATEST=$(curl -s "http://cmsrep.cern.ch/cgi-bin/repos/$COMP_REPO/$DMWM_ARCH?C=M;O=D" | grep -oP "(?<=>cms\+wmagentpy3-dev\+).*(?=-1-1)" | head -1) 5 | export CRABDEV_LATEST=$(curl -s "http://cmsrep.cern.ch/cgi-bin/repos/$COMP_REPO/$DMWM_ARCH?C=M;O=D" | grep -oP "(?<=>cms\+crab-devtools\+).*(?=-1-1)" | head -1) 6 | -------------------------------------------------------------------------------- /DMWM/setup-cmsbot.sh: -------------------------------------------------------------------------------- 1 | #! /bin/bash -e 2 | 3 | # For reference only, you cannot actually run this script because it has not run yet 4 | 5 | set -x 6 | 7 | start=`date +%s` 8 | 9 | rm -rf cms-bot || true 10 | if [ ! -d cms-bot ]; then 11 | git clone https://github.com/$CMS_BOT_REPO/cms-bot 12 | fi 13 | 14 | pushd $WORKSPACE/cms-bot/ 15 | 16 | wget https://pypi.python.org/packages/source/r/requests/requests-2.3.0.tar.gz#md5=7449ffdc8ec9ac37bbcd286003c80f00 17 | tar -xvf requests-2.3.0.tar.gz 18 | rm -rf requests || true 19 | mv requests-2.3.0/requests/ requests 20 | 21 | git checkout $CMS_BOT_BRANCH 22 | git pull --rebase origin $CMS_BOT_BRANCH 23 | 24 | popd 25 | 26 | end=`date +%s` 27 | runtime=$((end-start)) 28 | echo "Total time to setup cms-bot: $runtime" 29 | -------------------------------------------------------------------------------- /DMWM/setup-rucio.sh: -------------------------------------------------------------------------------- 1 | #! /bin/bash -e 2 | 3 | echo "AMR Updating the rucio.cfg file used by WMCore Jenkins" 4 | 5 | sed -i "s+RUCIO_HOST_OVERWRITE+$RUCIO_HOST+" $RUCIO_HOME/etc/rucio.cfg 6 | sed -i "s+RUCIO_AUTH_OVERWRITE+$RUCIO_AUTH+" $RUCIO_HOME/etc/rucio.cfg 7 | sed -i "s+\$X509_USER_CERT+$X509_USER_CERT+" $RUCIO_HOME/etc/rucio.cfg 8 | sed -i "s+\$X509_USER_KEY+$X509_USER_KEY+" $RUCIO_HOME/etc/rucio.cfg 9 | sed -i "s+\$X509_USER_PROXY+$X509_USER_PROXY+" $RUCIO_HOME/etc/rucio.cfg 10 | 11 | echo "Done updating rucio.cfg file" 12 | -------------------------------------------------------------------------------- /DMWM/setup-secrets.sh: -------------------------------------------------------------------------------- 1 | #! /bin/bash -e 2 | 3 | echo "Turning off tracing to avoid revealing secrets" 4 | set +x 5 | 6 | start=`date +%s` 7 | 8 | cp ~/.globus/dmwm-config.tmpl $WORKSPACE/wmas 9 | perl -p -i -e "s/THISHOSTNAME/`hostname`/" $WORKSPACE/wmas 10 | perl -p -i -e "s/srtest/dmwmtest/" $WORKSPACE/wmas 11 | . $WORKSPACE/wmas 12 | export WMAGENT_SECRETS_LOCATION=$WORKSPACE/wmas 13 | export COUCH_CERT_FILE=~/.globus/usercert.pem 14 | export COUCH_KEY_FILE=~/.globus/userkey.pem 15 | export X509_HOST_CERT=$COUCH_CERT_FILE 16 | export X509_HOST_KEY=$COUCH_KEY_FILE 17 | export X509_USER_CERT=$COUCH_CERT_FILE 18 | export X509_USER_KEY=$COUCH_KEY_FILE 19 | 20 | set -x 21 | voms-proxy-init -voms cms -out $WORKSPACE/x509up_u`id -u` 22 | export X509_USER_PROXY=$WORKSPACE/x509up_u`id -u` 23 | 24 | end=`date +%s` 25 | runtime=$((end-start)) 26 | 27 | echo "Total time to setup secrets: $runtime" 28 | -------------------------------------------------------------------------------- /DMWM/temporary-patches.sh: -------------------------------------------------------------------------------- 1 | #! /bin/bash -e 2 | 3 | pushd deployment 4 | git remote add ewv https://github.com/ericvaandering/deployment.git || true 5 | git fetch ewv 6 | git cherry-pick 2368295b048f9a343ab8023495a8a942f84a0539 || true 7 | popd 8 | 9 | echo "All temporary patches applied" 10 | -------------------------------------------------------------------------------- /DMWM/update-deployment.sh: -------------------------------------------------------------------------------- 1 | #! /bin/bash -e 2 | 3 | if [ ! -d deployment ]; then 4 | git clone https://github.com/dmwm/deployment.git 5 | fi 6 | 7 | pushd deployment 8 | git pull --rebase origin master 9 | popd 10 | -------------------------------------------------------------------------------- /DMWM/update-gh-pr-tags.sh: -------------------------------------------------------------------------------- 1 | #! /bin/bash -e 2 | 3 | pushd code 4 | git fetch --tags https://github.com/dmwm/$CODE_REPO.git "+refs/heads/*:refs/remotes/origin/*" 5 | git config remote.origin.url https://github.com/dmwm/$CODE_REPO.git 6 | git config --add remote.origin.fetch "+refs/heads/*:refs/remotes/origin/*" 7 | git fetch --tags https://github.com/dmwm/$CODE_REPO.git "+refs/pull/*:refs/remotes/origin/pr/*" 8 | export COMMIT=`git rev-parse "origin/pr/$ghprbPullId/merge^{commit}"` 9 | 10 | git checkout ${ghprbTargetBranch} # pick this up for later comparison in diff 11 | git checkout -f $COMMIT 12 | 13 | 14 | -------------------------------------------------------------------------------- /DMWM/update-gh-repo.sh: -------------------------------------------------------------------------------- 1 | #! /bin/bash -e 2 | 3 | if [ ! -d code ]; then 4 | git clone https://github.com/$WMCORE_REPO/$CODE_REPO.git code 5 | fi 6 | 7 | pushd code 8 | git pull --rebase origin $WMCORE_BRANCH 9 | popd 10 | -------------------------------------------------------------------------------- /DMWM/update-wmcore.sh: -------------------------------------------------------------------------------- 1 | #! /bin/bash -e 2 | 3 | if [ ! -d code ]; then 4 | git clone https://github.com/$WMCORE_REPO/WMCore.git code 5 | fi 6 | 7 | pushd code 8 | git pull --rebase origin $WMCORE_BRANCH 9 | popd 10 | -------------------------------------------------------------------------------- /FAQ.md: -------------------------------------------------------------------------------- 1 | ## How can I tell which version of an external is used? 2 | 3 | First lookup your release in [config.map](https://github.com/cms-sw/cms-bot/blob/master/config.map). 4 | 5 | Each line contains properties of a release queue. 6 | 7 | `CMSDIST_TAG` will tell you which CMSDIST tag / branch to lookup in: 8 | 9 | 10 | 11 | Look up for the spec related to your external and you should find in the first 12 | rows either a line of the kind: 13 | 14 | Source: 15 | 16 | for example: 17 | 18 | Source: git+https://github.com/%github_user/root.git?obj=%{branch}/%{tag}&export=%{n}-%{realversion}&output=/%{n}-%{realversion}-%{tag}.tgz 19 | 20 | the `%{defined-variable}` gets expanded to their value, as required by rpm. In particular in many cases we have: 21 | 22 | - `%tag`: the hash commit to be used for the external. 23 | - `%branch`: the branch on which the commit its located. 24 | - `github_user`: the user owning the repository to be used. 25 | -------------------------------------------------------------------------------- /IBPageTail.txt: -------------------------------------------------------------------------------- 1 | 2 | 3 | -------------------------------------------------------------------------------- /archive/create_json.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | import sys, re, json 4 | from os import environ, popen 5 | from os.path import dirname, realpath 6 | from optparse import OptionParser 7 | 8 | parser = OptionParser() 9 | parser.add_option("--logfile") 10 | parser.add_option("--jsonfile") 11 | (options, args) = parser.parse_args() 12 | 13 | 14 | def extract_data(inputfile): 15 | list_of_dicts = [] 16 | with open(inputfile, "r") as file: 17 | first_char = file.read(1) 18 | if not first_char: 19 | print("Error: Input file is empty"), sys.exit(1) 20 | pattern = re.compile(r"^([a-z]+)\+([\w-]+)\+([\w.-]+)\s\(([\w]+)\)") 21 | matched_lines = [pattern.match(l) for l in file.readlines()] 22 | for line in matched_lines: 23 | if line: 24 | list_of_dicts.append( 25 | dict( 26 | package_type=line.group(1), 27 | name=line.group(2), 28 | ver_suffix=line.group(3), 29 | hashtag=line.group(4), 30 | ) 31 | ) 32 | return json.dumps(list_of_dicts, sort_keys=True, indent=2) 33 | 34 | 35 | with open(options.jsonfile, "w") as file: 36 | file.write(extract_data(options.logfile)) 37 | -------------------------------------------------------------------------------- /archive/deprecate_releases.py: -------------------------------------------------------------------------------- 1 | #!/bin/env python 2 | from __future__ import print_function 3 | import sys 4 | 5 | if len(sys.argv) < 3: 6 | print("Usage: %s releases.map cmssw_version [cmssw_version [...]]" % sys.argv[0]) 7 | sys.exit(1) 8 | 9 | release_map = sys.argv[1] 10 | deprecate_list = sys.argv[2:] 11 | fd = open(release_map, "r") 12 | for line in fd.readlines(): 13 | release = line.split(";label=", 1)[1].split(";", 1)[0] 14 | if release in deprecate_list: 15 | line = line.replace("Announced", "Deprecated") 16 | print(line, end=" ") 17 | -------------------------------------------------------------------------------- /archive/get-git-tags.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | from github import Github, GithubException 3 | from os.path import expanduser 4 | from optparse import OptionParser 5 | from datetime import datetime 6 | from sys import exit 7 | import re 8 | from socket import setdefaulttimeout 9 | 10 | setdefaulttimeout(120) 11 | 12 | if __name__ == "__main__": 13 | parser = OptionParser( 14 | usage="%prog -b|--branch -d|--date -t|--tag [-n|--dry-run]" 15 | ) 16 | parser.add_option( 17 | "-r", 18 | "--repository", 19 | dest="repo", 20 | help="Github repository e.g. cms-sw/cmssw", 21 | type=str, 22 | default="cms-sw/cmssw", 23 | ) 24 | parser.add_option( 25 | "-m", 26 | "--match", 27 | dest="match", 28 | help="Regexp to match tags e.g. CMSSW_8_0_X", 29 | type=str, 30 | default="CMSSW_.+", 31 | ) 32 | opts, args = parser.parse_args() 33 | 34 | gh = Github(login_or_token=open(expanduser("~/.github-token")).read().strip()) 35 | repo = gh.get_repo(opts.repo) 36 | print("API Rate Limit") 37 | print("Limit, Remaining: ", gh.rate_limiting) 38 | print("Reset time (GMT): ", datetime.fromtimestamp(gh.rate_limiting_resettime)) 39 | 40 | tags = repo.get_releases() 41 | tagRe = re.compile("^" + opts.match + ".*$") 42 | for t in tags: 43 | if tagRe.match(t.name): 44 | print(t.name) 45 | -------------------------------------------------------------------------------- /archive/get-local-build-stats.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | import sys 3 | from os.path import exists, join 4 | import json 5 | from es_utils import get_summary_stats_from_json_file 6 | 7 | data = [] 8 | e, o = subprocess.getstatusoutput( 9 | "find %s -maxdepth 6 -mindepth 6 -name opts.json -type f | sed 's|/opts.json$||'" % sys.argv[1] 10 | ) 11 | for d in o.split("\n"): 12 | tool = d.split("/")[-2] 13 | jf = join(d, "opts.json") 14 | lf = join(d, "log") 15 | sf = join(d, "%s.json" % tool) 16 | if not exists(lf) or not exists(sf): 17 | continue 18 | e, c = subprocess.getstatusoutput("tail -1 %s | grep 'exit 0' | wc -l" % lf) 19 | if c == "0": 20 | continue 21 | jopts = {} 22 | with open(jf) as opts_dict_f: 23 | jopts = json.load(opts_dict_f) 24 | item = get_summary_stats_from_json_file(sf, 1) 25 | item.update(jopts) 26 | data.append({"_source": item}) 27 | 28 | print(json.dumps(data, sort_keys=True, indent=2)) 29 | -------------------------------------------------------------------------------- /archive/get_Github_API_rate.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | from github import Github 3 | from os.path import expanduser 4 | from repo_config import GH_TOKEN 5 | 6 | 7 | def main(): 8 | gh = Github(login_or_token=open(expanduser(GH_TOKEN)).read().strip()) 9 | print("GitHub API rate limit: {0}".format(gh.get_rate_limit())) 10 | 11 | 12 | if __name__ == "__main__": 13 | main() 14 | -------------------------------------------------------------------------------- /archive/get_repo_authors.py: -------------------------------------------------------------------------------- 1 | #!/bin/env python3 2 | from os.path import basename 3 | from sys import argv, exit 4 | from json import loads, dumps 5 | 6 | try: 7 | authors_info = {} 8 | repo = argv[1] 9 | err, output = subprocess.getstatusoutput( 10 | "curl -s https://api.github.com/repos/" + repo + "/stats/contributors" 11 | ) 12 | if err: 13 | print(output) 14 | exit(1) 15 | data = loads(output) 16 | for item in data: 17 | authors_info[item["author"]["login"]] = item["total"] 18 | if not authors_info: 19 | print(output) 20 | exit(1) 21 | print( 22 | basename(repo).upper().replace("-", "_") 23 | + "_AUTHORS=" 24 | + dumps(authors_info, sort_keys=True, indent=2) 25 | ) 26 | except IndexError: 27 | print("Repo Name Required ... Arugement missing !!!!") 28 | exit(1) 29 | -------------------------------------------------------------------------------- /archive/ib-upload-logs.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | from sys import argv 3 | from logUpdater import LogUpdater 4 | 5 | logger = LogUpdater(dirIn=argv[1]) 6 | logger.copyLogs(argv[2]) 7 | -------------------------------------------------------------------------------- /archive/merge-pull-request.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | from argparse import ArgumentParser 3 | from github import Github 4 | from os.path import expanduser 5 | from sys import exit 6 | from socket import setdefaulttimeout 7 | 8 | setdefaulttimeout(120) 9 | 10 | if __name__ == "__main__": 11 | parser = ArgumentParser() 12 | parser.add_argument("pr", type=int) 13 | parser.add_argument("-m", dest="message", type=str, default=None) 14 | args = parser.parse_args() 15 | 16 | gh = Github(login_or_token=open(expanduser("~/.github-token")).read().strip()) 17 | try: 18 | pr = gh.get_repo("cms-sw/cmssw").get_pull(args.pr) 19 | except: 20 | print("Could not find pull request. Maybe this is an issue?") 21 | exit(0) 22 | print(pr.number, ":", pr.title) 23 | if pr.is_merged(): 24 | print("Pull request is already merged.") 25 | exit(0) 26 | 27 | if args.message: 28 | pr.merge(commit_message=message) 29 | else: 30 | pr.merge() 31 | -------------------------------------------------------------------------------- /archive/parse_workflow_time.py: -------------------------------------------------------------------------------- 1 | #!/bin/env python3 2 | from datetime import datetime 3 | import re, json 4 | from argparse import ArgumentParser 5 | 6 | parser = ArgumentParser() 7 | parser.add_argument("-i", "--input") 8 | parser.add_argument("-o", "--output") 9 | args = parser.parse_args() 10 | fd_read = open(args.input, "r") 11 | 12 | dict_store = {} 13 | for line in fd_read: 14 | workflow = line.split("_")[0] 15 | match_date = re.findall( 16 | r"[A-Z]{3}\s+[\d]{2}\s+[\d]{2}:[\d]{2}:[\d]{2}\s+[\d]{4}", line, re.IGNORECASE 17 | ) 18 | if len(match_date) != 2: 19 | continue 20 | 21 | t1 = datetime.strptime(match_date[1], "%b %d %H:%M:%S %Y") 22 | t2 = datetime.strptime(match_date[0], "%b %d %H:%M:%S %Y") 23 | delta = t2 - t1 24 | dict_store[workflow] = delta.seconds 25 | 26 | fd_read.close() 27 | with open(args.output, "w") as outfile: 28 | json.dump(dict_store, outfile) 29 | -------------------------------------------------------------------------------- /archive/report_size.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | import sys 3 | 4 | # run this command for once to create the data file or directly pipe its output to this script 5 | # for releases 6 | # find /afs/cern.ch/cms/slc[5-7]* -maxdepth 3 -type d -print -exec fs lq {} \; | grep -v 'Volume Name' | sed 'N;s/\n/ /' | uniq -c -f2 7 | # for ibs 8 | # find /afs/cern.ch/cms/sw/ReleaseCandidates/ -maxdepth 3 -type d -print -exec fs lq {} \; |grep -v '^Volume' | sed 'N;s/\n/ /' | uniq -c -f3 9 | 10 | data = {} 11 | allocated = 0 12 | used = 0 13 | volumes = 0 14 | max_volume_len = 0 15 | max_path_len = 0 16 | for line in sys.stdin: 17 | info = line.strip().split() 18 | if info[2] in data: 19 | continue 20 | volumes += 1 21 | allocated = allocated + int(info[3]) 22 | used = used + int(info[4]) 23 | data[info[2]] = info 24 | if len(info[2]) > max_volume_len: 25 | max_volume_len = len(info[2]) 26 | if len(info[1]) > max_path_len: 27 | max_path_len = len(info[1]) 28 | max_volume_len = max_volume_len + 4 29 | max_path_len = max_path_len + 4 30 | 31 | print("Total Volumes :", volumes) 32 | print("Allocated Space:", int(allocated / 1000000), "GB") 33 | print("Used Space :", int(used / 1000000), "GB") 34 | for vol in sorted(data): 35 | msg = "{0:<" + str(max_volume_len) + "}{1:<" + str(max_path_len) + "}" 36 | print(msg.format(vol, data[vol][1]), data[vol][4] + "/" + data[vol][3]) 37 | -------------------------------------------------------------------------------- /build-release-watchers.yaml: -------------------------------------------------------------------------------- 1 | # Mapping between the release queues and the usernames of the people 2 | # that want to be aware of the build process 3 | CMSSW_7_3_X: 4 | CMSSW_7_3_DEVEL_X: 5 | CMSSW_7_2_X: 6 | CMSSW_7_1_X: 7 | CMSSW_7_0_X: 8 | CMSSW_6_2_X_SLHC: 9 | CMSSW_6_2_X: 10 | CMSSW_5_3_X: 11 | 12 | -------------------------------------------------------------------------------- /build_hosts.txt: -------------------------------------------------------------------------------- 1 | cmsbuild01.cern.ch 2 | cmsbuild02.cern.ch 3 | cmsbuild03.cern.ch 4 | cmsbuild04.cern.ch 5 | cmsbuild05.cern.ch 6 | cmsbuild06.cern.ch 7 | cmsbuild07.cern.ch 8 | cmsbuild08.cern.ch 9 | cmsbuild09.cern.ch 10 | cmsbuild10.cern.ch 11 | cmsbuild11.cern.ch 12 | cmsbuild12.cern.ch 13 | cmsbuild13.cern.ch 14 | cmsbuild14.cern.ch 15 | cmsbuild16.cern.ch 16 | cmsbuild17.cern.ch 17 | cmsbuild18.cern.ch 18 | cmsbuild19.cern.ch 19 | cmsbuild20.cern.ch 20 | cmsbuild22.cern.ch 21 | cmsbuild23.cern.ch 22 | cmsbuild24.cern.ch 23 | cmsbuild25.cern.ch 24 | cmsbuild30.cern.ch 25 | cmsbuild32.cern.ch 26 | vocms029.cern.ch 27 | vocms0315.cern.ch 28 | -------------------------------------------------------------------------------- /category-watchers.yaml: -------------------------------------------------------------------------------- 1 | # Map between github username and the list of cmssw categories to watch e.g. alca, db, core etc. 2 | # valid categories are available here http://cms-sw.github.io/categories.html 3 | tocheng: 4 | - alca 5 | mmusich: 6 | - alca 7 | - db 8 | PonIlya: 9 | - db 10 | yuanchao: 11 | - alca 12 | - db 13 | rsreds: 14 | - alca 15 | - db 16 | JanChyczynski: 17 | - db 18 | sbein: 19 | - fastsim 20 | -------------------------------------------------------------------------------- /checkDirSizes.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | from __future__ import print_function 3 | 4 | import sys 5 | from pickle import Pickler 6 | 7 | from _py2with3compatibility import run_cmd 8 | 9 | 10 | def doDu(what): 11 | error, out = run_cmd("du -k -s %s" % what) 12 | if error: 13 | print("Error while getting directory size.") 14 | sys.exit(1) 15 | results = [l.split() for l in out.split("\n")] 16 | return dict( 17 | [(pkg.strip().replace("src/", ""), int(sz.strip() * 1024)) for (sz, pkg) in results] 18 | ) 19 | 20 | 21 | if __name__ == "__main__": 22 | try: 23 | f = open("dirSizeInfo.pkl", "wb") 24 | pklr = Pickler(f, protocol=2) 25 | pklr.dump(doDu("src lib bin")) 26 | pklr.dump(doDu("src/*/*")) 27 | f.close() 28 | except Exception as e: 29 | print("ERROR during pickling results for dir size:", str(e)) 30 | sys.exit(1) 31 | print("Successfully pickled results for dir size !") 32 | -------------------------------------------------------------------------------- /cleanup-auto-build: -------------------------------------------------------------------------------- 1 | #!/bin/sh -ex 2 | 3 | # This script assumes cleans up the build area used for the build, it deletes: 4 | # /build/cmsbuild/auto-builds/$CMSSW_X_Y_Z-$ARCHITECTURE/ 5 | # CMSSW_X_Y_Z: the release that was build 6 | # ARCHITECTURE: architecture for the upload 7 | 8 | CMSSW_X_Y_Z=$1 9 | ARCHITECTURE=$2 10 | BUILD_DIR=$3 11 | WORKSPACE="${BUILD_DIR}/auto-builds/$CMSSW_X_Y_Z-$ARCHITECTURE" 12 | 13 | rm -r $WORKSPACE 14 | 15 | echo 'ALL_OK' 16 | -------------------------------------------------------------------------------- /cms-filename-checks.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | from __future__ import print_function 3 | from sys import argv 4 | from os.path import join, exists 5 | 6 | exceptions_regexp = [] 7 | 8 | uniq_paths = [] 9 | for file_path in [f.strip("\n").strip("/") for f in open(argv[1]).readlines()]: 10 | if not file_path or [r for r in exceptions_regexp if r.match(file_path)]: 11 | continue 12 | xpath = "" 13 | for sub_path in file_path.split("/"): 14 | xpath = join(xpath, sub_path) 15 | if not sub_path[:1].isdigit(): 16 | continue 17 | # If it exists then we allow to have files with [0-9] under it 18 | if exists(join(argv[2], xpath)): 19 | break 20 | if not xpath in uniq_paths: 21 | uniq_paths.append(xpath) 22 | break 23 | if uniq_paths: 24 | print("\n".join(uniq_paths)) 25 | -------------------------------------------------------------------------------- /cms_static.py: -------------------------------------------------------------------------------- 1 | GH_CMSSW_ORGANIZATION = "cms-sw" 2 | GH_CMSSW_REPO = "cmssw" 3 | GH_CMSDIST_REPO = "cmsdist" 4 | BUILD_REL = "^[Bb]uild[ ]+(CMSSW_[^ ]+)" 5 | CREATE_REPO = "^[Cc]reate[ ]+repository[ ]+([A-Z][0-9A-Za-z]+)[-/]([a-zA-Z][0-9A-Za-z]+)" 6 | NEW_ISSUE_PREFIX = "A new Issue was created by " 7 | NEW_PR_PREFIX = "A new Pull Request was created by " 8 | ISSUE_SEEN_MSG = "^A new (Pull Request|Issue) was created by " 9 | VALID_CMSDIST_BRANCHES = "^IB/CMSSW_.+$" 10 | BACKPORT_STR = "- Backported from #" 11 | CMSBUILD_GH_USER = "cmsbuild" 12 | CMSBOT_IGNORE_MSG = "\\s*" 13 | CMSBOT_NO_NOTIFY_MSG = "<(no-|)notify>\\s*" 14 | CMSBOT_TECHNICAL_MSG = "cms-bot internal usage" 15 | JENKINS_HOST = "cmsjenkins04" 16 | CMS_JENKINS_HOST = "cmsjenkins02" 17 | DMWM_JENKINS_HOST = "cmsjenkins11" 18 | VALID_CMS_SW_REPOS_FOR_TESTS = [ 19 | "cmssw", 20 | "cmsdist", 21 | "cmssdt-ib", 22 | "cmssdt-web", 23 | "cms-bot", 24 | "root", 25 | "cmssw-config", 26 | "pkgtools", 27 | "SCRAM", 28 | "cmssw-osenv", 29 | "cms-git-tools", 30 | "cms-common", 31 | "cms_oracleocci_abi_hack", 32 | "cms-docker", 33 | "siteconf", 34 | ] 35 | 36 | 37 | def get_jenkins(prefix): 38 | jhost = JENKINS_HOST 39 | if prefix == "cms-jenkins": 40 | jhost = CMS_JENKINS_HOST 41 | elif prefix == "dmwm-jenkins": 42 | jhost = DMWM_JENKINS_HOST 43 | return "http://%s.cern.ch:8080/%s" % (jhost, prefix) 44 | -------------------------------------------------------------------------------- /cmsrep.sh: -------------------------------------------------------------------------------- 1 | CMSREP_SERVER=cmsrep.cern.ch 2 | CMSREP_IB_SERVER=cmsrep.cern.ch 3 | CMSBUILD_OPTS_FILE="etc/build_options.sh" 4 | BUILD_OPTS="" 5 | MULTIARCH_OPTS="" 6 | RELVAL_REAL_ARCH="AuthenticAMD" 7 | umask 0002 8 | export CMS_PYTHON_TO_USE="python" 9 | if which python3 >/dev/null 2>&1 ; then export CMS_PYTHON_TO_USE="python3" ; fi 10 | 11 | #called with $BUILD_OPTS $MULTIARCH_OPTS $ARCH 12 | function cmsbuild_args() 13 | { 14 | arg="" 15 | if [ "$1" != "" ] ; then 16 | BLD_OPTS="" 17 | for x in $(echo "$1" | tr ',' ' ') ; do 18 | case $x in 19 | upload_store ) ;; 20 | without:* ) arg="${arg} --build-without=$(echo $x | sed 's|^without:||;s|:|,|g')" ;; 21 | system:* ) arg="${arg} --use-system-tools=$(echo $x | sed 's|^system:||;s|:|,|g')" ;; 22 | microarchs:* ) arg="${arg} --vectorization=$(echo $x | sed 's|^microarchs:||;s|:|,|g')" ;; 23 | * ) BLD_OPTS="${BLD_OPTS},$x" ;; 24 | esac 25 | done 26 | [ "$BLD_OPTS" != "" ] && arg="${arg} --build-options $(echo ${BLD_OPTS} | sed 's|^,||')" 27 | fi 28 | [ "$2" != "" ] && arg="${arg} --vectorization=$2" 29 | [ "${arg}" = "" ] || echo "${arg}" 30 | } 31 | 32 | function cmssw_default_target() 33 | { 34 | case $1 in 35 | *SKYLAKE*|*SANDYBRIDGE*|*HASWELL*|*MULTIARCHS*) echo auto ;; 36 | *) echo default ;; 37 | esac 38 | } 39 | -------------------------------------------------------------------------------- /cmssdt.sh: -------------------------------------------------------------------------------- 1 | CMSSDT_SERVER=cmssdt9-02.cern.ch 2 | -------------------------------------------------------------------------------- /cmssw-subprojcts/Stitched.filter: -------------------------------------------------------------------------------- 1 | DataFormats/Common 2 | DataFormats/FEDRawData 3 | DataFormats/FWLite 4 | DataFormats/Provenance 5 | DataFormats/StdDictionaries 6 | DataFormats/Streamer 7 | DataFormats/TestObjects 8 | DataFormats/WrappedStdDictionaries 9 | FWCore/Catalog 10 | FWCore/Common 11 | FWCore/Concurrency 12 | FWCore/FWLite 13 | FWCore/Framework 14 | FWCore/Integration 15 | FWCore/MessageLogger 16 | FWCore/MessageService 17 | FWCore/Modules 18 | FWCore/ParameterSet 19 | FWCore/ParameterSetReader 20 | FWCore/PluginManager 21 | FWCore/PrescaleService 22 | FWCore/PyDevParameterSet 23 | FWCore/Reflection 24 | FWCore/PythonParameterSet 25 | FWCore/SOA 26 | FWCore/ServiceRegistry 27 | FWCore/Services 28 | FWCore/SharedMemory 29 | FWCore/Skeletons 30 | FWCore/Sources 31 | FWCore/TFWLiteSelector 32 | FWCore/TFWLiteSelectorTest 33 | FWCore/TestProcessor 34 | FWCore/Utilities 35 | FWCore/Version 36 | IOMC/RandomEngine 37 | IOPool/Common 38 | IOPool/Input 39 | IOPool/Output 40 | IOPool/Provenance 41 | IOPool/SecondaryInput 42 | IOPool/Streamer 43 | IOPool/TFileAdaptor 44 | SimDataFormats/RandomEngine 45 | Utilities/General 46 | Utilities/RFIOAdaptor 47 | Utilities/StorageFactory 48 | Utilities/Testing 49 | Utilities/Xerces 50 | Utilities/XrdAdaptor 51 | -------------------------------------------------------------------------------- /cmssw-subprojcts/git_filter.cfg: -------------------------------------------------------------------------------- 1 | # This example filter configuration is set up to filter all (master) commits 2 | # on the linux kernel and split it into two branches, one containing the 3 | # source and the other containing the Documentation directory 4 | 5 | # Git repository to filter 6 | REPO: cmssw 7 | #REVN: range master~1000..master 8 | REVN: ref refs/heads/@BRANCH@ 9 | # Directory where the filter files are 10 | #BASE: modules/ 11 | BASE: 12 | # Filter config: filter name, filter file list 13 | FILT: @PROJECT@ @PROJECT_FILTER@ 14 | # Tag Prefix 15 | TPFX: @BRANCH@- 16 | -------------------------------------------------------------------------------- /cmssw_l2/commit.txt: -------------------------------------------------------------------------------- 1 | 94e50f7efbf3a0b0a26d62732a10282624c1f9d3 2 | cf031ff6502e597348fc448f85e077a5be8ec4e2 3 | 9c6c7b780f30478d69a59505842398b87aaa7e44 4 | e7c71669eeda3b513a92954295fc04f7dc58e3d9 5 | 81c7403e8b356a15a84fe6c3420adbc8214bca1d 6 | da59e226dcc1035e3b1fc480d67e10040024419c 7 | 40a7adfe61c17c466e51744e5b2876342837d3a3 8 | 36430c906dfcb3fff873ddce24daaf3999e1a670 9 | f4c7303c35a393f9f9efeb00dd56fdc22205c062 10 | 436b98497d8781b4547a2e3332f3eabfa340df26 11 | a6fdf78d2452d18181489212f74bea9225686189 12 | 4723b035408906a1e5f3f1a469b35d44966f8ddc 13 | 88940a2ff159dc40ec3796956f125959b3f3235d 14 | a9acc5a23090c16b328ea3e182dcbcaf3bf2ca73 15 | 90cc4db0b44dddb54f82fe77d7cd833219aadbe3 16 | 9f08b9a0bd8e6dd44ab4e6412f6a44bfcb8e9db8 17 | 8be973e1aa3c02111956b531586aef7644c690c0 18 | -------------------------------------------------------------------------------- /common/README.md: -------------------------------------------------------------------------------- 1 | # Here we keep code reusable between multiple scripts. 2 | -------------------------------------------------------------------------------- /common/get_cpu_number.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash -e 2 | ACTUAL_CPU=$(nproc) 3 | if [ "$ACTUAL_CPU" = "0" ] ; then ACTUAL_CPU=1; fi 4 | if [ "X$1" != "X" ] ; then let ACTUAL_CPU=$ACTUAL_CPU$1 ; fi 5 | echo ${ACTUAL_CPU} 6 | -------------------------------------------------------------------------------- /comparisons/makeDiff.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash -x 2 | f1=${1} 3 | f2=${2} 4 | fO=${3} 5 | lMod=${4} 6 | dOpt=${5} 7 | dirPattern=${6} 8 | dirPatternExclude=${7} 9 | echo "Running on f1 ${f1} f2 ${f2} fO ${fO} lMod ${lMod} dOpt ${dOpt} dirPattern ${dirPattern} dirPatternExclude ${dirPatternExclude}" 10 | echo -e "gROOT->SetStyle(\"Plain\");\n 11 | gSystem->Load(\"compareValHists_C.so\");\n 12 | f1=new TFile(\"${f1}\");\n 13 | f2 = new TFile(\"${f2}\");\n 14 | compareAll(f1,f2,${lMod},${dOpt}, \"${dirPattern}\", \"${dirPatternExclude}\");\n 15 | .qqqqqq" | root -l -b 16 | exit_code=$? 17 | [ $exit_code -eq 6 ] && exit_code=0 18 | exit $exit_code 19 | -------------------------------------------------------------------------------- /condor/autoload.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | SSH_JOBS=$(ls -d ${_CONDOR_SCRATCH_DIR}/.condor_ssh_to_job_* 2>/dev/null | wc -l) 3 | if [ ${SSH_JOBS} -gt 0 ] ; then 4 | if $CHECK_RUN ; then 5 | ps -u $(whoami) -o pid,start_time,rss,size,pcpu,cmd --forest 2>&1 >> node-check.status 6 | echo "[$(date)] Stopping node check job" >> node-check.status 7 | touch ${WORKSPACE}/.auto-stop 8 | wait 9 | CHECK_RUN=false 10 | ps -u $(whoami) -o pid,start_time,cmd --forest 2>&1 >> node-check.status 11 | echo "[$(date)] Stopped node check job" >> node-check.status 12 | fi 13 | elif ! $CHECK_RUN ; then 14 | CHECK_RUN=true 15 | ps -u $(whoami) -o pid,start_time,rss,size,pcpu,cmd --forest 2>&1 >> node-check.status 16 | rm -f ${WORKSPACE}/.auto-stop 17 | echo "[$(date)] Starting node check job" >> node-check.status 18 | $WORKSPACE/cache/cms-bot/condor/tests/node-check.sh > node-check.log 2>&1 & 19 | fi 20 | -------------------------------------------------------------------------------- /condor/connect.sub: -------------------------------------------------------------------------------- 1 | universe = @REQUEST_UNIVERSE@ 2 | +MaxRuntime = @REQUEST_MAXRUNTIME@ 3 | request_cpus = @REQUEST_CPUS@ 4 | executable = @SCRIPT_NAME@.sh 5 | transfer_input_files = @INPUT_FILES@ 6 | transfer_output_files = log.job 7 | should_transfer_files = yes 8 | when_to_transfer_output = on_exit 9 | output = logs/log.stdout 10 | error = logs/log.stdout 11 | log = logs/log.job 12 | -------------------------------------------------------------------------------- /condor/shutdown.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash -ex 2 | JOBID=$(echo $1 | sed 's|\.[0-9]*$||') 3 | FORCE=false 4 | if [ "$2" = "true" ] ; then FORCE=true ; fi 5 | echo "Trying to shutdown the node" 6 | SCHEDD_NAME=$(condor_q ${JOBID}.0 -af:l GlobalJobId -global | grep '^GlobalJobId *=' | sed 's|.*= *||;s|#.*||') 7 | if [ "X${SCHEDD_NAME}" = "X" ] ; then 8 | echo "Job might already be terminated" 9 | exit 0 10 | fi 11 | for schd in ${SCHEDD_NAME} ; do 12 | export _CONDOR_SCHEDD_HOST=${schd} 13 | export _CONDOR_CREDD_HOST=${schd} 14 | condor_q 15 | if [ $(condor_q ${JOBID} | grep "^$(whoami) " | wc -l) -gt 0 ] ; then 16 | if ! $FORCE ; then 17 | timeout 300 condor_ssh_to_job ${JOBID} 'touch ./jenkins/.shut-down' || true 18 | sleep 120 19 | fi 20 | condor_rm ${JOBID} || true 21 | fi 22 | mkdir -p $WORKSPACE/../grid-create-node/logs 23 | condor_transfer_data $JOBID || true 24 | cat $WORKSPACE/../grid-create-node/logs/log.* || true 25 | rm -rf $WORKSPACE/../grid-create-node/logs 26 | condor_rm -forcex ${JOBID} || true 27 | done 28 | condor_q 29 | -------------------------------------------------------------------------------- /condor/submit.sub: -------------------------------------------------------------------------------- 1 | universe = @REQUEST_UNIVERSE@ 2 | +MaxRuntime = @REQUEST_MAXRUNTIME@ 3 | request_cpus = @REQUEST_CPUS@ 4 | executable = @SCRIPT_NAME@.sh 5 | transfer_input_files = @X509_PROXY_FILE@ 6 | should_transfer_files = yes 7 | when_to_transfer_output = on_exit 8 | output = @SCRIPT_NAME@.stdout 9 | error = @SCRIPT_NAME@.stdout 10 | log = @SCRIPT_NAME@.log 11 | getenv = True 12 | queue 1 13 | 14 | -------------------------------------------------------------------------------- /condor/tests/node-check.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash -e 2 | ls -drt ${_CONDOR_SCRATCH_DIR}/.condor_ssh_to_job_* 2>/dev/null | head -n -1 | xargs --no-run-if-empty echo rm -rf || true 3 | SCRIPT_DIR=$(dirname $0) 4 | MAX_CPUS=$(grep -i '^ *RequestCpus *=' ${_CONDOR_JOB_AD} | sed 's|.*= *||;s| ||g') 5 | MAX_MEMORY=$(grep -i '^ *RequestMemory *=' ${_CONDOR_JOB_AD} | sed 's|.*= *||;s| ||g') 6 | [ "${MAX_MEMORY}" = "" ] && let MAX_MEMORY=${MAX_CPUS}*2*1000 7 | let MEMORY_PER_CPU="${MAX_MEMORY}/(${MAX_CPUS}*2)" 8 | echo "start" > auto-load 9 | $SCRIPT_DIR/node-check.py ${MAX_CPUS} ${MEMORY_PER_CPU} > out.log 2>&1 & 10 | sleep 1 11 | while [ ! -f ${_CONDOR_SCRATCH_DIR}/jenkins/.auto-stop ] ; do sleep 1 ; done 12 | echo "exit" > auto-load 13 | wait 14 | rm -f ${_CONDOR_SCRATCH_DIR}/jenkins/.auto-stop 15 | -------------------------------------------------------------------------------- /crab/CMSSW_8_0_X: -------------------------------------------------------------------------------- 1 | CMSSW_5_3_X -------------------------------------------------------------------------------- /crab/CMSSW_9_4_X: -------------------------------------------------------------------------------- 1 | CMSSW_5_3_X -------------------------------------------------------------------------------- /crab/FrameworkJobReport.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | -------------------------------------------------------------------------------- /crab/crab-test/pset.py: -------------------------------------------------------------------------------- 1 | ../fake_pset.py -------------------------------------------------------------------------------- /crab/crab-test/setup.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash -ex 2 | scram build enable-multi-targets 3 | cp $(dirname $0)/../FrameworkJobReport.xml $WORKSPACE/ 4 | -------------------------------------------------------------------------------- /crab/multiarch/setup.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | scram build enable-multi-targets 3 | -------------------------------------------------------------------------------- /crab/run.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash -ex 2 | env > run.log 3 | ld.so --help | grep supported | grep x86-64-v 4 | which cmsRun 5 | cmsRun -j FrameworkJobReport.xml PSet.py >>run.log 2>&1 6 | -------------------------------------------------------------------------------- /crab/scram-build/setup.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash -ex 2 | scram build enable-multi-targets 3 | cp $(dirname $0)/../FrameworkJobReport.xml $WORKSPACE/ 4 | 5 | -------------------------------------------------------------------------------- /crab/script/pset.py: -------------------------------------------------------------------------------- 1 | ../fake_pset.py -------------------------------------------------------------------------------- /crab/script/run.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash -ex 2 | pwd 3 | ls 4 | ld.so --help | grep supported | grep x86-64-v 5 | which cmsRun 6 | cmsRun --help >>run.log 7 | -------------------------------------------------------------------------------- /crab/script/setup.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash -ex 2 | cp $(dirname $0)/../FrameworkJobReport.xml $WORKSPACE/ 3 | -------------------------------------------------------------------------------- /crab/short-matrix/run.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash -e 2 | env > run.log 3 | ld.so --help | grep supported | grep x86-64-v 4 | mkdir matrix 5 | pushd matrix 6 | runTheMatrix.py -i all -s -j 3 -t 4 --ibeos >>../run.log 2>&1 || touch runall-report-step123-.log 7 | for f in $(find . -name '*' -type f) ; do 8 | case $f in 9 | *.xml|*.txt|*.log|*.py|*.json|*/cmdLog ) ;; 10 | * ) rm -rf $f ;; 11 | esac 12 | done 13 | popd 14 | mv matrix/runall-report-step123-.log matrix.log 15 | tar -czvf matrix.tar.gz matrix 16 | -------------------------------------------------------------------------------- /crab/short-matrix/setup.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash -ex 2 | scram build enable-multi-targets 3 | cp $(dirname $0)/../FrameworkJobReport.xml $WORKSPACE/ 4 | 5 | -------------------------------------------------------------------------------- /crab/short-matrix1/run.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash -e 2 | env > run.log 3 | ld.so --help | grep supported | grep x86-64-v 4 | mkdir matrix 5 | pushd matrix 6 | runTheMatrix.py --job-reports --command " -n 5 --customise Validation/Performance/TimeMemorySummary.customiseWithTimeMemorySummary " \ 7 | -i all -s -j 1 --ibeos >>../run.log 2>&1 || touch runall-report-step123-.log 8 | for f in $(find . -name '*' -type f) ; do 9 | case $f in 10 | *.xml|*.txt|*.log|*.py|*.json|*/cmdLog ) ;; 11 | * ) rm -rf $f ;; 12 | esac 13 | done 14 | popd 15 | tar -czvf matrix.tar.gz matrix >>run.log 2>&1 16 | mv matrix/runall-report-step123-.log matrix.log 17 | grep -E ' Step[0-9]' matrix.log || true 18 | grep ' tests passed' matrix.log || true 19 | -------------------------------------------------------------------------------- /crab/short-matrix1/setup.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash -ex 2 | scram build enable-multi-targets 3 | cp $(dirname $0)/../FrameworkJobReport.xml $WORKSPACE/ 4 | 5 | -------------------------------------------------------------------------------- /create-gh-release.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | import sys, json 3 | from os.path import expanduser 4 | from _py2with3compatibility import Request, urlopen 5 | from cms_static import GH_CMSSW_ORGANIZATION, GH_CMSSW_REPO 6 | 7 | GH_TOKEN = open(expanduser("~/.github-token")).read().strip() 8 | release_name = sys.argv[1] 9 | branch = sys.argv[2] 10 | print("Creating release:\n %s based on %s" % (release_name, branch)) 11 | 12 | # creating releases will be available in the next version of pyGithub 13 | params = { 14 | "tag_name": release_name, 15 | "target_commitish": branch, 16 | "name": release_name, 17 | "body": "cms-bot is going to build this release", 18 | "draft": False, 19 | "prerelease": False, 20 | } 21 | 22 | request = Request( 23 | "https://api.github.com/repos/" + GH_CMSSW_ORGANIZATION + "/" + GH_CMSSW_REPO + "/releases", 24 | headers={"Authorization": "token " + GH_TOKEN}, 25 | ) 26 | request.get_method = lambda: "POST" 27 | print("--") 28 | try: 29 | print(urlopen(request, json.dumps(params).encode()).read().decode()) 30 | print("OK release", release_name, "created") 31 | except Exception as e: 32 | print("There was an error while creating the release:\n", e) 33 | -------------------------------------------------------------------------------- /cvmfs/cms-ci.cern.ch/cvmfsdirtab.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | export CVMFS_DIR="/cvmfs/$(basename $(dirname $0))" 3 | dirtab=$(dirname $0)/cvmfsdirtab.txt 4 | if [ -f "${dirtab}" ] ; then cat "${dirtab}" ; fi 5 | $(dirname $0)/../cvmfsdirtab.sh 'week*/PR_*' 6 | -------------------------------------------------------------------------------- /cvmfs/cms-ci.cern.ch/cvmfsdirtab.txt: -------------------------------------------------------------------------------- 1 | /* 2 | /jenkins-artifacts/ib-baseline-tests/* 3 | /week*/cms-* 4 | /week*/cms-*/*/*/*/CMSSW_* 5 | /week*/cms-*/*/*/*/CMSSW_*/src 6 | -------------------------------------------------------------------------------- /cvmfs/cms-ib-test.cern.ch: -------------------------------------------------------------------------------- 1 | cms-ib.cern.ch -------------------------------------------------------------------------------- /cvmfs/cms-ib.cern.ch/cvmfsdirtab.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | export CVMFS_DIR="/cvmfs/$(basename $(dirname $0))" 3 | dirtab=$(dirname $0)/cvmfsdirtab.txt 4 | if [ -f "${dirtab}" ] ; then cat "${dirtab}" ; fi 5 | $(dirname $0)/../cvmfsdirtab.sh 'nweek-*' 'sw/*/nweek-*' 'tests' 6 | -------------------------------------------------------------------------------- /cvmfs/cms-ib.cern.ch/cvmfsdirtab.txt: -------------------------------------------------------------------------------- 1 | /* 2 | /git/* 3 | -------------------------------------------------------------------------------- /cvmfs/cms.cern.ch/.cvmfsdirtab: -------------------------------------------------------------------------------- 1 | /* 2 | /sl*gcc*/cms/cmssw/* 3 | /sl*gcc*/cms/cmssw-patch/* 4 | /sl*gcc*/external/*/* 5 | /fc*gcc*/cms/cmssw/* 6 | /fc*gcc*/cms/cmssw-patch/* 7 | /fc*gcc*/external/*/* 8 | /phys_generator/gridpacks/slc* 9 | /osx*gcc*/cms/cmssw/* 10 | /osx*gcc*/cms/cmssw-patch/* 11 | /osx*gcc*/external/*/* 12 | /crab3/sl*gcc* 13 | /crab3/sl*gcc*/external/gcc/* 14 | /crab3/sl*gcc*/cms/crabclient/* 15 | /phedex/sl*gcc* 16 | /spacemon-client/sl*gcc* 17 | /share/cms/data-*/V* 18 | /external/* 19 | /external/tex 20 | /cc*gcc*/cms/cmssw/* 21 | /cc*gcc*/cms/cmssw-patch/* 22 | /cc*gcc*/external/*/* 23 | /cs*gcc*/cms/cmssw/* 24 | /cs*gcc*/cms/cmssw-patch/* 25 | /cs*gcc*/external/*/* 26 | /alma*gcc*/cms/cmssw/* 27 | /alma*gcc*/cms/cmssw-patch/* 28 | /alma*gcc*/external/*/* 29 | /el*gcc*/cms/cmssw/* 30 | /el*gcc*/cms/cmssw-patch/* 31 | /el*gcc*/external/*/* 32 | /el*gcc*/lcg/root 33 | /el*gcc*/cms/coral 34 | /el*gcc*/cms/*-tool-conf 35 | /offcomp-prod/premixPUlist 36 | -------------------------------------------------------------------------------- /cvmfs_deployment/abort_transaction.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash -ex 2 | cd /tmp 3 | lock=~/cron_install_cmssw.lock 4 | rm -rf $lock 5 | 6 | cvmfs_server abort -f ${CVMFS_REPOSITORY} 7 | -------------------------------------------------------------------------------- /cvmfs_deployment/bootstrap_dir_for_arch.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash -ex 2 | # bootstrap - check if it exists and install if doesn't 3 | 4 | INSTALL_PATH=$1 5 | SCRAM_ARCH=$2 6 | RPMS_REPO=$3 7 | 8 | # check if RPMS_REPO matches the one in install path 9 | if [ -f ${INSTALL_PATH}/common/cmspkg ] && [ $(grep "repository ${RPMS_REPO} " ${INSTALL_PATH}/common/cmspkg | wc -l) -eq 0 ] ; then 10 | echo "Install path is bootstraped for another RPM REPO, abort" 11 | exit 1 12 | fi 13 | 14 | if [ ! -f ${INSTALL_PATH}/${SCRAM_ARCH}/cms/cms-common/1.0/etc/profile.d/init.sh ] ; then 15 | mkdir -p $INSTALL_PATH 16 | rm -f ${INSTALL_PATH}/bootstrap.sh 17 | OPTS="" 18 | if [ "${USE_DEV_CMSPKG}" = "true" ] ; then 19 | OPTS="-dev" 20 | fi 21 | wget --tries=5 --waitretry=60 -O ${INSTALL_PATH}/bootstrap.sh http://cmsrep.cern.ch/cmssw/bootstrap.sh 22 | source $(dirname $0)/../dockerrun.sh 23 | export CMSPKG_OS_COMMAND="" 24 | dockerrun "sh -ex ${INSTALL_PATH}/bootstrap.sh -a ${SCRAM_ARCH} ${OPTS} -repository ${RPMS_REPO} -path ${INSTALL_PATH} setup" 25 | fi 26 | -------------------------------------------------------------------------------- /cvmfs_deployment/git-reference.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash -ex 2 | source $(dirname $0)/utils.sh 3 | CVMFS_DIR=${CVMFS_BASEDIR}/git 4 | cvmfs_transaction /git 5 | for rep in ${REPOSITORY} ; do 6 | cd $WORKSPACE 7 | rm -rf checkout 8 | mkdir checkout 9 | cd checkout 10 | GH_REPO=${rep}.git 11 | REPO_NAME=$(basename ${GH_REPO}) 12 | git clone --bare https://github.com/${GH_REPO} ${REPO_NAME} 13 | pushd ${REPO_NAME} 14 | git repack -a -d --window=50 --max-pack-size=64M 15 | popd 16 | mkdir -p $(dirname ${CVMFS_DIR}/${GH_REPO}) 17 | rsync -a --delete ${REPO_NAME}/ ${CVMFS_DIR}/${GH_REPO}/ 18 | done 19 | cvmfs_server publish 20 | -------------------------------------------------------------------------------- /cvmfs_deployment/has_lease.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | import json, sys, requests 3 | 4 | gw = sys.argv[1] 5 | path = sys.argv[2].strip("/") 6 | rep = requests.get(gw + "/leases") 7 | data = rep.json()["data"] 8 | ecode = 1 9 | for xentry in data.keys(): 10 | entry = xentry.strip("/") 11 | rest = "" 12 | if entry.startswith(path): 13 | rest = entry[len(path) :] 14 | elif path.startswith(entry): 15 | rest = path[len(entry) :] 16 | else: 17 | continue 18 | print(rest) 19 | if rest and rest[0] != "/": 20 | continue 21 | ecode = 0 22 | print("Yes, there is lease for %s" % entry) 23 | print(data[xentry]) 24 | break 25 | sys.exit(ecode) 26 | -------------------------------------------------------------------------------- /cvmfs_deployment/ib-install-siteconf.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash -ex 2 | git clone --depth 1 https://github.com/cms-sw/siteconf.git SITECONF 3 | GIT_DIR=./SITECONF/.git git log -n 1 --pretty=format:"%H" > SITECONF/commit.id 4 | hostname > SITECONF//stratum0 5 | rm -rf ./SITECONF/.git 6 | source $(dirname $0)/utils.sh 7 | cvmfs_transaction SITECONF 8 | rsync -av --delete SITECONF/ ${CVMFS_BASEDIR}/SITECONF/ 9 | time cvmfs_server publish 10 | -------------------------------------------------------------------------------- /cvmfs_deployment/install-qemu.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash -ex 2 | QEMU_RELS="https://github.com/multiarch/qemu-user-static/releases/download/" 3 | VER="$1" 4 | SET_LATEST=true 5 | [ "${2}" != "true" ] && SET_LATEST=false 6 | 7 | if [ "$VER" = "" ] ; then 8 | echo "ERROR: Missing qemu version." 9 | exit 1 10 | fi 11 | 12 | source $(dirname $0)/utils.sh 13 | INST_DIR="${CVMFS_BASEDIR}/proot" 14 | 15 | if [ ! -d ${INST_DIR}/${VER} ] ; then 16 | mkdir -p $VER 17 | for arch in ppc64le aarch64 ; do 18 | wget -O $VER/qemu-${arch} "${QEMU_RELS}/${VER}/qemu-${arch}-static" 19 | chmod +x $VER/qemu-${arch} 20 | done 21 | fi 22 | cvmfs_transaction /proot 23 | if [ -d $VER ] ; then 24 | mkdir -p $INST_DIR 25 | mv $VER $INST_DIR/$VER 26 | fi 27 | if $SET_LATEST ; then 28 | ln -sf $VER ${INST_DIR}/latest 29 | fi 30 | cvmfs_server publish 31 | -------------------------------------------------------------------------------- /cvmfs_deployment/publish_transaction.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash -ex 2 | cd /tmp 3 | lock=~/cron_install_cmssw.lock 4 | ERR=0 5 | cvmfs_server publish ${CVMFS_REPOSITORY} || ERR=1 6 | if [ "$ERR" = "1" ] ; then cvmfs_server abort -f ${CVMFS_REPOSITORY} || ERR=1 ; fi 7 | rm -f $lock 8 | exit $ERR 9 | -------------------------------------------------------------------------------- /cvmfs_deployment/reseed_arch.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash -ex 2 | # bootstrap - reseed an existing architecture 3 | 4 | INSTALL_PATH=$1 5 | SCRAM_ARCH=$2 6 | RPMS_REPO=$3 7 | 8 | # check if RPMS_REPO matches the one in install path 9 | if [ -f ${INSTALL_PATH}/common/cmspkg ] && [ $(grep "repository ${RPMS_REPO} " ${INSTALL_PATH}/common/cmspkg | wc -l) -eq 0 ] ; then 10 | echo "Install path is bootstraped for another RPM REPO, abort" 11 | exit 1 12 | fi 13 | 14 | if [ -f ${INSTALL_PATH}/${SCRAM_ARCH}/cms/cms-common/1.0/etc/profile.d/init.sh ] ; then 15 | rm -f ${INSTALL_PATH}/bootstrap.sh 16 | OPTS="" 17 | if [ "${USE_DEV_CMSPKG}" = "true" ] ; then 18 | OPTS="-dev" 19 | fi 20 | wget --tries=5 --waitretry=60 -O ${INSTALL_PATH}/bootstrap.sh http://cmsrep.cern.ch/cmssw/bootstrap.sh 21 | source $(dirname $0)/../dockerrun.sh 22 | export CMSPKG_OS_COMMAND="" 23 | dockerrun "sh -ex ${INSTALL_PATH}/bootstrap.sh -a ${SCRAM_ARCH} ${OPTS} -repository ${RPMS_REPO} -path ${INSTALL_PATH} reseed" 24 | dockerrun "sh -ex ${INSTALL_PATH}/common/cmspkg -a ${SCRAM_ARCH} env -- rpm -q --provides system-base-import" 25 | fi 26 | -------------------------------------------------------------------------------- /cvmfs_deployment/root-WebGui-fix.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash -e 2 | dir=$1 3 | [ "$dir" != "" ] || dir="/cvmfs/cms.cern.ch" 4 | for r in $(find $dir -mindepth 4 -maxdepth 4 -path '*/lcg/root/6.*') ; do 5 | rver=$(basename $r | cut -d. -f2) 6 | [ $rver -ge 26 ] || continue 7 | f="${r}/etc/system.rootrc" 8 | if [ -e $r/etc/system.rootrc ] ; then 9 | [ $(grep '^ *WebGui.HttpLoopback: *no' $f | wc -l) -gt 0 ] || continue 10 | echo "Processing $f" 11 | if [ -e ${f}.original ] ; then 12 | cp ${f}.original $f 13 | else 14 | cp $f ${f}.original 15 | fi 16 | sed -i -e 's|WebGui.HttpLoopback: *no|WebGui.HttpLoopback: yes|' $f || true 17 | sed -i -e 's|ROOT::Experimental::RWebBrowserImp|TRootBrowser|;s|ROOT::RWebBrowserImp|TRootBrowser|' $f 18 | fi 19 | done 20 | -------------------------------------------------------------------------------- /cvmfs_deployment/start_transaction.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash -ex 2 | cd /tmp 3 | cvmfs_repo=${CVMFS_REPOSITORY} 4 | if [ "$LOCK_CVMFS" != "false" ] ; then 5 | lock=~/cron_install_cmssw.lock 6 | CPID="" 7 | while [ "$CPID" != "JENKINS:$1" ] ; do 8 | while [ -f $lock ] ; do 9 | if [ $(cat $lock | tail -1 | grep '^JENKINS:' | wc -l) -gt 0 ] ; then 10 | rm -f $lock 11 | else 12 | echo Waiting for lock ... 13 | sleep 30 14 | fi 15 | done 16 | echo "JENKINS:$1" > $lock 17 | sleep 1 18 | CPID=$(cat $lock | tail -1) 19 | done 20 | fi 21 | 22 | cvmfs_server transaction ${CVMFS_REPOSITORY} || ((cvmfs_server abort -f ${CVMFS_REPOSITORY} || rm -fR /var/spool/cvmfs/${cvmfs_repo}/is_publishing.lock) && cvmfs_server transaction ${CVMFS_REPOSITORY}) 23 | -------------------------------------------------------------------------------- /cvmfs_deployment/trasnaction.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash -ex 2 | source $(dirname $0)/utils.sh 3 | cvmfs_transaction $1 4 | -------------------------------------------------------------------------------- /cvmfs_deployment/utils.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | export CVMFS_DEPLOYMENT_DIR=$(realpath $(dirname ${BASH_ARGV[0]})) 3 | export CVMFS_BASEDIR=/cvmfs/${CVMFS_REPOSITORY} 4 | export USE_CVMFS_GW=false 5 | export CVMFS_GATEWAY_API="" 6 | if grep '^CVMFS_UPSTREAM_STORAGE=gw' /etc/cvmfs/repositories.d/${CVMFS_REPOSITORY}/server.conf 2>/dev/null ; then 7 | export CVMFS_GATEWAY_API=$(grep '^CVMFS_UPSTREAM_STORAGE=gw' /etc/cvmfs/repositories.d/${CVMFS_REPOSITORY}/server.conf | sed 's|.*,||') 8 | export USE_CVMFS_GW=true 9 | fi 10 | 11 | function cvmfs_transaction() 12 | { 13 | if ${USE_CVMFS_GW} ; then 14 | local lease_path=${CVMFS_REPOSITORY}/$(echo $1 | sed -e 's|^//*||;s|//*$||') 15 | while true ; do 16 | cvmfs_server abort -f ${CVMFS_REPOSITORY} || true 17 | ls -l /var/spool/${CVMFS_BASEDIR}/ 18 | rm -f /var/spool/${CVMFS_BASEDIR}/is_publishing.lock 19 | rm -f /var/spool/${CVMFS_BASEDIR}/session_token 20 | rm -f /var/spool/${CVMFS_BASEDIR}/in_transaction.lock 21 | if ! ${CVMFS_DEPLOYMENT_DIR}/has_lease.py ${CVMFS_GATEWAY_API} ${lease_path} ; then 22 | if cvmfs_server transaction ${lease_path} ; then break ; fi 23 | fi 24 | sleep 10 25 | done 26 | else 27 | cvmfs_server transaction ${CVMFS_REPOSITORY} || ((cvmfs_server abort -f ${CVMFS_REPOSITORY} || rm -fR /var/spool/${CVMFS_BASEDIR}/is_publishing.lock) && cvmfs_server transaction ${CVMFS_REPOSITORY}) 28 | fi 29 | } 30 | -------------------------------------------------------------------------------- /cvmfs_deployment/write_bookeeping_record.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash -ex 2 | 3 | PACKAGE_NAME=$1 4 | ARCH=$2 5 | INSTALL_TYPE=$3 #can be package or data 6 | 7 | tstamp=$(echo $(date +%s --utc) $(date --utc)) 8 | 9 | if [[ $INSTALL_TYPE == "package" ]] ; then 10 | pkg=$(echo $PACKAGE_NAME | cut -d+ -f2) 11 | version=$(echo $PACKAGE_NAME | cut -d+ -f3) 12 | if [[ $pkg = "python" ]] ; then 13 | echo COMP+python+$version $ARCH $tstamp 14 | else 15 | echo $pkg $version $ARCH $tstamp 16 | fi 17 | fi 18 | -------------------------------------------------------------------------------- /das-utils/copy-ib-lfn-to-eos.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash -ex 2 | export EOS_MGM_URL="root://eoscms.cern.ch" 3 | eos_cmd="/usr/bin/eos" 4 | eos_base="/store/user/cmsbuild" 5 | xrd_eos_base="root://eoscms.cern.ch//eos/cms" 6 | lfn=$1 7 | redirector=$2 8 | force=$3 9 | 10 | if [ "$redirector" = "" ] ; then redirector="root://cms-xrd-global.cern.ch"; fi 11 | eos_file="${eos_base}${lfn}" 12 | if [ "$force" != "true" ] ; then 13 | if ${eos_cmd} stat -f ${eos_file} >/dev/null 2>&1 ; then 14 | echo "Already exists: ${lfn}" 15 | exit 0 16 | fi 17 | fi 18 | eos_dir=$(dirname ${eos_base}/${lfn}) 19 | ${eos_cmd} mkdir -p ${eos_dir} 20 | ERR=0 21 | for rd in ${redirector} $(echo ${redirector} root://cms-xrd-global.cern.ch root://cmsxrootd.fnal.gov root://eoscms.cern.ch root://xrootd-cms.infn.it | tr ' ' '\n' | sort | uniq | grep 'root:' | grep -v "^${redirector}") ; do 22 | ERR=0 23 | xrdcp --force --posc -v ${rd}/${lfn} "${xrd_eos_base}/${eos_file}?eos.atomic=1" || ERR=1 24 | if [ $ERR -eq 0 ] ; then break ; fi 25 | done 26 | if [ $ERR -gt 0 ] ; then exit $ERR ; fi 27 | ${eos_cmd} stat -f ${eos_file} 28 | echo ALL_OK 29 | -------------------------------------------------------------------------------- /das-utils/dasgoclient: -------------------------------------------------------------------------------- 1 | das_client -------------------------------------------------------------------------------- /das-utils/order-das-files.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | import sys 3 | from sys import stdin, exit 4 | 5 | from os.path import dirname, abspath 6 | 7 | sys.path.append(dirname(dirname(abspath(__file__)))) # in order to import cms-bot level modules 8 | from _py2with3compatibility import run_cmd 9 | 10 | all_dasfiles = [] 11 | new_order = [] 12 | for line in stdin: 13 | line = line.strip("\n") 14 | if line.startswith("/store/"): 15 | all_dasfiles.append(line) 16 | else: 17 | new_order.append(line) 18 | 19 | if not all_dasfiles: 20 | print("\n".join(new_order)) 21 | exit(0) 22 | 23 | eos_cmd = "EOS_MGM_URL=root://eoscms.cern.ch /usr/bin/eos" 24 | EOS_BASE = "/eos/cms/store/user/cmsbuild/store" 25 | eos_base_len = len(EOS_BASE) 26 | err, eos_files = run_cmd("%s find -f %s | sort" % (eos_cmd, EOS_BASE)) 27 | if err: 28 | print("\n".join(new_order)) 29 | exit(0) 30 | 31 | new_order = [] 32 | for eos_file in eos_files.split("\n"): 33 | eos_file = "/store" + eos_file[eos_base_len:] 34 | if eos_file in all_dasfiles: 35 | new_order.append(eos_file) 36 | for das_file in all_dasfiles: 37 | if not das_file in new_order: 38 | new_order.append(das_file) 39 | 40 | print("\n".join(new_order)) 41 | -------------------------------------------------------------------------------- /es/README.md: -------------------------------------------------------------------------------- 1 | # Scripts for Elasticsearch. 2 | -------------------------------------------------------------------------------- /es/es_close_index.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | import sys 3 | from os.path import dirname, abspath 4 | 5 | cmsbot_dir = None 6 | if __file__: 7 | cmsbot_dir = dirname(dirname(abspath(__file__))) 8 | else: 9 | cmsbot_dir = dirname(dirname(abspath(sys.argv[0]))) 10 | sys.path.insert(0, cmsbot_dir) 11 | 12 | from es_utils import close_index 13 | 14 | for i in sys.argv[1:]: 15 | close_index(i) 16 | -------------------------------------------------------------------------------- /es/es_delete_indexes.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | from os.path import dirname, abspath 3 | import sys 4 | 5 | cmsbot_dir = None 6 | if __file__: 7 | cmsbot_dir = dirname(dirname(abspath(__file__))) 8 | else: 9 | cmsbot_dir = dirname(dirname(abspath(sys.argv[0]))) 10 | sys.path.insert(0, cmsbot_dir) 11 | 12 | from es_utils import delete_index, find_indexes 13 | 14 | for i in sys.argv[1:]: 15 | idxs = find_indexes(i) 16 | if not "close" in idxs: 17 | continue 18 | for ix in sorted(idxs["close"]): 19 | print("Deleting ", ix) 20 | delete_index(ix) 21 | -------------------------------------------------------------------------------- /es/es_get_templates.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | from os.path import dirname, abspath, join 3 | import sys 4 | 5 | cmsbot_dir = None 6 | if __file__: 7 | cmsbot_dir = dirname(dirname(abspath(__file__))) 8 | else: 9 | cmsbot_dir = dirname(dirname(abspath(sys.argv[0]))) 10 | sys.path.insert(0, cmsbot_dir) 11 | from _py2with3compatibility import run_cmd 12 | 13 | import json 14 | from es_utils import get_template 15 | 16 | tmpl = json.loads(get_template()) 17 | if "proxy-error" in tmpl: 18 | print("Error: ", tmpl["proxy-error"]) 19 | sys.exit(1) 20 | 21 | tmpl_dir = "%s/es/templates" % cmsbot_dir 22 | run_cmd("mkdir -p %s" % tmpl_dir) 23 | for t in tmpl: 24 | if not t.startswith("cmssdt-"): 25 | continue 26 | tfile = join(tmpl_dir, t + ".json") 27 | print("Saving: ", tfile) 28 | ref = open(tfile, "w") 29 | if ref: 30 | json.dump(tmpl[t], ref, indent=2, sort_keys=True, separators=(",", ": ")) 31 | ref.close() 32 | -------------------------------------------------------------------------------- /es/es_git_repo_size.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | import subprocess 3 | import sys, json 4 | from time import time 5 | 6 | from os.path import dirname, abspath 7 | 8 | if __file__: 9 | cmsbot_dir = dirname(dirname(abspath(__file__))) 10 | else: 11 | cmsbot_dir = dirname(dirname(abspath(sys.argv[0]))) 12 | sys.path.insert(0, cmsbot_dir) 13 | from es_utils import send_payload 14 | 15 | repo = sys.argv[1] 16 | e, o = subprocess.getstatusoutput("git clone --bare https://github.com/%s.git repo" % repo) 17 | if e: 18 | print(o) 19 | sys.exit(1) 20 | 21 | e, size = subprocess.getstatusoutput( 22 | "du -k -s -c repo/objects/pack/ | grep total | awk '{print $1}'" 23 | ) 24 | if e: 25 | print(size) 26 | sys.exit(1) 27 | 28 | e, o = subprocess.getstatusoutput("ls -d repo/objects/pack/pack-*.pack") 29 | if e: 30 | print(o) 31 | sys.exit(1) 32 | 33 | rid = o.split("/")[-1][5:-5] 34 | 35 | payload = {} 36 | payload["repository"] = repo 37 | payload["size"] = int(size) 38 | payload["@timestamp"] = int(time() * 1000) 39 | index = "git-repository-size" 40 | document = "stats" 41 | send_payload(index, document, rid, json.dumps(payload)) 42 | -------------------------------------------------------------------------------- /es/es_open_index.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | import sys 3 | from os.path import dirname, abspath 4 | 5 | cmsbot_dir = None 6 | if __file__: 7 | cmsbot_dir = dirname(dirname(abspath(__file__))) 8 | else: 9 | cmsbot_dir = dirname(dirname(abspath(sys.argv[0]))) 10 | sys.path.insert(0, cmsbot_dir) 11 | 12 | from es_utils import open_index 13 | 14 | for i in sys.argv[1:]: 15 | open_index(i) 16 | -------------------------------------------------------------------------------- /es/es_open_indexes.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | import sys 3 | from os.path import dirname, abspath 4 | 5 | cmsbot_dir = None 6 | if __file__: 7 | cmsbot_dir = dirname(dirname(abspath(__file__))) 8 | else: 9 | cmsbot_dir = dirname(dirname(abspath(sys.argv[0]))) 10 | sys.path.insert(0, cmsbot_dir) 11 | 12 | from es_utils import get_indexes, open_index, find_indexes 13 | from cmsutils import epoch2week 14 | from time import time 15 | 16 | cur_week = int(epoch2week(time(), 1)) 17 | 18 | for i in sys.argv[1:]: 19 | idxs = find_indexes(i) 20 | if not "close" in idxs: 21 | continue 22 | for ix in sorted(idxs["close"]): 23 | print("Opening ", ix) 24 | open_index(ix) 25 | print(get_indexes(ix)) 26 | -------------------------------------------------------------------------------- /es/es_send_templates.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | import json 3 | import sys 4 | from os.path import dirname, abspath, join, exists 5 | 6 | cmsbot_dir = None 7 | if __file__: 8 | cmsbot_dir = dirname(dirname(abspath(__file__))) 9 | else: 10 | cmsbot_dir = dirname(dirname(abspath(sys.argv[0]))) 11 | sys.path.insert(0, cmsbot_dir) 12 | 13 | from es_utils import send_template 14 | 15 | for tmpl in sys.argv[1:]: 16 | tmplfile = join(cmsbot_dir, "es", "templates", tmpl + ".json") 17 | if not exists(tmplfile): 18 | print("ERROR: No such file: ", tmplfile) 19 | sys.exit(1) 20 | payload = json.load(open(tmplfile)) 21 | if not send_template(tmpl, payload=json.dumps(payload)): 22 | sys.exit(1) 23 | -------------------------------------------------------------------------------- /es/es_show_indexes.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | import sys 3 | from os.path import dirname, abspath 4 | 5 | cmsbot_dir = None 6 | if __file__: 7 | cmsbot_dir = dirname(dirname(abspath(__file__))) 8 | else: 9 | cmsbot_dir = dirname(dirname(abspath(sys.argv[0]))) 10 | sys.path.insert(0, cmsbot_dir) 11 | 12 | from es_utils import get_indexes, find_indexes 13 | from cmsutils import epoch2week 14 | from time import time 15 | 16 | cur_week = int(epoch2week(time(), 1)) 17 | print(sys.argv) 18 | for i in sys.argv[1:]: 19 | idxs = find_indexes(i) 20 | for k in idxs: 21 | for ix in sorted(idxs[k]): 22 | print(get_indexes(ix)) 23 | -------------------------------------------------------------------------------- /es/templates/cmssdt-iwyu-logs.json: -------------------------------------------------------------------------------- 1 | { 2 | "aliases": {}, 3 | "mappings": { 4 | "cmssdt-iwyu-stats": { 5 | "properties": { 6 | "@timestamp": { 7 | "type": "date" 8 | }, 9 | "architecture": { 10 | "index": "not_analyzed", 11 | "type": "string" 12 | }, 13 | "excludes": { 14 | "type": "long" 15 | }, 16 | "files": { 17 | "type": "long" 18 | }, 19 | "includes": { 20 | "type": "long" 21 | }, 22 | "package": { 23 | "index": "not_analyzed", 24 | "type": "string" 25 | }, 26 | "pkg_url": { 27 | "index": "not_analyzed", 28 | "type": "string" 29 | }, 30 | "release-tag": { 31 | "index": "not_analyzed", 32 | "type": "string" 33 | } 34 | } 35 | } 36 | }, 37 | "order": 0, 38 | "settings": { 39 | "index": { 40 | "number_of_shards": "2" 41 | } 42 | }, 43 | "template": "cmssdt-iwyu" 44 | } -------------------------------------------------------------------------------- /es/templates/cmssdt-jenkins-builds.json: -------------------------------------------------------------------------------- 1 | { 2 | "aliases": {}, 3 | "mappings": { 4 | "cmssdt-jenkins-builds-data": { 5 | "properties": { 6 | "@timestamp": { 7 | "type": "date" 8 | }, 9 | "build_duration": { 10 | "type": "long" 11 | }, 12 | "build_number": { 13 | "type": "long" 14 | }, 15 | "build_result": { 16 | "index": "not_analyzed", 17 | "type": "string" 18 | }, 19 | "job_name": { 20 | "index": "not_analyzed", 21 | "type": "string" 22 | }, 23 | "job_status": { 24 | "index": "not_analyzed", 25 | "type": "string" 26 | }, 27 | "slave_node": { 28 | "index": "not_analyzed", 29 | "type": "string" 30 | }, 31 | "url": { 32 | "index": "not_analyzed", 33 | "type": "string" 34 | } 35 | } 36 | } 37 | }, 38 | "order": 0, 39 | "settings": { 40 | "index": { 41 | "number_of_shards": "1" 42 | } 43 | }, 44 | "template": "cmssdt-jenkins" 45 | } -------------------------------------------------------------------------------- /es/templates/cmssdt-timestamp.json: -------------------------------------------------------------------------------- 1 | { 2 | "aliases": {}, 3 | "mappings": { 4 | "cmssdt-timestamp": { 5 | "properties": { 6 | "@timestamp": { 7 | "type": "date" 8 | } 9 | } 10 | } 11 | }, 12 | "order": 0, 13 | "settings": { 14 | "index": { 15 | "number_of_shards": "1" 16 | } 17 | }, 18 | "template": "cmssdt-*" 19 | } -------------------------------------------------------------------------------- /es/templates/cmssdt-unittests.json: -------------------------------------------------------------------------------- 1 | { 2 | "aliases": {}, 3 | "mappings": { 4 | "cmssdt-unittests": { 5 | "properties": { 6 | "@timestamp": { 7 | "format": "yyyy-MM-dd HH:mm:ss", 8 | "type": "date" 9 | }, 10 | "architecture": { 11 | "index": "not_analyzed", 12 | "type": "string" 13 | }, 14 | "name": { 15 | "index": "not_analyzed", 16 | "type": "string" 17 | }, 18 | "package": { 19 | "index": "not_analyzed", 20 | "type": "string" 21 | }, 22 | "release": { 23 | "index": "not_analyzed", 24 | "type": "string" 25 | }, 26 | "status": { 27 | "type": "long" 28 | }, 29 | "url": { 30 | "index": "not_analyzed", 31 | "type": "string" 32 | } 33 | } 34 | } 35 | }, 36 | "order": 0, 37 | "settings": { 38 | "index": { 39 | "number_of_shards": "1" 40 | } 41 | }, 42 | "template": "cmssdt-ibs" 43 | } -------------------------------------------------------------------------------- /es_externals_stats.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | from sys import argv 3 | from es_utils import es_send_external_stats 4 | 5 | if __name__ == "__main__": 6 | stats_json_f = argv[1] 7 | opts_json_f = argv[2] 8 | es_send_external_stats(stats_json_f, opts_json_f, 1) 9 | -------------------------------------------------------------------------------- /es_iwyu_logs.py: -------------------------------------------------------------------------------- 1 | #!/bin/env python3 2 | import sys, json, os 3 | from es_utils import send_payload 4 | 5 | timestp = os.path.getmtime(sys.argv[1]) 6 | items = sys.argv[1].split("/")[:-1] 7 | arch = items[-1] 8 | rel = items[-2] 9 | try: 10 | data = json.loads(open(sys.argv[1]).read().strip()) 11 | except: 12 | print("json file not found/processed") 13 | exit(1) 14 | payload = {} 15 | payload["architecture"] = arch 16 | payload["release"] = rel 17 | payload["@timestamp"] = int(timestp * 1000) 18 | index = "iwyu" 19 | document = "iwyu-stats" 20 | id = False 21 | for item in data: 22 | payload["package"] = item 23 | files, includes, excludes = data[item] 24 | payload["files"] = files 25 | payload["includes"] = includes 26 | payload["excludes"] = excludes 27 | payload["url"] = ( 28 | "https://cmssdt.cern.ch/SDT/cgi-bin/buildlogs/iwyu/" 29 | + arch 30 | + "/" 31 | + rel 32 | + "/" 33 | + item 34 | + "/index.html" 35 | ) 36 | send_payload(index, document, id, json.dumps(payload)) 37 | -------------------------------------------------------------------------------- /fix-igprof-sql.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | from __future__ import print_function 3 | import sys 4 | import re 5 | 6 | unknown = 0 7 | 8 | 9 | def fix_file(line): 10 | global unknown 11 | m = re.match( 12 | '^(\\s*INSERT\\s+INTO\\s+files\\s+VALUES\\s+\\((\\d+),\\s*["])([^"]*)(["].*$)', line 13 | ) 14 | if m: 15 | xf = m.group(3) 16 | if xf: 17 | if xf[0] != "/": 18 | xf = "unknown-" + m.group(2) 19 | else: 20 | unknown += 1 21 | xf = "unknownfile-" + str(unknown) 22 | line = m.group(1) + xf + m.group(4) 23 | return line 24 | 25 | 26 | xline = "" 27 | for line in open(sys.argv[1]).readlines(): 28 | line = line.strip("\n") 29 | if xline: 30 | xline = xline + line 31 | if line.endswith(");"): 32 | line = fix_file(xline) 33 | xline = "" 34 | else: 35 | continue 36 | elif line.startswith("INSERT INTO files"): 37 | if not line.endswith(");"): 38 | xline = line 39 | continue 40 | else: 41 | line = fix_file(line) 42 | print(line) 43 | -------------------------------------------------------------------------------- /get-pr-changed-files: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | curl -s -L https://patch-diff.githubusercontent.com/raw/cms-sw/$1/pull/$2.patch | grep '^diff --git ' | sed 's|.* a/||;s| *b/.*||' | sort | uniq 3 | -------------------------------------------------------------------------------- /get-production-arch: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | CMS_BOT_DIR=$(dirname $0) 3 | CMSSW_X_Y_Z=$1 4 | RELEASE_SERIES=$2 5 | 6 | FULL_RELEASE=`echo $CMSSW_X_Y_Z | grep '_[a-zA-Z]*patch[1-9][0-9]*.*' | sed 's|_[a-zA-Z]*patch.*||'` 7 | if [ "X$FULL_RELEASE" != "X" ] ; then 8 | SCRAM_ARCH=`grep "label=$FULL_RELEASE;" ${CMS_BOT_DIR}/releases.map | grep 'prodarch=1;' | sed 's|.*architecture=||;s|;.*||'` 9 | fi 10 | if [ "X$FULL_RELEASE" = "X" -o "X$SCRAM_ARCH" = "X" ] ; then 11 | if [ "X$RELEASE_SERIES" = "X" ] ; then RELEASE_SERIES=$(echo $CMSSW_X_Y_Z | sed 's/^\(CMSSW_[0-9][0-9]*_[0-9][0-9]*_\).*/\1X/') ; fi 12 | eval `cat ${CMS_BOT_DIR}/config.map | grep "RELEASE_QUEUE=$RELEASE_SERIES" | grep 'PROD_ARCH=1'` 13 | fi 14 | echo $SCRAM_ARCH 15 | 16 | -------------------------------------------------------------------------------- /get-relval-failures.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | import argparse 3 | from es_utils import get_payload_wscroll 4 | 5 | parser = argparse.ArgumentParser() 6 | parser.add_argument("release", type=str, help="CMSSW Release") 7 | parser.add_argument("arch", type=str, help="Architecture ") 8 | args = parser.parse_args() 9 | 10 | print("Searching relval failures for %s/%s" % (args.release, args.arch)) 11 | 12 | query_relval_failures = """{ 13 | "query": {"bool": {"must": {"query_string": {"query": "release:%s AND architecture:%s AND NOT exitcode:0", "default_operator": "AND"}}}}, 14 | "from": 0, 15 | "size": 10000 16 | }""" % ( 17 | args.release, 18 | args.arch, 19 | ) 20 | 21 | content_hash = get_payload_wscroll("cmssdt-ib-matrix-*", query_relval_failures) 22 | 23 | if content_hash: 24 | if (not "hits" in content_hash) or (not "hits" in content_hash["hits"]): 25 | print("ERROR: ", content_hash) 26 | sys.exit(1) 27 | 28 | for hit in content_hash["hits"]["hits"]: 29 | relval = hit["_source"]["workflow"] 30 | step = hit["_source"]["step"] 31 | exitcode = hit["_source"]["exitcode"] 32 | print(f"WF:{relval}:{step}:{exitcode}") 33 | -------------------------------------------------------------------------------- /gpu_flavors.txt: -------------------------------------------------------------------------------- 1 | cuda 2 | rocm -------------------------------------------------------------------------------- /groups.yaml: -------------------------------------------------------------------------------- 1 | tracking-pog: 2 | - VinInn 3 | - mtosi 4 | - mmusich 5 | - VourMa 6 | btv-pog: 7 | - andrzejnovak 8 | - AlexDeMoor 9 | - Ming-Yan 10 | - Senphy 11 | - castaned 12 | ecal-offline: 13 | - wang0jin 14 | - thomreis 15 | - ReyerBand 16 | pixel-offline: 17 | - dkotlins 18 | - ferencek 19 | - tvami 20 | - mroguljic 21 | - tsusa 22 | trk-dqm: 23 | - arossi83 24 | - sroychow 25 | - richa2710 26 | tau-pog: 27 | - mbluj 28 | - azotz 29 | muon-reco: 30 | - CeliaFernandez 31 | - andrea21z 32 | -------------------------------------------------------------------------------- /ib-profiling-data.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash -ex 2 | python3 -m venv venv 3 | source venv/bin/activate 4 | if [ $(python3 -c "import sys;print(sys.version_info[1])") -gt 6 ] ; then 5 | pip install CMSMonitoring==0.3.3 6 | else 7 | pip install --user CMSMonitoring==0.3.3 8 | fi 9 | python3 $(dirname $0)/ib-profiling-data.py 10 | -------------------------------------------------------------------------------- /ib-weeks: -------------------------------------------------------------------------------- 1 | nweek-02830 2 | nweek-02831 3 | nweek-02832 4 | nweek-02833 5 | nweek-02834 6 | nweek-02835 7 | nweek-02836 8 | nweek-02837 9 | nweek-02838 10 | nweek-02839 11 | nweek-02840 12 | nweek-02841 13 | nweek-02842 14 | nweek-02843 15 | nweek-02844 16 | nweek-02845 17 | nweek-02846 18 | nweek-02847 19 | nweek-02848 20 | nweek-02849 21 | nweek-02850 22 | nweek-02851 23 | nweek-02852 24 | nweek-02853 25 | nweek-02854 26 | nweek-02855 27 | nweek-02856 28 | nweek-02857 29 | nweek-02858 30 | nweek-02859 31 | nweek-02860 32 | nweek-02861 33 | nweek-02862 34 | nweek-02863 35 | nweek-02864 36 | nweek-02865 37 | nweek-02866 38 | nweek-02867 39 | nweek-02868 40 | nweek-02869 41 | nweek-02870 42 | nweek-02871 43 | nweek-02872 44 | nweek-02873 45 | nweek-02874 46 | nweek-02875 47 | nweek-02876 48 | nweek-02877 49 | nweek-02878 50 | nweek-02879 51 | nweek-02880 52 | nweek-02881 53 | nweek-02882 54 | nweek-02883 55 | nweek-02884 56 | nweek-02885 57 | nweek-02886 58 | nweek-02887 59 | nweek-02888 60 | nweek-02889 61 | nweek-02890 62 | nweek-02891 63 | nweek-02892 64 | nweek-02893 65 | -------------------------------------------------------------------------------- /ib2buildlog.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | import sys, time 3 | 4 | ib = sys.argv[1] 5 | ib_date = ib.split("_")[-1] 6 | week_day = time.strftime("%a", time.strptime(ib_date, "%Y-%m-%d-%H%M")).lower() 7 | day_hour = time.strftime("%H", time.strptime(ib_date, "%Y-%m-%d-%H%M")) 8 | ib_queue = ".".join(ib.split("_X_")[0].split("_")[1:]) 9 | print("%s/%s-%s-%s/%s" % (week_day, ib_queue, week_day, day_hour, ib)) 10 | -------------------------------------------------------------------------------- /ignore-releases-for-tests: -------------------------------------------------------------------------------- 1 | CMSSW_13_1_X_2023-04-03-0500 2 | -------------------------------------------------------------------------------- /ignore-webhooks: -------------------------------------------------------------------------------- 1 | cms-sw/cmssdt-ib 2 | cms-sw/logreader 3 | -------------------------------------------------------------------------------- /import-stitched.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash -ex 2 | 3 | CMSSW_TAG=$1 4 | GIT_RELEASE=$2 5 | DRY_RUN=$3 6 | if [ "X$CMSSW_TAG" = "X" ] ; then 7 | echo "Usage: 0 CMSSW_version" 8 | exit 1 9 | fi 10 | if [ "X$GIT_RELEASE" != "X" ] ; then 11 | scram p $GIT_RELEASE 12 | pushd $GIT_RELEASE 13 | eval `scram runtime -sh` 14 | popd 15 | fi 16 | 17 | STITCHED_TAG=$(echo $CMSSW_TAG | sed 's|CMSSW_|STITCHED_|') 18 | 19 | git clone git@github.com:cms-sw/Stitched.git 20 | pushd Stitched 21 | HAS_TAG=$(git tag | grep "^$STITCHED_TAG"'$' | wc -l) 22 | if [ $HAS_TAG -gt 0 ] ; then 23 | echo "CMSSW tag $CMSSW_TAG is already ported to stitched repo as $STITCHED_TAG" 24 | exit 0 25 | fi 26 | git checkout --orphan cmssw-tag 27 | git rm -rf . 28 | popd 29 | 30 | wget -O ${CMSSW_TAG}.tar.gz https://github.com/cms-sw/cmssw/archive/${CMSSW_TAG}.tar.gz 31 | tar -xzf ${CMSSW_TAG}.tar.gz 32 | rm -f ${CMSSW_TAG}.tar.gz 33 | wget -O packages.txt https://raw.githubusercontent.com/cms-sw/Stitched/master/packages.txt 34 | 35 | for pkg in $(cat packages.txt) ; do 36 | mkdir -p Stitched/$(dirname $pkg) 37 | mv cmssw-${CMSSW_TAG}/$pkg Stitched/$pkg 38 | done 39 | rm -rf cmssw-${CMSSW_TAG} ${GIT_RELEASE} 40 | 41 | pushd Stitched 42 | git add . 43 | git commit -a -m "Imported new tag $STITCHED_TAG" 44 | git tag $STITCHED_TAG 45 | if [ "X$DRY_RUN" = "X" ] ; then 46 | git push origin $STITCHED_TAG 47 | fi 48 | popd 49 | -------------------------------------------------------------------------------- /init.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh -ex 2 | 3 | function Jenkins_GetCPU () 4 | { 5 | ACTUAL_CPU=$(nproc) 6 | if [ "X$1" != "X" ] ; then 7 | let ACTUAL_CPU=$ACTUAL_CPU*$1 || true 8 | fi 9 | echo $ACTUAL_CPU 10 | } 11 | 12 | -------------------------------------------------------------------------------- /jenkins-jobs/cmd-with-retry.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash -ex 2 | cmd=$1 ; shift 3 | if [ -f retry.txt ] ; then 4 | rm -f retry.txt 5 | echo "Previously done repos" 6 | ls *.done | wc -l 7 | else 8 | rm -f *.done 9 | fi 10 | if ! $cmd ${1+"$@"} > run.log 2>&1 ; then 11 | cat run.log 12 | echo "Total done repos" 13 | ls *.done | wc -l 14 | if [ $(grep -E '(socket.timeout: timed out|ssl.SSLError:)' run.log | wc -l) -gt 0 ] ; then 15 | echo "ERROR: Socket timeout, going to retry" 16 | let RETRY_COUNT=$RETRY_COUNT+1 17 | echo "RETRY_COUNT=$RETRY_COUNT" > retry.txt 18 | sleep 60 19 | else 20 | exit 1 21 | fi 22 | else 23 | cat run.log 24 | rm -rf *.done 25 | fi 26 | -------------------------------------------------------------------------------- /jenkins-jobs/es-cmssw-afs-eos.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | from __future__ import print_function 3 | from os.path import dirname, abspath 4 | import sys 5 | 6 | sys.path.append(dirname(dirname(abspath(__file__)))) 7 | from hashlib import sha1 8 | import json 9 | from es_utils import send_payload 10 | from _py2with3compatibility import run_cmd 11 | from cmsutils import cmsswIB2Week 12 | 13 | err, logs = run_cmd( 14 | "find /data/sdt/SDT/jenkins-artifacts/cmssw-afs-eos-comparison -mindepth 1 -maxdepth 1 -name '*.json' -type f" 15 | ) 16 | for jfile in logs.split("\n"): 17 | if not jfile: 18 | continue 19 | print("Processing file", jfile) 20 | payload = {} 21 | try: 22 | payload = json.load(open(jfile)) 23 | except ValueError as err: 24 | print(err) 25 | run_cmd("rm -f %s" % jfile) 26 | continue 27 | week, rel_sec = cmsswIB2Week(payload["release"]) 28 | payload["@timestamp"] = rel_sec * 1000 29 | id = sha1( 30 | ("%s-%s-%s" % (payload["release"], payload["architecture"], payload["fstype"])).encode() 31 | ).hexdigest() 32 | print(payload) 33 | if send_payload("cmssw-afs-eos-%s" % week, "build", id, json.dumps(payload)): 34 | run_cmd("rm -f %s" % jfile) 35 | -------------------------------------------------------------------------------- /jenkins-jobs/git/git-mirror-repository.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | from __future__ import print_function 3 | from os.path import abspath, dirname 4 | from sys import argv, exit, path 5 | from os import environ 6 | 7 | path.append(dirname(dirname(dirname(abspath(__file__))))) # in order to import top level modules 8 | from _py2with3compatibility import run_cmd, Request, urlopen, quote_plus 9 | 10 | repo = argv[1] 11 | e, o = run_cmd( 12 | 'git ls-remote -h "https://:@gitlab.cern.ch:8443/%s" 2>&1 | grep "refs/heads/" | wc -l' % repo 13 | ) 14 | if o == "0": 15 | print("Mirror repository not found:", repo) 16 | exit(0) 17 | 18 | TOKEN_FILE = "/data/secrets/cmsbuild-gitlab-secret" 19 | if "GITLAB_SECRET_TOKEN" in environ: 20 | TOKEN_FILE = environ["GITLAB_SECRET_TOKEN"] 21 | url = "https://gitlab.cern.ch/api/v4/projects/%s/mirror/pull" % quote_plus(repo) 22 | headers = {"PRIVATE-TOKEN": open(TOKEN_FILE).read().strip()} 23 | request = Request(url, headers=headers) 24 | request.get_method = lambda: "POST" 25 | response = urlopen(request) 26 | print(response.read()) 27 | -------------------------------------------------------------------------------- /jenkins/add-cpu-labels.groovy: -------------------------------------------------------------------------------- 1 | slave = hudson.model.Hudson.instance.slaves.find { slave -> slave.nodeName.equals(args[0]) } 2 | def cur_lab = slave.labelString.replaceAll(/ +/,' ').trim() 3 | def release_build=cur_lab.contains("release-build"); 4 | if (!(cur_lab =~ /\s*no_label\s*/)) 5 | { 6 | def xlabs=[args[1], args[2], "auto-label"]; 7 | def items = args[2].split("_"); 8 | for (String y : items){xlabs.push(y);} 9 | if (args[2]!="") 10 | { 11 | if (slave.name =~ /^cmsbuild\d+$/) 12 | { 13 | xlabs.push(args[2]+"-cloud"); 14 | xlabs.push("cloud"); 15 | release_build=true; 16 | } 17 | if (args[1]!=""){xlabs.push(args[2]+"-"+args[1]);} 18 | if (release_build){xlabs.push(args[2]+"-release-build");} 19 | } 20 | if (release_build){xlabs.push("release-build");} 21 | if (args[3]=="docker") 22 | { 23 | xlabs.push("docker"); 24 | if (items.length==2) 25 | { 26 | xlabs.push("docker-"+items[1]); 27 | if (args[1]!=""){xlabs.push("docker-"+items[1]+"-"+args[1]);} 28 | } 29 | if (args[1]!=""){xlabs.push("docker-"+args[1]);} 30 | } 31 | new_lab = xlabs.join(" ").replaceAll(/\s\s+/,' ').trim(); 32 | println "New Labels:"+new_lab 33 | println "Cur Labels:"+cur_lab 34 | if (new_lab != cur_lab) 35 | { 36 | slave.setLabelString(new_lab) 37 | println "Changing labels: "+cur_lab+"->"+new_lab 38 | } 39 | } 40 | else 41 | { 42 | println "Not changing labels due to explicit 'no_label'" 43 | } 44 | -------------------------------------------------------------------------------- /jenkins/auto-nodes.txt: -------------------------------------------------------------------------------- 1 | nvidia=grid-create-gpu-node 2 | -------------------------------------------------------------------------------- /jenkins/auto-nodes/grid-create-gpu-node: -------------------------------------------------------------------------------- 1 | EXTRA_LABELS=auto-delete 2 | -------------------------------------------------------------------------------- /jenkins/blacklist-lxplus.txt: -------------------------------------------------------------------------------- 1 | 2 | -------------------------------------------------------------------------------- /jenkins/delete-build.groovy: -------------------------------------------------------------------------------- 1 | import jenkins.model.Jenkins 2 | 3 | projName=args[0]; 4 | buildId=arg[1].toInteger(); 5 | 6 | println "Removing build number "+buildId+" for "+projName; 7 | Jenkins.instance.getItemByFullName(projName).builds.findAll 8 | { 9 | it.number == buildId 10 | }.each 11 | { 12 | it.delete() 13 | }; 14 | -------------------------------------------------------------------------------- /jenkins/jenkins-projects-report.groovy: -------------------------------------------------------------------------------- 1 | /** 2 | * extracts metadata about jenkins jobs from jenkins and dumps in json format 3 | */ 4 | 5 | import jenkins.* 6 | import jenkins.model.* 7 | import hudson.* 8 | import hudson.model.* 9 | import groovy.json.* 10 | 11 | def info = [:] 12 | for (p in Jenkins.instance.projects) { 13 | def new_map = [:] 14 | new_map['job_name'] = p.name 15 | new_map['job_desc'] = p.description 16 | new_map['upstream'] = [] 17 | new_map['downstream'] = [] 18 | new_map['triggers_from'] = [] 19 | new_map['subprojects'] = [] 20 | 21 | p.getUpstreamProjects().each { project -> new_map['upstream'] << project.getDisplayName() } 22 | p.getDownstreamProjects().each { project -> new_map['downstream'] << project.getDisplayName() } 23 | p.getBuildTriggerUpstreamProjects().each { project -> new_map['triggers_from'] << project.getDisplayName() } 24 | 25 | for (b in p.builders) { 26 | if (b in hudson.plugins.parameterizedtrigger.TriggerBuilder) { 27 | b.configs.projects.each { project -> new_map['subprojects'] << project } 28 | } 29 | } 30 | info[p.name] = new_map 31 | } 32 | def json = JsonOutput.toJson(info) 33 | def file1 = new File('/tmp/report_gen.txt') 34 | file1.write json 35 | -------------------------------------------------------------------------------- /jenkins/kill-build-release.groovy: -------------------------------------------------------------------------------- 1 | version = args[0]; 2 | wspace = args[1]; 3 | dryrun="true"; 4 | try{dryrun = args[2];} 5 | catch (e) {dryrun="true";} 6 | 7 | for (it in jenkins.model.Jenkins.instance.getItem("build-release").builds) 8 | { 9 | params = it.getBuildVariables(); 10 | if (params['CMSSW_X_Y_Z']!=version){continue;} 11 | arch = params['ARCHITECTURE']; 12 | println "#"+it.getNumber()+" "+version+"/"+arch; 13 | ws = it.getWorkspace(); 14 | if (ws==null) 15 | { 16 | try{ 17 | wsFromfile = 'grep /var/lib/jenkins/jobs/build-release/builds/'+it.getNumber()+'/build.xml' 18 | ws = wsFromfile.execute().text.replaceAll(".*","").replaceAll(" *.*","") 19 | } 20 | catch (e) {println "Error: Unable to find workspace"; continue;} 21 | } 22 | pfile = wspace+"/properties.kill-build-release-"+it.getNumber(); 23 | host_obj = it.getBuiltOn(); 24 | host = host_obj.getLauncher().getCommand().split(" ")[1]; 25 | if (!host.contains("@")){host=host+"@"+host_obj.getNodeName();} 26 | println "Creating property file:"+pfile; 27 | def out = new File(pfile); 28 | out << "CMSSW_X_Y_Z="+version+"\n"; 29 | out << "ARCHITECTURE="+arch+"\n"; 30 | out << "BUILD_DIR="+ws+"\n"; 31 | out << "BUILD_HOST="+host+"\n"; 32 | out << "DRY_RUN="+dryrun+"\n"; 33 | } 34 | 35 | -------------------------------------------------------------------------------- /jenkins/kill-build.groovy: -------------------------------------------------------------------------------- 1 | proj=args[0]; 2 | build_number=args[1].toInteger(); 3 | println "Checking running jobs for "+proj; 4 | for (it in jenkins.model.Jenkins.instance.getItem(proj).builds) 5 | { 6 | if (it.getNumber() != build_number){continue;} 7 | println "Found build with progress "+it.isInProgress() 8 | if (it.isInProgress() == true) 9 | { 10 | it.doStop(); 11 | println " Stopped Job"; 12 | } 13 | break; 14 | } 15 | 16 | -------------------------------------------------------------------------------- /jenkins/milkv-connect.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash -ex 2 | TARGET=$1 3 | USER=cmsbuild 4 | SCRATCH=/home/smmuzaffar/cmsbld/${NODE_NAME} 5 | 6 | KTAB=${HOME}/keytabs/$(echo $TARGET | sed 's|@.*||').keytab 7 | if [ ! -f $KTAB ] ; then KTAB=${HOME}/keytabs/cmsbld.keytab ; fi 8 | export KRB5CCNAME=FILE:/tmp/krb5cc_${USER}_${NODE_NAME} 9 | kinit $USER@CERN.CH -k -t ${KTAB} 10 | klist || true 11 | KRB5_FILENAME=$(echo $KRB5CCNAME | sed 's|^FILE:||') 12 | 13 | SSH_OPTS="-q -o IdentitiesOnly=yes -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no -o ServerAliveInterval=60" 14 | SSH_CMD="ssh -p 2223 ${SSH_OPTS}" 15 | SCP_CMD="scp -P 2223 ${SSH_OPTS}" 16 | 17 | $SSH_CMD -n $TARGET "mkdir -p $SCRATCH" 18 | $SCP_CMD ${KRB5_FILENAME} $TARGET:${SCRATCH}/krb5cc_${USER} 19 | $SCP_CMD /var/lib/jenkins/slave.jar $TARGET:${SCRATCH}/slave.jar 20 | $SSH_CMD $TARGET "ssh milkv-2 java \ 21 | --add-opens java.base/java.lang=ALL-UNNAMED \ 22 | --add-opens java.base/java.lang.reflect=ALL-UNNAMED \ 23 | -jar ${SCRATCH}/slave.jar -jar-cache $WORKSPACE/tmp" 24 | -------------------------------------------------------------------------------- /jenkins/nodes-sanity-check.sh: -------------------------------------------------------------------------------- 1 | SINGULARITY=$1 2 | shift 3 | PATHS=$@ 4 | 5 | # Checking that paths are acessible 6 | for path in ${PATHS[@]}; do 7 | echo "Checking ${path} for host $(hostname)" 8 | ls ${path} >/dev/null 2>&1 && echo -e "... OK!" || echo "ERROR accessing ${path}" 9 | done 10 | 11 | arch=$(uname -r | grep -o "el[0-9]") 12 | 13 | if [[ $arch == "el7" ]]; then 14 | arch="cc7" 15 | fi 16 | 17 | if [ "$SINGULARITY" == "true" ]; then 18 | # Checking that singularity can start 19 | echo "Checking that singularity can start a container on $(hostname)" 20 | /cvmfs/cms.cern.ch/common/cmssw-${arch} --command-to-run ls >/dev/null 2>&1 && echo -e "... OK!" || echo "ERROR starting singularity" 21 | fi 22 | -------------------------------------------------------------------------------- /jenkins/restart-slaves.groovy: -------------------------------------------------------------------------------- 1 | for (slave in hudson.model.Hudson.instance.slaves) 2 | { 3 | comp = slave.getComputer(); 4 | if (comp.isOffline()) 5 | { 6 | offCause = comp.getOfflineCause(); 7 | if (offCause == null){continue;} 8 | println slave.name+":"+offCause; 9 | if (offCause) 10 | { 11 | if (offCause.getClass() == org.jenkinsci.plugins.detection.unreliable.slave.BuildStatisticListener$1) 12 | { 13 | println "Trying to reconnect:"+slave.name; 14 | println comp.cliOnline(); 15 | } 16 | } 17 | } 18 | } 19 | -------------------------------------------------------------------------------- /jenkins/retry-build.groovy: -------------------------------------------------------------------------------- 1 | import jenkins.model.* 2 | import hudson.model.* 3 | 4 | projName=args[0]; 5 | buildId=args[1].toInteger(); 6 | 7 | println "Retrying build number "+buildId+" for "+projName; 8 | def job = Jenkins.instance.getItemByFullName(projName) 9 | def my_job = job.getBuildByNumber(buildId) 10 | 11 | def actions = my_job.getActions(ParametersAction) 12 | println actions 13 | job.scheduleBuild2(0, actions.toArray(new ParametersAction[actions.size()])) 14 | -------------------------------------------------------------------------------- /jenkins/set-slave-labels.groovy: -------------------------------------------------------------------------------- 1 | slave = hudson.model.Hudson.instance.slaves.find { slave -> slave.nodeName.equals(args[0]) } 2 | def cur_lab = slave.labelString.replaceAll(/ +/,' ').trim() 3 | def new_lab="" 4 | for(int i = 1;i"+new_lab 12 | } 13 | } 14 | -------------------------------------------------------------------------------- /jenkins/test-jenkins-webhook: -------------------------------------------------------------------------------- 1 | #!/bin/bash -ex 2 | if [ "$HOST" = "" ] ; then 3 | if [ "$BUILD_CAUSE_UPSTREAMTRIGGER" = "true" -o "$ROOT_BUILD_CAUSE" = "MANUALTRIGGER" ] ; then 4 | JDATA="${JOB_NAME}:${BUILD_ID}" 5 | hname=$(hostname -f) 6 | JURL=$(echo $LOCAL_JENKINS_URL | sed "s|//localhost|//$hname|") 7 | echo "${JDATA}" > $WORKSPACE/../jenkins-wekhook.data 8 | uheader=$(grep '' $JENKINS_HOME/config.xml | sed 's|.* *||;s| *.*||') 9 | curl -s -k -f --retry 3 --retry-delay 5 --max-time 30 -X POST -d \ 10 | "{\"DATA\":\"${JDATA}\",\"JENKINS_SERVER\":\"${JURL}\",\"JENKINS_USER\":\"cmssdt\",\"USER_HEADER\":\"${uheader}\"}" \ 11 | --header 'Content-Type: application/json' \ 12 | https://cmssdt.cern.ch/SDT/cgi-bin/jenkins_webhook > out.txt 13 | cat out.txt 14 | grep 'Unable to start jenkins job' out.txt >/dev/null && exit 1 15 | exit 0 16 | fi 17 | fi 18 | HOSTNAME=$(hostname -I) 19 | IP_OK=false 20 | for ip in $(hostname -I) ; do 21 | [ "$ip" != "${HOST}" ] || IP_OK=true 22 | done 23 | $IP_OK || exit 1 24 | if [ -f $WORKSPACE/../jenkins-wekhook.data ] ; then 25 | ODATA=$(cat $WORKSPACE/../jenkins-wekhook.data) 26 | rm -f $WORKSPACE/../jenkins-wekhook.data 27 | if [ "${ODATA}" != "${DATA}" ] ; then 28 | exit 1 29 | fi 30 | fi 31 | -------------------------------------------------------------------------------- /jobs/run.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | eval `scram run -sh` 3 | cd ${CMSSW_BASE} 4 | export PATH=${CMSSW_BASE}/cms-bot/das-utils:${PATH} 5 | export PYTHONUNBUFFERED=1 6 | export CMS_PATH=/cvmfs/cms-ib.cern.ch 7 | if [ "X$CMS_SITE_OVERRIDE" == "X" ]; then 8 | CMS_SITE_OVERRIDE="local" 9 | fi 10 | export SITECONFIG_PATH=/cvmfs/cms-ib.cern.ch/SITECONF/$CMS_SITE_OVERRIDE 11 | voms-proxy-init -voms cms 12 | 13 | rm -rf all-pyRelval 14 | mkdir all-pyRelval 15 | for wfs in $(ls wf*of*) ; do 16 | rm -rf pyRelval* 17 | ${CMSSW_BASE}/cms-bot/jobs/create-relval-jobs.py $(cat $wfs) 18 | for type in cpu rss dynamic time ; do 19 | cp -r pyRelval pyRelval-${type} 20 | pushd pyRelval-${type} 21 | ${CMSSW_BASE}/cms-bot/jobs/jobscheduler.py -c 200 -m 95 -o ${type} 22 | popd 23 | mv pyRelval-${type} all-pyRelval/${wfs}-${type} 24 | sleep 600 25 | done 26 | rm -rf pyRelval* 27 | sleep 1200 28 | done 29 | -------------------------------------------------------------------------------- /jobs/stats.py: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | """:" 4 | python_cmd="python3" 5 | python -V >/dev/null 2>&1 && python_cmd="python" 6 | exec ${python_cmd} $0 ${1+"$@"} 7 | """ 8 | 9 | from __future__ import print_function 10 | import sys 11 | from os.path import getmtime, join, dirname, abspath 12 | 13 | sys.path.append(dirname(dirname(abspath(__file__)))) # in order to import cms-bot level modules 14 | from _py2with3compatibility import run_cmd 15 | 16 | cache = {} 17 | e, o = run_cmd("ls -d wf*of*") 18 | for d in o.split("\n"): 19 | s, s1 = d.split(".list-", 1) 20 | xt = int(getmtime(d) - getmtime(join(d, "jobs.json"))) 21 | if not s in cache: 22 | cache[s] = {} 23 | if not xt in cache[s]: 24 | cache[s][xt] = [] 25 | e, o = run_cmd("find %s -name 'workflow.log' -type f" % d) 26 | tp = 0 27 | tf = 0 28 | for l in o.split("\n"): 29 | e, o = run_cmd("grep 'tests passed' %s" % l) 30 | x = o.replace(" failed", "").split(" tests passed, ") 31 | tp = tp + sum([int(i) for i in x[0].split(" ")]) 32 | tf = tf + sum([int(i) for i in x[1].split(" ")]) 33 | cache[s][xt].append({"order": s1, "passed": tp, "failed": tf}) 34 | for s in sorted(cache.keys()): 35 | print(s) 36 | for xt in sorted(cache[s].keys()): 37 | for item in cache[s][xt]: 38 | print(" ", xt, " \t", item) 39 | -------------------------------------------------------------------------------- /keytab.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | PRINCIPAL=$1 3 | USER=$(echo $PRINCIPAL | sed 's|@.*||') 4 | echo -n "Enter Password for $PRINCIPAL: " 5 | stty -echo 6 | read PASSWD 7 | stty echo 8 | echo "" 9 | 10 | printf "%b" "addent -password -p $PRINCIPAL -k 1 -e rc4-hmac\n$PASSWD\naddent -password -p $PRINCIPAL -k 1 -e aes256-cts\n$PASSWD\nwkt ${USER}.keytab" | ktutil 11 | klist -k ${USER}.keytab 12 | -------------------------------------------------------------------------------- /kill-build-release: -------------------------------------------------------------------------------- 1 | #!/bin/sh -ex 2 | 3 | # This is used when the build is aborted from the issue comments. 4 | # build-release creates a filed called BUILD_PID, this file 5 | # contains the process id for the build. 6 | # When this script is excecuted, it reads the file and kills 7 | # the process with that process ID. 8 | # The file BUILD_PID must be in the workspace of the build 9 | 10 | CMSSW_X_Y_Z=$1 11 | ARCHITECTURE=$2 12 | WORKSPACE=$3 13 | PID_TO_KILL=`ps -awx 2>&1 | grep docker_launcher.sh | grep $CMSSW_X_Y_Z | grep $ARCHITECTURE | awk '{print $1}'` 14 | 15 | pushd $WORKSPACE 16 | if [ "X$PID_TO_KILL" = "X" ]; then 17 | PID_TO_KILL=`head -n 1 BUILD_PID` 18 | fi 19 | echo "Killing $PID_TO_KILL, the build was aborted in the github issue." >> buildSteps.log 20 | kill -9 $PID_TO_KILL 21 | popd 22 | -------------------------------------------------------------------------------- /lumi/connect.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash -ex 2 | #usage $0 e.g. connect.sh el8 username@host.domain 3 | OS=$1 4 | TARGET=$2 5 | 6 | # Values from get_slot.sh 7 | USER=cmsbuild 8 | SESSION=$OS 9 | SLURM_ACCOUNT=project_462000245 10 | SCRATCH=/scratch/$SLURM_ACCOUNT/$USER/$SESSION 11 | 12 | KTAB=${HOME}/keytabs/$(echo $TARGET | sed 's|@.*||').keytab 13 | if [ ! -f $KTAB ] ; then KTAB=${HOME}/keytabs/cmsbld.keytab ; fi 14 | export KRB5CCNAME=FILE:/tmp/krb5cc_${USER}_${NODE_NAME} 15 | kinit $USER@CERN.CH -k -t ${KTAB} 16 | klist || true 17 | 18 | KRB5_FILENAME=$(echo $KRB5CCNAME | sed 's|^FILE:||') 19 | scp -p $SSH_OPTS ${KRB5_FILENAME} $TARGET:$SCRATCH 20 | 21 | echo "Launching LUMI node... for ${OS} using ${TARGET}" 22 | SSH_OPTS="-q -o IdentitiesOnly=yes -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no -o ServerAliveInterval=60" 23 | 24 | scp $SSH_OPTS /var/lib/jenkins/slave.jar ${TARGET}:~/cmsbuild/slave-${NODE_NAME}.jar 25 | ssh $SSH_OPTS ${TARGET} "~/cmsbuild/cms-bot/lumi/get_slot.sh ${OS} ${USER} ${NODE_NAME} ~/cmsbuild/slave-${NODE_NAME}.jar ${SLURM_ACCOUNT}" 26 | -------------------------------------------------------------------------------- /lumi/jenkins_java.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash -ex 2 | 3 | JENKINS_JAR=$1 4 | JAVA_CMD="/etc/alternatives/jre_17/bin/java" 5 | [ -e /etc/alternatives/jre_21/bin/java ] && JAVA_CMD="/etc/alternatives/jre_21/bin/java" 6 | $JAVA_CMD --add-opens java.base/java.lang=ALL-UNNAMED --add-opens java.base/java.lang.reflect=ALL-UNNAMED -jar ${JENKINS_JAR} -jar-cache $(dirname ${JENKINS_JAR})/tmp 7 | -------------------------------------------------------------------------------- /lumi/update_image.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash -ex 2 | 3 | OS=$1 4 | ARCH=$2 5 | 6 | TAG=$(curl -s "https://hub.docker.com/v2/namespaces/cmssw/repositories/${OS}/tags?page_size=100" | jq -r '.results[].name' | grep "${ARCH}-d" | sort -r | head -n1) 7 | echo "The latest cmssw ${OS} tag is $TAG" 8 | if [ -f cmssw_${OS}:${TAG}.sif ]; then 9 | echo "Nothing to do" 10 | exit 0 11 | fi 12 | 13 | echo "Building cmssw_${OS}:${TAG}.sif" 14 | rm -f cmssw_${OS}:${TAG}-tmp.sif 15 | singularity build cmssw_${OS}:${TAG}-tmp.sif docker://cmssw/${OS}:${TAG} 16 | chmod 644 cmssw_${OS}:${TAG}-tmp.sif 17 | mv cmssw_${OS}:${TAG}-tmp.sif cmssw_${OS}:${TAG}.sif 18 | ln -sf cmssw_${OS}:${TAG}.sif cmssw_${OS}.sif 19 | chown -h .project_462000245 cmssw_${OS}:${TAG}.sif cmssw_${OS}.sif 20 | echo "Done" 21 | -------------------------------------------------------------------------------- /lxr/checkout-version.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | from os import utime 3 | from sys import exit 4 | from os.path import isfile, islink 5 | from subprocess import getstatusoutput as cmd 6 | 7 | e, total = cmd("find . -type f | grep -v '/.git/' |wc -l") 8 | e, o = cmd('git log --name-only --pretty=format:"T:%at"') 9 | if e: 10 | print(o) 11 | exit(1) 12 | 13 | cache = {} 14 | time = 0 15 | cnt = 0 16 | for l in o.split("\n"): 17 | if not l: 18 | continue 19 | if l[:2] == "T:": 20 | time = int(l[2:]) 21 | continue 22 | if l in cache: 23 | continue 24 | if isfile(l) and not islink(l): 25 | cnt += 1 26 | cache[l] = time 27 | utime(l, (time, time)) 28 | print("[%s/%s] %s: %s" % (cnt, total, l, time)) 29 | else: 30 | cache[l] = 0 31 | -------------------------------------------------------------------------------- /lxr/delete-index.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash -ex 2 | source $(dirname $0)/version_utils.sh 3 | BASE_DIR=/data/lxr 4 | [ "X$1" = "X" ] && exit 1 5 | tag=$1 6 | delete_version ${BASE_DIR}/host_config/versions ${tag} 7 | sort_version ${BASE_DIR}/host_config/versions 8 | set_default ${BASE_DIR}/host_config/versions ${BASE_DIR}/host_config/default 9 | 10 | [ -d ${BASE_DIR}/src/$tag ] && rm -rf ${BASE_DIR}/src/$tag 11 | [ -d ${BASE_DIR}/glimpse_index/lxr/${tag} ] && rm -rf ${BASE_DIR}/glimpse_index/lxr/${tag} 12 | 13 | DOCKER_LXR=$(docker ps -a -q --filter 'name=lxr') 14 | if [ "X${DOCKER_LXR}" = "X" ] ; then 15 | ${BASE_DIR}/scripts/run_lxr.sh 16 | #wait for mysql server to come up 17 | sleep 120 18 | DOCKER_LXR=$(docker ps -a -q --filter 'name=lxr') 19 | fi 20 | docker exec -u lxr -t lxr /lxr/host_config/cleanup-db.sh "${tag}" 21 | 22 | -------------------------------------------------------------------------------- /lxr/generate-index.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash -ex 2 | source $(dirname $0)/version_utils.sh 3 | BASE_DIR=/data/lxr 4 | [ "X$1" = "X" ] && exit 1 5 | tag=$1 6 | DOCKER_LXR=$(docker ps -a -q --filter 'name=lxr') 7 | if [ "X${DOCKER_LXR}" = "X" ] ; then 8 | ${BASE_DIR}/scripts/run_lxr.sh 9 | #wait for mysql server to come up 10 | sleep 120 11 | DOCKER_LXR=$(docker ps -a -q --filter 'name=lxr') 12 | fi 13 | rm -rf ${BASE_DIR}/glimpse_index/lxr/${tag} || true 14 | mkdir -p ${BASE_DIR}/glimpse_index/lxr/${tag} 15 | echo $tag >> ${BASE_DIR}/host_config/versions 16 | sort_version ${BASE_DIR}/host_config/versions 17 | docker exec -u lxr -t lxr /lxr/genxref --url=//localhost/lxr --version=$tag 18 | set_default ${BASE_DIR}/host_config/versions ${BASE_DIR}/host_config/default 19 | -------------------------------------------------------------------------------- /lxr/version_utils.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | function sort_version() 3 | { 4 | grep '_X$' ${1} | tr '_' ' ' | sort -rn --key 2,3 | tr ' ' '_' | uniq > ${1}.new 5 | grep '_X_' ${1} | tr '_' ' ' | sort -rn --key 2,3 | tr ' ' '_' | uniq >> ${1}.new 6 | grep -v '_X_' ${1} | grep -v '_X$' | tr '_' ' ' | sort -rn --key 2,3 | tr ' ' '_' | uniq >> ${1}.new 7 | mv ${1}.new ${1} 8 | } 9 | 10 | function delete_version() 11 | { 12 | if [ $(grep "^${2}$" ${1} | wc -l) -gt 0 ] ; then 13 | grep -v "${2}$" $1 > $1.new 14 | mv ${1}.new ${1} 15 | fi 16 | } 17 | 18 | function set_default() 19 | { 20 | default_version=$(grep '_X_' ${1} | head -1) 21 | if [ "${default_version}" = "" ] ; then 22 | default_version=$(head -1 ${1}) 23 | fi 24 | echo "${default_version}" > ${2} 25 | } 26 | -------------------------------------------------------------------------------- /material_budget_ref.py: -------------------------------------------------------------------------------- 1 | from __future__ import print_function 2 | 3 | MATERIAL_BUDGET_REF = { 4 | "CMSSW_8_1_X": "CMSSW_8_1_X_2017-03-12-0000", 5 | "CMSSW_9_0_X": "CMSSW_9_0_X_2017-03-14-1100", 6 | "CMSSW_9_1_X": "CMSSW_9_0_X_2017-03-14-1100", 7 | "CMSSW_9_2_X": "CMSSW_9_0_X_2017-03-14-1100", 8 | "CMSSW_9_3_X": "CMSSW_9_0_X_2017-03-14-1100", 9 | "CMSSW_9_4_X": "CMSSW_9_0_X_2017-03-14-1100", 10 | "CMSSW_10_0_X": "CMSSW_9_0_X_2017-03-14-1100", 11 | "CMSSW_10_1_X": "CMSSW_9_0_X_2017-03-14-1100", 12 | "CMSSW_10_2_X": "CMSSW_9_0_X_2017-03-14-1100", 13 | "CMSSW_10_3_X": "CMSSW_9_0_X_2017-03-14-1100", 14 | "CMSSW_10_4_X": "CMSSW_9_0_X_2017-03-14-1100", 15 | "CMSSW_10_5_X": "CMSSW_9_0_X_2017-03-14-1100", 16 | "CMSSW_10_6_X": "CMSSW_9_0_X_2017-03-14-1100", 17 | "CMSSW_11_0_X": "CMSSW_9_0_X_2017-03-14-1100", 18 | "CMSSW_11_1_X": "CMSSW_9_0_X_2017-03-14-1100", 19 | "CMSSW_11_2_X": "CMSSW_9_0_X_2017-03-14-1100", 20 | } 21 | 22 | 23 | def get_ref(): 24 | from os import environ 25 | 26 | print(MATERIAL_BUDGET_REF["_".join(environ["CMSSW_VERSION"].split("_")[0:3]) + "_X"]) 27 | -------------------------------------------------------------------------------- /mirror-root: -------------------------------------------------------------------------------- 1 | #!/bin/sh -ex 2 | WORKSPACE=${WORKSPACE-$PWD} 3 | cd $WORKSPACE 4 | git clone https://github.com/root-mirror/root 5 | cd root 6 | git remote set-url origin https://github.com/root-mirror/root 7 | git remote add cms-sw git@github.com:cms-sw/root.git || git remote set-url cms-sw git@github.com:cms-sw/root.git 8 | git fetch origin --tags 9 | git fetch cms-sw 10 | git checkout master 11 | 12 | # Process tags 13 | TAGS=`git tag | grep -e 'v\(5-34-[2-9][0-9]\|6-[0-9][0-9]-[0-9][0-9]\)' | grep -v -e "-rc"` 14 | for t in $TAGS; do 15 | git show-ref -- $t | grep cms-sw/cms >/dev/null && continue 16 | echo cms/$t is missing 17 | git branch -D cms/$t || true 18 | git branch cms/$t $t 19 | git push cms-sw cms/$t:cms/$t 20 | done 21 | -------------------------------------------------------------------------------- /monitor_command.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | function monitor(){ 4 | echo "=========== $1 ===============" 5 | date 6 | uptime 7 | free -g 8 | ps -u $(whoami) -o pid,start_time,pcpu,rss,size,vsize,cmd --forest 9 | } 10 | 11 | log="monitor_command_exit_code.txt" 12 | ("$@" || echo $? > ${log}) & 13 | [ "${MONITOR_COMMAND_LOG}" = "" ] && MONITOR_COMMAND_LOG="monitor_command.log" 14 | rm -f ${MONITOR_COMMAND_LOG} 15 | touch ${MONITOR_COMMAND_LOG} 16 | LOG_CHECK=$(date +%s) 17 | monitor start >> ${MONITOR_COMMAND_LOG} 2>&1 18 | while [ $(jobs -r -p | wc -l) -gt 0 ] ; do 19 | sleep 1 20 | CTIME=$(date +%s) 21 | let LOG_GAP=${CTIME}-${LOG_CHECK} 22 | if [ $LOG_GAP -lt 300 ] ; then continue ; fi 23 | LOG_CHECK=${CTIME} 24 | monitor >> ${MONITOR_COMMAND_LOG} 2>&1 25 | done 26 | wait 27 | monitor end >> ${MONITOR_COMMAND_LOG} 2>&1 28 | exit_code=0 29 | [ -e ${log} ] && exit_code=$(cat ${log}) 30 | rm -f ${log} 31 | cat ${MONITOR_COMMAND_LOG} 32 | exit $exit_code 33 | -------------------------------------------------------------------------------- /openstack/hg/env.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | this_dir=$(dirname ${BASH_SOURCE[0]}) 3 | source ${this_dir}/os.sh 4 | for d in $(echo $1 | tr '/' ' ' | grep -v '^$'); do 5 | this_dir="${this_dir}/${d}" 6 | if [ -e ${this_dir}.sh ] ; then 7 | source ${this_dir}.sh 8 | fi 9 | done 10 | -------------------------------------------------------------------------------- /openstack/hg/os.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh -ex 2 | IMAGE_CC7="--cc7" 3 | IMAGE_SLC6="--slc6" 4 | IMAGE_CC8="--c8" 5 | IMAGE_ALMA8="--alma8" 6 | IMAGE_ALMA9="--alma9" 7 | IMAGE_RHEL8="--rhel8" 8 | IMAGE_RHEL9="--rhel9" 9 | DEFAULT_ADMIN_USER="cms-sdt-aibox-admins" 10 | DEFAULT_PUPPET_ENV="production" 11 | DEFAULT_SSH_KEY="cmsbuild" 12 | -------------------------------------------------------------------------------- /openstack/hg/vocmssdt/cmssdt.sh: -------------------------------------------------------------------------------- 1 | DEFAULT_FLAVOR="m2.xlarge" 2 | DEFAULT_IMAGE="$IMAGE_CC7" 3 | DEFAULT_VOLUME_TYPE="vault-500" 4 | DEFAULT_VOLUME_NAME="vdb" 5 | DEFAULT_VOLUME_SIZE="10000" 6 | -------------------------------------------------------------------------------- /openstack/hg/vocmssdt/doxygen.sh: -------------------------------------------------------------------------------- 1 | DEFAULT_FLAVOR="m2.medium" 2 | DEFAULT_IMAGE="$IMAGE_RHEL9" 3 | DEFAULT_VOLUME_NAME="vdb" 4 | -------------------------------------------------------------------------------- /openstack/hg/vocmssdt/dxr.sh: -------------------------------------------------------------------------------- 1 | DEFAULT_FLAVOR="m2.medium" 2 | DEFAULT_IMAGE="$IMAGE_RHEL9" 3 | DEFAULT_VOLUME_NAME="vdb" 4 | DEFAULT_VOLUME_SIZE="200" 5 | -------------------------------------------------------------------------------- /openstack/hg/vocmssdt/hypernews.sh: -------------------------------------------------------------------------------- 1 | DEFAULT_FLAVOR="m2.xlarge" 2 | DEFAULT_IMAGE="$IMAGE_CC7" 3 | -------------------------------------------------------------------------------- /openstack/hg/vocmssdt/jenkins/cms.sh: -------------------------------------------------------------------------------- 1 | DEFAULT_FLAVOR="m2.large" 2 | DEFAULT_IMAGE="$IMAGE_RHEL9" 3 | DEFAULT_FOREMAN_PARAMETERS="jenkins_prefix=cms-jenkins" 4 | -------------------------------------------------------------------------------- /openstack/hg/vocmssdt/jenkins/dev.sh: -------------------------------------------------------------------------------- 1 | DEFAULT_FLAVOR="m2.large" 2 | DEFAULT_IMAGE="$IMAGE_RHEL9" 3 | DEFAULT_FOREMAN_PARAMETERS="jenkins_prefix=dev-jenkins" 4 | -------------------------------------------------------------------------------- /openstack/hg/vocmssdt/jenkins/dmwm.sh: -------------------------------------------------------------------------------- 1 | DEFAULT_FLAVOR="m2.large" 2 | DEFAULT_IMAGE="$IMAGE_RHEL9" 3 | DEFAULT_FOREMAN_PARAMETERS="jenkins_prefix=dmwm-jenkins" 4 | -------------------------------------------------------------------------------- /openstack/hg/vocmssdt/jenkins/prod.sh: -------------------------------------------------------------------------------- 1 | DEFAULT_FLAVOR="m2.2xlarge" 2 | DEFAULT_IMAGE="$IMAGE_RHEL9" 3 | DEFAULT_FOREMAN_PARAMETERS="jenkins_prefix=jenkins" 4 | -------------------------------------------------------------------------------- /openstack/hg/vocmssdt/jenkins/user.sh: -------------------------------------------------------------------------------- 1 | DEFAULT_FLAVOR="m2.medium" 2 | DEFAULT_IMAGE="$IMAGE_CC7" 3 | DEFAULT_VOLUME_NAME="vdb" 4 | DEFAULT_VOLUME_SIZE="100" 5 | -------------------------------------------------------------------------------- /openstack/hg/vocmssdt/lxr.sh: -------------------------------------------------------------------------------- 1 | DEFAULT_FLAVOR="m2.large" 2 | DEFAULT_IMAGE="$IMAGE_RHEL9" 3 | DEFAULT_VOLUME_NAME="vdb" 4 | DEFAULT_VOLUME_SIZE="100" 5 | DEFAULT_VOLUME_TYPE="io3" 6 | -------------------------------------------------------------------------------- /openstack/hg/vocmssdt/puppet_test.sh: -------------------------------------------------------------------------------- 1 | DEFAULT_FLAVOR="m2.small" 2 | DEFAULT_IMAGE="$IMAGE_SLC6" 3 | DEFAULT_VOLUME_NAME="" 4 | DEFAULT_VOLUME_SIZE="" 5 | -------------------------------------------------------------------------------- /openstack/hg/vocmssdt/sdt/builder.sh: -------------------------------------------------------------------------------- 1 | DEFAULT_FLAVOR="c4.2xlarge.eph" 2 | DEFAULT_IMAGE="$IMAGE_ALMA9" 3 | DEFAULT_PROJECT="CMS_SDT_CI" 4 | -------------------------------------------------------------------------------- /openstack/hg/vocmssdt/sdt/builder/arm.sh: -------------------------------------------------------------------------------- 1 | DEFAULT_FLAVOR="a1.3xlarge" 2 | DEFAULT_PROJECT="CMS_Miscellaneous_ARM" 3 | -------------------------------------------------------------------------------- /openstack/hg/vocmssdt/sdt/cmsdocker.sh: -------------------------------------------------------------------------------- 1 | DEFAULT_FLAVOR="m2.medium" 2 | DEFAULT_IMAGE="$IMAGE_CC7" 3 | DEFAULT_VOLUME_NAME="vdb" 4 | DEFAULT_VOLUME_SIZE="500" 5 | -------------------------------------------------------------------------------- /openstack/hg/vocmssdt/sdt/cmsrep.sh: -------------------------------------------------------------------------------- 1 | DEFAULT_FLAVOR="m2.xlarge" 2 | DEFAULT_IMAGE="$IMAGE_CC7" 3 | DEFAULT_VOLUME_TYPE="vault-500" 4 | DEFAULT_VOLUME_NAME="vdb" 5 | DEFAULT_VOLUME_SIZE="10000" 6 | -------------------------------------------------------------------------------- /openstack/hg/vocmssdt/sdt/cmsrep9.sh: -------------------------------------------------------------------------------- 1 | DEFAULT_FLAVOR="m2.2xlarge" 2 | DEFAULT_IMAGE="$IMAGE_RHEL9" 3 | DEFAULT_VOLUME_TYPE="io1" 4 | DEFAULT_VOLUME_NAME="vdb" 5 | DEFAULT_VOLUME_SIZE="5000" 6 | -------------------------------------------------------------------------------- /openstack/hg/vocmssdt/sdt/cmssdt.sh: -------------------------------------------------------------------------------- 1 | DEFAULT_FLAVOR="m2.2xlarge" 2 | DEFAULT_IMAGE="$IMAGE_RHEL9" 3 | DEFAULT_VOLUME_TYPE="io1" 4 | DEFAULT_VOLUME_NAME="vdb" 5 | DEFAULT_VOLUME_SIZE="6000" 6 | -------------------------------------------------------------------------------- /openstack/hg/vocmssdt/sdt/cmsuser.sh: -------------------------------------------------------------------------------- 1 | DEFAULT_FLAVOR="m2.2xlarge" 2 | DEFAULT_IMAGE="$IMAGE_CC7" 3 | DEFAULT_PROJECT="CMS_SDT_Build" 4 | -------------------------------------------------------------------------------- /openstack/hg/vocmssdt/sdt/dev7.sh: -------------------------------------------------------------------------------- 1 | DEFAULT_FLAVOR="m2.2xlarge" 2 | DEFAULT_IMAGE="$IMAGE_CC7" 3 | DEFAULT_VOLUME_NAME="vdb" 4 | DEFAULT_VOLUME_SIZE="" 5 | -------------------------------------------------------------------------------- /openstack/hg/vocmssdt/sdt/dev8.sh: -------------------------------------------------------------------------------- 1 | DEFAULT_FLAVOR="m2.2xlarge" 2 | DEFAULT_IMAGE="$IMAGE_ALMA8" 3 | DEFAULT_VOLUME_SIZE="2000" 4 | DEFAULT_VOLUME_TYPE="io3" 5 | DEFAULT_VOLUME_NAME="vdb" 6 | -------------------------------------------------------------------------------- /openstack/hg/vocmssdt/sdt/dmwm.sh: -------------------------------------------------------------------------------- 1 | DEFAULT_FLAVOR="m2.medium" 2 | DEFAULT_IMAGE="$IMAGE_CC7" 3 | -------------------------------------------------------------------------------- /openstack/hg/vocmssdt/sdt/dmwm9.sh: -------------------------------------------------------------------------------- 1 | DEFAULT_FLAVOR="m2.large" 2 | DEFAULT_IMAGE="$IMAGE_ALMA9" 3 | -------------------------------------------------------------------------------- /openstack/hg/vocmssdt/sdt/docker.sh: -------------------------------------------------------------------------------- 1 | DEFAULT_FLAVOR="m2.xlarge" 2 | DEFAULT_IMAGE="$IMAGE_CC7" 3 | -------------------------------------------------------------------------------- /openstack/hg/vocmssdt/sdt/docker8.sh: -------------------------------------------------------------------------------- 1 | DEFAULT_FLAVOR="m2.xlarge" 2 | DEFAULT_IMAGE="$IMAGE_CC8" 3 | -------------------------------------------------------------------------------- /openstack/hg/vocmssdt/sdt/docker9.sh: -------------------------------------------------------------------------------- 1 | DEFAULT_FLAVOR="m2.xlarge" 2 | DEFAULT_IMAGE="$IMAGE_ALMA9" 3 | -------------------------------------------------------------------------------- /openstack/scripts/cmds/ai_bs.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash -ex 2 | pj=$(grep '^pj=' $1 | sed 's|^pj=||' | grep -E '^(CMS_SDT_Build|CMS_SDT_CI|CMS_Miscellaneous_ARM)$' || true) 3 | hg=$(grep '^hg=' $1 | sed 's|^hg=||' | grep '^[a-zA-Z0-9_-][a-zA-Z0-9_/-]*$' || true) 4 | opts=$(grep '^opts=' $1 | sed 's|^opts=||' | grep '^[a-zA-Z0-9_-][a-zA-Z0-9_=. -]*$' || true) 5 | if [ "${hg}" = "" ] ; then 6 | echo "ERROR: Missing hostgroup" 7 | exit 1 8 | fi 9 | if [ "$pj" = "" ] ; then pj="CMS_SDT_Build" ; fi 10 | source $(dirname $0)/setup-env.sh ${pj} 11 | ai-bs -g ${hg} ${opts} 12 | -------------------------------------------------------------------------------- /openstack/scripts/cmds/ai_kill.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash -ex 2 | vm=$(grep '^vm=' $1 | sed 's|^vm=||' | grep '^[a-zA-Z0-9_-][a-zA-Z0-9_-]*$' || true) 3 | pj=$(grep '^pj=' $1 | sed 's|^pj=||' | grep '^[A-Z][A-Z_]*$' || true) 4 | [ "$pj" != "" ] || pj="$vm" 5 | if [ "${vm}" = "" ] ; then 6 | echo "ERROR: Wrong VM name" 7 | exit 1 8 | fi 9 | ERR=0 10 | source $(dirname $0)/setup-env.sh "${pj}" 11 | ai-kill ${vm} || ERR=1 12 | ai-foreman delhost --do-not-ask ${vm} || true 13 | exit $ERR 14 | -------------------------------------------------------------------------------- /openstack/scripts/cmds/setup-env.sh: -------------------------------------------------------------------------------- 1 | export OS_PROJECT_DOMAIN_ID=default 2 | export OS_REGION_NAME=cern 3 | export OS_IDENTITY_PROVIDER=sssd 4 | export OS_MUTUAL_AUTH=disabled 5 | export OS_IDENTITY_API_VERSION=3 6 | export OS_AUTH_TYPE=v3fedkerb 7 | export OS_PROTOCOL=kerberos 8 | export OS_AUTH_URL=https://keystone.cern.ch/v3 9 | export OS_VOLUME_API_VERSION=3 10 | export OS_USERNAME=$(whoami) 11 | case $1 in 12 | cmsbuild*|CMS_SDT_CI) 13 | export OS_TENANT_ID="dd21c071-cf05-4a5e-8197-0aa0a4d3c8c7" 14 | export OS_PROJECT_NAME="CMS SDT CI" 15 | ;; 16 | vocms-arm*|CMS_Miscellaneous_ARM) 17 | export OS_TENANT_ID=d9e1127a-94a0-4ced-a3ee-22707eab3449 18 | export OS_PROJECT_NAME="CMS Miscellaneous ARM" 19 | ;; 20 | *) 21 | export OS_TENANT_ID="63b9ceb9-4743-42a0-ab89-1a121443ab1d" 22 | export OS_PROJECT_NAME="CMS SDT Build" 23 | ;; 24 | esac 25 | export OS_TENANT_NAME="${OS_PROJECT_NAME}" 26 | -------------------------------------------------------------------------------- /openstack/scripts/cmds/showhost.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash -e 2 | hg=$(head -1 $1 | grep '^hg=vocmssdt/' | sed 's|^hg=||' | grep '^[a-zA-Z0-9_/-][a-zA-Z0-9_/-]*$' || true) 3 | if [ "$hg" = "" ] ; then 4 | echo "ERROR: Wrong hostgroup" 5 | exit 1 6 | fi 7 | ai-foreman -g "${hg}" --no-color --no-header -z Name -z OS showhost 8 | -------------------------------------------------------------------------------- /parse_jenkins_builds.json: -------------------------------------------------------------------------------- 1 | { 2 | "whitelist": ["ib-run-pr-unittests", "ib-run-pr-relvals", "ib-run-baseline"], 3 | "timeout": 3600, 4 | "custom": {} 5 | } 6 | -------------------------------------------------------------------------------- /pr_testing/cmssw-pr-package.spec: -------------------------------------------------------------------------------- 1 | ### RPM cms cmssw-pr-package 1.0 2 | ## NOCOMPILER 3 | ## NO_VERSION_SUFFIX 4 | 5 | Source: none 6 | 7 | ## INCLUDE cmssw-pr-data 8 | 9 | %prep 10 | 11 | %build 12 | 13 | %install 14 | mkdir -p %{i}/bin 15 | mkdir -p %{i}/lib 16 | mkdir -p %{i}/biglib 17 | 18 | cp -r %{release_dir}/{bin,lib,biglib} %{i}/ 19 | -------------------------------------------------------------------------------- /pr_testing/get_external_name.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash -ex 2 | # gets repo name, then returns spec name depending on the rule 3 | PKG_REPO=$1 4 | PKG_NAME=$(echo ${PKG_REPO} | sed 's|.*/||') 5 | ARRAY=( 6 | # Array of packages to keep track of 7 | # "REPO_NAME:SPECK_NAME" 8 | "cms-externals/llvm-project:llvm" 9 | "cms-sw/SCRAM:SCRAMV1" 10 | "cms-externals/incubator-mxnet:mxnet-predict" 11 | "cms-externals/eigen-git-mirror:eigen" 12 | "cms-externals/tensorflow:tensorflow-sources" 13 | "cms-sw/cmssw-config:coral" 14 | "cms-externals/dxr:py3-dxr" 15 | ) 16 | 17 | case ${PKG_REPO} in 18 | cms-data/*) 19 | RECIPE_NAME="data-${PKG_NAME}" 20 | ;; 21 | *) 22 | RECIPE_NAME=${PKG_NAME} 23 | for repo in "${ARRAY[@]}" ; do 24 | KEY=${repo%%:*} 25 | VALUE=${repo#*:} 26 | if [ "${KEY}" == "${PKG_REPO}" ]; then 27 | RECIPE_NAME=${VALUE} 28 | fi 29 | done 30 | ;; 31 | esac 32 | 33 | echo ${RECIPE_NAME} 34 | -------------------------------------------------------------------------------- /pr_testing/merge_cms-bot_pr.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash -ex 2 | # This script should check if there are cms-bot PR and merge into current repo 3 | # Kept as a minimum to avoid chicken and egg problem 4 | # --- 5 | # Constants 6 | SCRIPTPATH="$( cd "$(dirname "$0")" ; pwd -P )" # Absolute path to script 7 | CMS_BOT_DIR=$(dirname ${SCRIPTPATH}) # To get CMS_BOT dir path 8 | PR_TESTING_DIR=${CMS_BOT_DIR}/pr_testing 9 | source ${PR_TESTING_DIR}/_helper_functions.sh # general helper functions 10 | 11 | PULL_REQUESTS=$1 12 | PULL_REQUESTS=$(echo ${PULL_REQUESTS} | tr ',' ' ' | sed 's/ */ /g' | sed 's/^ *//;s/ *$//' ) # to make consistent separation in list 13 | UNIQ_REPOS=$(echo ${PULL_REQUESTS} | tr ' ' '\n' | sed 's|#.*||g' | sort | uniq | tr '\n' ' ' ) # Repos without pull number 14 | 15 | # Do git pull --rebase for only /cms-bot 16 | for U_REPO in $(echo ${UNIQ_REPOS} | tr ' ' '\n' | grep '/cms-bot' ); do 17 | FILTERED_PRS=$(echo ${PULL_REQUESTS} | tr ' ' '\n' | grep ${U_REPO} | tr '\n' ' ') 18 | for PR in ${FILTERED_PRS}; do 19 | git_clone_and_merge "$(get_cached_GH_JSON "${PR}")" 20 | done 21 | done 22 | 23 | -------------------------------------------------------------------------------- /pr_testing/retry-command.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | max_try=1 3 | if [ $(echo "${CMS_BOT_RETRY_COUNT}" | grep '^[1-9]$' | wc -l) -gt 0 ] ; then 4 | max_try=${CMS_BOT_RETRY_COUNT} 5 | fi 6 | while true ; do 7 | let max_try=$max_try-1 8 | echo "Running $@" 9 | $@ 10 | err=$? 11 | if [ $err -gt 0 ] ; then 12 | [ $max_try -gt 0 ] || exit $err 13 | else 14 | exit 0 15 | fi 16 | done 17 | -------------------------------------------------------------------------------- /prepare-repo-clone-for-port.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash -x 2 | #$0 PR_NUM pr-user/user-branch cms-user/cms-repo dest-branch 3 | PR_NUM=$1 4 | PR_USER=`echo $2 | sed 's|/.*||'` 5 | PR_BRANCH=`echo $2 | sed 's|.*/||'` 6 | REPO_NAME=`echo $3 | sed 's|.*/||'` 7 | CMS_USER=`echo $3 | sed 's|/.*||'` 8 | REPO_DES_BRANCH=$4 9 | 10 | REPO_REF="" 11 | if [ ! -d "${REPO_NAME}" ] ; then 12 | if [ -e /afs/cern.ch/cms/git-cmssw-mirror/${REPO_NAME}.git ] ; then 13 | REPO_REF="--reference /afs/cern.ch/cms/git-cmssw-mirror/${REPO_NAME}.git" 14 | fi 15 | git clone ${REPO_REF} git@github.com:${CMS_USER}/${REPO_NAME} ${REPO_NAME} 16 | fi 17 | cd ${REPO_NAME} 18 | git clean -fdx 19 | git checkout ${REPO_DES_BRANCH} 20 | git reset --hard origin/$REPO_DES_BRANCH 21 | git clean -fdx 22 | git pull --rebase 23 | 24 | CUR_BRANCH=`git branch | grep '^*' | sed 's|.* ||'` 25 | if [ "X${CUR_BRANCH}" != "X${REPO_DES_BRANCH}" ] ; then 26 | echo "Unable to checkout ${REPO_DES_BRANCH} branch" 27 | exit 1 28 | fi 29 | NEW_BRANCH=port-${PR_NUM}-`echo ${REPO_DES_BRANCH} | tr / -` 30 | git remote rm user || true 31 | git remote add user git@github.com:${PR_USER}/${REPO_NAME}.git 32 | git branch -D ${NEW_BRANCH} || true 33 | git branch -D ${PR_BRANCH} || true 34 | git fetch user ${PR_BRANCH}:${PR_BRANCH} 35 | git checkout -b ${NEW_BRANCH} 36 | 37 | -------------------------------------------------------------------------------- /process-partial-logs-relval.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | from __future__ import print_function 3 | import os, sys 4 | from runPyRelValThread import PyRelValsThread 5 | 6 | path = sys.argv[1] 7 | newloc = os.path.dirname(path) + "/pyRelValMatrixLogs/run" 8 | os.system("mkdir -p " + newloc) 9 | ProcessLogs = PyRelValsThread(1, path, "1of1", newloc) 10 | print("Generating runall log file: %s" % path) 11 | ProcessLogs.update_runall() 12 | print("Generating relval time info") 13 | ProcessLogs.update_wftime() 14 | print("Parsing logs for workflows/steps") 15 | ProcessLogs.parseLog() 16 | print("Done") 17 | -------------------------------------------------------------------------------- /release-deploy-afs: -------------------------------------------------------------------------------- 1 | #!/bin/sh -ex 2 | CMSSW_X_Y_Z=$1 3 | ARCHITECTURE=$2 4 | 5 | RPM_NAME=cmssw 6 | case $CMSSW_X_Y_Z in 7 | *patch*) RPM_NAME=cmssw-patch ;; 8 | esac 9 | 10 | BASERELEASE=$(echo $CMSSW_X_Y_Z | sed -e 's/_[a-zA-Z0-9]*patch[0-9].*//') 11 | while ! fs lq /afs/.cern.ch/cms/$ARCHITECTURE/cms/cmssw/$BASERELEASE | grep -E '[2-9]000000'; do 12 | echo Waiting for volume ; sleep 10 13 | done 14 | 15 | source /afs/cern.ch/cms/cmsset_default.sh 16 | /afs/.cern.ch/cms/common/cmspkg -a $ARCHITECTURE update 17 | /afs/.cern.ch/cms/common/cmspkg -a $ARCHITECTURE -f upgrade 18 | /afs/.cern.ch/cms/common/cmspkg -a $ARCHITECTURE install -y $(/afs/.cern.ch/cms/common/cmspkg -a $ARCHITECTURE search $CMSSW_X_Y_Z | sed -e 's|[ ].*||' | grep -e "^cms[+]$RPM_NAME[+]$CMSSW_X_Y_Z\$") 19 | /afs/.cern.ch/cms/common/cmspkg -a $ARCHITECTURE -f clean || true 20 | touch /afs/.cern.ch/cms/$ARCHITECTURE/tmp/$CMSSW_X_Y_Z 21 | echo 'ALL_OK' 22 | 23 | -------------------------------------------------------------------------------- /releases.py: -------------------------------------------------------------------------------- 1 | from milestones import * 2 | import re 3 | 4 | # Default development branch 5 | # Changes from master branch will be merge in to it 6 | # Any PR open against this will be automatically closed by cms-bot (Pr should be made for master branch) 7 | # For new release cycle just change this and make sure to add its milestone and production branches 8 | CMSSW_DEVEL_BRANCH = "CMSSW_15_1_X" 9 | 10 | RELEASE_BRANCH_MILESTONE["master"] = RELEASE_BRANCH_MILESTONE[CMSSW_DEVEL_BRANCH] 11 | RELEASE_BRANCH_PRODUCTION.append("master") 12 | USERS_TO_TRIGGER_HOOKS = set( 13 | SPECIAL_RELEASE_MANAGERS + [m for rel in RELEASE_MANAGERS for m in rel] 14 | ) 15 | 16 | 17 | def get_release_managers(branch): 18 | if branch in RELEASE_MANAGERS: 19 | return RELEASE_MANAGERS[branch] 20 | for exp in RELEASE_MANAGERS: 21 | if re.match(exp, branch): 22 | return RELEASE_MANAGERS[exp] 23 | return [] 24 | 25 | 26 | def is_closed_branch(branch): 27 | if branch in RELEASE_BRANCH_CLOSED: 28 | return True 29 | for exp in RELEASE_BRANCH_CLOSED: 30 | if re.match(exp, branch): 31 | return True 32 | return False 33 | -------------------------------------------------------------------------------- /repo_config.py: -------------------------------------------------------------------------------- 1 | from cms_static import GH_CMSSW_ORGANIZATION, GH_CMSSW_REPO, CMSBUILD_GH_USER, get_jenkins 2 | from os.path import dirname, abspath 3 | import os 4 | 5 | GH_TOKEN = os.getenv("GH_TOKEN_FILE", "~/.github-token") 6 | GH_TOKEN_READONLY = "~/.github-token-readonly" 7 | CONFIG_DIR = dirname(abspath(__file__)) 8 | CMSBUILD_USER = CMSBUILD_GH_USER 9 | GH_REPO_ORGANIZATION = GH_CMSSW_ORGANIZATION 10 | CREATE_EXTERNAL_ISSUE = True 11 | CHECK_DPG_POG = True 12 | NONBLOCKING_LABELS = True 13 | JENKINS_SERVER = get_jenkins("jenkins") 14 | IGNORE_ISSUES = { 15 | GH_CMSSW_ORGANIZATION + "/" + GH_CMSSW_REPO: [12368], 16 | } 17 | -------------------------------------------------------------------------------- /report-build-release-status: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | PYTHON_CMD="python" 3 | if which python3 >/dev/null 2>&1 ; then PYTHON_CMD="python3" ; fi 4 | for x in 0 1 2 ; do 5 | ${PYTHON_CMD} $(dirname $0)/report-build-release-status.py "$@" && exit 0 6 | sleep 30 7 | done 8 | exit 1 9 | 10 | -------------------------------------------------------------------------------- /report-pull-request-results: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | $(dirname $0)/report-pull-request-results.py "$@" 3 | -------------------------------------------------------------------------------- /repos/EcalLaserValidation/HLT_EcalLaserValidation/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cms-sw/cms-bot/a86c80b6135b23c1ac3440bfbe9df727fa97e33c/repos/EcalLaserValidation/HLT_EcalLaserValidation/__init__.py -------------------------------------------------------------------------------- /repos/EcalLaserValidation/HLT_EcalLaserValidation/categories.py: -------------------------------------------------------------------------------- 1 | from cms_static import GH_CMSSW_REPO as gh_cmssw 2 | from cms_static import GH_CMSDIST_REPO as gh_cmsdist 3 | from repo_config import GH_REPO_ORGANIZATION as gh_user 4 | from repo_config import CMSBUILD_USER, GH_REPO_NAME 5 | 6 | CMSSW_ORP = [] 7 | APPROVE_BUILD_RELEASE = list(set([] + CMSSW_ORP)) 8 | REQUEST_BUILD_RELEASE = APPROVE_BUILD_RELEASE 9 | TRIGGER_PR_TESTS = list(set(["smuzaffar"] + REQUEST_BUILD_RELEASE)) 10 | PR_HOLD_MANAGERS = [] 11 | 12 | COMMON_CATEGORIES = ["tests", "code-checks"] 13 | EXTERNAL_CATEGORIES = ["externals"] 14 | EXTERNAL_REPOS = [] 15 | 16 | CMSSW_REPOS = [gh_user + "/" + gh_cmssw] 17 | CMSDIST_REPOS = [gh_user + "/" + gh_cmsdist] 18 | CMSSW_ISSUES_TRACKERS = list(set(CMSSW_ORP)) 19 | COMPARISON_MISSING_MAP = [] 20 | 21 | # github_user:[list of categories] 22 | CMSSW_L2 = { 23 | CMSBUILD_USER: ["tests", "code-checks"], 24 | "degrutto": ["ecal-laser"], 25 | "ndaci": ["ecal-laser"], 26 | "zghiche": ["ecal-laser"], 27 | "gennai": ["ecal-laser"], 28 | "zhenbinwu": ["ecal-laser"], 29 | "wang-hui": ["ecal-laser"], 30 | "abrinke1": ["ecal-laser"], 31 | "mkovac": ["ecal-laser"], 32 | } 33 | 34 | CMSSW_CATEGORIES = { 35 | "ecal-laser": [GH_REPO_NAME], 36 | } 37 | 38 | USERS_TO_TRIGGER_HOOKS = set(TRIGGER_PR_TESTS + CMSSW_ISSUES_TRACKERS + list(CMSSW_L2.keys())) 39 | CMS_REPOS = set(CMSDIST_REPOS + CMSSW_REPOS + EXTERNAL_REPOS) 40 | -------------------------------------------------------------------------------- /repos/EcalLaserValidation/HLT_EcalLaserValidation/category-watchers.yaml: -------------------------------------------------------------------------------- 1 | # Map between github username and the list of cmssw categories to watch e.g. alca, db, core etc. 2 | # valid categories are available here http://cms-sw.github.io/categories.html 3 | #Format 4 | #gh_user: 5 | #- category 6 | #- category 7 | 8 | -------------------------------------------------------------------------------- /repos/EcalLaserValidation/HLT_EcalLaserValidation/groups.yaml: -------------------------------------------------------------------------------- 1 | #Map between groups and github users, format of this file is 2 | #groupname: 3 | # - gh_user 4 | # - gh_user 5 | #groupname: 6 | # - gh_user 7 | # - gh_user 8 | -------------------------------------------------------------------------------- /repos/EcalLaserValidation/HLT_EcalLaserValidation/releases.py: -------------------------------------------------------------------------------- 1 | # Default development branch 2 | # Changes from master branch will be merge in to it 3 | # Any PR open against this will be automatically closed by cms-bot (Pr should be made for master branch) 4 | # For new release cycle just change this and make sure to add its milestone and production branches 5 | 6 | CMSSW_DEVEL_BRANCH = "CMSSW_10_0_X" 7 | RELEASE_BRANCH_MILESTONE = {} 8 | RELEASE_BRANCH_CLOSED = [] 9 | RELEASE_BRANCH_PRODUCTION = [] 10 | SPECIAL_RELEASE_MANAGERS = [] 11 | RELEASE_MANAGERS = {} 12 | USERS_TO_TRIGGER_HOOKS = set( 13 | SPECIAL_RELEASE_MANAGERS + [m for rel in RELEASE_MANAGERS for m in rel] 14 | ) 15 | -------------------------------------------------------------------------------- /repos/EcalLaserValidation/HLT_EcalLaserValidation/repo_config.py: -------------------------------------------------------------------------------- 1 | from cms_static import GH_CMSSW_ORGANIZATION, GH_CMSSW_REPO, CMSBUILD_GH_USER, get_jenkins 2 | from os.path import basename, dirname, abspath 3 | 4 | GH_TOKEN = "~/.github-token" 5 | GH_TOKEN_READONLY = "~/.github-token-readonly" 6 | CONFIG_DIR = dirname(abspath(__file__)) 7 | CMSBUILD_USER = "cmsbuild" 8 | GH_REPO_ORGANIZATION = "EcalLaserValidation" 9 | GH_REPO_NAME = "HLT_EcalLaserValidation" 10 | GH_REPO_FULLNAME = GH_REPO_ORGANIZATION + "/" + GH_REPO_NAME 11 | CREATE_EXTERNAL_ISSUE = False 12 | JENKINS_SERVER = get_jenkins("cms-jenkins") 13 | GITHUB_WEBHOOK_TOKEN = "U2FsdGVkX18OTa0HlxmA6uQ9oimETZqECqGDvkqQsEW/7jod1rl8AF1GnmAu0kGt" 14 | # GITHUB_WEBHOOK_TOKEN='U2FsdGVkX18uyTkiQtIOYUfVj2PQLV34u5hQAbfNhl8=' 15 | ADD_LABELS = False 16 | ADD_WEB_HOOK = False 17 | JENKINS_UPLOAD_DIRECTORY = "EcalLaserValidation/HLT_EcalLaserValidation" 18 | JENKINS_NOTIFICATION_EMAIL = "" 19 | OPEN_ISSUE_FOR_PUSH_TESTS = True 20 | IGNORE_ISSUES = [] 21 | # Valid Web hooks 22 | VALID_WEB_HOOKS = ["push"] 23 | # Set the Jenkins slave label is your tests needs special machines to run. 24 | JENKINS_SLAVE_LABEL = "slc6 && amd64 && cmsbuild" 25 | 26 | 27 | def file2Package(filename): 28 | return GH_REPO_NAME 29 | -------------------------------------------------------------------------------- /repos/EcalLaserValidation/HLT_EcalLaserValidation/run-pr-tests: -------------------------------------------------------------------------------- 1 | #!/bin/bash -ex 2 | ERR=0 3 | ./steps.sh || ERR=1 4 | CMSSW_DIR=$(ls -d CMSSW_*) 5 | if [ "X${CMSSW_DIR}" = "X" ] ; then exit $ERR; fi 6 | mv ${CMSSW_DIR}/src ${WORKSPACE}/upload/${CMSSW_DIR} 7 | find ${WORKSPACE}/upload/${CMSSW_DIR} -name '*.root' -type f | xargs --no-run-if-empty rm -f 8 | JOB_SUMMARY_LOG="${WORKSPACE}/summary.txt" 9 | echo "## Differences found for various paths" > ${JOB_SUMMARY_LOG} 10 | for diff_file in $(find ${WORKSPACE}/upload/${CMSSW_DIR} -name '*_diff.log' -type f) ; do 11 | path=$(basename $diff_file | sed 's|_diff.log$||') 12 | echo "### $path" >> ${JOB_SUMMARY_LOG} 13 | if [ -s ${diff_file} ] ; then 14 | echo '```' >> ${JOB_SUMMARY_LOG} 15 | cat ${diff_file} >> ${JOB_SUMMARY_LOG} 16 | echo '```' >> ${JOB_SUMMARY_LOG} 17 | else 18 | echo " - There were no differences" >> ${JOB_SUMMARY_LOG} 19 | fi 20 | done 21 | exit $ERR 22 | -------------------------------------------------------------------------------- /repos/EcalLaserValidation/HLT_EcalLaserValidation/super-users.yaml: -------------------------------------------------------------------------------- 1 | #List of super users, format is 2 | #- ghuser 3 | #- ghuser 4 | -------------------------------------------------------------------------------- /repos/EcalLaserValidation/HLT_EcalLaserValidation/watchers.yaml: -------------------------------------------------------------------------------- 1 | # Map between github username and the list of packages to watch. 2 | # Package is a regular expression which will be automatically delimited by ".*" 3 | # Format of this file is 4 | #ghuser: 5 | # - subsystem1/ 6 | # - subsystem/package1 7 | #ghuser: 8 | # - subsystem2/package1 9 | # - subsystem2/package2 10 | smuzaffar: 11 | - HLT_EcalLaserValidation 12 | -------------------------------------------------------------------------------- /repos/EcalLaserValidation/L1T_EcalLaserValidation/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cms-sw/cms-bot/a86c80b6135b23c1ac3440bfbe9df727fa97e33c/repos/EcalLaserValidation/L1T_EcalLaserValidation/__init__.py -------------------------------------------------------------------------------- /repos/EcalLaserValidation/L1T_EcalLaserValidation/categories.py: -------------------------------------------------------------------------------- 1 | from cms_static import GH_CMSSW_REPO as gh_cmssw 2 | from cms_static import GH_CMSDIST_REPO as gh_cmsdist 3 | from repo_config import GH_REPO_ORGANIZATION as gh_user 4 | from repo_config import CMSBUILD_USER, GH_REPO_NAME 5 | 6 | CMSSW_ORP = [] 7 | APPROVE_BUILD_RELEASE = list(set([] + CMSSW_ORP)) 8 | REQUEST_BUILD_RELEASE = APPROVE_BUILD_RELEASE 9 | TRIGGER_PR_TESTS = list(set(["smuzaffar"] + REQUEST_BUILD_RELEASE)) 10 | PR_HOLD_MANAGERS = [] 11 | 12 | COMMON_CATEGORIES = ["tests", "code-checks"] 13 | EXTERNAL_CATEGORIES = ["externals"] 14 | EXTERNAL_REPOS = [] 15 | 16 | CMSSW_REPOS = [gh_user + "/" + gh_cmssw] 17 | CMSDIST_REPOS = [gh_user + "/" + gh_cmsdist] 18 | CMSSW_ISSUES_TRACKERS = list(set(CMSSW_ORP)) 19 | COMPARISON_MISSING_MAP = [] 20 | 21 | # github_user:[list of categories] 22 | CMSSW_L2 = { 23 | CMSBUILD_USER: ["tests", "code-checks"], 24 | "degrutto": ["ecal-laser"], 25 | "ndaci": ["ecal-laser"], 26 | "zghiche": ["ecal-laser"], 27 | "gennai": ["ecal-laser"], 28 | "zhenbinwu": ["ecal-laser"], 29 | "wang-hui": ["ecal-laser"], 30 | "abrinke1": ["ecal-laser"], 31 | "mkovac": ["ecal-laser"], 32 | } 33 | 34 | CMSSW_CATEGORIES = { 35 | "ecal-laser": [GH_REPO_NAME], 36 | } 37 | 38 | USERS_TO_TRIGGER_HOOKS = set(TRIGGER_PR_TESTS + CMSSW_ISSUES_TRACKERS + list(CMSSW_L2.keys())) 39 | CMS_REPOS = set(CMSDIST_REPOS + CMSSW_REPOS + EXTERNAL_REPOS) 40 | -------------------------------------------------------------------------------- /repos/EcalLaserValidation/L1T_EcalLaserValidation/category-watchers.yaml: -------------------------------------------------------------------------------- 1 | # Map between github username and the list of cmssw categories to watch e.g. alca, db, core etc. 2 | # valid categories are available here http://cms-sw.github.io/categories.html 3 | #Format 4 | #gh_user: 5 | #- category 6 | #- category 7 | 8 | -------------------------------------------------------------------------------- /repos/EcalLaserValidation/L1T_EcalLaserValidation/groups.yaml: -------------------------------------------------------------------------------- 1 | #Map between groups and github users, format of this file is 2 | #groupname: 3 | # - gh_user 4 | # - gh_user 5 | #groupname: 6 | # - gh_user 7 | # - gh_user 8 | -------------------------------------------------------------------------------- /repos/EcalLaserValidation/L1T_EcalLaserValidation/releases.py: -------------------------------------------------------------------------------- 1 | # Default development branch 2 | # Changes from master branch will be merge in to it 3 | # Any PR open against this will be automatically closed by cms-bot (Pr should be made for master branch) 4 | # For new release cycle just change this and make sure to add its milestone and production branches 5 | 6 | CMSSW_DEVEL_BRANCH = "CMSSW_10_0_X" 7 | RELEASE_BRANCH_MILESTONE = {} 8 | RELEASE_BRANCH_CLOSED = [] 9 | RELEASE_BRANCH_PRODUCTION = [] 10 | SPECIAL_RELEASE_MANAGERS = [] 11 | RELEASE_MANAGERS = {} 12 | USERS_TO_TRIGGER_HOOKS = set( 13 | SPECIAL_RELEASE_MANAGERS + [m for rel in RELEASE_MANAGERS for m in rel] 14 | ) 15 | -------------------------------------------------------------------------------- /repos/EcalLaserValidation/L1T_EcalLaserValidation/repo_config.py: -------------------------------------------------------------------------------- 1 | from cms_static import GH_CMSSW_ORGANIZATION, GH_CMSSW_REPO, CMSBUILD_GH_USER, get_jenkins 2 | from os.path import basename, dirname, abspath 3 | 4 | GH_TOKEN = "~/.github-token" 5 | GH_TOKEN_READONLY = "~/.github-token-readonly" 6 | CONFIG_DIR = dirname(abspath(__file__)) 7 | CMSBUILD_USER = "cmsbuild" 8 | GH_REPO_ORGANIZATION = "EcalLaserValidation" 9 | GH_REPO_NAME = "L1T_EcalLaserValidation" 10 | GH_REPO_FULLNAME = GH_REPO_ORGANIZATION + "/" + GH_REPO_NAME 11 | CREATE_EXTERNAL_ISSUE = False 12 | JENKINS_SERVER = get_jenkins("cms-jenkins") 13 | GITHUB_WEBHOOK_TOKEN = "U2FsdGVkX18FTI2p/ZkGhERFC/gPJhXtW+bjAF9xtuWoJIDhv3B+ifsXz3gWm5Xq" 14 | ADD_LABELS = False 15 | ADD_WEB_HOOK = False 16 | JENKINS_UPLOAD_DIRECTORY = "EcalLaserValidation/L1T_EcalLaserValidation" 17 | JENKINS_NOTIFICATION_EMAIL = "" 18 | OPEN_ISSUE_FOR_PUSH_TESTS = True 19 | IGNORE_ISSUES = [] 20 | # Valid Web hooks 21 | VALID_WEB_HOOKS = ["push"] 22 | # Set the Jenkins slave label is your tests needs special machines to run. 23 | JENKINS_SLAVE_LABEL = "slc6 && amd64 && cmsbuild" 24 | 25 | 26 | def file2Package(filename): 27 | return GH_REPO_NAME 28 | -------------------------------------------------------------------------------- /repos/EcalLaserValidation/L1T_EcalLaserValidation/run-pr-tests: -------------------------------------------------------------------------------- 1 | #!/bin/bash -ex 2 | ./new.sh 3 | -------------------------------------------------------------------------------- /repos/EcalLaserValidation/L1T_EcalLaserValidation/super-users.yaml: -------------------------------------------------------------------------------- 1 | #List of super users, format is 2 | #- ghuser 3 | #- ghuser 4 | -------------------------------------------------------------------------------- /repos/EcalLaserValidation/L1T_EcalLaserValidation/watchers.yaml: -------------------------------------------------------------------------------- 1 | # Map between github username and the list of packages to watch. 2 | # Package is a regular expression which will be automatically delimited by ".*" 3 | # Format of this file is 4 | #ghuser: 5 | # - subsystem1/ 6 | # - subsystem/package1 7 | #ghuser: 8 | # - subsystem2/package1 9 | # - subsystem2/package2 10 | smuzaffar: 11 | - HLT_EcalLaserValidation 12 | -------------------------------------------------------------------------------- /repos/EcalLaserValidation/RECO_EcalPulseShapeValidation/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cms-sw/cms-bot/a86c80b6135b23c1ac3440bfbe9df727fa97e33c/repos/EcalLaserValidation/RECO_EcalPulseShapeValidation/__init__.py -------------------------------------------------------------------------------- /repos/EcalLaserValidation/RECO_EcalPulseShapeValidation/categories.py: -------------------------------------------------------------------------------- 1 | from cms_static import GH_CMSSW_REPO as gh_cmssw 2 | from cms_static import GH_CMSDIST_REPO as gh_cmsdist 3 | from repo_config import GH_REPO_ORGANIZATION as gh_user 4 | from repo_config import CMSBUILD_USER, GH_REPO_NAME 5 | 6 | CMSSW_ORP = [] 7 | APPROVE_BUILD_RELEASE = list(set([] + CMSSW_ORP)) 8 | REQUEST_BUILD_RELEASE = APPROVE_BUILD_RELEASE 9 | TRIGGER_PR_TESTS = list(set(["smuzaffar", CMSBUILD_USER] + REQUEST_BUILD_RELEASE)) 10 | PR_HOLD_MANAGERS = [] 11 | 12 | COMMON_CATEGORIES = ["tests", "code-checks"] 13 | EXTERNAL_CATEGORIES = ["externals"] 14 | EXTERNAL_REPOS = [] 15 | 16 | CMSSW_REPOS = [gh_user + "/" + gh_cmssw] 17 | CMSDIST_REPOS = [gh_user + "/" + gh_cmsdist] 18 | CMSSW_ISSUES_TRACKERS = list(set(CMSSW_ORP)) 19 | COMPARISON_MISSING_MAP = [] 20 | 21 | # github_user:[list of categories] 22 | CMSSW_L2 = { 23 | CMSBUILD_USER: ["tests", "code-checks"], 24 | "emanueledimarco": ["ecal-pulse-shape"], 25 | } 26 | 27 | CMSSW_CATEGORIES = { 28 | "ecal-pulse-shape": [GH_REPO_NAME], 29 | } 30 | 31 | USERS_TO_TRIGGER_HOOKS = set(TRIGGER_PR_TESTS + CMSSW_ISSUES_TRACKERS + list(CMSSW_L2.keys())) 32 | CMS_REPOS = set(CMSDIST_REPOS + CMSSW_REPOS + EXTERNAL_REPOS) 33 | -------------------------------------------------------------------------------- /repos/EcalLaserValidation/RECO_EcalPulseShapeValidation/category-watchers.yaml: -------------------------------------------------------------------------------- 1 | # Map between github username and the list of cmssw categories to watch e.g. alca, db, core etc. 2 | # valid categories are available here http://cms-sw.github.io/categories.html 3 | #Format 4 | #gh_user: 5 | #- category 6 | #- category 7 | 8 | -------------------------------------------------------------------------------- /repos/EcalLaserValidation/RECO_EcalPulseShapeValidation/groups.yaml: -------------------------------------------------------------------------------- 1 | #Map between groups and github users, format of this file is 2 | #groupname: 3 | # - gh_user 4 | # - gh_user 5 | #groupname: 6 | # - gh_user 7 | # - gh_user 8 | -------------------------------------------------------------------------------- /repos/EcalLaserValidation/RECO_EcalPulseShapeValidation/releases.py: -------------------------------------------------------------------------------- 1 | # Default development branch 2 | # Changes from master branch will be merge in to it 3 | # Any PR open against this will be automatically closed by cms-bot (Pr should be made for master branch) 4 | # For new release cycle just change this and make sure to add its milestone and production branches 5 | 6 | CMSSW_DEVEL_BRANCH = "CMSSW_10_2_X" 7 | RELEASE_BRANCH_MILESTONE = {} 8 | RELEASE_BRANCH_CLOSED = [] 9 | RELEASE_BRANCH_PRODUCTION = [] 10 | SPECIAL_RELEASE_MANAGERS = [] 11 | RELEASE_MANAGERS = {} 12 | USERS_TO_TRIGGER_HOOKS = set( 13 | SPECIAL_RELEASE_MANAGERS + [m for rel in RELEASE_MANAGERS for m in rel] 14 | ) 15 | -------------------------------------------------------------------------------- /repos/EcalLaserValidation/RECO_EcalPulseShapeValidation/repo_config.py: -------------------------------------------------------------------------------- 1 | from cms_static import GH_CMSSW_ORGANIZATION, GH_CMSSW_REPO, CMSBUILD_GH_USER, get_jenkins 2 | from os.path import basename, dirname, abspath 3 | 4 | GH_TOKEN = "~/.github-token" 5 | GH_TOKEN_READONLY = "~/.github-token-readonly" 6 | CONFIG_DIR = dirname(abspath(__file__)) 7 | CMSBUILD_USER = "cmsbuild" 8 | GH_REPO_ORGANIZATION = "EcalLaserValidation" 9 | GH_REPO_NAME = "RECO_EcalPulseShapeValidation" 10 | GH_REPO_FULLNAME = GH_REPO_ORGANIZATION + "/" + GH_REPO_NAME 11 | CREATE_EXTERNAL_ISSUE = False 12 | JENKINS_SERVER = get_jenkins("cms-jenkins") 13 | GITHUB_WEBHOOK_TOKEN = "U2FsdGVkX1+r+XWzRjZHPgURrshDykGdtONgxUa7XBof1Nh1/BiWgt3IyWXu4t60" 14 | ADD_LABELS = False 15 | ADD_WEB_HOOK = False 16 | JENKINS_UPLOAD_DIRECTORY = "EcalLaserValidation/RECO_EcalPulseShapeValidation" 17 | JENKINS_NOTIFICATION_EMAIL = "" 18 | OPEN_ISSUE_FOR_PUSH_TESTS = True 19 | IGNORE_ISSUES = [] 20 | # Valid Web hooks 21 | VALID_WEB_HOOKS = ["push"] 22 | # Set the Jenkins slave label is your tests needs special machines to run. 23 | JENKINS_SLAVE_LABEL = "slc6 && amd64 && cmsbuild" 24 | 25 | 26 | def file2Package(filename): 27 | return GH_REPO_NAME 28 | -------------------------------------------------------------------------------- /repos/EcalLaserValidation/RECO_EcalPulseShapeValidation/run-pr-tests: -------------------------------------------------------------------------------- 1 | #!/bin/bash -ex 2 | $WORKSPACE/userrepo/run-pr-tests.sh "$@" 3 | -------------------------------------------------------------------------------- /repos/EcalLaserValidation/RECO_EcalPulseShapeValidation/super-users.yaml: -------------------------------------------------------------------------------- 1 | #List of super users, format is 2 | #- ghuser 3 | #- ghuser 4 | -------------------------------------------------------------------------------- /repos/EcalLaserValidation/RECO_EcalPulseShapeValidation/watchers.yaml: -------------------------------------------------------------------------------- 1 | # Map between github username and the list of packages to watch. 2 | # Package is a regular expression which will be automatically delimited by ".*" 3 | # Format of this file is 4 | #ghuser: 5 | # - subsystem1/ 6 | # - subsystem/package1 7 | #ghuser: 8 | # - subsystem2/package1 9 | # - subsystem2/package2 10 | -------------------------------------------------------------------------------- /repos/EcalLaserValidation/TPG_EcalLaserValidation/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cms-sw/cms-bot/a86c80b6135b23c1ac3440bfbe9df727fa97e33c/repos/EcalLaserValidation/TPG_EcalLaserValidation/__init__.py -------------------------------------------------------------------------------- /repos/EcalLaserValidation/TPG_EcalLaserValidation/category-watchers.yaml: -------------------------------------------------------------------------------- 1 | # Map between github username and the list of cmssw categories to watch e.g. alca, db, core etc. 2 | # valid categories are available here http://cms-sw.github.io/categories.html 3 | #Format 4 | #gh_user: 5 | #- category 6 | #- category 7 | 8 | -------------------------------------------------------------------------------- /repos/EcalLaserValidation/TPG_EcalLaserValidation/check-for-valid-push: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | if [ ! -e ToRun/NewToRun.txt ] ; then 3 | echo "Missing file: ToRun/NewToRun.txt" 4 | exit 1 5 | fi 6 | echo "OK to start test" 7 | -------------------------------------------------------------------------------- /repos/EcalLaserValidation/TPG_EcalLaserValidation/groups.yaml: -------------------------------------------------------------------------------- 1 | #Map between groups and github users, format of this file is 2 | #groupname: 3 | # - gh_user 4 | # - gh_user 5 | #groupname: 6 | # - gh_user 7 | # - gh_user 8 | -------------------------------------------------------------------------------- /repos/EcalLaserValidation/TPG_EcalLaserValidation/releases.py: -------------------------------------------------------------------------------- 1 | # Default development branch 2 | # Changes from master branch will be merge in to it 3 | # Any PR open against this will be automatically closed by cms-bot (Pr should be made for master branch) 4 | # For new release cycle just change this and make sure to add its milestone and production branches 5 | 6 | CMSSW_DEVEL_BRANCH = "CMSSW_10_0_X" 7 | RELEASE_BRANCH_MILESTONE = {} 8 | RELEASE_BRANCH_CLOSED = [] 9 | RELEASE_BRANCH_PRODUCTION = [] 10 | SPECIAL_RELEASE_MANAGERS = [] 11 | RELEASE_MANAGERS = {} 12 | USERS_TO_TRIGGER_HOOKS = set( 13 | SPECIAL_RELEASE_MANAGERS + [m for rel in RELEASE_MANAGERS for m in rel] 14 | ) 15 | -------------------------------------------------------------------------------- /repos/EcalLaserValidation/TPG_EcalLaserValidation/repo_config.py: -------------------------------------------------------------------------------- 1 | from cms_static import GH_CMSSW_ORGANIZATION, GH_CMSSW_REPO, CMSBUILD_GH_USER, get_jenkins 2 | from os.path import basename, dirname, abspath 3 | 4 | GH_TOKEN = "~/.github-token" 5 | GH_TOKEN_READONLY = "~/.github-token-readonly" 6 | CONFIG_DIR = dirname(abspath(__file__)) 7 | CMSBUILD_USER = "cmsbuild" 8 | GH_REPO_ORGANIZATION = "EcalLaserValidation" 9 | GH_REPO_NAME = "TPG_EcalLaserValidation" 10 | GH_REPO_FULLNAME = GH_REPO_ORGANIZATION + "/" + GH_REPO_NAME 11 | CREATE_EXTERNAL_ISSUE = False 12 | JENKINS_SERVER = get_jenkins("cms-jenkins") 13 | GITHUB_WEBHOOK_TOKEN = "U2FsdGVkX1+GGHoH9PB4G9rRvEuoKejWnx1zWHOV39VGswFB1rX9s7F3HFdaTtcs" 14 | ADD_LABELS = False 15 | ADD_WEB_HOOK = False 16 | JENKINS_UPLOAD_DIRECTORY = "EcalLaserValidation/TPG_EcalLaserValidation" 17 | JENKINS_NOTIFICATION_EMAIL = "" 18 | OPEN_ISSUE_FOR_PUSH_TESTS = True 19 | IGNORE_ISSUES = [] 20 | # Valid Web hooks 21 | VALID_WEB_HOOKS = ["push"] 22 | # Set the Jenkins slave label if your tests needs special machines to run. 23 | JENKINS_SLAVE_LABEL = "slc6 && amd64 && cmsbuild" 24 | 25 | 26 | def file2Package(filename): 27 | return GH_REPO_NAME 28 | -------------------------------------------------------------------------------- /repos/EcalLaserValidation/TPG_EcalLaserValidation/run-pr-tests: -------------------------------------------------------------------------------- 1 | #!/bin/bash -ex 2 | ./new.sh 3 | -------------------------------------------------------------------------------- /repos/EcalLaserValidation/TPG_EcalLaserValidation/super-users.yaml: -------------------------------------------------------------------------------- 1 | #List of super users, format is 2 | #- ghuser 3 | #- ghuser 4 | -------------------------------------------------------------------------------- /repos/EcalLaserValidation/TPG_EcalLaserValidation/watchers.yaml: -------------------------------------------------------------------------------- 1 | # Map between github username and the list of packages to watch. 2 | # Package is a regular expression which will be automatically delimited by ".*" 3 | # Format of this file is 4 | #ghuser: 5 | # - subsystem1/ 6 | # - subsystem/package1 7 | #ghuser: 8 | # - subsystem2/package1 9 | # - subsystem2/package2 10 | smuzaffar: 11 | - HLT_EcalLaserValidation 12 | -------------------------------------------------------------------------------- /repos/EcalLaserValidation/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cms-sw/cms-bot/a86c80b6135b23c1ac3440bfbe9df727fa97e33c/repos/EcalLaserValidation/__init__.py -------------------------------------------------------------------------------- /repos/HcalConditionsAutomatization/ConditionsValidation/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cms-sw/cms-bot/a86c80b6135b23c1ac3440bfbe9df727fa97e33c/repos/HcalConditionsAutomatization/ConditionsValidation/__init__.py -------------------------------------------------------------------------------- /repos/HcalConditionsAutomatization/ConditionsValidation/categories.py: -------------------------------------------------------------------------------- 1 | from cms_static import GH_CMSSW_REPO as gh_cmssw 2 | from cms_static import GH_CMSDIST_REPO as gh_cmsdist 3 | from repo_config import GH_REPO_ORGANIZATION as gh_user 4 | from repo_config import CMSBUILD_USER, GH_REPO_NAME 5 | 6 | CMSSW_ORP = [] 7 | APPROVE_BUILD_RELEASE = list(set([] + CMSSW_ORP)) 8 | REQUEST_BUILD_RELEASE = APPROVE_BUILD_RELEASE 9 | TRIGGER_PR_TESTS = list(set(["smuzaffar"] + REQUEST_BUILD_RELEASE)) 10 | PR_HOLD_MANAGERS = [] 11 | 12 | COMMON_CATEGORIES = ["tests", "code-checks"] 13 | EXTERNAL_CATEGORIES = ["externals"] 14 | EXTERNAL_REPOS = [] 15 | 16 | CMSSW_REPOS = [gh_user + "/" + gh_cmssw] 17 | CMSDIST_REPOS = [gh_user + "/" + gh_cmsdist] 18 | CMSSW_ISSUES_TRACKERS = list(set(CMSSW_ORP)) 19 | COMPARISON_MISSING_MAP = [] 20 | 21 | # github_user:[list of categories] 22 | CMSSW_L2 = { 23 | CMSBUILD_USER: ["tests", "code-checks"], 24 | "GilsonCS": ["hcal-conditions"], 25 | } 26 | 27 | CMSSW_CATEGORIES = { 28 | "hcal-conditions": [GH_REPO_NAME], 29 | } 30 | 31 | USERS_TO_TRIGGER_HOOKS = set(TRIGGER_PR_TESTS + CMSSW_ISSUES_TRACKERS + list(CMSSW_L2.keys())) 32 | CMS_REPOS = set(CMSDIST_REPOS + CMSSW_REPOS + EXTERNAL_REPOS) 33 | -------------------------------------------------------------------------------- /repos/HcalConditionsAutomatization/ConditionsValidation/category-watchers.yaml: -------------------------------------------------------------------------------- 1 | # Map between github username and the list of cmssw categories to watch e.g. alca, db, core etc. 2 | # valid categories are available here http://cms-sw.github.io/categories.html 3 | #Format 4 | #gh_user: 5 | #- category 6 | #- category 7 | 8 | -------------------------------------------------------------------------------- /repos/HcalConditionsAutomatization/ConditionsValidation/groups.yaml: -------------------------------------------------------------------------------- 1 | #Map between groups and github users, format of this file is 2 | #groupname: 3 | # - gh_user 4 | # - gh_user 5 | #groupname: 6 | # - gh_user 7 | # - gh_user 8 | -------------------------------------------------------------------------------- /repos/HcalConditionsAutomatization/ConditionsValidation/releases.py: -------------------------------------------------------------------------------- 1 | # Default development branch 2 | # Changes from master branch will be merge in to it 3 | # Any PR open against this will be automatically closed by cms-bot (Pr should be made for master branch) 4 | # For new release cycle just change this and make sure to add its milestone and production branches 5 | 6 | CMSSW_DEVEL_BRANCH = "CMSSW_10_0_X" 7 | RELEASE_BRANCH_MILESTONE = {} 8 | RELEASE_BRANCH_CLOSED = [] 9 | RELEASE_BRANCH_PRODUCTION = [] 10 | SPECIAL_RELEASE_MANAGERS = [] 11 | RELEASE_MANAGERS = {} 12 | USERS_TO_TRIGGER_HOOKS = set( 13 | SPECIAL_RELEASE_MANAGERS + [m for rel in RELEASE_MANAGERS for m in rel] 14 | ) 15 | -------------------------------------------------------------------------------- /repos/HcalConditionsAutomatization/ConditionsValidation/repo_config.py: -------------------------------------------------------------------------------- 1 | from cms_static import GH_CMSSW_ORGANIZATION, GH_CMSSW_REPO, CMSBUILD_GH_USER, get_jenkins 2 | from os.path import basename, dirname, abspath 3 | 4 | GH_TOKEN = "~/.github-token-cmsbot" 5 | GH_TOKEN_READONLY = "~/.github-token-readonly" 6 | CONFIG_DIR = dirname(abspath(__file__)) 7 | CMSBUILD_USER = "cmsbot" 8 | GH_REPO_ORGANIZATION = "HcalConditionsAutomatization" 9 | GH_REPO_NAME = "ConditionsValidation" 10 | GH_REPO_FULLNAME = GH_REPO_ORGANIZATION + "/" + GH_REPO_NAME 11 | CREATE_EXTERNAL_ISSUE = False 12 | JENKINS_SERVER = get_jenkins("cms-jenkins") 13 | ADD_LABELS = False 14 | ADD_WEB_HOOK = True 15 | JENKINS_UPLOAD_DIRECTORY = "HcalConditionsAutomatization/ConditionsValidation" 16 | JENKINS_NOTIFICATION_EMAIL = "" 17 | OPEN_ISSUE_FOR_PUSH_TESTS = True 18 | IGNORE_ISSUES = [] 19 | # Valid Web hooks 20 | VALID_WEB_HOOKS = ["push"] 21 | # Set the Jenkins slave label is your tests needs special machines to run. 22 | JENKINS_SLAVE_LABEL = "lxplus" 23 | 24 | 25 | def file2Package(filename): 26 | return GH_REPO_NAME 27 | -------------------------------------------------------------------------------- /repos/HcalConditionsAutomatization/ConditionsValidation/run-pr-tests: -------------------------------------------------------------------------------- 1 | #!/bin/bash -ex 2 | ./new.sh 3 | -------------------------------------------------------------------------------- /repos/HcalConditionsAutomatization/ConditionsValidation/super-users.yaml: -------------------------------------------------------------------------------- 1 | #List of super users, format is 2 | #- ghuser 3 | #- ghuser 4 | -------------------------------------------------------------------------------- /repos/HcalConditionsAutomatization/ConditionsValidation/watchers.yaml: -------------------------------------------------------------------------------- 1 | # Map between github username and the list of packages to watch. 2 | # Package is a regular expression which will be automatically delimited by ".*" 3 | # Format of this file is 4 | #ghuser: 5 | # - subsystem1/ 6 | # - subsystem/package1 7 | #ghuser: 8 | # - subsystem2/package1 9 | # - subsystem2/package2 10 | smuzaffar: 11 | - ConditionsValidation 12 | 13 | -------------------------------------------------------------------------------- /repos/HcalConditionsAutomatization/__init__.py: -------------------------------------------------------------------------------- 1 | 2 | -------------------------------------------------------------------------------- /repos/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cms-sw/cms-bot/a86c80b6135b23c1ac3440bfbe9df727fa97e33c/repos/__init__.py -------------------------------------------------------------------------------- /repos/cms_patatrack/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cms-sw/cms-bot/a86c80b6135b23c1ac3440bfbe9df727fa97e33c/repos/cms_patatrack/__init__.py -------------------------------------------------------------------------------- /repos/cms_patatrack/cmssw/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cms-sw/cms-bot/a86c80b6135b23c1ac3440bfbe9df727fa97e33c/repos/cms_patatrack/cmssw/__init__.py -------------------------------------------------------------------------------- /repos/cms_patatrack/cmssw/categories.py: -------------------------------------------------------------------------------- 1 | from cms_static import GH_CMSSW_REPO as gh_cmssw 2 | from cms_static import GH_CMSDIST_REPO as gh_cmsdist 3 | from repo_config import GH_REPO_ORGANIZATION as gh_user 4 | from repo_config import CMSBUILD_USER 5 | from categories_map import CMSSW_CATEGORIES 6 | 7 | CMSSW_ORP = [] 8 | APPROVE_BUILD_RELEASE = list(set([] + CMSSW_ORP)) 9 | REQUEST_BUILD_RELEASE = APPROVE_BUILD_RELEASE 10 | TRIGGER_PR_TESTS = list(set(["smuzaffar", "felicepantaleo"] + REQUEST_BUILD_RELEASE)) 11 | PR_HOLD_MANAGERS = [] 12 | 13 | COMMON_CATEGORIES = ["tests", "code-checks"] 14 | EXTERNAL_CATEGORIES = ["externals"] 15 | EXTERNAL_REPOS = [] 16 | 17 | CMSSW_REPOS = [gh_user + "/" + gh_cmssw] 18 | CMSDIST_REPOS = [gh_user + "/" + gh_cmsdist] 19 | CMSSW_ISSUES_TRACKERS = list(set(CMSSW_ORP)) 20 | COMPARISON_MISSING_MAP = [] 21 | 22 | # github_user:[list of categories] 23 | CMSSW_L2 = { 24 | CMSBUILD_USER: ["tests", "code-checks"], 25 | "fwyzard": list(CMSSW_CATEGORIES.keys()), 26 | } 27 | 28 | USERS_TO_TRIGGER_HOOKS = set(TRIGGER_PR_TESTS + CMSSW_ISSUES_TRACKERS + list(CMSSW_L2.keys())) 29 | CMS_REPOS = set(CMSDIST_REPOS + CMSSW_REPOS + EXTERNAL_REPOS) 30 | -------------------------------------------------------------------------------- /repos/cms_patatrack/cmssw/category-watchers.yaml: -------------------------------------------------------------------------------- 1 | # Map between github username and the list of cmssw categories to watch e.g. alca, db, core etc. 2 | # valid categories are available here http://cms-sw.github.io/categories.html 3 | #Format 4 | #gh_user: 5 | #- category 6 | #- category 7 | 8 | -------------------------------------------------------------------------------- /repos/cms_patatrack/cmssw/groups.yaml: -------------------------------------------------------------------------------- 1 | #Map between groups and github users, format of this file is 2 | #groupname: 3 | # - gh_user 4 | # - gh_user 5 | #groupname: 6 | # - gh_user 7 | # - gh_user 8 | -------------------------------------------------------------------------------- /repos/cms_patatrack/cmssw/releases.py: -------------------------------------------------------------------------------- 1 | # Default development branch 2 | # Changes from master branch will be merge in to it 3 | # Any PR open against this will be automatically closed by cms-bot (Pr should be made for master branch) 4 | # For new release cycle just change this and make sure to add its milestone and production branches 5 | 6 | CMSSW_DEVEL_BRANCH = "CMSSW_10_1_X" 7 | RELEASE_BRANCH_MILESTONE = {} 8 | RELEASE_BRANCH_CLOSED = [] 9 | RELEASE_BRANCH_PRODUCTION = [] 10 | SPECIAL_RELEASE_MANAGERS = [] 11 | RELEASE_MANAGERS = {} 12 | USERS_TO_TRIGGER_HOOKS = set( 13 | SPECIAL_RELEASE_MANAGERS + [m for rel in RELEASE_MANAGERS for m in rel] 14 | ) 15 | -------------------------------------------------------------------------------- /repos/cms_patatrack/cmssw/super-users.yaml: -------------------------------------------------------------------------------- 1 | #List of super users, format is 2 | #- ghuser 3 | #- ghuser 4 | -------------------------------------------------------------------------------- /repos/cms_patatrack/cmssw/watchers.yaml: -------------------------------------------------------------------------------- 1 | # Map between github username and the list of packages to watch. 2 | # Package is a regular expression which will be automatically delimited by ".*" 3 | # Format of this file is 4 | #ghuser: 5 | # - subsystem1/ 6 | # - subsystem/package1 7 | #ghuser: 8 | # - subsystem2/package1 9 | # - subsystem2/package2 10 | -------------------------------------------------------------------------------- /repos/cms_sw/cms_docker/categories.py: -------------------------------------------------------------------------------- 1 | _file_items = __file__.split("/") 2 | _default_bot_dir = "/".join(_file_items[0:-4]) 3 | exec(open("%s/%s" % (_default_bot_dir, _file_items[-1])).read()) 4 | 5 | # Override default issue trackers for cms-docker repo 6 | # Only notify CMS_SDT members 7 | CMSSW_ISSUES_TRACKERS = CMS_SDT[:] 8 | 9 | CMSSW_CONTAINERS = [ 10 | "alma8", 11 | "cc7", 12 | "cc8", 13 | "cms", 14 | "cmssw", 15 | "cs8", 16 | "cs9", 17 | "docker-lxr", 18 | "docker-vtune", 19 | "el8", 20 | "el9", 21 | "el10", 22 | "rocky8", 23 | "slc5", 24 | "slc6", 25 | "ubi8", 26 | ] 27 | 28 | CMSSW_LABELS = {} 29 | for item in CMSSW_CONTAINERS: 30 | CMSSW_LABELS[item] = [item + "/"] 31 | -------------------------------------------------------------------------------- /repos/cms_sw/cms_docker/githublabels.py: -------------------------------------------------------------------------------- 1 | _file_items = __file__.split("/") 2 | _default_bot_dir = "/".join(_file_items[0:-4]) 3 | exec(open("%s/%s" % (_default_bot_dir, _file_items[-1])).read()) 4 | 5 | from categories import CMSSW_CONTAINERS 6 | 7 | for data in CMSSW_CONTAINERS: 8 | TYPE_COMMANDS[data] = [LABEL_COLORS["doc"], data, "mtype"] 9 | 10 | for arch in ["x86_64", "ppc64le", "aarch64", "amd64"]: 11 | TYPE_COMMANDS[arch] = [LABEL_COLORS["doc"], "%s-[0-9a-f]+" % arch, "mtype", True] 12 | TYPE_COMMANDS[arch + "-cms-docker"] = [ 13 | LABEL_COLORS["doc"], 14 | "%s-([a-z]+-|)(queued|building|done|error)" % arch, 15 | "mtype", 16 | True, 17 | "state", 18 | ] 19 | -------------------------------------------------------------------------------- /repos/cms_sw/cms_docker/repo_config.py: -------------------------------------------------------------------------------- 1 | _file_items = __file__.split("/") 2 | _default_bot_dir = "/".join(_file_items[0:-4]) 3 | exec(open("%s/%s" % (_default_bot_dir, _file_items[-1])).read()) 4 | 5 | CONFIG_DIR = _default_bot_dir 6 | CHECK_DPG_POG = False 7 | -------------------------------------------------------------------------------- /repos/iarspider_cmssw/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cms-sw/cms-bot/a86c80b6135b23c1ac3440bfbe9df727fa97e33c/repos/iarspider_cmssw/__init__.py -------------------------------------------------------------------------------- /repos/iarspider_cmssw/cmsdist/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cms-sw/cms-bot/a86c80b6135b23c1ac3440bfbe9df727fa97e33c/repos/iarspider_cmssw/cmsdist/__init__.py -------------------------------------------------------------------------------- /repos/iarspider_cmssw/cmsdist/category-watchers.yaml: -------------------------------------------------------------------------------- 1 | # Map between github username and the list of cmssw categories to watch e.g. alca, db, core etc. 2 | # valid categories are available here http://cms-sw.github.io/categories.html 3 | tocheng: 4 | - alca 5 | mmusich: 6 | - alca 7 | - db 8 | silviodonato: 9 | - hlt 10 | PonIlya: 11 | - db 12 | yuanchao: 13 | - alca 14 | - db 15 | rsreds: 16 | - alca 17 | - db 18 | 19 | -------------------------------------------------------------------------------- /repos/iarspider_cmssw/cmsdist/groups.yaml: -------------------------------------------------------------------------------- 1 | tracking-pog: 2 | - VinInn 3 | - mtosi 4 | - mmusich 5 | - VourMa 6 | btv-pog: 7 | - JyothsnaKomaragiri 8 | - emilbols 9 | - andrzejnovak 10 | - AlexDeMoor 11 | - Ming-Yan 12 | - Senphy 13 | ecal-offline: 14 | - wang0jin 15 | - thomreis 16 | - ReyerBand 17 | pixel-offline: 18 | - dkotlins 19 | - ferencek 20 | - tvami 21 | - mroguljic 22 | trk-dqm: 23 | - arossi83 24 | - sroychow 25 | tau-pog: 26 | - mbluj 27 | - azotz 28 | muon-reco: 29 | - CeliaFernandez 30 | - andrea21z 31 | -------------------------------------------------------------------------------- /repos/iarspider_cmssw/cmsdist/releases.py: -------------------------------------------------------------------------------- 1 | from milestones import * 2 | import re 3 | 4 | # Default development branch 5 | # Changes from master branch will be merge in to it 6 | # Any PR open against this will be automatically closed by cms-bot (Pr should be made for master branch) 7 | # For new release cycle just change this and make sure to add its milestone and production branches 8 | CMSSW_DEVEL_BRANCH = "CMSSW_14_1_X" 9 | 10 | RELEASE_BRANCH_MILESTONE["master"] = RELEASE_BRANCH_MILESTONE[CMSSW_DEVEL_BRANCH] 11 | RELEASE_BRANCH_PRODUCTION.append("master") 12 | USERS_TO_TRIGGER_HOOKS = set( 13 | SPECIAL_RELEASE_MANAGERS + [m for rel in RELEASE_MANAGERS for m in rel] 14 | ) 15 | 16 | 17 | def get_release_managers(branch): 18 | if branch in RELEASE_MANAGERS: 19 | return RELEASE_MANAGERS[branch] 20 | for exp in RELEASE_MANAGERS: 21 | if re.match(exp, branch): 22 | return RELEASE_MANAGERS[exp] 23 | return [] 24 | 25 | 26 | def is_closed_branch(branch): 27 | if branch in RELEASE_BRANCH_CLOSED: 28 | return True 29 | for exp in RELEASE_BRANCH_CLOSED: 30 | if re.match(exp, branch): 31 | return True 32 | return False 33 | -------------------------------------------------------------------------------- /repos/iarspider_cmssw/cmssw/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cms-sw/cms-bot/a86c80b6135b23c1ac3440bfbe9df727fa97e33c/repos/iarspider_cmssw/cmssw/__init__.py -------------------------------------------------------------------------------- /repos/iarspider_cmssw/cmssw/category-watchers.yaml: -------------------------------------------------------------------------------- 1 | # Map between github username and the list of cmssw categories to watch e.g. alca, db, core etc. 2 | # valid categories are available here http://cms-sw.github.io/categories.html 3 | tocheng: 4 | - alca 5 | mmusich: 6 | - alca 7 | - db 8 | silviodonato: 9 | - hlt 10 | PonIlya: 11 | - db 12 | yuanchao: 13 | - alca 14 | - db 15 | rsreds: 16 | - alca 17 | - db 18 | 19 | -------------------------------------------------------------------------------- /repos/iarspider_cmssw/cmssw/groups.yaml: -------------------------------------------------------------------------------- 1 | tracking-pog: 2 | - VinInn 3 | - mtosi 4 | - mmusich 5 | - VourMa 6 | btv-pog: 7 | - JyothsnaKomaragiri 8 | - emilbols 9 | - andrzejnovak 10 | - AlexDeMoor 11 | - Ming-Yan 12 | - Senphy 13 | ecal-offline: 14 | - wang0jin 15 | - thomreis 16 | - ReyerBand 17 | pixel-offline: 18 | - dkotlins 19 | - ferencek 20 | - tvami 21 | - mroguljic 22 | trk-dqm: 23 | - arossi83 24 | - sroychow 25 | tau-pog: 26 | - mbluj 27 | - azotz 28 | muon-reco: 29 | - CeliaFernandez 30 | - andrea21z 31 | -------------------------------------------------------------------------------- /repos/iarspider_cmssw/cmssw/releases.py: -------------------------------------------------------------------------------- 1 | from milestones import * 2 | import re 3 | 4 | # Default development branch 5 | # Changes from master branch will be merge in to it 6 | # Any PR open against this will be automatically closed by cms-bot (Pr should be made for master branch) 7 | # For new release cycle just change this and make sure to add its milestone and production branches 8 | CMSSW_DEVEL_BRANCH = "CMSSW_14_1_X" 9 | 10 | RELEASE_BRANCH_MILESTONE["master"] = RELEASE_BRANCH_MILESTONE[CMSSW_DEVEL_BRANCH] 11 | RELEASE_BRANCH_PRODUCTION.append("master") 12 | USERS_TO_TRIGGER_HOOKS = set( 13 | SPECIAL_RELEASE_MANAGERS + [m for rel in RELEASE_MANAGERS for m in rel] 14 | ) 15 | 16 | 17 | def get_release_managers(branch): 18 | if branch in RELEASE_MANAGERS: 19 | return RELEASE_MANAGERS[branch] 20 | for exp in RELEASE_MANAGERS: 21 | if re.match(exp, branch): 22 | return RELEASE_MANAGERS[exp] 23 | return [] 24 | 25 | 26 | def is_closed_branch(branch): 27 | if branch in RELEASE_BRANCH_CLOSED: 28 | return True 29 | for exp in RELEASE_BRANCH_CLOSED: 30 | if re.match(exp, branch): 31 | return True 32 | return False 33 | -------------------------------------------------------------------------------- /repos/smuzaffar/SCRAM/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cms-sw/cms-bot/a86c80b6135b23c1ac3440bfbe9df727fa97e33c/repos/smuzaffar/SCRAM/__init__.py -------------------------------------------------------------------------------- /repos/smuzaffar/SCRAM/repo_config.py: -------------------------------------------------------------------------------- 1 | from cms_static import get_jenkins 2 | from os.path import basename, dirname, abspath 3 | 4 | # GH read/write token: Use default ~/.github-token-cmsbot 5 | GH_TOKEN = "~/.github-token-cmsbot" 6 | # GH readonly token: Use default ~/.github-token-readonly 7 | GH_TOKEN_READONLY = "~/.github-token-readonly" 8 | CONFIG_DIR = dirname(abspath(__file__)) 9 | # GH bot user: Use default cmsbot 10 | CMSBUILD_USER = "cmsbot" 11 | GH_REPO_ORGANIZATION = "smuzaffar" 12 | 13 | GITHUB_WEBHOOK_TOKEN = "U2FsdGVkX1/yGRI4T5Xuk69SIVHNLg1fgE1+BU1eiRemkuUdkmqIZD0ICUVaEuO2" 14 | REQUEST_PROCESSOR = "simple-cms-bot" 15 | TRIGGER_PR_TESTS = [] 16 | VALID_WEB_HOOKS = ["issue_comment"] 17 | WEBHOOK_PAYLOAD = True 18 | JENKINS_SERVER = get_jenkins("cms-jenkins") 19 | -------------------------------------------------------------------------------- /repos/smuzaffar/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cms-sw/cms-bot/a86c80b6135b23c1ac3440bfbe9df727fa97e33c/repos/smuzaffar/__init__.py -------------------------------------------------------------------------------- /repos/smuzaffar/cmssw/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cms-sw/cms-bot/a86c80b6135b23c1ac3440bfbe9df727fa97e33c/repos/smuzaffar/cmssw/__init__.py -------------------------------------------------------------------------------- /repos/smuzaffar/cmssw/categories.py: -------------------------------------------------------------------------------- 1 | from cms_static import GH_CMSSW_REPO as gh_cmssw 2 | from cms_static import GH_CMSDIST_REPO as gh_cmsdist 3 | from repo_config import GH_REPO_ORGANIZATION as gh_user 4 | from repo_config import CMSBUILD_USER 5 | from categories_map import CMSSW_CATEGORIES 6 | 7 | CMSSW_ORP = [] 8 | APPROVE_BUILD_RELEASE = list(set([] + CMSSW_ORP)) 9 | REQUEST_BUILD_RELEASE = APPROVE_BUILD_RELEASE 10 | TRIGGER_PR_TESTS = list(set([] + REQUEST_BUILD_RELEASE)) 11 | PR_HOLD_MANAGERS = [] 12 | 13 | COMMON_CATEGORIES = ["tests", "code-checks"] 14 | EXTERNAL_CATEGORIES = ["externals"] 15 | EXTERNAL_REPOS = [] 16 | 17 | CMSSW_REPOS = [gh_user + "/" + gh_cmssw] 18 | CMSDIST_REPOS = [gh_user + "/" + gh_cmsdist] 19 | CMSSW_ISSUES_TRACKERS = list(set(CMSSW_ORP)) 20 | COMPARISON_MISSING_MAP = [] 21 | 22 | # github_user:[list of categories] 23 | CMSSW_L2 = { 24 | CMSBUILD_USER: ["tests", "code-checks"], 25 | gh_user: list(CMSSW_CATEGORIES.keys()), 26 | } 27 | 28 | USERS_TO_TRIGGER_HOOKS = set(TRIGGER_PR_TESTS + CMSSW_ISSUES_TRACKERS + list(CMSSW_L2.keys())) 29 | CMS_REPOS = set(CMSDIST_REPOS + CMSSW_REPOS + EXTERNAL_REPOS) 30 | -------------------------------------------------------------------------------- /repos/smuzaffar/cmssw/category-watchers.yaml: -------------------------------------------------------------------------------- 1 | # Map between github username and the list of cmssw categories to watch e.g. alca, db, core etc. 2 | # valid categories are available here http://cms-sw.github.io/categories.html 3 | #Format 4 | #gh_user: 5 | #- category 6 | #- category 7 | 8 | -------------------------------------------------------------------------------- /repos/smuzaffar/cmssw/groups.yaml: -------------------------------------------------------------------------------- 1 | #Map between groups and github users, format of this file is 2 | #groupname: 3 | # - gh_user 4 | # - gh_user 5 | #groupname: 6 | # - gh_user 7 | # - gh_user 8 | -------------------------------------------------------------------------------- /repos/smuzaffar/cmssw/releases.py: -------------------------------------------------------------------------------- 1 | # Default development branch 2 | # Changes from master branch will be merge in to it 3 | # Any PR open against this will be automatically closed by cms-bot (Pr should be made for master branch) 4 | # For new release cycle just change this and make sure to add its milestone and production branches 5 | 6 | CMSSW_DEVEL_BRANCH = "CMSSW_10_0_X" 7 | RELEASE_BRANCH_MILESTONE = {} 8 | RELEASE_BRANCH_CLOSED = [] 9 | RELEASE_BRANCH_PRODUCTION = [] 10 | SPECIAL_RELEASE_MANAGERS = [] 11 | RELEASE_MANAGERS = {} 12 | USERS_TO_TRIGGER_HOOKS = set( 13 | SPECIAL_RELEASE_MANAGERS + [m for rel in RELEASE_MANAGERS for m in rel] 14 | ) 15 | -------------------------------------------------------------------------------- /repos/smuzaffar/cmssw/super-users.yaml: -------------------------------------------------------------------------------- 1 | #List of super users, format is 2 | #- ghuser 3 | #- ghuser 4 | -------------------------------------------------------------------------------- /repos/smuzaffar/cmssw/watchers.yaml: -------------------------------------------------------------------------------- 1 | # Map between github username and the list of packages to watch. 2 | # Package is a regular expression which will be automatically delimited by ".*" 3 | # Format of this file is 4 | #ghuser: 5 | # - subsystem1/ 6 | # - subsystem/package1 7 | #ghuser: 8 | # - subsystem2/package1 9 | # - subsystem2/package2 10 | -------------------------------------------------------------------------------- /repos/smuzaffar/int_build/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cms-sw/cms-bot/a86c80b6135b23c1ac3440bfbe9df727fa97e33c/repos/smuzaffar/int_build/__init__.py -------------------------------------------------------------------------------- /repos/smuzaffar/int_build/categories.py: -------------------------------------------------------------------------------- 1 | from cms_static import GH_CMSSW_REPO as gh_cmssw 2 | from cms_static import GH_CMSDIST_REPO as gh_cmsdist 3 | from repo_config import GH_REPO_ORGANIZATION as gh_user 4 | from repo_config import CMSBUILD_USER 5 | 6 | CMSSW_ORP = [] 7 | APPROVE_BUILD_RELEASE = list(set([] + CMSSW_ORP)) 8 | REQUEST_BUILD_RELEASE = APPROVE_BUILD_RELEASE 9 | TRIGGER_PR_TESTS = list(set([] + REQUEST_BUILD_RELEASE)) 10 | PR_HOLD_MANAGERS = [] 11 | 12 | COMMON_CATEGORIES = ["tests", "code-checks"] 13 | EXTERNAL_CATEGORIES = ["externals"] 14 | EXTERNAL_REPOS = [] 15 | 16 | CMSSW_REPOS = [gh_user + "/" + gh_cmssw] 17 | CMSDIST_REPOS = [gh_user + "/" + gh_cmsdist] 18 | CMSSW_ISSUES_TRACKERS = list(set(CMSSW_ORP)) 19 | COMPARISON_MISSING_MAP = [] 20 | 21 | # github_user:[list of categories] 22 | CMSSW_L2 = { 23 | CMSBUILD_USER: ["tests", "code-checks"], 24 | gh_user: [gh_user], 25 | } 26 | 27 | CMSSW_CATEGORIES = { 28 | gh_user: [gh_user], 29 | } 30 | 31 | USERS_TO_TRIGGER_HOOKS = set(TRIGGER_PR_TESTS + CMSSW_ISSUES_TRACKERS + list(CMSSW_L2.keys())) 32 | CMS_REPOS = set(CMSDIST_REPOS + CMSSW_REPOS + EXTERNAL_REPOS) 33 | -------------------------------------------------------------------------------- /repos/smuzaffar/int_build/category-watchers.yaml: -------------------------------------------------------------------------------- 1 | # Map between github username and the list of cmssw categories to watch e.g. alca, db, core etc. 2 | # valid categories are available here http://cms-sw.github.io/categories.html 3 | #Format 4 | #gh_user: 5 | #- category 6 | #- category 7 | 8 | -------------------------------------------------------------------------------- /repos/smuzaffar/int_build/groups.yaml: -------------------------------------------------------------------------------- 1 | #Map between groups and github users, format of this file is 2 | #groupname: 3 | # - gh_user 4 | # - gh_user 5 | #groupname: 6 | # - gh_user 7 | # - gh_user 8 | -------------------------------------------------------------------------------- /repos/smuzaffar/int_build/releases.py: -------------------------------------------------------------------------------- 1 | # Default development branch 2 | # Changes from master branch will be merge in to it 3 | # Any PR open against this will be automatically closed by cms-bot (Pr should be made for master branch) 4 | # For new release cycle just change this and make sure to add its milestone and production branches 5 | 6 | CMSSW_DEVEL_BRANCH = "CMSSW_10_0_X" 7 | RELEASE_BRANCH_MILESTONE = {} 8 | RELEASE_BRANCH_CLOSED = [] 9 | RELEASE_BRANCH_PRODUCTION = [] 10 | SPECIAL_RELEASE_MANAGERS = [] 11 | RELEASE_MANAGERS = {} 12 | USERS_TO_TRIGGER_HOOKS = set( 13 | SPECIAL_RELEASE_MANAGERS + [m for rel in RELEASE_MANAGERS for m in rel] 14 | ) 15 | -------------------------------------------------------------------------------- /repos/smuzaffar/int_build/repo_config.py: -------------------------------------------------------------------------------- 1 | from cms_static import GH_CMSSW_ORGANIZATION, GH_CMSSW_REPO, CMSBUILD_GH_USER, get_jenkins 2 | from os.path import basename, dirname, abspath 3 | 4 | GH_TOKEN = "~/.github-token-cmsbot" 5 | GH_TOKEN_READONLY = "~/.github-token-readonly" 6 | CONFIG_DIR = dirname(abspath(__file__)) 7 | CMSBUILD_USER = "cmsbot" 8 | GH_REPO_ORGANIZATION = basename(dirname(CONFIG_DIR)) 9 | GH_REPO_FULLNAME = "smuzaffar/int-build" 10 | CREATE_EXTERNAL_ISSUE = False 11 | JENKINS_SERVER = get_jenkins("cms-jenkins") 12 | GITHUB_WEBHOOK_TOKEN = "U2FsdGVkX1+GEHdp/Cmu73+ctvrzSGXc9OvL+8bZyjOe6ZPkqr/GIPgpJHiEp+hR" 13 | ADD_LABELS = False 14 | ADD_WEB_HOOK = False 15 | IGNORE_ISSUES = [] 16 | 17 | 18 | def file2Package(filename): 19 | return GH_REPO_ORGANIZATION 20 | -------------------------------------------------------------------------------- /repos/smuzaffar/int_build/run-pr-tests: -------------------------------------------------------------------------------- 1 | #!/bin/bash -ex 2 | echo OK 3 | echo -e "+1\n\nAll OK" > $WORKSPACE/result.txt 4 | 5 | -------------------------------------------------------------------------------- /repos/smuzaffar/int_build/super-users.yaml: -------------------------------------------------------------------------------- 1 | #List of super users, format is 2 | #- ghuser 3 | #- ghuser 4 | -------------------------------------------------------------------------------- /repos/smuzaffar/int_build/watchers.yaml: -------------------------------------------------------------------------------- 1 | # Map between github username and the list of packages to watch. 2 | # Package is a regular expression which will be automatically delimited by ".*" 3 | # Format of this file is 4 | #ghuser: 5 | # - subsystem1/ 6 | # - subsystem/package1 7 | #ghuser: 8 | # - subsystem2/package1 9 | # - subsystem2/package2 10 | -------------------------------------------------------------------------------- /rucio/deploy/oidc/etc/rucio.cfg: -------------------------------------------------------------------------------- 1 | [client] 2 | rucio_host = https://cms-rucio.cern.ch 3 | auth_host = https://cms-rucio-auth.cern.ch 4 | request_retries = 3 5 | auth_type = oidc 6 | oidc_issuer = cms 7 | ca_cert = /cvmfs/cms.cern.ch/grid/etc/grid-security/certificates/ 8 | -------------------------------------------------------------------------------- /rucio/deploy/rucio.cfg: -------------------------------------------------------------------------------- 1 | # Copyright European Organization for Nuclear Research (CERN) 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # You may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 6 | # 7 | # Authors: 8 | # - Vincent Garonne, , 2017 9 | # - Eric Vaandering, , 2018 10 | 11 | [common] 12 | [client] 13 | rucio_host = http://cms-rucio.cern.ch 14 | auth_host = https://cms-rucio-auth.cern.ch 15 | auth_type = x509_proxy 16 | ca_cert = /cvmfs/cms.cern.ch/grid/etc/grid-security/certificates/ 17 | request_retries = 3 18 | -------------------------------------------------------------------------------- /rucio/setup.sh: -------------------------------------------------------------------------------- 1 | SELECTED_VERSION=current 2 | [ "$BASH_ARGC" -eq 0 ] && [ "$1" != "" ] && SELECTED_VERSION="$1" 3 | ARCH=$(uname -m)/$(/cvmfs/cms.cern.ch/common/cmsos | cut -d_ -f1 | sed 's|^[a-z]*|rhel|') 4 | THISDIR=$(cd $(dirname ${BASH_SOURCE:-${(%):-%N}}) >/dev/null 2>&1; /bin/pwd)/${ARCH}/ 5 | if [ ! -e ${THISDIR}/${SELECTED_VERSION}/bin/rucio ] ; then 6 | echo "Error: Unable to find rucio version '${SELECTED_VERSION}'" >&2 7 | return 8 | fi 9 | PYTHON_DIR="$(grep '#!/' ${THISDIR}/${SELECTED_VERSION}/bin/rucio | sed 's|^#!||;s|/bin/python[^/]*$||')" 10 | if [ -e "${PYTHON_DIR}/etc/profile.d/init.sh" ] ; then 11 | source "${PYTHON_DIR}/etc/profile.d/init.sh" 12 | fi 13 | PY_PATH=$(ls -d --color=never ${THISDIR}/${SELECTED_VERSION}/lib/python*/site-packages) 14 | export PATH=${THISDIR}/${SELECTED_VERSION}/bin${PATH:+:$PATH} 15 | export PYTHONPATH=${PY_PATH}${PYTHONPATH:+:$PYTHONPATH} 16 | export RUCIO_HOME=${THISDIR}/${SELECTED_VERSION} 17 | rucio --version 18 | -------------------------------------------------------------------------------- /run-ib-addon.py: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | """:" 4 | python_cmd="python3" 5 | python -V >/dev/null 2>&1 && python_cmd="python" 6 | exec ${python_cmd} $0 ${1+"$@"} 7 | """ 8 | 9 | from __future__ import print_function 10 | from sys import exit, argv 11 | from os import environ 12 | from cmsutils import cmsRunProcessCount, doCmd 13 | from logUpdater import LogUpdater 14 | 15 | if ("CMSSW_BASE" not in environ) or ("SCRAM_ARCH" not in environ): 16 | print( 17 | "ERROR: Unable to file the release environment, please make sure you have set the cmssw environment before calling this script" 18 | ) 19 | exit(1) 20 | 21 | timeout = 7200 22 | try: 23 | timeout = int(argv[1]) 24 | except: 25 | timeout = 7200 26 | logger = LogUpdater(environ["CMSSW_BASE"]) 27 | ret = doCmd( 28 | "cd %s; rm -rf addOnTests; timeout %s addOnTests.py -j %s 2>&1 >addOnTests.log" 29 | % (environ["CMSSW_BASE"], timeout, cmsRunProcessCount) 30 | ) 31 | doCmd("cd " + environ["CMSSW_BASE"] + "/addOnTests/logs; zip -r addOnTests.zip *.log") 32 | logger.updateAddOnTestsLogs() 33 | -------------------------------------------------------------------------------- /run-ib-classversion: -------------------------------------------------------------------------------- 1 | #!/bin/bash -ex 2 | CMS_BOT_DIR=$(dirname $0) 3 | case $CMS_BOT_DIR in /*) ;; *) CMS_BOT_DIR=$(pwd)/${CMS_BOT_DIR} ;; esac 4 | cd $CMSSW_BASE 5 | $CMS_BOT_DIR/generate-class-version.sh --baseline 6 | mkdir $WORKSPACE/upload 7 | mv class_versions $WORKSPACE/upload 8 | mv class_versions.html $WORKSPACE/upload 9 | source $CMS_BOT_DIR/jenkins-artifacts 10 | send_jenkins_artifacts $WORKSPACE/upload/ class_versions/${RELEASE_FORMAT}/${ARCHITECTURE} 11 | -------------------------------------------------------------------------------- /run-ib-geometry: -------------------------------------------------------------------------------- 1 | #!/bin/sh -ex 2 | 3 | # Runs geometry comparison tests for an IB 4 | WORKSPACE="${WORKSPACE-$PWD}" 5 | RELEASE_FORMAT=$1 6 | ARCHITECTURE=$2 7 | scram -a $ARCHITECTURE project $RELEASE_FORMAT 8 | pushd $RELEASE_FORMAT 9 | eval `scram run -sh` 10 | popd 11 | 12 | $CMSSW_RELEASE_BASE/src/Validation/Geometry/test/dddvsdb/runDDDvsDBGeometryValidation.sh 13 | $CMSSW_RELEASE_BASE/src/Validation/Geometry/test/dddvsdb/runDDDvsDBGeometryValidation.sh auto:run2_mc GeometryExtended2015 GeometryExtended2015 14 | 15 | rm -rf $RELEASE_FORMAT 16 | 17 | -------------------------------------------------------------------------------- /run-ib-iwyu: -------------------------------------------------------------------------------- 1 | #!/bin/bash -ex 2 | CMS_BOT_DIR=$(dirname $0) 3 | case $CMS_BOT_DIR in /*) ;; *) CMS_BOT_DIR=$(pwd)/${CMS_BOT_DIR} ;; esac 4 | cd $LOCALRT 5 | rm -rf src 6 | cp -r $CMSSW_RELEASE_BASE/src src 7 | find src -maxdepth 2 -type l -exec rm -f {} \; 8 | CPUS=`nproc` 9 | BUILD_LOG=yes scram b -k -j $CPUS compile COMPILER=iwyu || true 10 | scram build -f buildlog 11 | if [ -d tmp/$SCRAM_ARCH/cache/log/src ] ; then 12 | for logfile in `find tmp/$SCRAM_ARCH/cache/log/src -name 'build.log' -type f` ; do 13 | DIR=`echo $logfile | cut -d/ -f6,7` 14 | mkdir -p iwyu/$DIR 15 | ${CMS_BOT_DIR}/parse_iwyu_logs.py $logfile $CMSSW_VERSION >iwyu/$DIR/index.html 16 | cp $logfile iwyu/$DIR 17 | done 18 | fi 19 | 20 | if [ ! $(ls -A $LOCALRT/iwyu | wc -l) -eq 0 ]; then 21 | pushd $LOCALRT/iwyu 22 | tar czf iwyu.tar.gz * 23 | popd 24 | mkdir $WORKSPACE/results 25 | mv $LOCALRT/iwyu/iwyu.tar.gz $WORKSPACE/results 26 | mv $LOCALRT/stats.json $WORKSPACE/results 27 | sed -i 's/}{/,/g' $WORKSPACE/results/stats.json 28 | source $CMS_BOT_DIR/jenkins-artifacts 29 | send_jenkins_artifacts $WORKSPACE/results/ iwyu/${RELEASE_FORMAT}/${ARCHITECTURE} 30 | fi 31 | 32 | -------------------------------------------------------------------------------- /run-ib-material-budget: -------------------------------------------------------------------------------- 1 | #!/bin/bash -ex 2 | CMS_BOT_DIR=$(dirname $0) 3 | case $CMS_BOT_DIR in /*) ;; *) CMS_BOT_DIR=$(pwd)/${CMS_BOT_DIR} ;; esac 4 | 5 | eval `scram run -sh` 6 | mkdir $LOCALRT/run 7 | cd $LOCALRT/run 8 | 9 | $CMS_BOT_DIR/run-material-budget 10 | 11 | source $CMS_BOT_DIR/jenkins-artifacts 12 | send_jenkins_artifacts $LOCALRT/run material-budget/$CMSSW_VERSION/$SCRAM_ARCH || exit 1 13 | 14 | -------------------------------------------------------------------------------- /run-ib-python3: -------------------------------------------------------------------------------- 1 | #!/bin/bash -ex 2 | 3 | cd $CMSSW_BASE 4 | rm -rf src cfipython 5 | curl -k -L -s -o src.tar.gz https://github.com/cms-sw/cmssw/archive/${CMSSW_VERSION}.tar.gz 6 | tar -xzf src.tar.gz 7 | rm -rf src.tar.gz 8 | mv cmssw-${CMSSW_VERSION} src 9 | mkdir -p ${CMSSW_BASE}/cfipython/${SCRAM_ARCH}/ 10 | rsync -a $CMSSW_RELEASE_BASE/cfipython/${SCRAM_ARCH}/ ${CMSSW_BASE}/cfipython/${SCRAM_ARCH}/ 11 | find ${CMSSW_BASE}/cfipython/${SCRAM_ARCH}/ -type l | xargs --no-run-if-empty rm -f 12 | echo '
' >  python3.html
13 | CMD_python=$(which python3) scram b -r -v -k -j $(nproc) CompilePython >> python3.html 2>&1
14 | grep -n 'Error compiling' python3.html | while IFS= read -r line; do
15 |   LN=$(echo "$line" | sed 's|:.*||')
16 |   PKG=$(echo "$line" | sed 's|[^/]*/||' | cut -d/ -f1,2 | sed 's|/||')
17 |   sed -i -e "${LN}s|\(.*\)|

\1

|" python3.html 18 | done 19 | echo '
' >> python3.html 20 | 21 | -------------------------------------------------------------------------------- /run-material-budget: -------------------------------------------------------------------------------- 1 | #!/bin/bash -ex 2 | 3 | VGEO_DIR=$CMSSW_BASE/src/Validation/Geometry 4 | if [ ! -e $VGEO_DIR ] ; then VGEO_DIR=$CMSSW_RELEASE_BASE/src/Validation/Geometry ; fi 5 | 6 | cmsRun ${VGEO_DIR}/python/single_neutrino_cfg.py >$LOCALRT/single_neutrino_cfg.log 2>&1 7 | #Remove big plugin paths 8 | export LD_LIBRARY_PATH=$(echo $LD_LIBRARY_PATH | tr ':' '\n' | grep -v /biglib/$SCRAM_ARCH | tr '\n' ':' | sed 's|:$||') 9 | 10 | cmsRun ${VGEO_DIR}/test/runP_Tracker_cfg.py >$LOCALRT/runP_Tracker_cfg.log 2>&1 11 | mkdir Images 12 | 13 | if [ -e ${VGEO_DIR}/test/MaterialBudget.py ] ; then 14 | cp ${VGEO_DIR}/test/MaterialBudget.py . 15 | python MaterialBudget.py -s -d Tracker || true 16 | else 17 | cp ${VGEO_DIR}/test/MaterialBudget.C . 18 | root -b -q 'MaterialBudget.C("Tracker")' || true 19 | fi 20 | -------------------------------------------------------------------------------- /spack/backport.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash -e 2 | UPSTREAM_SPACK=${UPSTREAM_SPACK:-"/home/razumov/Work/_CMS/vanilla_spack"} 3 | ######################################################################################################################### 4 | [ -d ${UPSTREAM_SPACK} ] || (echo "Invalid upstream Spack location: ${UPSTREAM_SPACK}"; exit 2) 5 | SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )" 6 | cd $(dirname $SCRIPT_DIR) 7 | [ $# -lt 1 ] && (echo "Usage: backport.sh [ ...]"; exit 1) 8 | for pname in "$@" 9 | do 10 | updir=${UPSTREAM_SPACK}/var/spack/repos/builtin/packages/${pname} 11 | [ ! -d ${updir} -o ! -f ${updir}/package.py ] && (echo "Can't find recipe for $pname"; exit 2) 12 | cp -rf ${updir} repos/backport/packages 13 | cp -rf repos/backport/packages/${pname} spack/var/spack/repos/builtin/packages 14 | done 15 | -------------------------------------------------------------------------------- /spack/cvmfsInstall.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash -x 2 | export CMSARCH=${CMSARCH:-slc7_amd64_gcc900} 3 | export SCRAM_ARCH=$CMSARCH 4 | export CVMFS_REPOSITORY=cms-ib.cern.ch 5 | export BASEDIR=/cvmfs/$CVMFS_REPOSITORY 6 | export USE_SINGULARITY=true 7 | export WORKDIR=$WORKSPACE 8 | 9 | weekno=$(tail -1 $WORKSPACE/cms-bot/ib-weeks) 10 | export RPM_INSTALL_PREFIX=$BASEDIR/$weekno/spack 11 | 12 | rm -f ${WORKSPACE}/fail 13 | 14 | cd $WORKSPACE/cms-bot 15 | ./spack/bootstrap.sh 16 | ./cvmfs_deployment/start_transaction.sh 17 | 18 | # Check if the transaction really happened 19 | if [ `touch $BASEDIR/is_writable 2> /dev/null; echo "$?"` -eq 0 ]; then 20 | rm $BASEDIR/is_writable 21 | else 22 | echo CVMFS filesystem is not writable. Aborting. 23 | echo " " | mail -s "$CVMFS_REPOSITORY cannot be set to transaction" cms-sdt-logs@cern.ch 24 | exit 1 25 | fi 26 | 27 | # Use dockerrun since we may need to use qemu 28 | source ${WORKSPACE}/cms-bot/dockerrun.sh ; dockerrun ${WORKSPACE}/cms-bot/spack/install.sh 29 | exit_code=$? 30 | [ -e ${WORKSPACE}/fail -o ${exit_code} -ne 0 ] && ./cvmfs_deployment/abort_transaction.sh || 31 | ${WORKSPACE}/cms-bot/cvmfs/cms-ib.cern.ch/cvmfsdirtab.sh 32 | ./cvmfs_deployment/publish_transaction.sh 33 | exit ${exit_code} 34 | -------------------------------------------------------------------------------- /spack/singBuild.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash -x 2 | export CMSARCH=${CMSARCH:-slc7_amd64_gcc900} 3 | export SCRAM_ARCH=$CMSARCH 4 | export USE_SINGULARITY=true 5 | export WORKDIR=${WORKSPACE} 6 | if [ x$DOCKER_IMG == "x" ]; then 7 | arch="$(echo $CMSARCH | cut -d_ -f2)" 8 | os=$(echo $CMSARCH | cut -d_ -f1 | sed 's|slc7|cc7|') 9 | if [ "${os}" = "rhel8" ] ; then os="ubi8" ; fi 10 | DOCKER_IMG="cmssw/${os}:${arch}" 11 | if [ "${arch}" = "amd64" ] ; then 12 | DOCKER_IMG="cmssw/${os}:x86_64" 13 | fi 14 | fi 15 | export DOCKER_IMG 16 | 17 | rm -f ${WORKSPACE}/fail 18 | 19 | ${WORKSPACE}/cms-bot/spack/bootstrap.sh 20 | 21 | ${WORKSPACE}/cms-bot/docker_launcher.sh ${WORKSPACE}/cms-bot/spack/build.sh 22 | if [ -e ${WORKSPACE}/fail ]; then 23 | echo Build falied, uploading monitor data 24 | tar -zcf ${WORKSPACE}/monitor.tar.gz ${WORKSPACE}/monitor 25 | scp ${WORKSPACE}/monitor.tar.gz cmsbuild@lxplus:/eos/user/r/razumov/www/CMS/mirror 26 | rm ${WORKSPACE}/monitor.tar.gz 27 | touch ${WORKSPACE}/fail 28 | exit 1 29 | fi 30 | if [ ${UPLOAD_BUILDCACHE-x} = "true" ]; then 31 | echo Prepare mirror and buildcache 32 | # TODO: create mirror and sync to s3 33 | # TODO: push gpg key to mirror (broken in 0.17, should be working in 0.18) 34 | bin/spack -e ${SPACK_ENV_NAME} buildcache create -r -a --mirror-url s3://cms-spack/ 35 | fi 36 | echo All done 37 | -------------------------------------------------------------------------------- /statics-filter1.txt: -------------------------------------------------------------------------------- 1 | edm::RootInputFileSequence::initTheFile 2 | PrintGeomInfoAction::update 3 | edm::ConcurrentHadronizerFilter ${OVERRIDE_DIR}/${cmd} 11 | echo "LD_PRELOAD='' exec ${sys_cmd} \"\$@\"" >> ${OVERRIDE_DIR}/${cmd} 12 | chmod +x ${OVERRIDE_DIR}/${cmd} 13 | fi 14 | done 15 | -------------------------------------------------------------------------------- /system-tools/nproc/nproc: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | echo ${JENKINS_AGENT_CORES} 3 | -------------------------------------------------------------------------------- /templates/performance-summary-plots-list: -------------------------------------------------------------------------------- 1 | # This is a list that contains the workflows,steps, and 2 | # parameter to be reported on the webpage. When read, 3 | # it will be used to generate the json file with the 4 | # urls of the results on graphite. 5 | 5_1_TTbar+TTbarFS+HARVESTFS step1_TTbar+TTbarFS+HARVESTFS AvgEventCPU 6 | 5_1_TTbar+TTbarFS+HARVESTFS step1_TTbar+TTbarFS+HARVESTFS PeakValueRss 7 | 202_0_TTbar+TTbarINPUT+DIGIPU1+RECOPU1+HARVEST step2_TTbar+TTbar+DIGIPU1+RECOPU1+HARVEST AvgEventCPU 8 | 202_0_TTbar+TTbarINPUT+DIGIPU1+RECOPU1+HARVEST step2_TTbar+TTbar+DIGIPU1+RECOPU1+HARVEST PeakValueRss 9 | 202_0_TTbar+TTbarINPUT+DIGIPU1+RECOPU1+HARVEST step3_TTbar+TTbar+DIGIPU1+RECOPU1+HARVEST AvgEventCPU 10 | 202_0_TTbar+TTbarINPUT+DIGIPU1+RECOPU1+HARVEST step3_TTbar+TTbar+DIGIPU1+RECOPU1+HARVEST PeakValueRss 11 | 400_0_TTbar+TTbarFSPU+HARVESTFS step1_TTbar+TTbarFSPU+HARVESTFS AvgEventCPU 12 | 400_0_TTbar+TTbarFSPU+HARVESTFS step1_TTbar+TTbarFSPU+HARVESTFS PeakValueRss 13 | -------------------------------------------------------------------------------- /tests/Framework.patch: -------------------------------------------------------------------------------- 1 | --- Framework.py.orig 2025-04-03 10:01:22.739783757 +0200 2 | +++ Framework.py 2025-04-03 10:03:32.558079128 +0200 3 | @@ -241,6 +241,7 @@ 4 | 5 | class BasicTestCase(unittest.TestCase): 6 | recordMode = False 7 | + recordActionMode = False 8 | tokenAuthMode = False 9 | jwtAuthMode = False 10 | retry = None 11 | @@ -349,15 +350,15 @@ 12 | 13 | if self.tokenAuthMode: 14 | self.g = github.Github( 15 | - self.oauth_token, retry=self.retry, pool_size=self.pool_size 16 | + self.oauth_token, retry=self.retry, per_page=100 17 | ) 18 | elif self.jwtAuthMode: 19 | self.g = github.Github( 20 | - jwt=self.jwt, retry=self.retry, pool_size=self.pool_size 21 | + jwt=self.jwt, retry=self.retry, per_page=100 22 | ) 23 | else: 24 | self.g = github.Github( 25 | - self.login, self.password, retry=self.retry, pool_size=self.pool_size 26 | + self.login, self.password, retry=self.retry, per_page=100 27 | ) 28 | 29 | 30 | @@ -379,3 +380,7 @@ 31 | 32 | def setPoolSize(pool_size): 33 | BasicTestCase.pool_size = pool_size 34 | + 35 | + 36 | +def activateRecordActionMode(): # pragma no cover (Function useful only when recording new tests, not used during automated tests) 37 | + BasicTestCase.recordActionMode = True 38 | -------------------------------------------------------------------------------- /tests/PRActionData/TestProcessPr.test_assign_from.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "type": "load-bot-cache", 4 | "data": { 5 | "commits": {}, 6 | "emoji": {}, 7 | "signatures": {} 8 | } 9 | }, 10 | { 11 | "type": "emoji", 12 | "data": [ 13 | 2769192218, 14 | "+1", 15 | true 16 | ] 17 | }, 18 | { 19 | "type": "create-comment", 20 | "data": "New categories assigned: core\n\nDr15Jones,makortel,smuzaffar you have been requested to review this Pull request/Issue and eventually sign? Thanks" 21 | }, 22 | { 23 | "type": "add-label", 24 | "data": [ 25 | "core-pending", 26 | "pending-signatures" 27 | ] 28 | }, 29 | { 30 | "type": "remove-label", 31 | "data": [ 32 | "pending-assignment" 33 | ] 34 | }, 35 | { 36 | "type": "edit-comment", 37 | "data": "cms-bot internal usage" 38 | }, 39 | { 40 | "type": "save-bot-cache", 41 | "data": { 42 | "commits": {}, 43 | "emoji": { 44 | "2769192218": "+1" 45 | }, 46 | "signatures": {} 47 | } 48 | } 49 | ] -------------------------------------------------------------------------------- /tests/PRActionData/TestProcessPr.test_assign_from_invalid.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "type": "load-bot-cache", 4 | "data": { 5 | "commits": {}, 6 | "emoji": { 7 | "2769192218": "+1" 8 | }, 9 | "signatures": {} 10 | } 11 | }, 12 | { 13 | "type": "add-label", 14 | "data": [ 15 | "pending-assignment" 16 | ] 17 | }, 18 | { 19 | "type": "remove-label", 20 | "data": [ 21 | "core-pending", 22 | "pending-signatures" 23 | ] 24 | }, 25 | { 26 | "type": "save-bot-cache", 27 | "data": { 28 | "commits": {}, 29 | "emoji": { 30 | "2769192218": "+1" 31 | }, 32 | "signatures": {} 33 | } 34 | } 35 | ] -------------------------------------------------------------------------------- /tests/PRActionData/TestProcessPr.test_create_repo.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "type": "create-property-file", 4 | "data": { 5 | "filename": "query-new-data-repo-issues-30.properties", 6 | "data": { 7 | "ISSUE_NUMBER": "30" 8 | } 9 | } 10 | }, 11 | { 12 | "type": "add-label", 13 | "data": [ 14 | "pending-assignment" 15 | ] 16 | }, 17 | { 18 | "type": "remove-label", 19 | "data": [] 20 | }, 21 | { 22 | "type": "create-comment", 23 | "data": "cms-bot internal usage" 24 | }, 25 | { 26 | "type": "save-bot-cache", 27 | "data": { 28 | "emoji": {}, 29 | "signatures": {}, 30 | "commits": {} 31 | } 32 | }, 33 | { 34 | "type": "create-comment", 35 | "data": "A new Issue was created by @iarspider.\n\n@Dr15Jones, @iarspider, @makortel, @smuzaffar can you please review it and eventually sign/assign? Thanks.\n\ncms-bot commands are listed here\n" 36 | } 37 | ] -------------------------------------------------------------------------------- /tests/PRActionData/TestProcessPr.test_empty_pr.json: -------------------------------------------------------------------------------- 1 | [] -------------------------------------------------------------------------------- /tests/PRActionData/TestProcessPr.test_future_commit.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "type": "update-milestone", 4 | "data": { 5 | "id": 4, 6 | "title": "CMSSW_14_1_X" 7 | } 8 | } 9 | ] -------------------------------------------------------------------------------- /tests/PRActionData/TestProcessPr.test_many_commits_warn.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "type": "update-milestone", 4 | "data": { 5 | "id": 4, 6 | "title": "CMSSW_14_1_X" 7 | } 8 | }, 9 | { 10 | "type": "create-comment", 11 | "data": "This PR contains many commits (151 >= 150) and will not be processed. Please ensure you have selected the correct target branch and consider squashing unnecessary commits.\nDr15Jones, iarspider, makortel, smuzaffar, to re-enable processing of this PR, you can write `+commit-count` in a comment. Thanks." 12 | } 13 | ] -------------------------------------------------------------------------------- /tests/PRActionData/TestProcessPr.test_new_issue.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "type": "add-label", 4 | "data": [ 5 | "pending-assignment" 6 | ] 7 | }, 8 | { 9 | "type": "remove-label", 10 | "data": [] 11 | }, 12 | { 13 | "type": "create-comment", 14 | "data": "cms-bot internal usage" 15 | }, 16 | { 17 | "type": "save-bot-cache", 18 | "data": { 19 | "emoji": {}, 20 | "signatures": {}, 21 | "commits": {} 22 | } 23 | }, 24 | { 25 | "type": "create-comment", 26 | "data": "A new Issue was created by iarspider.\n\nDr15Jones, iarspider, makortel, smuzaffar can you please review it and eventually sign/assign? Thanks.\n\ncms-bot commands are listed here\n" 27 | } 28 | ] -------------------------------------------------------------------------------- /tests/PRActionData/TestProcessPr.test_orp_issue.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "type": "emoji", 4 | "data": [ 5 | 2858276137, 6 | "+1", 7 | true 8 | ] 9 | }, 10 | { 11 | "type": "create-comment", 12 | "data": "New categories assigned: orp\n\n@iarspider you have been requested to review this Pull request/Issue and eventually sign? Thanks" 13 | }, 14 | { 15 | "type": "add-label", 16 | "data": [ 17 | "orp-pending", 18 | "pending-signatures" 19 | ] 20 | }, 21 | { 22 | "type": "remove-label", 23 | "data": [] 24 | }, 25 | { 26 | "type": "create-comment", 27 | "data": "cms-bot internal usage" 28 | }, 29 | { 30 | "type": "save-bot-cache", 31 | "data": { 32 | "emoji": { 33 | "2858276137": "+1" 34 | }, 35 | "signatures": {}, 36 | "commits": {} 37 | } 38 | }, 39 | { 40 | "type": "create-comment", 41 | "data": "A new Issue was created by @iarspider.\n\n@Dr15Jones, @iarspider, @makortel, @smuzaffar can you please review it and eventually sign/assign? Thanks.\n\ncms-bot commands are listed here\n" 42 | } 43 | ] -------------------------------------------------------------------------------- /tests/PRActionData/TestProcessPr.test_pr_develop_branch.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "type": "edit-pr", 4 | "data": { 5 | "base": "master", 6 | "res": null 7 | } 8 | }, 9 | { 10 | "type": "create-comment", 11 | "data": "@iarspider, CMSSW_14_1_X branch is closed for direct updates. cms-bot is going to move this PR to master branch.\nIn future, please use cmssw master branch to submit your changes.\n" 12 | } 13 | ] -------------------------------------------------------------------------------- /tests/PRActionData/TestProcessPr.test_warn_many_files.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "type": "update-milestone", 4 | "data": { 5 | "id": 4, 6 | "title": "CMSSW_14_1_X" 7 | } 8 | }, 9 | { 10 | "type": "create-comment", 11 | "data": "This PR touches many files (1510 >= 1500) and will not be processed. Please ensure you have selected the correct target branch and consider splitting this PR into several.\nDr15Jones, iarspider, makortel, smuzaffar, to re-enable processing of this PR, you can write `+file-count` in a comment. Thanks." 12 | } 13 | ] -------------------------------------------------------------------------------- /tests/README.md: -------------------------------------------------------------------------------- 1 | # Testing process_pr.py 2 | 3 | ## To replay the tests 4 | * Run `./run_pr_tests.sh [test_name]`, will run all tests if `test_name` is not given 5 | 6 | ## To record a new test: 7 | 8 | ### Setup 9 | * Run tests in replay mode at least once to create venv 10 | * Create `GithubCredentials.py` in top-level directory (**not** in `tests/`) with contents: 11 | 12 | ```py 13 | login = "" 14 | password = "" 15 | oauth_token = "" 16 | jwt = "" 17 | app_id = "" 18 | app_private_key = "" 19 | ``` 20 | 21 | * Also write oauth token in `~/.github-token` 22 | 23 | ### Recording tests 24 | 25 | * Prepare (or update) a (cmssw) PR with desired state 26 | * Implement a new test in `test_process_pr.py`. For most tests, only a call to `self.runTest(prId=...)` is needed. 27 | * Run `./process-pull-request.py -n -r ` to check bot behaviour 28 | * Run `pytest --auth_with_token --record -k test_draft_pr_opened test_process_pr.py` to record PR state and bot actions 29 | * Check recorded actions (`PRActionData/TestProcessPr..json`) 30 | * Make bot actually perform actions: `./process-pull-request.py -r ` -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cms-sw/cms-bot/a86c80b6135b23c1ac3440bfbe9df727fa97e33c/tests/__init__.py -------------------------------------------------------------------------------- /tests/coverage_process_pr.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | coverage run --include=../process_pr.py -m pytest --auth_with_token test_process_pr.py 3 | coverage html 4 | if [ $(hostname) = "cmspc001" ]; then 5 | ssh lxplus rm -rf /eos/user/r/razumov/www/htmlcov 6 | scp -r htmlcov lxplus:/eos/user/r/razumov/www/ 7 | fi -------------------------------------------------------------------------------- /tests/record_test.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash -xe 2 | 3 | case $2 in 4 | 1) 5 | FLAGS="--record" 6 | ;; 7 | 2) 8 | FLAGS="--record_action" 9 | ;; 10 | 3) 11 | FLAGS="--record --record_action" 12 | ;; 13 | *) 14 | echo "USAGE: record_test []"; 15 | exit 1 16 | ;; 17 | esac 18 | 19 | pytest -Wignore::DeprecationWarning --log-disable=github.Requester --log-cli-level=DEBUG -k $1 $FLAGS --auth_with_token test_process_pr.py 20 | ./verify_load_cache.py -------------------------------------------------------------------------------- /tests/run_pr_tests.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash -xe 2 | INSTALL_REQS=0 3 | if [ ! -d venv ]; then 4 | python3 -m venv venv 5 | INSTALL_REQS=1 6 | fi 7 | 8 | source venv/bin/activate 9 | if [ $INSTALL_REQS -eq 1 ]; then 10 | python3 -m pip install --upgrade pip 11 | pip install -r test-requirements.txt 12 | fi 13 | 14 | if [ ! -e Framework.py ]; then 15 | curl -L https://github.com/PyGithub/PyGithub/raw/v1.56/tests/Framework.py > Framework.py 16 | #sed -i -e 's/self\.retry/self.retry, per_page=100/g' Framework.py 17 | patch -p0 < Framework.patch 18 | fi 19 | 20 | if [ $# -ge 1 ]; then 21 | pytest --verbosity="2" -Wignore::DeprecationWarning -k "$@" test_process_pr.py --auth_with_token 22 | else 23 | pytest --verbosity="2" -Wignore::DeprecationWarning test_process_pr.py --auth_with_token 24 | fi 25 | -------------------------------------------------------------------------------- /tests/test-requirements.txt: -------------------------------------------------------------------------------- 1 | cryptography 2 | httpretty>=1.0.3 3 | pytest>=5.3 4 | pytest-cov>=2.8 5 | PyGithub==1.56 6 | pyyaml 7 | urllib3<2.3.0 8 | -------------------------------------------------------------------------------- /tests/test_config-map.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | import re 4 | 5 | KEYS_RE = "(IB_TEST_TYPE|MULTIARCH_OPTS|BUILD_OPTS|CMS_BOT_BRANCH|CVMFS_INSTALL_IMAGE|DEBUG_EXTERNALS|SKIP_TESTS|REQUIRED_TEST|FORCE_FULL_IB|SLAVE_LABELS|SINGULARITY|IB_ONLY|BUILD_DAY|NO_IB|SCRAM_ARCH|RELEASE_QUEUE|BUILD_PATCH_RELEASE|PKGTOOLS_TAG|CMSDIST_TAG|RELEASE_BRANCH|ADDITIONAL_TESTS|PR_TESTS|DISABLED|ALWAYS_TAG_CMSSW|DO_STATIC_CHECKS|PROD_ARCH|ENABLE_DEBUG|PRS_TEST_CLANG|MESOS_QUEUE|DO_NOT_INSTALL|BUILD_HOUR|IB_WEB_PAGE|DOCKER_IMG|SPACK)" 6 | 7 | if __name__ == "__main__": 8 | for l in open("config.map").read().split("\n"): 9 | if not l: 10 | continue 11 | l = l.strip(";") 12 | for p in l.split(";"): 13 | assert "=" in p 14 | (key, value) = p.split("=") 15 | assert re.match(KEYS_RE, key) 16 | -------------------------------------------------------------------------------- /trigger_jenkins_job.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | from optparse import OptionParser 3 | from jenkins_callback import build_jobs 4 | import json 5 | 6 | 7 | def process(opts): 8 | xparam = [] 9 | for param in opts.params: 10 | p, v = param.split("=", 1) 11 | xparam.append({"name": p, "value": v}) 12 | build_jobs( 13 | opts.server, [(json.dumps({"parameter": xparam}), opts.job)], headers={}, user=opts.user 14 | ) 15 | 16 | 17 | if __name__ == "__main__": 18 | parser = OptionParser(usage="%prog") 19 | parser.add_option("-j", "--job", dest="job", help="Jenkins jobs to trigger", default=None) 20 | parser.add_option( 21 | "-s", 22 | "--server", 23 | dest="server", 24 | help="Jenkins server URL e.g. https://cmssdt.cern.ch/cms-jenkins", 25 | default=None, 26 | ) 27 | parser.add_option( 28 | "-u", "--user", dest="user", help="Jenkins user name to trigger the job", default="cmssdt" 29 | ) 30 | parser.add_option( 31 | "-p", 32 | "--parameter", 33 | dest="params", 34 | help="Job parameter e.g. -p Param=Value. One can use this multiple times.", 35 | action="append", 36 | type="string", 37 | metavar="PARAMETERS", 38 | ) 39 | opts, args = parser.parse_args() 40 | 41 | if (not opts.job) or (not opts.server): 42 | parser.error("Missing job/server parameter.") 43 | process(opts) 44 | -------------------------------------------------------------------------------- /update-commit-statues-matching.py: -------------------------------------------------------------------------------- 1 | import github_utils 2 | import argparse 3 | 4 | 5 | def main(): 6 | parser = argparse.ArgumentParser() 7 | parser.add_argument("--repository", "-r") 8 | parser.add_argument("--commit", "-c") 9 | parser.add_argument("--prefix", "-p") 10 | parser.add_argument("suffix") 11 | args = parser.parse_args() 12 | 13 | status_prefix = f"{args.prefix}/" 14 | 15 | all_statuses = github_utils.get_combined_statuses(args.commit, args.repository).get( 16 | "statuses", [] 17 | ) 18 | 19 | for status in all_statuses: 20 | if ( 21 | status["context"].startswith(status_prefix) 22 | and status["context"].endswith(f"/{args.suffix}") 23 | and status["state"] == "pending" 24 | ): 25 | github_utils.mark_commit_status( 26 | args.commit, 27 | args.repository, 28 | status["context"], 29 | "success", 30 | "", 31 | "Timed out waiting for node", 32 | ) 33 | 34 | 35 | if __name__ == "__main__": 36 | main() 37 | -------------------------------------------------------------------------------- /update-github-hooks-ip.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | from _py2with3compatibility import urlopen 3 | from json import loads 4 | from os import system 5 | from sys import exit 6 | 7 | ip_file = "/data/sdt/github-hook-meta.txt" 8 | cnt = 0 9 | with open("%s.tmp" % ip_file, "w") as ref: 10 | for m in [ 11 | i.encode().decode() 12 | for i in loads(urlopen("https://api.github.com/meta").readlines()[0])["hooks"] 13 | ]: 14 | ref.write("%s\n" % m) 15 | cnt += 1 16 | if cnt: 17 | system("mv %s.tmp %s" % (ip_file, ip_file)) 18 | else: 19 | system("rm -f %s.tmp" % ip_file) 20 | exit(1) 21 | -------------------------------------------------------------------------------- /upload-build-log: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | CMS_BOT_DIR=$(dirname $0) 3 | case $CMS_BOT_DIR in /*) ;; *) CMS_BOT_DIR=$(pwd)/${CMS_BOT_DIR} ;; esac 4 | 5 | WORKSPACE=$1 6 | LOG_SRC=$2 7 | LOG_DES=$3 8 | LOCAL_LOGDIR=$WORKSPACE/cmsBuild_BUILD_logs 9 | mkdir -p ${LOCAL_LOGDIR}/js 10 | if [ -d ${LOG_SRC}/../../DEPS ] ; then mv ${LOG_SRC}/../../DEPS ${LOCAL_LOGDIR}/DEPS ; fi 11 | if [ -f $WORKSPACE/renderPRTests.js ] ; then cp $WORKSPACE/renderPRTests.js ${LOCAL_LOGDIR}/js/ ; fi 12 | for f in summary.html testsResults.txt cmsswtoolconf.log externals.json ; do 13 | if [ -f $WORKSPACE/$f ] ; then cp $WORKSPACE/$f ${LOCAL_LOGDIR}/ ; fi 14 | done 15 | if [ -d "${LOG_SRC}" ] ; then 16 | pushd $LOG_SRC 17 | for log in $(find . -maxdepth 4 -mindepth 4 -name log -type f | sed 's|^./||') ; do 18 | dir=$(dirname $log) 19 | mkdir -p ${LOCAL_LOGDIR}/${dir} 20 | mv $log ${LOCAL_LOGDIR}/${dir}/ 21 | [ -e ${dir}/src-logs.tgz ] && mv ${dir}/src-logs.tgz ${LOCAL_LOGDIR}/${dir}/ 22 | json=$(basename $(dirname $dir)).json 23 | [ -e "${dir}/${json}" ] && mv ${dir}/${json} ${LOCAL_LOGDIR}/${dir}/ 24 | [ -e "${dir}/opts.json" ] && mv ${dir}/opts.json ${LOCAL_LOGDIR}/${dir}/ 25 | done 26 | popd 27 | fi 28 | source $CMS_BOT_DIR/jenkins-artifacts || true 29 | send_jenkins_artifacts ${LOCAL_LOGDIR}/ ${LOG_DES}/ || true 30 | 31 | -------------------------------------------------------------------------------- /weekly-cmsset_default.sh: -------------------------------------------------------------------------------- 1 | XARCH=$(echo $1 | cut -d_ -f2) 2 | [ "${XARCH}" != "amd64" ] || XARCH=x86_64 3 | IB_WEEK_ENV=$(ls -d /cvmfs/cms-ib.cern.ch/sw/$XARCH/nweek-*/cmsset_default.sh | tail -1) 4 | source ${IB_WEEK_ENV} || true 5 | which git-cms-addpkg 6 | 7 | --------------------------------------------------------------------------------