├── .gitignore ├── automation ├── README.md ├── aql-example │ ├── excercises │ │ ├── 1-latestDockerApp.aql │ │ ├── 2-listDockerAppDependencies.aql │ │ ├── 3-latestDockerAppProperties.aql │ │ └── 4-listCriticalVulnerabiliitesOnProd.aql │ └── solution │ │ ├── 1-latestDockerApp.aql │ │ ├── 2-listDockerAppDependencies.aql │ │ ├── 3-latestDockerAppProperties.aql │ │ ├── 4-listCriticalVulnerabiliitesOnProd.aql │ │ └── latestDockerAppBuild.aql ├── catchup.sh ├── docker-app │ ├── Dockerfile │ ├── Jenkinsfile │ ├── app-test │ │ ├── promote-gradleWar.json │ │ └── retag.json │ ├── appmodules-download.json │ └── retag.json ├── docker-framework │ ├── Dockerfile │ ├── Jenkinsfile │ ├── README.md │ ├── automation-docker-prod-local.json │ ├── automation-gradle-release-local.json │ ├── framework-download.json │ ├── framework-test │ │ └── Dockerfile │ ├── gradeWar-download.json │ ├── local-repository-template │ ├── retag.json │ └── tomcat │ │ ├── framework-download-template │ │ ├── framework-download.json │ │ ├── framework-upload.json │ │ ├── framework-verify.json │ │ └── server.xml ├── gradle-example │ ├── dev │ │ └── Jenkinsfile │ └── release │ │ └── Jenkinsfile ├── maven-example │ └── Jenkinsfile └── preventUnapproved │ ├── PreventUnapproved.groovy │ ├── PreventUnapprovedTest.groovy │ └── README.md ├── bintray ├── Dockerfile ├── Jenkinsfile ├── bintrayrocks │ ├── amaze-1.0-x86.tgz │ ├── amaze-1.0.tgz │ └── exec │ │ ├── README.md │ │ └── bintrayrocks.sh ├── commands.list ├── distrib │ └── Jenkinsfile ├── eula.md ├── jfrog └── prep.sh ├── conan └── exercises │ ├── Dockerfile │ ├── catchup.sh │ ├── consumer │ ├── .gitignore │ ├── CMakeLists.txt │ ├── conanfile.txt │ └── timer.cpp │ ├── consumer_gcc │ ├── .gitignore │ ├── conanfile.txt │ └── timer.cpp │ ├── create │ └── README │ ├── create_sources │ └── README │ ├── gtest │ ├── .gitignore │ ├── consumer │ │ ├── CMakeLists.txt │ │ ├── conanfile.txt │ │ └── example.cpp │ └── package │ │ ├── CMakeLists.txt │ │ ├── conanfile.py │ │ ├── hello.cpp │ │ ├── hello.h │ │ └── test.cpp │ ├── gtest_build_requires │ ├── .gitignore │ ├── consumer │ │ ├── CMakeLists.txt │ │ ├── conanfile.txt │ │ ├── example.cpp │ │ └── test_profile.txt │ └── package │ │ ├── CMakeLists.txt │ │ ├── conanfile.py │ │ ├── hello.cpp │ │ ├── hello.h │ │ └── test.cpp │ ├── header_only │ ├── .gitignore │ └── example.cpp │ ├── jenkins │ └── Jenkinsfile │ ├── linux-armv7 │ └── profile_arm │ ├── CMakeLists.txt │ ├── arm_gcc_debug.profile │ ├── conanfile.py │ └── example.c ├── dockerlfc ├── build.gradle ├── gradlew ├── gradlew.bat ├── settings.gradle └── src │ └── swampup │ ├── DemoSwampUpTraining.groovy │ ├── latestDockerApp.aql │ ├── latestDockerAppReleaseApproval.aql │ ├── listPropertiesOnDeployedDockerApp.aql │ ├── qaApprovalCriteria.aql │ └── testDockerApp.groovy ├── essentials_of_jfrog_artifactory ├── .gitignore ├── README.md ├── aql.json ├── commands-unix.list ├── commands-windows.list ├── group.json ├── jenkins-user.json ├── license..bat ├── license.bat ├── license.sh ├── permission-target-jenkins.json ├── permission-target-read.json ├── permission-target-release-eng.json ├── permission-target-write.json ├── promote.json ├── repository-release-local-config.json ├── repository-release-virtual-config.json ├── repository-release-virtual-config2.json ├── repository-remote-config.json ├── repository-snapshot-local-config.json ├── repository-snapshot-virtual-config.json ├── repository-staging-local-config.json ├── update-permission-target-jenkins.json ├── update-user.json └── user.json ├── jfmc └── repository │ ├── CreateGenericLocalRepo │ ├── CreateMultiPushReplicationPeers │ ├── CreateVirtualRepo │ ├── UpdateRepoPushReplication │ └── test_script ├── scale └── buildSrc │ └── src │ └── main │ └── resources │ └── dind │ └── list.sh ├── security ├── commands-unix.list ├── group.json ├── group_admin.json ├── token.json └── user.json └── src └── org └── jfrog └── MyArtifactory.groovy /.gitignore: -------------------------------------------------------------------------------- 1 | .DS_Store* 2 | # intellij generated files 3 | /.idea 4 | production/ 5 | 6 | # gradle dependencies 7 | dockerlfc/.gradle/ 8 | dockerlfc/gradle/ 9 | -------------------------------------------------------------------------------- /automation/README.md: -------------------------------------------------------------------------------- 1 | # swampup 2 | Automation Training - Swampup Edition 3 | ===================================== 4 | 5 | Url Links - substitute the IP address assigned from Orbitera. 6 | ----- 7 | ```XML 8 | [Artifactory Main HA] - http://35.185.192.7/artifactory 9 | [Artifactory Primary] - http://35.185.192.7:8081/artifactory 10 | [Aritfactory Secondary] - http://35.185.192.7:8082/artifactory 11 | [Artifacatory DR] - http://35.185.192.7:8083/artifactory 12 | [Artifactory Pro -1] - http://35.185.192.7:8084/artifactory 13 | [Artifactory Pro -2] - http://35.185.192.7:8085/artifactory 14 | [Jenkins] - http://35.185.192.7:8088/artifactory 15 | [Xray] - http://35.185.192.7:8080/web 16 | [jfmc] - http://35.185.192.7:8080 17 | [Jenkins HTTP Proxy] - http://35.185.192.7:8086/artifactory 18 | [automation-docker-virtual] - http://35.185.192.7:5002 19 | [docker-virtual] - http://35.185.192.7:5001 20 | [automation-docker-prod-local] - http://35.185.192.7:5003 21 | [swampup scripts] - https://github.com/jfrogtraining/swampup 22 | [project-example] - https://github.com/jfrogtraining/project-examples.git branch swampup201 23 | [Artifactory User Plugins] - https://www.jfrog.com/confluence/display/RTF/User+Plugins 24 | [Artifactory Public API] - for use with Artifactory User Plugin - http://repo.jfrog.org/artifactory/oss-releases-local/org/artifactory/artifactory-papi/%5BRELEASE%5D/artifactory-papi-%5BRELEASE%5D-javadoc.jar!/index.html 25 | [Artifactory User Plugin] - https://github.com/JFrogDev/artifactory-user-plugins 26 | ``` 27 | 28 | API Documentation Links 29 | ----------------------- 30 | 31 | ```XML 32 | [Artifactory REST API ] - https://www.jfrog.com/confluence/display/RTF/Artifactory+REST+API also on Artifactory 5.x dashboard 33 | [Retrieve LATEST Documentation ] - https://www.jfrog.com/confluence/display/RTF/Artifactory+REST+API#ArtifactoryRESTAPI-RetrieveLatestArtifact 34 | [JFrog CLI] - https://www.jfrog.com/confluence/display/CLI/CLI+for+JFrog+Artifactory 35 | [FileSpec] - https://www.jfrog.com/confluence/display/CLI/CLI+for+JFrog+Artifactory#CLIforJFrogArtifactory-UsingFileSpecs 36 | [Jenkins Aritfactory Plugin - DSL] - https://wiki.jenkins-ci.org/display/JENKINS/Artifactory+-+Working+With+the+Pipeline+Jenkins+Plugin 37 | [Jenkins Artifactory Pipeline examples] - https://github.com/JFrogDev/project-examples/tree/master/jenkins-pipeline-examples 38 | [Aritfactory Query Language] - https://www.jfrog.com/confluence/display/RTF/Artifactory+Query+Language 39 | [Aritfactory Client Java] - https://github.com/JFrogDev/artifactory-client-java 40 | [Artifactory User Plugin] - https://www.jfrog.com/confluence/display/RTF/User+Plugins 41 | [Artifactory Public API] - http://repo.jfrog.org/artifactory/oss-releases-local/org/artifactory/artifactory-papi/%5BRELEASE%5D/artifactory-papi-%5BRELEASE%5D-javadoc.jar!/index.html 42 | [Artifactory User Plugins on GitHub] - https://github.com/JFrogDev/artifactory-user-plugins 43 | [Spock test framework for Artifactory User plugin] - http://spockframework.org/spock/docs/1.1/index.html 44 | ``` 45 | Download Slide: Automation 46 | --------------- 47 | Click on the below link - 48 | https://dl.bintray.com/jfrog/Training-Presentations/Advanced-Automation-With-JFrog-Artifactory.pdf?expiry=1496361600000&id=K8v%2BJBItDfdcU9%2BBa2lxho%2Fg%2FqmA%2F0CR8Tm5UYJ4YeuuqQ1NZiCIC9J6TIMtlTR3fZXLLXsklEZVJG2pjcWZlA%3D%3D&signature=faU6JLBsLILR%2BZr86O9MfFhwNMqQQtuzfAlD2bjDod5Qlk7rxi1eOss7f17ivA0Am0m9w4Zoc%2FZvVkSN4uvKtA%3D%3D 49 | 50 | 51 | Download Slide: Advanced CI 52 | --------------- 53 | Click the link below -- 54 | https://dl.bintray.com/jfrog/Training-Presentations/Advanced%20CI%20-%20Commit%20to%20deployment%20for%20docker_V2.pdf?expiry=1496361600000&id=K8v%2BJBItDfdcU9%2BBa2lxhh1LiT3lYxM1CXYoDHYbvIjEf%2BFfL5XXjp4U42osv%2BGeSr%2BFhsyA6LA2iq0xjbPIvw%3D%3D&signature=I%2BYTJHgxtX6oAbv094DEb1JwszGXmHeKqVC0aRKe4EsPfERQeFtK1idA0KnWYlnMqckfEXnBOBLsZLg%2FszXUZg%3D%3D 55 | 56 | Excercise 1 - Set Up 57 | --------------------- 58 | Introduction to Artifactory API and JFrog CLI. The automation-docker-framework builds will fail without teh automation-docker-prod-local repository created; 59 | 60 | - Clone github/jfrogtraining/swampup 61 | - Create automation-docker-prod-local repostorty using artifactory API; hint: see swampup/docker-framework/local-repository-template; Use your API Keys. 62 | - Manually configure to enable automation-docker-prod-local for XRay scan and configure XRay build and repository watch for automation-prod-local. 63 | - Develop JFrog CLI file spec to download the following files to your Artifactory HA instance - repository - tomcat-local. use tomcat-vritual repo in your FileSpec. Info are: 64 | - Files to download and upload to Artifactory HA instance - 65 | * i. tomcat-local/java/jdk-8u91-linux-x64.tar.gz 66 | * ii. tomcat-local/org/apache/apache-tomcat/apache-tomcat-8.5.5.tar.gz 67 | * Source of these files - a. http://jfrog.local:8084/artifactory; Repository: tomcat-local 68 | - Verify your work by running - "jfrog rt s --server-id=artifactory-ha --spec framework-verify.json" 69 | - Execute both the following builds 70 | * 1. gradle-example 71 | * 2. automation-docker-framework 72 | * 3. automation-docker-app 73 | - References 74 | * 1. Jfrog rt c show 75 | * 2. jfrog rt c swampup-automation --url=http://jfrog.local:8084/artifactory --user=admin --password=password 76 | * 3. jfrog rt c artifactory-ha –url=://jfrog.local/artifactory –user=admin –password=password 77 | * 4. jfrog rt dl --server-id=swampup-automation --spec framework-download.json 78 | * 5. jfrog rt u --server-id=artifactory-ha --spec framework-upload.json 79 | 80 | ```XML 81 | [Framework-download.json] - downloads JDK Java and Tomcat 82 | [Framework-upload.json] - uploads JDK Java and Tomcat to Artifactory HA instance 83 | [Framework-verify.json] - verify the upload is successful else the docker builds will fail. 84 | [To include json file to curl command: Curl …] use -T 85 | [catchup.sh] - runs all of excerise 1 86 | ``` 87 | 88 | Contents of this directory 89 | -------------------------- 90 | The docker-framework has its own readme.md file. 91 | 92 | maven-example 93 | ------------- 94 | The following files in the maven-example folder 95 | 96 | ```XML 97 | [jenkinsfile] - maven build enabling XRay Scan; property insertion, test cases results 98 | ``` 99 | 100 | gradle-example 101 | -------------- 102 | The dev folder is not used at the moment. 103 | 104 | ```XML 105 | [release/jenkinsfile] - gradle build; property insertion 106 | ``` 107 | -------------------------------------------------------------------------------- /automation/aql-example/excercises/1-latestDockerApp.aql: -------------------------------------------------------------------------------- 1 | items.find ( 2 | { 3 | "repo":{"$eq":"automation-docker-prod-local"}, 4 | "@docker.repoName":{"$eq":"docker-app"} 5 | } 6 | ).include("name", "property.*") 7 | 8 | 9 | -------------------------------------------------------------------------------- /automation/aql-example/excercises/2-listDockerAppDependencies.aql: -------------------------------------------------------------------------------- 1 | items.find ( 2 | { 3 | "repo":{"$eq":"automation-docker-prod-local"}, 4 | "@docker.repoName":{"$eq":"docker-app"}, 5 | "@docker.manifest":{"$eq":"latest"}, 6 | "@qa.functional-test":{"$eq":"pass"} 7 | } 8 | ).include("name", "@docker.repoName", "@docker.manifest", "artifact.module.build") 9 | -------------------------------------------------------------------------------- /automation/aql-example/excercises/3-latestDockerAppProperties.aql: -------------------------------------------------------------------------------- 1 | items.find ( 2 | { 3 | "repo":{"$eq":"automation-docker-prod-local"}, 4 | "@docker.repoName":{"$eq":"docker-app"}, 5 | "@docker.manifest":{"$eq":"latest"}, 6 | "@qa.functional-test":{"$eq":"pass"} 7 | } 8 | ).include("name", "artifact.module.build.name", "artifact.module.build.number") 9 | -------------------------------------------------------------------------------- /automation/aql-example/excercises/4-listCriticalVulnerabiliitesOnProd.aql: -------------------------------------------------------------------------------- 1 | items.find ( 2 | { 3 | "repo": {"$match":"*-prod-local"} 4 | } 5 | ) 6 | .include("repo", "name", "path", "artifact.module.build") 7 | -------------------------------------------------------------------------------- /automation/aql-example/solution/1-latestDockerApp.aql: -------------------------------------------------------------------------------- 1 | items.find ( 2 | { 3 | "repo":{"$eq":"automation-docker-prod-local"}, 4 | "@docker.repoName":{"$eq":"docker-app"}, 5 | "@docker.manifest":{"$eq":"latest"}, 6 | "@qa.functional-test":{"$eq":"pass"} 7 | } 8 | ).include("name", "artifact.module.build.name", "artifact.module.build.number") 9 | -------------------------------------------------------------------------------- /automation/aql-example/solution/2-listDockerAppDependencies.aql: -------------------------------------------------------------------------------- 1 | items.find ( 2 | { 3 | "repo":{"$eq":"automation-docker-prod-local"}, 4 | "@docker.repoName":{"$eq":"docker-app"}, 5 | "@docker.manifest":{"$eq":"latest"}, 6 | "@qa.functional-test":{"$eq":"pass"} 7 | } 8 | ).include("name", "@docker.repoName", "@docker.manifest", "artifact.module.build", "artifact.module.build.module.dependency") 9 | -------------------------------------------------------------------------------- /automation/aql-example/solution/3-latestDockerAppProperties.aql: -------------------------------------------------------------------------------- 1 | items.find ( 2 | { 3 | "repo":{"$eq":"automation-docker-prod-local"}, 4 | "@docker.repoName":{"$eq":"docker-app"}, 5 | "@docker.manifest":{"$eq":"latest"}, 6 | "@qa.functional-test":{"$eq":"pass"} 7 | } 8 | ).include("name", "artifact.module.build.name", "artifact.module.build.number", "property.*") 9 | -------------------------------------------------------------------------------- /automation/aql-example/solution/4-listCriticalVulnerabiliitesOnProd.aql: -------------------------------------------------------------------------------- 1 | items.find ( 2 | { 3 | "repo": {"$match":"*-prod-local"}, 4 | "$and" : [{"property.key" : {"$match" : 5 | "*.alert.topSeverity"}}, 6 | {"property.value" : {"$eq" : "Critical"}}]}) 7 | .include("repo", "name", "path", "artifact.module.build") 8 | -------------------------------------------------------------------------------- /automation/aql-example/solution/latestDockerAppBuild.aql: -------------------------------------------------------------------------------- 1 | builds.find ( 2 | { 3 | "module.artifact.item.@docker.manifest":{"$eq":"latest"}, 4 | "module.artifact.item.@functional-test":{"$eq":"pass"}, 5 | "module.artifact.item.repo":{"$eq":"automation-docker-prod-local"}, 6 | "module.artifact.item.path":{"$eq":"docker-app/latest"} 7 | } 8 | ).include("name","number", "module.artifact.item") 9 | -------------------------------------------------------------------------------- /automation/catchup.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | RED='\033[0;41;30m' 4 | STD='\033[0;0;39m' 5 | # change per student 6 | APIKEY='AKCp2WWshJKjZjguhB3vD2u3RMwHA7gmxWUohWVhs1FqacHBAzKaiL2pp24NNUEhWHm5Dd4JY' 7 | 8 | exercise1() { 9 | echo "performing exercise 1" 10 | cd $HOME/swampup/automation/docker-framework 11 | # make sure student change /etc/hosts on the VM with jfrog.local and the IP address from Orbitera. 12 | curl -H "X-JFrog-Art-Api:${APIKEY}" -H "Content-Type:application/vnd.org.jfrog.artifactory.repositories.LocalRepositoryConfiguration+json" -X PUT "http://jfrog.local/artifactory/api/repositories/automation-gradle-release-local" -T automation-gradle-release-local.json 13 | cd tomcat 14 | jfrog rt c clear 15 | # Change password per student 16 | jfrog rt c swampup-automation --url=http://jfrog.local:8084/artifactory --user=admin --password=password 17 | jfrog rt c artifactory-ha --url=http://jfrog.local/artifactory --user=admin --password=password 18 | jfrog rt dl --server-id=swampup-automation --spec framework-download.json 19 | jfrog rt u --server-id=artifactory-ha --spec framework-upload.json 20 | jfrog rt s --server-id=artifactory-ha --spec framework-verify.json 21 | cd $HOME/swampup/automation 22 | echo "Should see both java jdk and apache-tomcat dependencies returned" 23 | echo "exercise 1 completed" 24 | } 25 | 26 | read_options(){ 27 | local choice 28 | read -p "Enter choice [ 1 - 4] " choice 29 | case $choice in 30 | 1) exercise1 ;; 31 | 2) exit 0 ;; 32 | *) echo -e "${RED}Error...${STD}" && sleep 2 33 | esac 34 | } 35 | 36 | # function to display menus 37 | show_menus() { 38 | echo "~~~~~~~~~~~~~~~~~~~~~~~~~~" 39 | echo " Automation Catch Up Menu " 40 | echo "~~~~~~~~~~~~~~~~~~~~~~~~~~" 41 | echo "1. Excercise 1" 42 | echo "2. Exit" 43 | } 44 | 45 | while true 46 | do 47 | show_menus 48 | read_options 49 | done 50 | -------------------------------------------------------------------------------- /automation/docker-app/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM jfrog.local:5003/docker-framework:latest 2 | 3 | MAINTAINER Stanley Fong stanleyf@jfrog.com 4 | 5 | ADD war/*.war /home/exec/tomcat/webapps/swampup.war 6 | 7 | CMD /bin/bash -c cd /home/exec; /home/exec/tomcat/bin/catalina.sh run 8 | -------------------------------------------------------------------------------- /automation/docker-app/Jenkinsfile: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env groovy 2 | 3 | node ('master') { 4 | git url: 'https://github.com/jfrogtraining/swampup' 5 | def rtServer = Artifactory.newServer url: SERVER_URL, credentialsId: CREDENTIALS 6 | def buildInfo = Artifactory.newBuildInfo() 7 | def tagDockerApp 8 | def rtDocker 9 | buildInfo.env.capture = true 10 | 11 | stage ('Dependencies') { 12 | dir ('automation/docker-app') { 13 | try { 14 | println "Gather Released Docker Framework and Gradle War file" 15 | sh 'rm -rf war' 16 | def downloadAppSpec = readFile "${env.WORKSPACE}/automation/docker-app/appmodules-download.json" 17 | rtServer.download(downloadAppSpec, buildInfo ) 18 | } catch (Exception e) { 19 | println "Caught Exception during resolution. Message ${e.message}" 20 | throw e 21 | } 22 | } 23 | } 24 | 25 | stage ('build & deploy') { 26 | dir ('automation/docker-app') { 27 | tagDockerApp = "${ARTDOCKER_REGISTRY}/docker-app:${env.BUILD_NUMBER}" 28 | docker.build(tagDockerApp) 29 | withCredentials([[$class: 'UsernamePasswordMultiBinding', credentialsId: CREDENTIALS, usernameVariable: 'USERNAME', passwordVariable: 'PASSWORD']]) { 30 | rtDocker = Artifactory.docker server: rtServer 31 | rtDocker.push(tagDockerApp, REPO, buildInfo) 32 | rtServer.publishBuildInfo buildInfo 33 | } 34 | } 35 | } 36 | 37 | stage ('test') { 38 | dir('automation/docker-app/app-test') { 39 | sh 'docker rmi '+tagDockerApp+' || true' 40 | rtDocker.pull (tagDockerApp) 41 | if (testApp(tagDockerApp)) { 42 | println "Setting property and promotion" 43 | updateProperty ("qa.functional-test=pass") 44 | } else { 45 | updateProperty ("qa.functional-test=fail; qa.failed-test=page-not-loaded") 46 | currentBuild.result = 'UNSTABLE' 47 | return 48 | } 49 | } 50 | } 51 | 52 | stage('Xray Scan') { 53 | if (XRAY_SCAN == "YES") { 54 | def xrayConfig = [ 55 | 'buildName' : env.JOB_NAME, 56 | 'buildNumber' : env.BUILD_NUMBER, 57 | 'failBuild' : false 58 | ] 59 | def xrayResults = rtServer.xrayScan xrayConfig 60 | echo xrayResults as String 61 | echo 'how am I doing?' 62 | } else { 63 | println "No Xray scan performed. To enable set XRAY_SCAN = YES" 64 | } 65 | // sleep 60 66 | } 67 | 68 | stage ('promotion') { 69 | dir('automation/docker-app/app-test') { 70 | def promotionConfig = [ 71 | 'buildName' : env.JOB_NAME, 72 | 'buildNumber' : env.BUILD_NUMBER, 73 | 'targetRepo' : PROMOTE_REPO, 74 | 'comment' : 'App works with latest released version of gradle swampup app, tomcat and jdk', 75 | 'sourceRepo' : SOURCE_REPO, 76 | 'status' : 'Released', 77 | 'includeDependencies': false, 78 | 'copy' : true 79 | ] 80 | rtServer.promote promotionConfig 81 | reTagLatest (SOURCE_REPO) 82 | reTagLatest (PROMOTE_REPO) 83 | } 84 | 85 | // promote war file from gradle-release to gradle-prod 86 | 87 | } 88 | } 89 | 90 | def testApp (tag) { 91 | def result = true 92 | docker.image(tag).withRun('-p 8181:8181') {c -> 93 | sleep 10 94 | def stdout = sh(script: 'curl "http://localhost:8181/swampup/"', returnStdout: true) 95 | if (stdout.contains("Welcome Docker Lifecycle Training")) { 96 | println "*** Passed Test: " + stdout 97 | } else { 98 | println "*** Failed Test: " + stdout 99 | result = false 100 | } 101 | } 102 | sh "docker rmi ${tag}" 103 | return result 104 | } 105 | 106 | def reTagLatest (targetRepo) { 107 | def BUILD_NUMBER = env.BUILD_NUMBER 108 | sh 'sed -E "s/@/$BUILD_NUMBER/" retag.json > retag_out.json' 109 | switch (targetRepo) { 110 | case PROMOTE_REPO : 111 | sh 'sed -E "s/TARGETREPO/${PROMOTE_REPO}/" retag_out.json > retaga_out.json' 112 | break 113 | case SOURCE_REPO : 114 | sh 'sed -E "s/TARGETREPO/${SOURCE_REPO}/" retag_out.json > retaga_out.json' 115 | break 116 | } 117 | sh 'cat retaga_out.json' 118 | withCredentials([[$class: 'UsernamePasswordMultiBinding', credentialsId: CREDENTIALS, usernameVariable: 'USERNAME', passwordVariable: 'PASSWORD']]) { 119 | def curlString = "curl -u " + env.USERNAME + ":" + env.PASSWORD + " " + SERVER_URL 120 | def regTagStr = curlString + "/api/docker/$targetRepo/v2/promote -X POST -H 'Content-Type: application/json' -T retaga_out.json" 121 | println "Curl String is " + regTagStr 122 | sh regTagStr 123 | } 124 | } 125 | 126 | def updateProperty (property) { 127 | withCredentials([[$class: 'UsernamePasswordMultiBinding', credentialsId: CREDENTIALS, usernameVariable: 'USERNAME', passwordVariable: 'PASSWORD']]) { 128 | def curlString = "curl -u " + env.USERNAME + ":" + env.PASSWORD + " " + "-X PUT " + SERVER_URL 129 | def updatePropStr = curlString + "/api/storage/${SOURCE_REPO}/docker-app/${env.BUILD_NUMBER}?properties=${property}" 130 | println "Curl String is " + updatePropStr 131 | sh updatePropStr 132 | } 133 | } 134 | -------------------------------------------------------------------------------- /automation/docker-app/app-test/promote-gradleWar.json: -------------------------------------------------------------------------------- 1 | { 2 | "status": "Released", 3 | "comment" : "App Image has passed test.", 4 | "ciUser": "jfrog", 5 | "dryRun" : false, 6 | "targetRepo" : "gradle-prod-local", 7 | "sourceRepo" : "gradle-release-local", 8 | "includeDependencies" : false, 9 | "copy" : false, 10 | "failFast": false 11 | } -------------------------------------------------------------------------------- /automation/docker-app/app-test/retag.json: -------------------------------------------------------------------------------- 1 | { 2 | "targetRepo" : "TARGETREPO", 3 | "dockerRepository" : "docker-app", 4 | "tag" : "@", 5 | "targetTag" : "latest", 6 | "copy": true 7 | } -------------------------------------------------------------------------------- /automation/docker-app/appmodules-download.json: -------------------------------------------------------------------------------- 1 | { 2 | "files": [ 3 | { 4 | "pattern": "gradle-release-local/org/jfrog/example/gradle/webservice/*/*.war", 5 | "target": "war/webservice.war", 6 | "props": "unit-test=pass", 7 | "flat": "true", 8 | "build": "gradle-release/LATEST" 9 | } 10 | ] 11 | } -------------------------------------------------------------------------------- /automation/docker-app/retag.json: -------------------------------------------------------------------------------- 1 | 2 | { 3 | "targetRepo" : "TARGETREPO", 4 | "dockerRepository" : "docker-app", 5 | "tag" : "@", 6 | "targetTag" : "latest", 7 | "copy": true 8 | } -------------------------------------------------------------------------------- /automation/docker-framework/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM jfrog.local:5002/ubuntu:latest 2 | 3 | MAINTAINER Stanley Fong stanleyf@jfrog.com 4 | 5 | RUN /bin/bash -c cd ~; cd /home ; mkdir -p exec 6 | ADD jdk/jdk-8-linux-x64.tar.gz /home/exec 7 | RUN /bin/bash -c cd /home/exec; mv /home/exec/jdk* /home/exec/jdk8 8 | ENV JAVA_HOME=/home/exec/jdk8 9 | RUN sed "/securerandom.source=/{s/file:\/dev\/random/file:\/dev\/urandom/}" /home/exec/jdk8/jre/lib/security/java.security -i 10 | 11 | ADD tomcat/apache-tomcat-8.tar.gz /home/exec 12 | RUN /bin/bash -c cd /home/exec; mv /home/exec/apache-tomcat-8.* /home/exec/tomcat 13 | ADD tomcat/server.xml /home/exec/tomcat/conf 14 | ENV CATALINA_HOME=/home/exec/tomcat 15 | ENV TEST_ENV=2 16 | 17 | CMD /bin/bash -c cd /home/exec; /home/exec/tomcat/bin/catalina.sh run 18 | 19 | -------------------------------------------------------------------------------- /automation/docker-framework/Jenkinsfile: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env groovy 2 | 3 | node ('master') { 4 | git url: 'https://github.com/jfrogtraining/swampup' 5 | def rtServer = Artifactory.newServer url: SERVER_URL, credentialsId: CREDENTIALS 6 | def buildInfo = Artifactory.newBuildInfo() 7 | def tagName 8 | buildInfo.env.capture = true 9 | 10 | stage('Dependencies') { 11 | dir('automation/docker-framework') { 12 | try { 13 | println "Gather Java and Tomcat" 14 | 15 | def downloadFrameworkSpec = readFile "${env.WORKSPACE}/automation/docker-framework/framework-download.json" 16 | 17 | def downloadSpec = """{ 18 | "files": [ 19 | { 20 | "pattern": "tomcat-local/java/jdk-8u91-linux-x64.tar.gz", 21 | "target": "jdk/jdk-8-linux-x64.tar.gz", 22 | "flat":"true" 23 | }, 24 | { 25 | "pattern": "tomcat-local/org/apache/apache-tomcat/apache-tomcat-8.0.32.tar.gz", 26 | "target": "tomcat/apache-tomcat-8.tar.gz", 27 | "flat":"true" 28 | } 29 | ] 30 | }""" 31 | 32 | rtServer.download (downloadSpec, buildInfo) 33 | if (fileExists('jdk/jdk-8-linux-x64.tar.gz') && fileExists('tomcat/apache-tomcat-8.tar.gz')) { 34 | println "Downloaded dependencies" 35 | } else { 36 | println "Missing Dependencies either jdk or tomcat - see listing below:" 37 | sh 'ls -d */*' 38 | throw new FileNotFoundException("Missing Dependencies") 39 | } 40 | } catch (Exception e) { 41 | println "Caught exception during resolution. Message ${e.message}" 42 | throw e 43 | } 44 | } 45 | } 46 | stage('build') { 47 | dir ('automation/docker-framework') { 48 | withCredentials([[$class: 'UsernamePasswordMultiBinding', credentialsId: CREDENTIALS, usernameVariable: 'USERNAME', passwordVariable: 'PASSWORD']]) { 49 | def rtDocker = Artifactory.docker server: rtServer 50 | tagName = "${ARTDOCKER_REGISTRY}/docker-framework:${env.BUILD_NUMBER}" 51 | docker.build(tagName) 52 | rtDocker.push(tagName, REPO, buildInfo) 53 | rtServer.publishBuildInfo buildInfo 54 | } 55 | } 56 | } 57 | 58 | stage('test') { 59 | dir('automation/docker-framework/framework-test') { 60 | def gradleWarDownload1 = readFile "${env.WORKSPACE}/automation/docker-framework/gradeWar-download.json" 61 | 62 | def gradleWarDownload = """{ 63 | "files": [ 64 | { 65 | "pattern": "gradle-release-local/org/jfrog/example/gradle/webservice/1.1.2/*.war", 66 | "target": "war/webservice.war", 67 | "props": "unit-test=pass", 68 | "flat": "true" 69 | } 70 | ] 71 | }""" 72 | 73 | rtServer.download(gradleWarDownload) 74 | updateDockerFile() 75 | def tagDockerFramework = "${ARTDOCKER_REGISTRY}/docker-framework-test:${env.BUILD_NUMBER}" 76 | docker.build(tagDockerFramework) 77 | if (testFramework(tagDockerFramework)) { 78 | println "Setting property and promotion" 79 | updateProperty ("qa.functional-test=pass") 80 | sh "docker rmi ${tagName}" 81 | } else { 82 | updateProperty ("qa.functional-test=fail; qa.failed-test=page-not-loaded") 83 | currentBuild.result = 'UNSTABLE' 84 | sh "docker rmi ${tagName}" 85 | return 86 | } 87 | } 88 | } 89 | 90 | stage('Xray Scan') { 91 | if (XRAY_SCAN == "YES") { 92 | def xrayConfig = [ 93 | 'buildName' : env.JOB_NAME, 94 | 'buildNumber' : env.BUILD_NUMBER, 95 | 'failBuild' : false 96 | ] 97 | def xrayResults = rtServer.xrayScan xrayConfig 98 | echo xrayResults as String 99 | } else { 100 | println "No Xray scan performed. To enable set XRAY_SCAN = YES" 101 | } 102 | //sleep 60 103 | } 104 | 105 | stage ('promote') { 106 | dir ('automation/docker-framework') { 107 | def promotionConfig = [ 108 | 'buildName' : env.JOB_NAME, 109 | 'buildNumber' : env.BUILD_NUMBER, 110 | 'targetRepo' : PROMOTE_REPO, 111 | 'comment' : 'Framework test with latest version of application', 112 | 'sourceRepo' : SOURCE_REPO, 113 | 'status' : 'Released', 114 | 'includeDependencies': false, 115 | 'copy' : true 116 | ] 117 | rtServer.promote promotionConfig 118 | reTagLatest (SOURCE_REPO) 119 | reTagLatest (PROMOTE_REPO) 120 | } 121 | } 122 | } 123 | 124 | def updateDockerFile () { 125 | def BUILD_NUMBER = env.BUILD_NUMBER 126 | sh 'sed -i "s/docker-framework:latest/docker-framework:$BUILD_NUMBER/" Dockerfile' 127 | } 128 | 129 | def reTagLatest (targetRepo) { 130 | def BUILD_NUMBER = env.BUILD_NUMBER 131 | sh 'sed -E "s/@/$BUILD_NUMBER/" retag.json > retag_out.json' 132 | switch (targetRepo) { 133 | case PROMOTE_REPO : 134 | sh 'sed -E "s/TARGETREPO/${PROMOTE_REPO}/" retag_out.json > retaga_out.json' 135 | break 136 | case SOURCE_REPO : 137 | sh 'sed -E "s/TARGETREPO/${SOURCE_REPO}/" retag_out.json > retaga_out.json' 138 | break 139 | } 140 | sh 'cat retaga_out.json' 141 | withCredentials([[$class: 'UsernamePasswordMultiBinding', credentialsId: CREDENTIALS, usernameVariable: 'USERNAME', passwordVariable: 'PASSWORD']]) { 142 | def curlString = "curl -u " + env.USERNAME + ":" + env.PASSWORD + " " + SERVER_URL 143 | def regTagStr = curlString + "/api/docker/$targetRepo/v2/promote -X POST -H 'Content-Type: application/json' -T retaga_out.json" 144 | println "Curl String is " + regTagStr 145 | sh regTagStr 146 | } 147 | } 148 | 149 | def testFramework (tag) { 150 | def result = true 151 | docker.image(tag).withRun('-p 8181:8181') {c -> 152 | sleep 10 153 | def stdout = sh(script: 'curl "http://localhost:8181/swampup/"', returnStdout: true) 154 | if (stdout.contains("Welcome Docker Lifecycle Training")) { 155 | println "*** Passed Test: " + stdout 156 | } else { 157 | println "*** Failed Test: " + stdout 158 | result = false 159 | } 160 | } 161 | sh "docker rmi ${tag}" 162 | return result 163 | } 164 | 165 | def updateProperty (property) { 166 | withCredentials([[$class: 'UsernamePasswordMultiBinding', credentialsId: CREDENTIALS, usernameVariable: 'USERNAME', passwordVariable: 'PASSWORD']]) { 167 | def curlString = "curl -u " + env.USERNAME + ":" + env.PASSWORD + " " + "-X PUT " + SERVER_URL 168 | def updatePropStr = curlString + "/api/storage/${SOURCE_REPO}/docker-framework/${env.BUILD_NUMBER}?properties=${property}" 169 | println "Curl String is " + updatePropStr 170 | sh updatePropStr 171 | } 172 | } 173 | -------------------------------------------------------------------------------- /automation/docker-framework/README.md: -------------------------------------------------------------------------------- 1 | Automation Training - Docker Framework 2 | ====================================== 3 | This folder contains scripts used for the Automation SwampUp Training exercises. 4 | 5 | Script Details 6 | -------------- 7 | The following files in the tomcat folder 8 | 9 | ```XML 10 | [framework-download.json] fileSpec - download java jdk and tomcat for docker-framework build. Used with JFrogCli 11 | [framework-upload.json] fileSpec - upload java jdk and tomcat to artifactory with properties. Used with JFrogCli 12 | [framework-verify.json] fileSpec - verify the jdk and tomcat are uploaded correctly. Used with JFrogCli 13 | [framework-download-template] - fileSpec skeleton used to create fileSpec for exercise 1. 14 | ``` 15 | 16 | The following flies are in the docker-framework folder 17 | ```XML 18 | [local-repository-template] json file to create local repository 19 | [automation-framework-prod-local.json] - json file to create repo needed for exercise 1. 20 | ``` 21 | 22 | Jenkins Details 23 | --------------- 24 | The following files are used for Docker-Framework build 25 | 26 | ```XML 27 | [Dockerfile] Put the apache tomcat, Java jdk and Ubuntu in docker container 28 | [Jenkinsfile] pipeline code for docker-framework build 29 | [retag.json] Tag the latest build with LATEST tag. 30 | [framework-download.json] fileSpec to download java jdk and tomcat based on properties 31 | [gradeWar-download.json] fileSpect to download latest war file from gradle build 32 | ``` 33 | -------------------------------------------------------------------------------- /automation/docker-framework/automation-docker-prod-local.json: -------------------------------------------------------------------------------- 1 | { 2 | "key": "automation-docker-prod-local", 3 | "rclass" : "local", 4 | "packageType": "docker", 5 | "description": "Ready for production release Framework Repository", 6 | "notes": "SwampUp Automation Training", 7 | "includesPattern": "**/*", 8 | "excludesPattern": "", 9 | "repoLayoutRef" : "simple-default" 10 | } 11 | -------------------------------------------------------------------------------- /automation/docker-framework/automation-gradle-release-local.json: -------------------------------------------------------------------------------- 1 | { 2 | "key": "automation-gradle-release-local", 3 | "rclass" : "local", 4 | "packageType": "gradle", 5 | "description": "Ready for production release Framework Repository", 6 | "notes": "SwampUp Automation Training", 7 | "includesPattern": "**/*", 8 | "excludesPattern": "", 9 | "repoLayoutRef" : "simple-default" 10 | } 11 | -------------------------------------------------------------------------------- /automation/docker-framework/framework-download.json: -------------------------------------------------------------------------------- 1 | { 2 | "files": [ 3 | { 4 | "aql": { 5 | "items.find": { 6 | "repo": "tomcat-local", 7 | "$and": [ 8 | { 9 | "path": {"$match": "java"}, 10 | "name": {"$match": "jdk-8u91-linux-x64.tar.gz"} 11 | } 12 | ] 13 | } 14 | }, 15 | "target": "jdk/jdk-8-linux-x64.tar.gz", 16 | "flat": "true" 17 | }, 18 | { 19 | "aql": { 20 | "items.find": { 21 | "repo": "tomcat-local", 22 | "$and": [ 23 | { 24 | "path": {"$match": "org/apache/apache-tomcat"}, 25 | "name": {"$match": "apache-tomcat-8.0.32.tar.gz"} 26 | } 27 | ] 28 | } 29 | }, 30 | "target": "tomcat/apache-tomcat-8.tar.gz", 31 | "flat": "true" 32 | } 33 | ] 34 | } 35 | -------------------------------------------------------------------------------- /automation/docker-framework/framework-test/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM jfrog.local:5002/docker-framework:latest 2 | 3 | MAINTAINER Stanley Fong stanleyf@jfrog.com 4 | 5 | ADD war/*.war /home/exec/tomcat/webapps/swampup.war 6 | 7 | CMD /bin/bash -c cd /home/exec; /home/exec/tomcat/bin/catalina.sh run 8 | -------------------------------------------------------------------------------- /automation/docker-framework/gradeWar-download.json: -------------------------------------------------------------------------------- 1 | { 2 | "files": [ 3 | { 4 | "pattern": "gradle-release-local/org/jfrog/example/gradle/webservice/*/*.war", 5 | "target": "war/webservice.war", 6 | "props": "unit-test=pass", 7 | "flat": "true", 8 | "build": "gradle-release/LATEST" 9 | } 10 | ] 11 | } -------------------------------------------------------------------------------- /automation/docker-framework/local-repository-template: -------------------------------------------------------------------------------- 1 | { 2 | - "key": "local-repo1", 3 | + "rclass" : "local", 4 | + "packageType": "maven" | "gradle" | "ivy" | "sbt" | "nuget" | "gems" | "npm" | "bower" | "debian" | "composer" | "pypi" | "docker" | "vagrant" | "gitlfs" | "yum" | "conan" | "generic" 5 | - "description": "The local repository public description", 6 | - "notes": "Some internal notes", 7 | - "includesPattern": "**/*" (default), 8 | - "excludesPattern": "" (default), 9 | - "repoLayoutRef" : "maven-2-default", 10 | - "debianTrivialLayout" : false, 11 | - "checksumPolicyType": "client-checksums" (default) | "server-generated-checksums" 12 | - "handleReleases": true (default), 13 | - "handleSnapshots": true (default), 14 | - "maxUniqueSnapshots": 0 (default), 15 | - "maxUniqueTags": 0 (default) 16 | - "snapshotVersionBehavior": "unique" | "non-unique" (default) | "deployer", 17 | - "suppressPomConsistencyChecks": false (default), 18 | - "blackedOut": false (default), 19 | - "propertySets": ["ps1", "ps2"], 20 | - "archiveBrowsingEnabled" : false, 21 | - "calculateYumMetadata" : false, 22 | - "yumRootDepth" : 0, 23 | - "dockerApiVersion" : "V2" (default) 24 | } 25 | -------------------------------------------------------------------------------- /automation/docker-framework/retag.json: -------------------------------------------------------------------------------- 1 | { 2 | "targetRepo" : "TARGETREPO", 3 | "dockerRepository" : "docker-framework", 4 | "tag" : "@", 5 | "targetTag" : "latest", 6 | "copy": true 7 | } -------------------------------------------------------------------------------- /automation/docker-framework/tomcat/framework-download-template: -------------------------------------------------------------------------------- 1 | { 2 | "files": [ 3 | { 4 | "pattern": "", 5 | "target": "", 6 | }, 7 | { 8 | "pattern": "", 9 | "target": "", 10 | }, 11 | ] 12 | } 13 | 14 | { 15 | "files": [ 16 | { 17 | "aql": { 18 | "items.find": { 19 | "repo": "", 20 | "$and": [ 21 | { 22 | "path": {"$match": ""}, 23 | "name": {"$match": ""}, 24 | "property.key" : {"$match" : ""}, 25 | "property.value" : {"$ne" : ""} 26 | } 27 | ] 28 | } 29 | } 30 | }, 31 | { 32 | "aql": { 33 | "items.find": { 34 | "repo": "", 35 | "$and": [ 36 | { 37 | "path": {"$match": ""}, 38 | "name": {"$match": ""}, 39 | "property.key" : {"$match" : ""}, 40 | "property.value" : {"$ne" : ""} 41 | } 42 | ] 43 | } 44 | } 45 | } 46 | ] 47 | } -------------------------------------------------------------------------------- /automation/docker-framework/tomcat/framework-download.json: -------------------------------------------------------------------------------- 1 | { 2 | "files": [ 3 | { 4 | "pattern": "tomcat-local/java/jdk-8u91-linux-x64.tar.gz", 5 | "target": "tomcat-local/java/jdk-8u91-linux-x64.tar.gz", 6 | "flat": "true" 7 | }, 8 | { 9 | "pattern": "tomcat-local/org/apache/apache-tomcat/apache-tomcat-8.0.32.tar.gz", 10 | "target": "tomcat-local/org/apache/apache-tomcat/apache-tomcat-8.0.32.tar.gz", 11 | "flat": "true" 12 | } 13 | ] 14 | } -------------------------------------------------------------------------------- /automation/docker-framework/tomcat/framework-upload.json: -------------------------------------------------------------------------------- 1 | { 2 | "files": [ 3 | { 4 | "pattern": "tomcat-local/java/jdk-8u91-linux-x64.tar.gz", 5 | "target": "tomcat-local/java/jdk-8u91-linux-x64.tar.gz", 6 | "props": "security-approval=yes;approver=stanley", 7 | "flat": "true" 8 | }, 9 | { 10 | "pattern": "tomcat-local/org/apache/apache-tomcat/apache-tomcat-8.0.32.tar.gz", 11 | "target": "tomcat-local/org/apache/apache-tomcat/apache-tomcat-8.0.32.tar.gz", 12 | "props": "security-approval=yes;approver=stanley", 13 | "flat" : "true" 14 | } 15 | ] 16 | } -------------------------------------------------------------------------------- /automation/docker-framework/tomcat/framework-verify.json: -------------------------------------------------------------------------------- 1 | { 2 | "files": [ 3 | { 4 | "aql": { 5 | "items.find": { 6 | "repo": "tomcat-local", 7 | "$and": [ 8 | { 9 | "path": {"$match": "java"}, 10 | "name": {"$match": "jdk-8u91-linux-x64.tar.gz"}, 11 | "property.key" : {"$match" : "security-approval"}, 12 | "property.value" : {"$eq" : "yes"}, 13 | "property.key" : {"$match" : "approver"}, 14 | "property.value" : {"$eq" : "stanley"} 15 | } 16 | ] 17 | } 18 | } 19 | }, 20 | { 21 | "aql": { 22 | "items.find": { 23 | "repo": "tomcat-local", 24 | "$and": [ 25 | { 26 | "path": {"$match": "org/apache/apache-tomcat"}, 27 | "name": {"$match": "apache-tomcat-8.0.32.tar.gz"}, 28 | "property.key" : {"$match" : "security-approval"}, 29 | "property.value" : {"$eq" : "yes"}, 30 | "property.key" : {"$match" : "approver"}, 31 | "property.value" : {"$eq" : "stanley"} 32 | } 33 | ] 34 | } 35 | } 36 | } 37 | ] 38 | } 39 | -------------------------------------------------------------------------------- /automation/docker-framework/tomcat/server.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | -------------------------------------------------------------------------------- /automation/gradle-example/dev/Jenkinsfile: -------------------------------------------------------------------------------- 1 | node { 2 | def server = Artifactory.newServer url: SERVER_URL, credentialsId: CREDENTIALS 3 | def rtGradle = Artifactory.newGradleBuild() 4 | 5 | stage 'Build' 6 | git url: 'https://github.com/jfrogtraining/project-examples.git', branch: 'swampup2017' 7 | 8 | stage 'Artifactory configuration' 9 | rtGradle.tool = GRADLE_TOOL // Tool name from Jenkins configuration 10 | rtGradle.deployer repo:'gradle-dev', server: server 11 | rtGradle.resolver repo:'libs-release', server: server 12 | rtGradle.deployer.addProperty("unit-test", "pass").addProperty("qa-team", "platform", "ui") 13 | def buildInfo = Artifactory.newBuildInfo() 14 | buildInfo.env.collect() 15 | 16 | stage 'Exec Gradle' 17 | buildInfo = rtGradle.run rootDir: "gradle-examples/4/gradle-example-ci-server/", buildFile: 'build.gradle', tasks: 'clean artifactoryPublish' 18 | 19 | stage 'Publish & Scan' 20 | server.publishBuildInfo buildInfo 21 | if (XRAY_SCAN == "YES") { 22 | scanBuild () 23 | } 24 | } 25 | 26 | def scanBuild () { 27 | def scanConfig = [ 28 | 'buildName' : buildInfo.name, 29 | 'buildNumber' : buildInfo.number 30 | ] 31 | def scanResult = server.xrayScan scanConfig 32 | echo xrayResults as String 33 | } -------------------------------------------------------------------------------- /automation/gradle-example/release/Jenkinsfile: -------------------------------------------------------------------------------- 1 | node { 2 | def server = Artifactory.newServer url: SERVER_URL, credentialsId: CREDENTIALS 3 | def rtGradle = Artifactory.newGradleBuild() 4 | 5 | stage 'Build' 6 | git url: 'https://github.com/jfrogtraining/project-examples.git', branch: 'swampup2017' 7 | 8 | stage 'Artifactory configuration' 9 | rtGradle.tool = GRADLE_TOOL // Tool name from Jenkins configuration 10 | rtGradle.deployer repo:DEPLOY_REPO, server: server 11 | rtGradle.resolver repo:'libs-release', server: server 12 | rtGradle.deployer.addProperty("unit-test", "pass").addProperty("qa-team", "platform", "ui") 13 | def buildInfo = Artifactory.newBuildInfo() 14 | buildInfo.env.capture = true 15 | 16 | stage 'Exec Gradle' 17 | if(CLEAN_REPO == "YES") { 18 | sh 'rm -rf ~/.gradle/caches' 19 | } 20 | rtGradle.run rootDir: "gradle-examples/4/gradle-example-ci-server/", buildFile: 'build.gradle', tasks: 'clean artifactoryPublish', buildInfo: buildInfo 21 | 22 | stage 'Publish & Scan' 23 | server.publishBuildInfo buildInfo 24 | if (XRAY_SCAN == "YES") { 25 | def scanConfig = [ 26 | 'buildName' : env.JOB_NAME, 27 | 'buildNumber' : env.BUILD_NUMBER, 28 | 'failBuild' : false 29 | ] 30 | def scanResult = server.xrayScan scanConfig 31 | echo scanResult as String 32 | } 33 | } 34 | -------------------------------------------------------------------------------- /automation/maven-example/Jenkinsfile: -------------------------------------------------------------------------------- 1 | node { 2 | def server = Artifactory.newServer url: SERVER_URL, credentialsId: CREDENTIALS 3 | def rtMaven = Artifactory.newMavenBuild() 4 | 5 | stage 'Build' 6 | git url: 'https://github.com/jfrogdev/project-examples.git' 7 | 8 | stage 'Artifactory configuration' 9 | rtMaven.tool = MAVEN_TOOL // Tool name from Jenkins configuration 10 | rtMaven.deployer releaseRepo:'automation-mvn-solution-local', snapshotRepo:'automation-mvn-sol-snapshot-local', server: server 11 | rtMaven.resolver releaseRepo:'libs-release', snapshotRepo:'libs-snapshot', server: server 12 | rtMaven.deployer.addProperty("unit-test", "pass").addProperty("qa-team", "platform", "ui") 13 | def buildInfo = Artifactory.newBuildInfo() 14 | buildInfo.env.capture = true 15 | 16 | stage 'Exec Maven' 17 | rtMaven.run pom: 'maven-example/pom.xml', goals: 'clean install', buildInfo: buildInfo 18 | 19 | stage 'Publish & Scan' 20 | step([$class: 'JUnitResultArchiver', testResults: '**/target/surefire-reports/TEST-*.xml']) 21 | if (reportOnTestsForBuild ()) { 22 | currentBuild.result = 'UNSTABLE' 23 | } 24 | server.publishBuildInfo buildInfo 25 | if (XRAY_SCAN == "YES") { 26 | def scanConfig = [ 27 | 'buildName' : buildInfo.name, 28 | 'buildNumber' : buildInfo.number, 29 | 'failBuild' : false 30 | ] 31 | def scanResult = server.xrayScan scanConfig 32 | echo scanResult as String 33 | } 34 | } 35 | 36 | @NonCPS 37 | def reportOnTestsForBuild () { 38 | def failedTests = [] 39 | def build = manager.build 40 | if (build.getAction(hudson.tasks.junit.TestResultAction.class) == null) { 41 | println "No Tests" 42 | return true 43 | } 44 | def result = build.getAction(hudson.tasks.junit.TestResultAction.class).result 45 | if ((result == null)) { 46 | println "No test results" 47 | return true 48 | } else { 49 | println "Failed test count: " + result.getFailCount() 50 | println "Passed test count: " + result.getPassCount() 51 | failedTests = result.getFailedTests() 52 | failedTests.each { test -> 53 | println test.name 54 | } 55 | return (result.getFailCount()) 56 | } 57 | } -------------------------------------------------------------------------------- /automation/preventUnapproved/PreventUnapproved.groovy: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (C) 2014 JFrog Ltd. 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | /** 18 | * 19 | * @author jbaruch 20 | * @since 20/08/12 21 | */ 22 | 23 | download { 24 | altResponse { request, responseRepoPath -> 25 | def artifactStatus = repositories.getProperties(responseRepoPath).getFirst('approver.status') 26 | if (artifactStatus && artifactStatus != 'approved') { 27 | status = 403 28 | message = 'This artifact wasn\'t approved yet, please use the Approver application.' 29 | log.warn "You asked for an unapproved artifact: $responseRepoPath. 403 in da face!" 30 | } 31 | } 32 | } 33 | -------------------------------------------------------------------------------- /automation/preventUnapproved/PreventUnapprovedTest.groovy: -------------------------------------------------------------------------------- 1 | import groovyx.net.http.HttpResponseException 2 | import spock.lang.Specification 3 | 4 | import org.jfrog.artifactory.client.model.repository.settings.impl.MavenRepositorySettingsImpl 5 | import static org.jfrog.artifactory.client.ArtifactoryClient.create 6 | 7 | class PreventUnapprovedTest extends Specification { 8 | def 'prevent unapproved test'() { 9 | setup: 10 | def baseurl = 'http://localhost:8088/artifactory' 11 | def artifactory = create(baseurl, 'admin', 'password') 12 | 13 | def builder = artifactory.repositories().builders() 14 | def local = builder.localRepositoryBuilder().key('maven-local') 15 | .repositorySettings(new MavenRepositorySettingsImpl()).build() 16 | artifactory.repositories().create(0, local) 17 | 18 | when: 19 | def artifact = new ByteArrayInputStream("$status artifact".bytes) 20 | artifactory.repository('maven-local').upload(status, artifact).doUpload() 21 | artifactory.repository('maven-local').file(status).properties().addProperty('approver.status', status).doSet() 22 | 23 | then: 24 | testDownload(artifactory.repository('maven-local'), status, "$status artifact", approved) 25 | 26 | cleanup: 27 | artifactory.repository('maven-local').delete() 28 | 29 | where: 30 | status | approved 31 | 'approved' | true 32 | 'rejected' | false 33 | } 34 | 35 | def testDownload(repo, status, content, approved) { 36 | try { 37 | repo.download(status).doDownload().text == content && approved 38 | } catch (HttpResponseException ex) { 39 | !approved 40 | } 41 | } 42 | } 43 | -------------------------------------------------------------------------------- /automation/preventUnapproved/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jfrogtraining/swampup/58ab0a71c6564dd8b474fd20983ea5009168d1a4/automation/preventUnapproved/README.md -------------------------------------------------------------------------------- /bintray/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM jfrog.local:5001/busybox 2 | 3 | MAINTAINER markg@jfrog.com 4 | 5 | ADD jfrog /root/ 6 | 7 | COPY ./swampup-bintray-1.0-x86.tgz /root/ 8 | 9 | RUN /bin/tar -xvf /root/swampup-bintray-1.0-x86.tgz 10 | 11 | RUN mv /exec /root/exec 12 | 13 | CMD /root/exec/bintrayrocks.sh 14 | -------------------------------------------------------------------------------- /bintray/Jenkinsfile: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env groovy 2 | 3 | node ('master') { 4 | git url: 'https://github.com/jfrogtraining/swampup.git' 5 | def rtServer = Artifactory.newServer url: "http://jfrog.local/artifactory", credentialsId: CREDENTIALS 6 | def buildInfo = Artifactory.newBuildInfo() 7 | def tagDockerApp 8 | def rtDocker 9 | buildInfo.env.capture = true 10 | 11 | stage('build & deploy') { 12 | dir ('bintray') { 13 | sh './prep.sh' 14 | tagDockerApp = "jfrog.local:5001/swampup-bintray:${env.BUILD_NUMBER}" 15 | docker.build(tagDockerApp) 16 | withCredentials([[$class: 'UsernamePasswordMultiBinding', credentialsId: CREDENTIALS, usernameVariable: 'USERNAME', passwordVariable: 'PASSWORD']]) { 17 | rtDocker = Artifactory.docker("${env.USERNAME}", "${env.PASSWORD}") 18 | rtDocker.push(tagDockerApp, 'docker-stage-local', buildInfo) 19 | rtServer.publishBuildInfo buildInfo 20 | } 21 | 22 | def uploadSpec = """{ 23 | "files": [ 24 | { 25 | "pattern": "swampup*.*", 26 | "target": "generic-local/swampup/swampup-bintray/swampup-bintray-${env.BUILD_NUMBER}-amd64.tgz" 27 | } 28 | ] 29 | }""" 30 | rtServer.upload(uploadSpec, buildInfo) 31 | } 32 | } 33 | 34 | } 35 | -------------------------------------------------------------------------------- /bintray/bintrayrocks/amaze-1.0-x86.tgz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jfrogtraining/swampup/58ab0a71c6564dd8b474fd20983ea5009168d1a4/bintray/bintrayrocks/amaze-1.0-x86.tgz -------------------------------------------------------------------------------- /bintray/bintrayrocks/amaze-1.0.tgz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jfrogtraining/swampup/58ab0a71c6564dd8b474fd20983ea5009168d1a4/bintray/bintrayrocks/amaze-1.0.tgz -------------------------------------------------------------------------------- /bintray/bintrayrocks/exec/README.md: -------------------------------------------------------------------------------- 1 | This program provides an explanation of everything you need to know about bintray. -------------------------------------------------------------------------------- /bintray/bintrayrocks/exec/bintrayrocks.sh: -------------------------------------------------------------------------------- 1 | echo 'Bintray Rocks!' 2 | -------------------------------------------------------------------------------- /bintray/commands.list: -------------------------------------------------------------------------------- 1 | ./prep.sh 2 | export BT_ORG= 3 | jfrog bt stream $BT_ORG 4 | #Create swamup-dist repo 5 | ## use swampup product 6 | ## save Repo, We will come back and edit later other tabs. 7 | #GPG key commands 8 | # gpg --gen-key 9 | # gpg --list-secret-keys --keyid-format LONG 10 | # gpg --armor --export 11 | # If you have more than one key than specify your key after --export 12 | # If you GPG complains about it needing more bytes, open a second terminal. 13 | # sudo find / -type f | xargs grep somerandomstring > /dev/null 14 | # Copy your GPG key, beginning with -----BEGIN PGP PUBLIC KEY BLOCK----- and ending with -----END PGP PUBLIC KEY BLOCK----- 15 | # Create Layout: 16 | ##[orgPath]/[module]/[module]-[baseRev]-[architecture<.*>].[ext] 17 | jfrog bt acc-keys --org=$BT_ORG create swampup-key 18 | jfrog bt ent --access=r --keys=swampup-key create $BT_ORG/swampup-gen 19 | jfrog bt ent --access=r --keys=swampup-key create $BT_ORG/swampup-reg 20 | https://bintray.com/jfrog-int/product/amazing-product/1.0/accept_eula?user=swampup-key@$BT_ORG 21 | git clone https://github.com/JFrogDev/bintray-scripts.git 22 | -------------------------------------------------------------------------------- /bintray/distrib/Jenkinsfile: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env groovy 2 | 3 | node ('master') { 4 | git url: 'https://github.com/jfrogtraining/swampup.git' 5 | def rtServer = Artifactory.newServer url: "http://jfrog.local/artifactory", credentialsId: CREDENTIALS 6 | def buildInfo = Artifactory.newBuildInfo() 7 | def tagDockerApp 8 | def rtDocker 9 | buildInfo.env.capture = true 10 | 11 | stage('build & deploy') { 12 | dir ('bintray') { 13 | sh './prep.sh' 14 | tagDockerApp = "jfrog.local:5001/swampup-bintray:${env.BUILD_NUMBER}" 15 | docker.build(tagDockerApp) 16 | withCredentials([[$class: 'UsernamePasswordMultiBinding', credentialsId: CREDENTIALS, usernameVariable: 'USERNAME', passwordVariable: 'PASSWORD']]) { 17 | rtDocker = Artifactory.docker("${env.USERNAME}", "${env.PASSWORD}") 18 | rtDocker.push(tagDockerApp, 'docker-stage-local', buildInfo) 19 | rtServer.publishBuildInfo buildInfo 20 | } 21 | 22 | def uploadSpec = """{ 23 | "files": [ 24 | { 25 | "pattern": "swampup*.*", 26 | "target": "generic-local/swampup/swampup-bintray/swampup-bintray-${env.BUILD_NUMBER}-amd64.tgz" 27 | } 28 | ] 29 | }""" 30 | rtServer.upload(uploadSpec, buildInfo) 31 | } 32 | } 33 | stage('distribute') { 34 | def distributionConfig = [ 35 | // Mandatory parameters 36 | 'buildName' : buildInfo.name, 37 | 'buildNumber' : buildInfo.number, 38 | 'targetRepo' : 'swampup-dryrun-2', 39 | 40 | // Optional parameters 41 | 'publish' : true, // Default: true. If true, artifacts are published when deployed to Bintray. 42 | 'overrideExistingFiles' : false, // Default: false. If true, Artifactory overwrites builds already existing in the target path in Bintray. 43 | 'async' : false, // Default: false. If true, the build will be distributed asynchronously. Errors and warnings may be viewed in the Artifactory log. 44 | ] 45 | rtServer.distribute distributionConfig 46 | } 47 | 48 | } 49 | -------------------------------------------------------------------------------- /bintray/eula.md: -------------------------------------------------------------------------------- 1 | # A Complete Fake License Agreement 2 | JFrog Demo is a compact and smart client that provides a simple interface that automates access to *Artifactory*, *Bintray* and *Mission Control* through their respective REST APIs. 3 | By using the JFrog CLI, you can greatly simplify your automation scripts making them more readable and easier to maintain. 4 | Several features of the JFrog CLI makes your scripts more efficient and reliable: 5 | 6 | - Multi-threaded upload and download of artifacts make builds run faster 7 | - Checksum optimization reduces redundant file transfers 8 | - Wildcards and regular expressions give you an easy way to collect all the artifacts you wish to upload or download. 9 | - "Dry run" gives you a preview of file transfer operations before you actually run them 10 | 11 | # Download and Installation 12 | 13 | You can get the executable directly from the [JFrog CLI Download Page](https://www.jfrog.com/getcli/), or you can download the source files from this GitHub project and build it yourself. 14 | 15 | On Mac you can run: 16 | ```` 17 | $ brew install jfrog-cli-go 18 | ```` 19 | 20 | ## Building the Executable 21 | 22 | JFrog CLI is written in the [Go programming language](https://golang.org/), so to build the CLI yourself, you first need to have Go installed and configured on your machine. 23 | 24 | ### Setup Go 25 | 26 | To download and install `Go`, please refer to the [Go documentation](https://golang.org/doc/install). 27 | Please download `Go 1.6` or above. 28 | 29 | Navigate to the directory where you want to create the jfrog-cli-go project, and set the value of the GOPATH environment variable to the full path of this directory. 30 | 31 | ### Download and Build the CLI 32 | 33 | To download the jfrog-cli-go project, execute the following command: 34 | ```` 35 | $ go get github.com/jfrogdev/jfrog-cli-go/... 36 | ```` 37 | Go will download and build the project on your machine. Once complete, you will find the JFrog CLI executable under your `$GOPATH/bin` directory. 38 | 39 | # Using JFrog CLI with Artifactory, Bintray and Mission Control 40 | JFrog CLI can be used for a variety of functions with Artifactory, Bintray and Mission Control, and has a dedicated set of commands for each product. To learn how to use JFrog CLI, please refer to the relevant documentation through the corresponding links below: 41 | * [Using JFrog CLI with Artifactory](https://www.jfrog.com/confluence/display/RTF/JFrog+CLI) 42 | * [Using JFrog CLI with Bintray](https://bintray.com/docs/usermanual/cli/cli_jfrogcli.html) 43 | * [Using JFrog CLI with Mission Control](https://www.jfrog.com/confluence/display/MC/JFrog+CLI) 44 | 45 | # Accepting this agrees that the JFrog CLI is cool! 46 | -------------------------------------------------------------------------------- /bintray/jfrog: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jfrogtraining/swampup/58ab0a71c6564dd8b474fd20983ea5009168d1a4/bintray/jfrog -------------------------------------------------------------------------------- /bintray/prep.sh: -------------------------------------------------------------------------------- 1 | cd bintrayrocks 2 | tar -cvf swampup-bintray-1.0-x86.tgz exec 3 | cd .. 4 | cp bintrayrocks/swampup-bintray-1.0-x86.tgz . 5 | 6 | -------------------------------------------------------------------------------- /conan/exercises/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM dockcross/linux-armv7 2 | ENV DEFAULT_DOCKCROSS_IMAGE conanclass/linux-armv7 3 | ADD . /work 4 | -------------------------------------------------------------------------------- /conan/exercises/catchup.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | curdir=$(pwd) 4 | RED='\033[0;51;30m' 5 | STD='\033[0;0;39m' 6 | APIKEY='AKCp2WWshJKjZjguhB3vD2u3RMwHA7gmxWUohWVhs1FqacHBAzKaiL2pp24NNUEhWHm5Dd4JY' 7 | 8 | consumer() { 9 | echo "performing Exercise 2 (consumer, with CMake)" 10 | cd consumer 11 | rm -rf build 12 | mkdir -p build 13 | cd build 14 | conan install .. 15 | cmake .. -DCMAKE_BUILD_TYPE=Release 16 | cmake --build . 17 | cd bin 18 | ./timer 19 | conan search 20 | conan search zlib/1.2.11@conan/stable 21 | } 22 | 23 | consumer_debug() { 24 | echo "performing Exercise 3 (consumer, with build_type Debug)" 25 | cd consumer 26 | rm -rf build 27 | mkdir -p build 28 | cd build 29 | conan install .. -s build_type=Debug 30 | cmake .. -DCMAKE_BUILD_TYPE=Debug 31 | cmake --build . 32 | cd bin 33 | ./timer 34 | conan search 35 | conan search zlib/1.2.11@conan/stable 36 | } 37 | 38 | consumer_gcc() { 39 | echo "performing Exercise 4 (consumer, with GCC)" 40 | cd consumer_gcc 41 | conan install . -g gcc 42 | g++ timer.cpp @conanbuildinfo.gcc -o timer --std=c++11 43 | ./timer 44 | } 45 | 46 | create() { 47 | echo "performing Exercise 5 (Create a Conan Package)" 48 | cd create 49 | conan new Hello/0.1 50 | conan create . user/testing 51 | conan search 52 | conan search Hello/0.1@user/testing 53 | conan create . user/testing -s build_type=Debug 54 | conan search Hello/0.1@user/testing 55 | conan new Hello/0.1 -t 56 | conan create . user/testing 57 | } 58 | 59 | create_sources() { 60 | echo "performing Exercise 6 (Create Package with sources)" 61 | cd create_sources 62 | conan new Hello/0.1 -t -s 63 | conan create . user/testing 64 | conan create . user/testing -s build_type=Debug 65 | } 66 | 67 | upload_artifactory() { 68 | echo "performing Exercise 7 (Upload packages to artifactory)" 69 | conan upload Hello/0.1@user/testing -r artifactory --all 70 | conan search -r=artifactory 71 | conan search Hello/0.1@user/testing -r=artifactory 72 | conan remove Hello/0.1@user/testing -f 73 | cd create_sources 74 | conan test test_package Hello/0.1@user/testing 75 | conan test test_package Hello/0.1@user/testing -s build_type=Debug 76 | conan upload "*" -r=artifactory --all --confirm 77 | conan remove "*" -f 78 | cd ../consumer/build 79 | conan install .. 80 | } 81 | 82 | cross_build_hello(){ 83 | cd create_sources 84 | less ../profile_arm/arm_gcc_debug.profile 85 | conan create . user/testing -pr=../profile_arm/arm_gcc_debug.profile 86 | conan search 87 | conan search Hello/0.1@user/testing 88 | } 89 | 90 | profile_arm_compiler() { 91 | cd profile_arm 92 | rm -rf build 93 | mkdir -p build 94 | cd build 95 | conan install .. --profile ../arm_gcc_debug.profile 96 | conan install .. -pr=../arm_gcc_debug.profile --build missing 97 | conan search zlib/1.2.11@conan/stable 98 | conan build .. 99 | ls bin/example && echo "Example built ok!" 100 | } 101 | 102 | package_header_only(){ 103 | cd header_only 104 | conan new picojson/1.3.0 -i -t 105 | cp example.cpp test_package 106 | 107 | echo 'from conans import ConanFile 108 | 109 | class PicojsonConan(ConanFile): 110 | name = "picojson" 111 | version = "1.3.0" 112 | license = "The 2-Clause BSD License" 113 | url = "https://github.com/kazuho/picojson" 114 | # No settings/options are necessary, this is header only 115 | 116 | def source(self): 117 | self.run("git clone https://github.com/kazuho/picojson.git") 118 | 119 | def package(self): 120 | self.copy("*.h", "include")' > conanfile.py 121 | 122 | conan create . user/testing 123 | } 124 | 125 | gtest() { 126 | conan remote add conan-center https://conan.bintray.com 127 | cd gtest/package 128 | conan create . user/testing 129 | cd ../consumer 130 | conan install . 131 | conan remove "gtest*" -f 132 | conan install . 133 | } 134 | 135 | gtest_build_require() { 136 | cd gtest/package 137 | conan create . user/testing 138 | conan remove "gtest*" -f 139 | cd ../consumer 140 | conan install . 141 | } 142 | 143 | cmake_build_require() { 144 | cd gtest/package 145 | echo 'message(STATUS "CMAKE VERSION ${CMAKE_VERSION}")' >> CMakeLists.txt 146 | conan create . user/testing 147 | echo 'include(default) 148 | [build_requires] 149 | cmake_installer/3.3.2@conan/stable' > myprofile 150 | conan create . user/testing -pr=myprofile 151 | } 152 | 153 | read_options(){ 154 | local choice 155 | cd ${curdir} 156 | read -p "Enter choice: " choice 157 | case $choice in 158 | 2) consumer ;; 159 | 3) consumer_debug ;; 160 | 4) consumer_gcc ;; 161 | 5) create ;; 162 | 6) create_sources ;; 163 | 7) upload_artifactory ;; 164 | 8) cross_build_hello ;; 165 | 9) profile_arm_compiler ;; 166 | 11) gtest ;; 167 | 12) gtest_build_require ;; 168 | 13) cmake_build_require ;; 169 | 14) package_header_only ;; 170 | -1) exit 0 ;; 171 | *) echo -e "${RED}Not valid option! ${STD}" && sleep 2 172 | esac 173 | } 174 | 175 | 176 | # function to display menus 177 | show_menus() { 178 | echo "~~~~~~~~~~~~~~~~~~~~~~~~~~" 179 | echo " Automation Catch Up Menu " 180 | echo "~~~~~~~~~~~~~~~~~~~~~~~~~~" 181 | echo "2. Exercise 2 (Consume with CMake)" 182 | echo "3. Exercise 3 (Consume with CMake, with different build_type, Debug)" 183 | echo "4. Exercise 4 (Consume with GCC)" 184 | echo "5. Exercise 5 (Create a conan package)" 185 | echo "6. Exercise 6 (Create package with sources)" 186 | echo "7. Exercise 7 (Upload packages to artifactory)" 187 | echo "8. Exercise 8 (Cross build to ARM - RPI)" 188 | echo "9. Exercise 9 (Cross build zlib dependency to ARM)" 189 | echo "11. Exercise 11 (Use Gtest as a require)" 190 | echo "12. Exercise 12 (Use Gtest as a build_require)" 191 | echo "13. Exercise 13 (CMake as build require)" 192 | echo "14. Exercise 14 (Create a package for a header only library)" 193 | echo "-1. Exit" 194 | } 195 | 196 | while true 197 | do 198 | show_menus 199 | read_options 200 | done 201 | -------------------------------------------------------------------------------- /conan/exercises/consumer/.gitignore: -------------------------------------------------------------------------------- 1 | build 2 | -------------------------------------------------------------------------------- /conan/exercises/consumer/CMakeLists.txt: -------------------------------------------------------------------------------- 1 | cmake_minimum_required(VERSION 2.8) 2 | project(BoostPoco) 3 | add_compile_options(-std=c++11) 4 | 5 | include(${CMAKE_BINARY_DIR}/conanbuildinfo.cmake) 6 | conan_basic_setup() 7 | 8 | add_executable(timer timer.cpp) 9 | target_link_libraries(timer ${CONAN_LIBS}) 10 | 11 | 12 | # if(EXISTS ${CMAKE_BINARY_DIR}/conanbuildinfo.cmake) 13 | # include(${CMAKE_BINARY_DIR}/conanbuildinfo.cmake) 14 | # conan_basic_setup() 15 | # endif() 16 | 17 | # But we could use CMake find_package as well, or link with libraries individually 18 | #find_package(Boost REQUIRED regex) 19 | 20 | #if(Boost_FOUND) 21 | # include_directories(${Boost_INCLUDE_DIRS}) 22 | # target_link_libraries(timer ${Boost_LIBRARIES}) 23 | #endif() 24 | 25 | #target_link_libraries(timer ${CONAN_LIBS_POCO}) 26 | -------------------------------------------------------------------------------- /conan/exercises/consumer/conanfile.txt: -------------------------------------------------------------------------------- 1 | [requires] 2 | boost/1.67.0@conan/stable 3 | Poco/1.9.0@pocoproject/stable 4 | 5 | [generators] 6 | cmake 7 | 8 | [options] 9 | Boost:shared=False 10 | Poco:shared=False 11 | -------------------------------------------------------------------------------- /conan/exercises/consumer/timer.cpp: -------------------------------------------------------------------------------- 1 | #include "Poco/Timer.h" 2 | #include "Poco/Thread.h" 3 | #include "Poco/Stopwatch.h" 4 | 5 | #include 6 | #include 7 | #include 8 | 9 | using Poco::Timer; 10 | using Poco::TimerCallback; 11 | using Poco::Thread; 12 | using Poco::Stopwatch; 13 | 14 | class TimerExample{ 15 | public: 16 | TimerExample(){ _sw.start();} 17 | void onTimer(Timer& timer){ 18 | std::cout << "Callback called after " << _sw.elapsed()/1000 << " milliseconds." << std::endl; 19 | } 20 | private: 21 | Stopwatch _sw; 22 | }; 23 | 24 | int main(int argc, char** argv){ 25 | TimerExample example; 26 | Timer timer(250, 500); 27 | timer.start(TimerCallback(example, &TimerExample::onTimer)); 28 | 29 | Thread::sleep(3000); 30 | timer.stop(); 31 | 32 | std::string s = "correct@email.com", s2="bademail"; 33 | boost::regex expr{"\\b[a-zA-Z0-9._%-]+@[a-zA-Z0-9.-]+\\.[a-zA-Z]{2,4}\\b"}; 34 | std::cout << std::boolalpha << boost::regex_match(s, expr) << '\n'; 35 | std::cout << std::boolalpha << boost::regex_match(s2, expr) << '\n'; 36 | 37 | return 0; 38 | } 39 | -------------------------------------------------------------------------------- /conan/exercises/consumer_gcc/.gitignore: -------------------------------------------------------------------------------- 1 | build 2 | conanbuildinfo* 3 | conaninfo* 4 | timer -------------------------------------------------------------------------------- /conan/exercises/consumer_gcc/conanfile.txt: -------------------------------------------------------------------------------- 1 | [requires] 2 | boost/1.67.0@conan/stable 3 | Poco/1.9.0@pocoproject/stable 4 | 5 | [generators] 6 | cmake 7 | 8 | [options] 9 | Boost:shared=False 10 | Poco:shared=False 11 | -------------------------------------------------------------------------------- /conan/exercises/consumer_gcc/timer.cpp: -------------------------------------------------------------------------------- 1 | #include "Poco/Timer.h" 2 | #include "Poco/Thread.h" 3 | #include "Poco/Stopwatch.h" 4 | 5 | #include 6 | #include 7 | #include 8 | 9 | using Poco::Timer; 10 | using Poco::TimerCallback; 11 | using Poco::Thread; 12 | using Poco::Stopwatch; 13 | 14 | class TimerExample{ 15 | public: 16 | TimerExample(){ _sw.start();} 17 | void onTimer(Timer& timer){ 18 | std::cout << "Callback called after " << _sw.elapsed()/1000 << " milliseconds." << std::endl; 19 | } 20 | private: 21 | Stopwatch _sw; 22 | }; 23 | 24 | int main(int argc, char** argv){ 25 | TimerExample example; 26 | Timer timer(250, 500); 27 | timer.start(TimerCallback(example, &TimerExample::onTimer)); 28 | 29 | Thread::sleep(3000); 30 | timer.stop(); 31 | 32 | std::string s = "correct@email.com", s2="bademail"; 33 | boost::regex expr{"\\b[a-zA-Z0-9._%-]+@[a-zA-Z0-9.-]+\\.[a-zA-Z]{2,4}\\b"}; 34 | std::cout << std::boolalpha << boost::regex_match(s, expr) << '\n'; 35 | std::cout << std::boolalpha << boost::regex_match(s2, expr) << '\n'; 36 | 37 | return 0; 38 | } 39 | -------------------------------------------------------------------------------- /conan/exercises/create/README: -------------------------------------------------------------------------------- 1 | This folder is empty on purpose. 2 | Files will be created with "conan new" commmand 3 | Still the folder is necessary so "catchup" script works well. -------------------------------------------------------------------------------- /conan/exercises/create_sources/README: -------------------------------------------------------------------------------- 1 | This folder is empty on purpose. 2 | Files will be created with "conan new" commmand 3 | Still the folder is necessary so "catchup" script works well. -------------------------------------------------------------------------------- /conan/exercises/gtest/.gitignore: -------------------------------------------------------------------------------- 1 | test_package/build 2 | *.pyc 3 | conanbuildinfo* 4 | conaninfo* 5 | -------------------------------------------------------------------------------- /conan/exercises/gtest/consumer/CMakeLists.txt: -------------------------------------------------------------------------------- 1 | project(PackageTest CXX) 2 | cmake_minimum_required(VERSION 2.8.12) 3 | 4 | include(${CMAKE_BINARY_DIR}/conanbuildinfo.cmake) 5 | conan_basic_setup() 6 | 7 | add_executable(example example.cpp) 8 | target_link_libraries(example ${CONAN_LIBS}) 9 | 10 | # CTest is a testing tool that can be used to test your project. 11 | # enable_testing() 12 | # add_test(NAME example 13 | # WORKING_DIRECTORY ${CMAKE_BINARY_DIR}/bin 14 | # COMMAND example) 15 | -------------------------------------------------------------------------------- /conan/exercises/gtest/consumer/conanfile.txt: -------------------------------------------------------------------------------- 1 | [requires] 2 | Hello/0.1@user/testing 3 | 4 | [generators] 5 | cmake 6 | 7 | -------------------------------------------------------------------------------- /conan/exercises/gtest/consumer/example.cpp: -------------------------------------------------------------------------------- 1 | #include 2 | #include "hello.h" 3 | 4 | int main() { 5 | hello(); 6 | } 7 | -------------------------------------------------------------------------------- /conan/exercises/gtest/package/CMakeLists.txt: -------------------------------------------------------------------------------- 1 | PROJECT(MyHello) 2 | cmake_minimum_required(VERSION 2.8) 3 | 4 | include(${CMAKE_BINARY_DIR}/conanbuildinfo.cmake) 5 | conan_basic_setup() 6 | 7 | add_library(hello STATIC hello.cpp) 8 | 9 | add_executable(runUnitTests test.cpp) 10 | target_link_libraries(runUnitTests hello ${CONAN_LIBS}) 11 | set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++0x") 12 | add_test(runUnitTests runUnitTests) 13 | -------------------------------------------------------------------------------- /conan/exercises/gtest/package/conanfile.py: -------------------------------------------------------------------------------- 1 | from conans import ConanFile, CMake 2 | 3 | 4 | class HelloConan(ConanFile): 5 | name = "Hello" 6 | version = "0.1" 7 | settings = "os", "compiler", "build_type", "arch" 8 | generators = "cmake" 9 | exports_sources = "*" 10 | requires = "gtest/1.8.0@bincrafters/stable" 11 | default_options = "gtest:shared=False" 12 | 13 | def build(self): 14 | cmake = CMake(self) 15 | cmake.configure() 16 | cmake.build() 17 | self.run("bin/runUnitTests") 18 | 19 | def package(self): 20 | self.copy("*.h", dst="include") 21 | self.copy("*.a", dst="lib", keep_path=False) 22 | 23 | def package_info(self): 24 | self.cpp_info.libs = ["hello"] 25 | -------------------------------------------------------------------------------- /conan/exercises/gtest/package/hello.cpp: -------------------------------------------------------------------------------- 1 | #include "hello.h" 2 | 3 | string message(){ 4 | return "Hello World!"; 5 | } 6 | 7 | void hello(){ 8 | cout << message() << endl; 9 | } -------------------------------------------------------------------------------- /conan/exercises/gtest/package/hello.h: -------------------------------------------------------------------------------- 1 | #include 2 | #include 3 | using namespace std; 4 | 5 | string message(); 6 | void hello(); -------------------------------------------------------------------------------- /conan/exercises/gtest/package/test.cpp: -------------------------------------------------------------------------------- 1 | // In the test file 2 | #include 3 | #include "hello.h" 4 | 5 | TEST(SalutationTest, Static) { 6 | EXPECT_EQ(string("Hello World!"), message()); 7 | } 8 | -------------------------------------------------------------------------------- /conan/exercises/gtest_build_requires/.gitignore: -------------------------------------------------------------------------------- 1 | test_package/build 2 | *.pyc 3 | conanbuildinfo* 4 | conaninfo* 5 | -------------------------------------------------------------------------------- /conan/exercises/gtest_build_requires/consumer/CMakeLists.txt: -------------------------------------------------------------------------------- 1 | project(PackageTest CXX) 2 | cmake_minimum_required(VERSION 2.8.12) 3 | 4 | include(${CMAKE_BINARY_DIR}/conanbuildinfo.cmake) 5 | conan_basic_setup() 6 | 7 | add_executable(example example.cpp) 8 | target_link_libraries(example ${CONAN_LIBS}) 9 | 10 | # CTest is a testing tool that can be used to test your project. 11 | # enable_testing() 12 | # add_test(NAME example 13 | # WORKING_DIRECTORY ${CMAKE_BINARY_DIR}/bin 14 | # COMMAND example) 15 | -------------------------------------------------------------------------------- /conan/exercises/gtest_build_requires/consumer/conanfile.txt: -------------------------------------------------------------------------------- 1 | [requires] 2 | hello/1.0@lasote/testing 3 | 4 | -------------------------------------------------------------------------------- /conan/exercises/gtest_build_requires/consumer/example.cpp: -------------------------------------------------------------------------------- 1 | #include 2 | #include "hello.h" 3 | 4 | int main() { 5 | hello(); 6 | } 7 | -------------------------------------------------------------------------------- /conan/exercises/gtest_build_requires/consumer/test_profile.txt: -------------------------------------------------------------------------------- 1 | [env] 2 | TEST_ENABLED=1 3 | 4 | [options] 5 | gtest:shared=False 6 | 7 | [build_requires] 8 | hello*: gtest/1.8.0@lasote/stable 9 | -------------------------------------------------------------------------------- /conan/exercises/gtest_build_requires/package/CMakeLists.txt: -------------------------------------------------------------------------------- 1 | PROJECT(MyHello) 2 | cmake_minimum_required(VERSION 2.8) 3 | 4 | include(${CMAKE_BINARY_DIR}/conanbuildinfo.cmake) 5 | conan_basic_setup() 6 | 7 | add_library(hello STATIC hello.cpp) 8 | 9 | if(TEST_ENABLED) 10 | add_executable(runUnitTests test.cpp) 11 | target_link_libraries(runUnitTests hello ${CONAN_LIBS}) 12 | set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++0x") 13 | add_test(runUnitTests runUnitTests) 14 | endif() 15 | 16 | 17 | -------------------------------------------------------------------------------- /conan/exercises/gtest_build_requires/package/conanfile.py: -------------------------------------------------------------------------------- 1 | import os 2 | from conans import ConanFile, CMake 3 | 4 | 5 | class HelloConan(ConanFile): 6 | name = "hello" 7 | version = "1.0" 8 | settings = "os", "compiler", "build_type", "arch" 9 | generators = "cmake" 10 | exports_sources = "*" 11 | 12 | def build(self): 13 | cmake = CMake(self) 14 | 15 | if os.environ.get("TEST_ENABLED") == "1": 16 | cmake.definitions["TEST_ENABLED"] = "1" 17 | 18 | cmake.configure() 19 | cmake.build() 20 | 21 | if os.environ.get("TEST_ENABLED") == "1": 22 | self.run("bin/runUnitTests") 23 | 24 | def package(self): 25 | self.copy("*.h", dst="include") 26 | self.copy("*.a", dst="lib", keep_path=False) 27 | 28 | def package_info(self): 29 | self.cpp_info.libs = ["hello"] 30 | -------------------------------------------------------------------------------- /conan/exercises/gtest_build_requires/package/hello.cpp: -------------------------------------------------------------------------------- 1 | #include "hello.h" 2 | 3 | string message(){ 4 | return "Hello World!"; 5 | } 6 | 7 | void hello(){ 8 | cout << message() << endl; 9 | } -------------------------------------------------------------------------------- /conan/exercises/gtest_build_requires/package/hello.h: -------------------------------------------------------------------------------- 1 | #include 2 | #include 3 | using namespace std; 4 | 5 | string message(); 6 | void hello(); -------------------------------------------------------------------------------- /conan/exercises/gtest_build_requires/package/test.cpp: -------------------------------------------------------------------------------- 1 | // In the test file 2 | #include 3 | #include "hello.h" 4 | 5 | TEST(SalutationTest, Static) { 6 | EXPECT_EQ(string("Hello World!"), message()); 7 | } 8 | -------------------------------------------------------------------------------- /conan/exercises/header_only/.gitignore: -------------------------------------------------------------------------------- 1 | conanfile.py 2 | test_package/* 3 | -------------------------------------------------------------------------------- /conan/exercises/header_only/example.cpp: -------------------------------------------------------------------------------- 1 | #include "picojson/picojson.h" 2 | 3 | int main(void) { 4 | 5 | const char* json = "{\"a\":1}"; 6 | picojson::value v; 7 | std::string err; 8 | const char* json_end = picojson::parse(v, json, json + strlen(json), &err); 9 | if (! err.empty()) { 10 | std::cerr << err << std::endl; 11 | } 12 | std::cout << "Json parsed ok!" << std::endl; 13 | 14 | return 0; 15 | } 16 | 17 | -------------------------------------------------------------------------------- /conan/exercises/jenkins/Jenkinsfile: -------------------------------------------------------------------------------- 1 | def artifactory_name = "artifactory-ha" 2 | def artifactory_repo = "myconanrepo" 3 | 4 | node { 5 | def server = Artifactory.server artifactory_name 6 | def client = Artifactory.newConanClient() 7 | def serverName = client.remote.add server: server, repo: artifactory_repo 8 | stage("Get recipe"){ 9 | checkout scm 10 | } 11 | 12 | stage("Build package"){ 13 | client.run(command: "create . team/stable") 14 | } 15 | 16 | stage("Upload packages"){ 17 | String command = "upload * --all -r ${serverName} --confirm" 18 | def b = client.run(command: command) 19 | server.publishBuildInfo b 20 | } 21 | } -------------------------------------------------------------------------------- /conan/exercises/linux-armv7: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | DEFAULT_DOCKCROSS_IMAGE=conanclass/linux-armv7 4 | 5 | #------------------------------------------------------------------------------ 6 | # Helpers 7 | # 8 | err() { 9 | echo -e >&2 ERROR: $@\\n 10 | } 11 | 12 | die() { 13 | err $@ 14 | exit 1 15 | } 16 | 17 | has() { 18 | # eg. has command update 19 | local kind=$1 20 | local name=$2 21 | 22 | type -t $kind:$name | grep -q function 23 | } 24 | 25 | #------------------------------------------------------------------------------ 26 | # Command handlers 27 | # 28 | command:update-image() { 29 | docker pull $FINAL_IMAGE 30 | } 31 | 32 | help:update-image() { 33 | echo Pull the latest $FINAL_IMAGE . 34 | } 35 | 36 | command:update-script() { 37 | if cmp -s <( docker run --rm $FINAL_IMAGE ) $0; then 38 | echo $0 is up to date 39 | else 40 | echo -n Updating $0 '... ' 41 | docker run --rm $FINAL_IMAGE > $0 && echo ok 42 | fi 43 | } 44 | 45 | help:update-image() { 46 | echo Update $0 from $FINAL_IMAGE . 47 | } 48 | 49 | command:update() { 50 | command:update-image 51 | command:update-script 52 | } 53 | 54 | help:update() { 55 | echo Pull the latest $FINAL_IMAGE, and then update $0 from that. 56 | } 57 | 58 | command:help() { 59 | if [[ $# != 0 ]]; then 60 | if ! has command $1; then 61 | err \"$1\" is not an dockcross command 62 | command:help 63 | elif ! has help $1; then 64 | err No help found for \"$1\" 65 | else 66 | help:$1 67 | fi 68 | else 69 | cat >&2 < 88 | ENDHELP 89 | exit 1 90 | fi 91 | } 92 | 93 | #------------------------------------------------------------------------------ 94 | # Option processing 95 | # 96 | special_update_command='' 97 | while [[ $# != 0 ]]; do 98 | case $1 in 99 | 100 | --) 101 | shift 102 | break 103 | ;; 104 | 105 | --args|-a) 106 | ARG_ARGS="$2" 107 | shift 2 108 | ;; 109 | 110 | --config|-c) 111 | ARG_CONFIG="$2" 112 | shift 2 113 | ;; 114 | 115 | --image|-i) 116 | ARG_IMAGE="$2" 117 | shift 2 118 | ;; 119 | update|update-image|update-script) 120 | special_update_command=$1 121 | break 122 | ;; 123 | -*) 124 | err Unknown option \"$1\" 125 | command:help 126 | exit 127 | ;; 128 | 129 | *) 130 | break 131 | ;; 132 | 133 | esac 134 | done 135 | 136 | # The precedence for options is: 137 | # 1. command-line arguments 138 | # 2. environment variables 139 | # 3. defaults 140 | 141 | # Source the config file if it exists 142 | DEFAULT_DOCKCROSS_CONFIG=~/.dockcross 143 | FINAL_CONFIG=${ARG_CONFIG-${DOCKCROSS_CONFIG-$DEFAULT_DOCKCROSS_CONFIG}} 144 | 145 | [[ -f "$FINAL_CONFIG" ]] && source "$FINAL_CONFIG" 146 | 147 | # Set the docker image 148 | FINAL_IMAGE=${ARG_IMAGE-${DOCKCROSS_IMAGE-$DEFAULT_DOCKCROSS_IMAGE}} 149 | 150 | # Handle special update command 151 | if [ "$special_update_command" != "" ]; then 152 | case $special_update_command in 153 | 154 | update) 155 | command:update 156 | exit $? 157 | ;; 158 | 159 | update-image) 160 | command:update-image 161 | exit $? 162 | ;; 163 | 164 | update-script) 165 | command:update-script 166 | exit $? 167 | ;; 168 | 169 | esac 170 | fi 171 | 172 | # Set the docker run extra args (if any) 173 | FINAL_ARGS=${ARG_ARGS-${DOCKCROSS_ARGS}} 174 | 175 | # Bash on Ubuntu on Windows 176 | UBUNTU_ON_WINDOWS=$([ -e /proc/version ] && grep -l Microsoft /proc/version || echo "") 177 | # MSYS, Git Bash, etc. 178 | MSYS=$([ -e /proc/version ] && grep -l MINGW /proc/version || echo "") 179 | 180 | if [ -z "$UBUNTU_ON_WINDOWS" -a -z "$MSYS" ]; then 181 | USER_IDS="-e BUILDER_UID=$( id -u ) -e BUILDER_GID=$( id -g ) -e BUILDER_USER=$( id -un ) -e BUILDER_GROUP=$( id -gn )" 182 | fi 183 | 184 | # Change the PWD when working in Docker on Windows 185 | if [ -n "$UBUNTU_ON_WINDOWS" ]; then 186 | HOST_PWD=$PWD 187 | HOST_PWD=${HOST_PWD/\/mnt\//} 188 | HOST_PWD=${HOST_PWD/\//:\/} 189 | elif [ -n "$MSYS" ]; then 190 | HOST_PWD=$PWD 191 | HOST_PWD=${HOST_PWD/\//} 192 | HOST_PWD=${HOST_PWD/\//:\/} 193 | else 194 | HOST_PWD=$PWD 195 | fi 196 | 197 | # Mount Additional Volumes 198 | if [ -z "$SSH_DIR" ]; then 199 | SSH_DIR="$HOME/.ssh" 200 | fi 201 | 202 | HOST_VOLUMES= 203 | if [ -e "$SSH_DIR" ]; then 204 | HOST_VOLUMES+="-v $SSH_DIR:/home/$(id -un)/.ssh" 205 | fi 206 | 207 | #------------------------------------------------------------------------------ 208 | # Now, finally, run the command in a container 209 | # 210 | tty -s && TTY_ARGS=-ti || TTY_ARGS= 211 | CONTAINER_NAME=dockcross_$RANDOM 212 | docker run $TTY_ARGS --name $CONTAINER_NAME \ 213 | $FINAL_ARGS \ 214 | $FINAL_IMAGE "$@" 215 | run_exit_code=$? 216 | 217 | # Attempt to delete container 218 | rm_output=$(docker rm -f $CONTAINER_NAME 2>&1) 219 | rm_exit_code=$? 220 | if [[ $rm_exit_code != 0 ]]; then 221 | if [[ "$CIRCLECI" == "true" ]] && [[ $rm_output == *"Driver btrfs failed to remove"* ]]; then 222 | : # Ignore error because of https://circleci.com/docs/docker-btrfs-error/ 223 | else 224 | echo "$rm_output" 225 | exit $rm_exit_code 226 | fi 227 | fi 228 | 229 | exit $run_exit_code 230 | 231 | ################################################################################ 232 | # 233 | # This image is not intended to be run manually. 234 | # 235 | # To create a dockcross helper script for the 236 | # dockcross/linux-armv7 image, run: 237 | # 238 | # docker run --rm dockcross/linux-armv7 > dockcross-linux-armv7 239 | # chmod +x dockcross-linux-armv7 240 | # 241 | # You may then wish to move the dockcross script to your PATH. 242 | # 243 | ################################################################################ 244 | -------------------------------------------------------------------------------- /conan/exercises/profile_arm/CMakeLists.txt: -------------------------------------------------------------------------------- 1 | cmake_minimum_required(VERSION 2.8) 2 | project(ZlibARM) 3 | 4 | include(${CMAKE_BINARY_DIR}/conanbuildinfo.cmake) 5 | conan_basic_setup() 6 | 7 | add_executable(example example.c) 8 | target_link_libraries(example ${CONAN_LIBS}) 9 | -------------------------------------------------------------------------------- /conan/exercises/profile_arm/arm_gcc_debug.profile: -------------------------------------------------------------------------------- 1 | [settings] 2 | os=Linux 3 | compiler=gcc 4 | compiler.version=4.9 5 | compiler.libcxx=libstdc++ 6 | build_type=Debug 7 | arch=armv7 8 | os_build=Linux 9 | arch_build=x86_64 10 | 11 | [env] 12 | CC=arm-linux-gnueabihf-gcc 13 | CXX=arm-linux-gnueabihf-g++ 14 | -------------------------------------------------------------------------------- /conan/exercises/profile_arm/conanfile.py: -------------------------------------------------------------------------------- 1 | from conans.model.conan_file import ConanFile 2 | from conans import CMake 3 | 4 | class DefaultNameConan(ConanFile): 5 | settings = "os", "compiler", "arch", "build_type" 6 | generators = "cmake" 7 | requires = "zlib/1.2.11@conan/stable" 8 | 9 | def build(self): 10 | cmake = CMake(self) 11 | cmake.configure() 12 | cmake.build() -------------------------------------------------------------------------------- /conan/exercises/profile_arm/example.c: -------------------------------------------------------------------------------- 1 | /* enough.c -- determine the maximum size of inflate's Huffman code tables over 2 | * all possible valid and complete Huffman codes, subject to a length limit. 3 | * Copyright (C) 2007, 2008, 2012 Mark Adler 4 | * Version 1.4 18 August 2012 Mark Adler 5 | */ 6 | 7 | /* Version history: 8 | 1.0 3 Jan 2007 First version (derived from codecount.c version 1.4) 9 | 1.1 4 Jan 2007 Use faster incremental table usage computation 10 | Prune examine() search on previously visited states 11 | 1.2 5 Jan 2007 Comments clean up 12 | As inflate does, decrease root for short codes 13 | Refuse cases where inflate would increase root 14 | 1.3 17 Feb 2008 Add argument for initial root table size 15 | Fix bug for initial root table size == max - 1 16 | Use a macro to compute the history index 17 | 1.4 18 Aug 2012 Avoid shifts more than bits in type (caused endless loop!) 18 | Clean up comparisons of different types 19 | Clean up code indentation 20 | */ 21 | 22 | /* 23 | Examine all possible Huffman codes for a given number of symbols and a 24 | maximum code length in bits to determine the maximum table size for zilb's 25 | inflate. Only complete Huffman codes are counted. 26 | 27 | Two codes are considered distinct if the vectors of the number of codes per 28 | length are not identical. So permutations of the symbol assignments result 29 | in the same code for the counting, as do permutations of the assignments of 30 | the bit values to the codes (i.e. only canonical codes are counted). 31 | 32 | We build a code from shorter to longer lengths, determining how many symbols 33 | are coded at each length. At each step, we have how many symbols remain to 34 | be coded, what the last code length used was, and how many bit patterns of 35 | that length remain unused. Then we add one to the code length and double the 36 | number of unused patterns to graduate to the next code length. We then 37 | assign all portions of the remaining symbols to that code length that 38 | preserve the properties of a correct and eventually complete code. Those 39 | properties are: we cannot use more bit patterns than are available; and when 40 | all the symbols are used, there are exactly zero possible bit patterns 41 | remaining. 42 | 43 | The inflate Huffman decoding algorithm uses two-level lookup tables for 44 | speed. There is a single first-level table to decode codes up to root bits 45 | in length (root == 9 in the current inflate implementation). The table 46 | has 1 << root entries and is indexed by the next root bits of input. Codes 47 | shorter than root bits have replicated table entries, so that the correct 48 | entry is pointed to regardless of the bits that follow the short code. If 49 | the code is longer than root bits, then the table entry points to a second- 50 | level table. The size of that table is determined by the longest code with 51 | that root-bit prefix. If that longest code has length len, then the table 52 | has size 1 << (len - root), to index the remaining bits in that set of 53 | codes. Each subsequent root-bit prefix then has its own sub-table. The 54 | total number of table entries required by the code is calculated 55 | incrementally as the number of codes at each bit length is populated. When 56 | all of the codes are shorter than root bits, then root is reduced to the 57 | longest code length, resulting in a single, smaller, one-level table. 58 | 59 | The inflate algorithm also provides for small values of root (relative to 60 | the log2 of the number of symbols), where the shortest code has more bits 61 | than root. In that case, root is increased to the length of the shortest 62 | code. This program, by design, does not handle that case, so it is verified 63 | that the number of symbols is less than 2^(root + 1). 64 | 65 | In order to speed up the examination (by about ten orders of magnitude for 66 | the default arguments), the intermediate states in the build-up of a code 67 | are remembered and previously visited branches are pruned. The memory 68 | required for this will increase rapidly with the total number of symbols and 69 | the maximum code length in bits. However this is a very small price to pay 70 | for the vast speedup. 71 | 72 | First, all of the possible Huffman codes are counted, and reachable 73 | intermediate states are noted by a non-zero count in a saved-results array. 74 | Second, the intermediate states that lead to (root + 1) bit or longer codes 75 | are used to look at all sub-codes from those junctures for their inflate 76 | memory usage. (The amount of memory used is not affected by the number of 77 | codes of root bits or less in length.) Third, the visited states in the 78 | construction of those sub-codes and the associated calculation of the table 79 | size is recalled in order to avoid recalculating from the same juncture. 80 | Beginning the code examination at (root + 1) bit codes, which is enabled by 81 | identifying the reachable nodes, accounts for about six of the orders of 82 | magnitude of improvement for the default arguments. About another four 83 | orders of magnitude come from not revisiting previous states. Out of 84 | approximately 2x10^16 possible Huffman codes, only about 2x10^6 sub-codes 85 | need to be examined to cover all of the possible table memory usage cases 86 | for the default arguments of 286 symbols limited to 15-bit codes. 87 | 88 | Note that an unsigned long long type is used for counting. It is quite easy 89 | to exceed the capacity of an eight-byte integer with a large number of 90 | symbols and a large maximum code length, so multiple-precision arithmetic 91 | would need to replace the unsigned long long arithmetic in that case. This 92 | program will abort if an overflow occurs. The big_t type identifies where 93 | the counting takes place. 94 | 95 | An unsigned long long type is also used for calculating the number of 96 | possible codes remaining at the maximum length. This limits the maximum 97 | code length to the number of bits in a long long minus the number of bits 98 | needed to represent the symbols in a flat code. The code_t type identifies 99 | where the bit pattern counting takes place. 100 | */ 101 | 102 | #include 103 | #include 104 | #include 105 | #include 106 | 107 | #define local static 108 | 109 | /* special data types */ 110 | typedef unsigned long long big_t; /* type for code counting */ 111 | typedef unsigned long long code_t; /* type for bit pattern counting */ 112 | struct tab { /* type for been here check */ 113 | size_t len; /* length of bit vector in char's */ 114 | char *vec; /* allocated bit vector */ 115 | }; 116 | 117 | /* The array for saving results, num[], is indexed with this triplet: 118 | 119 | syms: number of symbols remaining to code 120 | left: number of available bit patterns at length len 121 | len: number of bits in the codes currently being assigned 122 | 123 | Those indices are constrained thusly when saving results: 124 | 125 | syms: 3..totsym (totsym == total symbols to code) 126 | left: 2..syms - 1, but only the evens (so syms == 8 -> 2, 4, 6) 127 | len: 1..max - 1 (max == maximum code length in bits) 128 | 129 | syms == 2 is not saved since that immediately leads to a single code. left 130 | must be even, since it represents the number of available bit patterns at 131 | the current length, which is double the number at the previous length. 132 | left ends at syms-1 since left == syms immediately results in a single code. 133 | (left > sym is not allowed since that would result in an incomplete code.) 134 | len is less than max, since the code completes immediately when len == max. 135 | 136 | The offset into the array is calculated for the three indices with the 137 | first one (syms) being outermost, and the last one (len) being innermost. 138 | We build the array with length max-1 lists for the len index, with syms-3 139 | of those for each symbol. There are totsym-2 of those, with each one 140 | varying in length as a function of sym. See the calculation of index in 141 | count() for the index, and the calculation of size in main() for the size 142 | of the array. 143 | 144 | For the deflate example of 286 symbols limited to 15-bit codes, the array 145 | has 284,284 entries, taking up 2.17 MB for an 8-byte big_t. More than 146 | half of the space allocated for saved results is actually used -- not all 147 | possible triplets are reached in the generation of valid Huffman codes. 148 | */ 149 | 150 | /* The array for tracking visited states, done[], is itself indexed identically 151 | to the num[] array as described above for the (syms, left, len) triplet. 152 | Each element in the array is further indexed by the (mem, rem) doublet, 153 | where mem is the amount of inflate table space used so far, and rem is the 154 | remaining unused entries in the current inflate sub-table. Each indexed 155 | element is simply one bit indicating whether the state has been visited or 156 | not. Since the ranges for mem and rem are not known a priori, each bit 157 | vector is of a variable size, and grows as needed to accommodate the visited 158 | states. mem and rem are used to calculate a single index in a triangular 159 | array. Since the range of mem is expected in the default case to be about 160 | ten times larger than the range of rem, the array is skewed to reduce the 161 | memory usage, with eight times the range for mem than for rem. See the 162 | calculations for offset and bit in beenhere() for the details. 163 | 164 | For the deflate example of 286 symbols limited to 15-bit codes, the bit 165 | vectors grow to total approximately 21 MB, in addition to the 4.3 MB done[] 166 | array itself. 167 | */ 168 | 169 | /* Globals to avoid propagating constants or constant pointers recursively */ 170 | local int max; /* maximum allowed bit length for the codes */ 171 | local int root; /* size of base code table in bits */ 172 | local int large; /* largest code table so far */ 173 | local size_t size; /* number of elements in num and done */ 174 | local int *code; /* number of symbols assigned to each bit length */ 175 | local big_t *num; /* saved results array for code counting */ 176 | local struct tab *done; /* states already evaluated array */ 177 | 178 | /* Index function for num[] and done[] */ 179 | #define INDEX(i,j,k) (((size_t)((i-1)>>1)*((i-2)>>1)+(j>>1)-1)*(max-1)+k-1) 180 | 181 | /* Free allocated space. Uses globals code, num, and done. */ 182 | local void cleanup(void) 183 | { 184 | size_t n; 185 | 186 | if (done != NULL) { 187 | for (n = 0; n < size; n++) 188 | if (done[n].len) 189 | free(done[n].vec); 190 | free(done); 191 | } 192 | if (num != NULL) 193 | free(num); 194 | if (code != NULL) 195 | free(code); 196 | } 197 | 198 | /* Return the number of possible Huffman codes using bit patterns of lengths 199 | len through max inclusive, coding syms symbols, with left bit patterns of 200 | length len unused -- return -1 if there is an overflow in the counting. 201 | Keep a record of previous results in num to prevent repeating the same 202 | calculation. Uses the globals max and num. */ 203 | local big_t count(int syms, int len, int left) 204 | { 205 | big_t sum; /* number of possible codes from this juncture */ 206 | big_t got; /* value returned from count() */ 207 | int least; /* least number of syms to use at this juncture */ 208 | int most; /* most number of syms to use at this juncture */ 209 | int use; /* number of bit patterns to use in next call */ 210 | size_t index; /* index of this case in *num */ 211 | 212 | /* see if only one possible code */ 213 | if (syms == left) 214 | return 1; 215 | 216 | /* note and verify the expected state */ 217 | assert(syms > left && left > 0 && len < max); 218 | 219 | /* see if we've done this one already */ 220 | index = INDEX(syms, left, len); 221 | got = num[index]; 222 | if (got) 223 | return got; /* we have -- return the saved result */ 224 | 225 | /* we need to use at least this many bit patterns so that the code won't be 226 | incomplete at the next length (more bit patterns than symbols) */ 227 | least = (left << 1) - syms; 228 | if (least < 0) 229 | least = 0; 230 | 231 | /* we can use at most this many bit patterns, lest there not be enough 232 | available for the remaining symbols at the maximum length (if there were 233 | no limit to the code length, this would become: most = left - 1) */ 234 | most = (((code_t)left << (max - len)) - syms) / 235 | (((code_t)1 << (max - len)) - 1); 236 | 237 | /* count all possible codes from this juncture and add them up */ 238 | sum = 0; 239 | for (use = least; use <= most; use++) { 240 | got = count(syms - use, len + 1, (left - use) << 1); 241 | sum += got; 242 | if (got == (big_t)0 - 1 || sum < got) /* overflow */ 243 | return (big_t)0 - 1; 244 | } 245 | 246 | /* verify that all recursive calls are productive */ 247 | assert(sum != 0); 248 | 249 | /* save the result and return it */ 250 | num[index] = sum; 251 | return sum; 252 | } 253 | 254 | /* Return true if we've been here before, set to true if not. Set a bit in a 255 | bit vector to indicate visiting this state. Each (syms,len,left) state 256 | has a variable size bit vector indexed by (mem,rem). The bit vector is 257 | lengthened if needed to allow setting the (mem,rem) bit. */ 258 | local int beenhere(int syms, int len, int left, int mem, int rem) 259 | { 260 | size_t index; /* index for this state's bit vector */ 261 | size_t offset; /* offset in this state's bit vector */ 262 | int bit; /* mask for this state's bit */ 263 | size_t length; /* length of the bit vector in bytes */ 264 | char *vector; /* new or enlarged bit vector */ 265 | 266 | /* point to vector for (syms,left,len), bit in vector for (mem,rem) */ 267 | index = INDEX(syms, left, len); 268 | mem -= 1 << root; 269 | offset = (mem >> 3) + rem; 270 | offset = ((offset * (offset + 1)) >> 1) + rem; 271 | bit = 1 << (mem & 7); 272 | 273 | /* see if we've been here */ 274 | length = done[index].len; 275 | if (offset < length && (done[index].vec[offset] & bit) != 0) 276 | return 1; /* done this! */ 277 | 278 | /* we haven't been here before -- set the bit to show we have now */ 279 | 280 | /* see if we need to lengthen the vector in order to set the bit */ 281 | if (length <= offset) { 282 | /* if we have one already, enlarge it, zero out the appended space */ 283 | if (length) { 284 | do { 285 | length <<= 1; 286 | } while (length <= offset); 287 | vector = realloc(done[index].vec, length); 288 | if (vector != NULL) 289 | memset(vector + done[index].len, 0, length - done[index].len); 290 | } 291 | 292 | /* otherwise we need to make a new vector and zero it out */ 293 | else { 294 | length = 1 << (len - root); 295 | while (length <= offset) 296 | length <<= 1; 297 | vector = calloc(length, sizeof(char)); 298 | } 299 | 300 | /* in either case, bail if we can't get the memory */ 301 | if (vector == NULL) { 302 | fputs("abort: unable to allocate enough memory\n", stderr); 303 | cleanup(); 304 | exit(1); 305 | } 306 | 307 | /* install the new vector */ 308 | done[index].len = length; 309 | done[index].vec = vector; 310 | } 311 | 312 | /* set the bit */ 313 | done[index].vec[offset] |= bit; 314 | return 0; 315 | } 316 | 317 | /* Examine all possible codes from the given node (syms, len, left). Compute 318 | the amount of memory required to build inflate's decoding tables, where the 319 | number of code structures used so far is mem, and the number remaining in 320 | the current sub-table is rem. Uses the globals max, code, root, large, and 321 | done. */ 322 | local void examine(int syms, int len, int left, int mem, int rem) 323 | { 324 | int least; /* least number of syms to use at this juncture */ 325 | int most; /* most number of syms to use at this juncture */ 326 | int use; /* number of bit patterns to use in next call */ 327 | 328 | /* see if we have a complete code */ 329 | if (syms == left) { 330 | /* set the last code entry */ 331 | code[len] = left; 332 | 333 | /* complete computation of memory used by this code */ 334 | while (rem < left) { 335 | left -= rem; 336 | rem = 1 << (len - root); 337 | mem += rem; 338 | } 339 | assert(rem == left); 340 | 341 | /* if this is a new maximum, show the entries used and the sub-code */ 342 | if (mem > large) { 343 | large = mem; 344 | printf("max %d: ", mem); 345 | for (use = root + 1; use <= max; use++) 346 | if (code[use]) 347 | printf("%d[%d] ", code[use], use); 348 | putchar('\n'); 349 | fflush(stdout); 350 | } 351 | 352 | /* remove entries as we drop back down in the recursion */ 353 | code[len] = 0; 354 | return; 355 | } 356 | 357 | /* prune the tree if we can */ 358 | if (beenhere(syms, len, left, mem, rem)) 359 | return; 360 | 361 | /* we need to use at least this many bit patterns so that the code won't be 362 | incomplete at the next length (more bit patterns than symbols) */ 363 | least = (left << 1) - syms; 364 | if (least < 0) 365 | least = 0; 366 | 367 | /* we can use at most this many bit patterns, lest there not be enough 368 | available for the remaining symbols at the maximum length (if there were 369 | no limit to the code length, this would become: most = left - 1) */ 370 | most = (((code_t)left << (max - len)) - syms) / 371 | (((code_t)1 << (max - len)) - 1); 372 | 373 | /* occupy least table spaces, creating new sub-tables as needed */ 374 | use = least; 375 | while (rem < use) { 376 | use -= rem; 377 | rem = 1 << (len - root); 378 | mem += rem; 379 | } 380 | rem -= use; 381 | 382 | /* examine codes from here, updating table space as we go */ 383 | for (use = least; use <= most; use++) { 384 | code[len] = use; 385 | examine(syms - use, len + 1, (left - use) << 1, 386 | mem + (rem ? 1 << (len - root) : 0), rem << 1); 387 | if (rem == 0) { 388 | rem = 1 << (len - root); 389 | mem += rem; 390 | } 391 | rem--; 392 | } 393 | 394 | /* remove entries as we drop back down in the recursion */ 395 | code[len] = 0; 396 | } 397 | 398 | /* Look at all sub-codes starting with root + 1 bits. Look at only the valid 399 | intermediate code states (syms, left, len). For each completed code, 400 | calculate the amount of memory required by inflate to build the decoding 401 | tables. Find the maximum amount of memory required and show the code that 402 | requires that maximum. Uses the globals max, root, and num. */ 403 | local void enough(int syms) 404 | { 405 | int n; /* number of remaing symbols for this node */ 406 | int left; /* number of unused bit patterns at this length */ 407 | size_t index; /* index of this case in *num */ 408 | 409 | /* clear code */ 410 | for (n = 0; n <= max; n++) 411 | code[n] = 0; 412 | 413 | /* look at all (root + 1) bit and longer codes */ 414 | large = 1 << root; /* base table */ 415 | if (root < max) /* otherwise, there's only a base table */ 416 | for (n = 3; n <= syms; n++) 417 | for (left = 2; left < n; left += 2) 418 | { 419 | /* look at all reachable (root + 1) bit nodes, and the 420 | resulting codes (complete at root + 2 or more) */ 421 | index = INDEX(n, left, root + 1); 422 | if (root + 1 < max && num[index]) /* reachable node */ 423 | examine(n, root + 1, left, 1 << root, 0); 424 | 425 | /* also look at root bit codes with completions at root + 1 426 | bits (not saved in num, since complete), just in case */ 427 | if (num[index - 1] && n <= left << 1) 428 | examine((n - left) << 1, root + 1, (n - left) << 1, 429 | 1 << root, 0); 430 | } 431 | 432 | /* done */ 433 | printf("done: maximum of %d table entries\n", large); 434 | } 435 | 436 | /* 437 | Examine and show the total number of possible Huffman codes for a given 438 | maximum number of symbols, initial root table size, and maximum code length 439 | in bits -- those are the command arguments in that order. The default 440 | values are 286, 9, and 15 respectively, for the deflate literal/length code. 441 | The possible codes are counted for each number of coded symbols from two to 442 | the maximum. The counts for each of those and the total number of codes are 443 | shown. The maximum number of inflate table entires is then calculated 444 | across all possible codes. Each new maximum number of table entries and the 445 | associated sub-code (starting at root + 1 == 10 bits) is shown. 446 | 447 | To count and examine Huffman codes that are not length-limited, provide a 448 | maximum length equal to the number of symbols minus one. 449 | 450 | For the deflate literal/length code, use "enough". For the deflate distance 451 | code, use "enough 30 6". 452 | 453 | This uses the %llu printf format to print big_t numbers, which assumes that 454 | big_t is an unsigned long long. If the big_t type is changed (for example 455 | to a multiple precision type), the method of printing will also need to be 456 | updated. 457 | */ 458 | int main(int argc, char **argv) 459 | { 460 | 461 | int syms; /* total number of symbols to code */ 462 | int n; /* number of symbols to code for this run */ 463 | big_t got; /* return value of count() */ 464 | big_t sum; /* accumulated number of codes over n */ 465 | code_t word; /* for counting bits in code_t */ 466 | 467 | /* set up globals for cleanup() */ 468 | code = NULL; 469 | num = NULL; 470 | done = NULL; 471 | 472 | /* get arguments -- default to the deflate literal/length code */ 473 | syms = 10; 474 | root = 9; 475 | max = 15; 476 | 477 | if (argc > 1) { 478 | syms = atoi(argv[1]); 479 | if (argc > 2) { 480 | root = atoi(argv[2]); 481 | if (argc > 3) 482 | max = atoi(argv[3]); 483 | } 484 | } 485 | if (argc > 4 || syms < 2 || root < 1 || max < 1) { 486 | fputs("invalid arguments, need: [sym >= 2 [root >= 1 [max >= 1]]]\n", 487 | stderr); 488 | return 1; 489 | } 490 | 491 | /* if not restricting the code length, the longest is syms - 1 */ 492 | if (max > syms - 1) 493 | max = syms - 1; 494 | 495 | /* determine the number of bits in a code_t */ 496 | for (n = 0, word = 1; word; n++, word <<= 1) 497 | ; 498 | 499 | /* make sure that the calculation of most will not overflow */ 500 | if (max > n || (code_t)(syms - 2) >= (((code_t)0 - 1) >> (max - 1))) { 501 | fputs("abort: code length too long for internal types\n", stderr); 502 | return 1; 503 | } 504 | 505 | /* reject impossible code requests */ 506 | if ((code_t)(syms - 1) > ((code_t)1 << max) - 1) { 507 | fprintf(stderr, "%d symbols cannot be coded in %d bits\n", 508 | syms, max); 509 | return 1; 510 | } 511 | 512 | /* allocate code vector */ 513 | code = calloc(max + 1, sizeof(int)); 514 | if (code == NULL) { 515 | fputs("abort: unable to allocate enough memory\n", stderr); 516 | return 1; 517 | } 518 | 519 | /* determine size of saved results array, checking for overflows, 520 | allocate and clear the array (set all to zero with calloc()) */ 521 | if (syms == 2) /* iff max == 1 */ 522 | num = NULL; /* won't be saving any results */ 523 | else { 524 | size = syms >> 1; 525 | if (size > ((size_t)0 - 1) / (n = (syms - 1) >> 1) || 526 | (size *= n, size > ((size_t)0 - 1) / (n = max - 1)) || 527 | (size *= n, size > ((size_t)0 - 1) / sizeof(big_t)) || 528 | (num = calloc(size, sizeof(big_t))) == NULL) { 529 | fputs("abort: unable to allocate enough memory\n", stderr); 530 | cleanup(); 531 | return 1; 532 | } 533 | } 534 | 535 | /* count possible codes for all numbers of symbols, add up counts */ 536 | sum = 0; 537 | for (n = 2; n <= syms; n++) { 538 | got = count(n, 1, 2); 539 | sum += got; 540 | if (got == (big_t)0 - 1 || sum < got) { /* overflow */ 541 | fputs("abort: can't count that high!\n", stderr); 542 | cleanup(); 543 | return 1; 544 | } 545 | printf("%llu %d-codes\n", got, n); 546 | } 547 | printf("%llu total codes for 2 to %d symbols", sum, syms); 548 | if (max < syms - 1) 549 | printf(" (%d-bit length limit)\n", max); 550 | else 551 | puts(" (no length limit)"); 552 | 553 | /* allocate and clear done array for beenhere() */ 554 | if (syms == 2) 555 | done = NULL; 556 | else if (size > ((size_t)0 - 1) / sizeof(struct tab) || 557 | (done = calloc(size, sizeof(struct tab))) == NULL) { 558 | fputs("abort: unable to allocate enough memory\n", stderr); 559 | cleanup(); 560 | return 1; 561 | } 562 | 563 | /* find and show maximum inflate table usage */ 564 | if (root > max) /* reduce root to max length */ 565 | root = max; 566 | if ((code_t)syms < ((code_t)1 << (root + 1))) 567 | enough(syms); 568 | else 569 | puts("cannot handle minimum code lengths > root"); 570 | 571 | /* done */ 572 | cleanup(); 573 | return 0; 574 | } 575 | -------------------------------------------------------------------------------- /dockerlfc/build.gradle: -------------------------------------------------------------------------------- 1 | group 'org.jfrog.test' 2 | version '1.0-SNAPSHOT' 3 | 4 | apply plugin: 'groovy' 5 | apply plugin: 'java' 6 | 7 | sourceCompatibility = 1.5 8 | 9 | repositories { 10 | jcenter() 11 | } 12 | 13 | dependencies { 14 | compile 'org.codehaus.groovy:groovy-all:2.3.11' 15 | compile 'org.codehaus.groovy.modules.http-builder:http-builder:0.7.1' 16 | compile 'org.jfrog.artifactory.client:artifactory-java-client-services:+' 17 | compile 'org.jfrog.artifactory.client:artifactory-java-client-api:2.3.4' 18 | testCompile group: 'junit', name: 'junit', version: '4.11' 19 | } 20 | -------------------------------------------------------------------------------- /dockerlfc/gradlew: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | ############################################################################## 4 | ## 5 | ## Gradle start up script for UN*X 6 | ## 7 | ############################################################################## 8 | 9 | # Attempt to set APP_HOME 10 | # Resolve links: $0 may be a link 11 | PRG="$0" 12 | # Need this for relative symlinks. 13 | while [ -h "$PRG" ] ; do 14 | ls=`ls -ld "$PRG"` 15 | link=`expr "$ls" : '.*-> \(.*\)$'` 16 | if expr "$link" : '/.*' > /dev/null; then 17 | PRG="$link" 18 | else 19 | PRG=`dirname "$PRG"`"/$link" 20 | fi 21 | done 22 | SAVED="`pwd`" 23 | cd "`dirname \"$PRG\"`/" >/dev/null 24 | APP_HOME="`pwd -P`" 25 | cd "$SAVED" >/dev/null 26 | 27 | APP_NAME="Gradle" 28 | APP_BASE_NAME=`basename "$0"` 29 | 30 | # Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. 31 | DEFAULT_JVM_OPTS="" 32 | 33 | # Use the maximum available, or set MAX_FD != -1 to use that value. 34 | MAX_FD="maximum" 35 | 36 | warn ( ) { 37 | echo "$*" 38 | } 39 | 40 | die ( ) { 41 | echo 42 | echo "$*" 43 | echo 44 | exit 1 45 | } 46 | 47 | # OS specific support (must be 'true' or 'false'). 48 | cygwin=false 49 | msys=false 50 | darwin=false 51 | nonstop=false 52 | case "`uname`" in 53 | CYGWIN* ) 54 | cygwin=true 55 | ;; 56 | Darwin* ) 57 | darwin=true 58 | ;; 59 | MINGW* ) 60 | msys=true 61 | ;; 62 | NONSTOP* ) 63 | nonstop=true 64 | ;; 65 | esac 66 | 67 | CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar 68 | 69 | # Determine the Java command to use to start the JVM. 70 | if [ -n "$JAVA_HOME" ] ; then 71 | if [ -x "$JAVA_HOME/jre/sh/java" ] ; then 72 | # IBM's JDK on AIX uses strange locations for the executables 73 | JAVACMD="$JAVA_HOME/jre/sh/java" 74 | else 75 | JAVACMD="$JAVA_HOME/bin/java" 76 | fi 77 | if [ ! -x "$JAVACMD" ] ; then 78 | die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME 79 | 80 | Please set the JAVA_HOME variable in your environment to match the 81 | location of your Java installation." 82 | fi 83 | else 84 | JAVACMD="java" 85 | which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. 86 | 87 | Please set the JAVA_HOME variable in your environment to match the 88 | location of your Java installation." 89 | fi 90 | 91 | # Increase the maximum file descriptors if we can. 92 | if [ "$cygwin" = "false" -a "$darwin" = "false" -a "$nonstop" = "false" ] ; then 93 | MAX_FD_LIMIT=`ulimit -H -n` 94 | if [ $? -eq 0 ] ; then 95 | if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then 96 | MAX_FD="$MAX_FD_LIMIT" 97 | fi 98 | ulimit -n $MAX_FD 99 | if [ $? -ne 0 ] ; then 100 | warn "Could not set maximum file descriptor limit: $MAX_FD" 101 | fi 102 | else 103 | warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT" 104 | fi 105 | fi 106 | 107 | # For Darwin, add options to specify how the application appears in the dock 108 | if $darwin; then 109 | GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\"" 110 | fi 111 | 112 | # For Cygwin, switch paths to Windows format before running java 113 | if $cygwin ; then 114 | APP_HOME=`cygpath --path --mixed "$APP_HOME"` 115 | CLASSPATH=`cygpath --path --mixed "$CLASSPATH"` 116 | JAVACMD=`cygpath --unix "$JAVACMD"` 117 | 118 | # We build the pattern for arguments to be converted via cygpath 119 | ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null` 120 | SEP="" 121 | for dir in $ROOTDIRSRAW ; do 122 | ROOTDIRS="$ROOTDIRS$SEP$dir" 123 | SEP="|" 124 | done 125 | OURCYGPATTERN="(^($ROOTDIRS))" 126 | # Add a user-defined pattern to the cygpath arguments 127 | if [ "$GRADLE_CYGPATTERN" != "" ] ; then 128 | OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)" 129 | fi 130 | # Now convert the arguments - kludge to limit ourselves to /bin/sh 131 | i=0 132 | for arg in "$@" ; do 133 | CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -` 134 | CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option 135 | 136 | if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition 137 | eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"` 138 | else 139 | eval `echo args$i`="\"$arg\"" 140 | fi 141 | i=$((i+1)) 142 | done 143 | case $i in 144 | (0) set -- ;; 145 | (1) set -- "$args0" ;; 146 | (2) set -- "$args0" "$args1" ;; 147 | (3) set -- "$args0" "$args1" "$args2" ;; 148 | (4) set -- "$args0" "$args1" "$args2" "$args3" ;; 149 | (5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;; 150 | (6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;; 151 | (7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;; 152 | (8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;; 153 | (9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;; 154 | esac 155 | fi 156 | 157 | # Split up the JVM_OPTS And GRADLE_OPTS values into an array, following the shell quoting and substitution rules 158 | function splitJvmOpts() { 159 | JVM_OPTS=("$@") 160 | } 161 | eval splitJvmOpts $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS 162 | JVM_OPTS[${#JVM_OPTS[*]}]="-Dorg.gradle.appname=$APP_BASE_NAME" 163 | 164 | exec "$JAVACMD" "${JVM_OPTS[@]}" -classpath "$CLASSPATH" org.gradle.wrapper.GradleWrapperMain "$@" 165 | -------------------------------------------------------------------------------- /dockerlfc/gradlew.bat: -------------------------------------------------------------------------------- 1 | @if "%DEBUG%" == "" @echo off 2 | @rem ########################################################################## 3 | @rem 4 | @rem Gradle startup script for Windows 5 | @rem 6 | @rem ########################################################################## 7 | 8 | @rem Set local scope for the variables with windows NT shell 9 | if "%OS%"=="Windows_NT" setlocal 10 | 11 | set DIRNAME=%~dp0 12 | if "%DIRNAME%" == "" set DIRNAME=. 13 | set APP_BASE_NAME=%~n0 14 | set APP_HOME=%DIRNAME% 15 | 16 | @rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. 17 | set DEFAULT_JVM_OPTS= 18 | 19 | @rem Find java.exe 20 | if defined JAVA_HOME goto findJavaFromJavaHome 21 | 22 | set JAVA_EXE=java.exe 23 | %JAVA_EXE% -version >NUL 2>&1 24 | if "%ERRORLEVEL%" == "0" goto init 25 | 26 | echo. 27 | echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. 28 | echo. 29 | echo Please set the JAVA_HOME variable in your environment to match the 30 | echo location of your Java installation. 31 | 32 | goto fail 33 | 34 | :findJavaFromJavaHome 35 | set JAVA_HOME=%JAVA_HOME:"=% 36 | set JAVA_EXE=%JAVA_HOME%/bin/java.exe 37 | 38 | if exist "%JAVA_EXE%" goto init 39 | 40 | echo. 41 | echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME% 42 | echo. 43 | echo Please set the JAVA_HOME variable in your environment to match the 44 | echo location of your Java installation. 45 | 46 | goto fail 47 | 48 | :init 49 | @rem Get command-line arguments, handling Windows variants 50 | 51 | if not "%OS%" == "Windows_NT" goto win9xME_args 52 | if "%@eval[2+2]" == "4" goto 4NT_args 53 | 54 | :win9xME_args 55 | @rem Slurp the command line arguments. 56 | set CMD_LINE_ARGS= 57 | set _SKIP=2 58 | 59 | :win9xME_args_slurp 60 | if "x%~1" == "x" goto execute 61 | 62 | set CMD_LINE_ARGS=%* 63 | goto execute 64 | 65 | :4NT_args 66 | @rem Get arguments from the 4NT Shell from JP Software 67 | set CMD_LINE_ARGS=%$ 68 | 69 | :execute 70 | @rem Setup the command line 71 | 72 | set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar 73 | 74 | @rem Execute Gradle 75 | "%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS% 76 | 77 | :end 78 | @rem End local scope for the variables with windows NT shell 79 | if "%ERRORLEVEL%"=="0" goto mainEnd 80 | 81 | :fail 82 | rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of 83 | rem the _cmd.exe /c_ return code! 84 | if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1 85 | exit /b 1 86 | 87 | :mainEnd 88 | if "%OS%"=="Windows_NT" endlocal 89 | 90 | :omega 91 | -------------------------------------------------------------------------------- /dockerlfc/settings.gradle: -------------------------------------------------------------------------------- 1 | rootProject.name = 'dockerlfc' 2 | 3 | -------------------------------------------------------------------------------- /dockerlfc/src/swampup/DemoSwampUpTraining.groovy: -------------------------------------------------------------------------------- 1 | package swampup 2 | 3 | import org.jfrog.artifactory.client.Artifactory 4 | import org.jfrog.artifactory.client.ArtifactoryClient 5 | 6 | 7 | /** 8 | * Created by stanleyf on 10/05/2017. 9 | */ 10 | class DemoSwampUpTraining extends GroovyTestCase { 11 | def artifactoryUrl = 'http://35.197.6.224/artifactory' 12 | Artifactory artifactory = ArtifactoryClient.create(artifactoryUrl, "admin", "password") 13 | 14 | def testApp = new testDockerApp (artifactory) 15 | String testLatestApp 16 | 17 | void setUp() { 18 | super.setUp() 19 | } 20 | 21 | void test1_testLatestDockerApp () { 22 | testLatestApp = "/Users/stanleyf/git/swampup/dockerlfc/src/swampup/latestDockerApp.aql" 23 | testApp.setAqlFile(testLatestApp) 24 | testApp.runTest() 25 | } 26 | 27 | void test2_giveApprovalForRelease () { 28 | testLatestApp = "/Users/stanleyf/git/swampup/dockerlfc/src/swampup/qaApprovalCriteria.aql" 29 | testApp.setAqlFile(testLatestApp) 30 | testApp.sendApproval() 31 | } 32 | 33 | void test3_devOpsDeployRelease () { 34 | testLatestApp = "/Users/stanleyf/git/swampup/dockerlfc/src/swampup/latestDockerAppReleaseApproval.aql" 35 | testApp.setAqlFile(testLatestApp) 36 | testApp.downloadToProduction() 37 | } 38 | 39 | void test4_whereDeployed () { 40 | testLatestApp = "/Users/stanleyf/git/swampup/dockerlfc/src/swampup/listPropertiesOnDeployedDockerApp.aql" 41 | testApp.setAqlFile(testLatestApp) 42 | testApp.listPropertiesDeployedDockerApp() 43 | } 44 | } 45 | -------------------------------------------------------------------------------- /dockerlfc/src/swampup/latestDockerApp.aql: -------------------------------------------------------------------------------- 1 | items.find ( 2 | { 3 | "repo":{"$eq":"automation-docker-prod-local"}, 4 | "@docker.repoName":{"$eq":"docker-app"}, 5 | "@docker.manifest":{"$eq":"latest"}, 6 | "@qa.functional-test":{"$eq":"pass"} 7 | } 8 | ).include("name", "artifact.module.build.name", "artifact.module.build.number", "property.*") 9 | -------------------------------------------------------------------------------- /dockerlfc/src/swampup/latestDockerAppReleaseApproval.aql: -------------------------------------------------------------------------------- 1 | items.find ( 2 | { 3 | "repo":{"$eq":"automation-docker-prod-local"}, 4 | "@docker.repoName":{"$eq":"docker-app"}, 5 | "@docker.manifest":{"$eq":"latest"}, 6 | "@qa.release-approved":{"$eq":"yes"} 7 | } 8 | ).include("name", "artifact.module.build.name", "artifact.module.build.number", "property.*") 9 | -------------------------------------------------------------------------------- /dockerlfc/src/swampup/listPropertiesOnDeployedDockerApp.aql: -------------------------------------------------------------------------------- 1 | items.find ( 2 | { 3 | "repo":{"$eq":"automation-docker-prod-local"}, 4 | "@docker.repoName":{"$eq":"docker-app"}, 5 | "@docker.manifest":{"$eq":"latest"}, 6 | "@devops.deployed":{"$eq":"true"} 7 | } 8 | ).include("name", "artifact.module.build.name", "artifact.module.build.number", "property.*") -------------------------------------------------------------------------------- /dockerlfc/src/swampup/qaApprovalCriteria.aql: -------------------------------------------------------------------------------- 1 | items.find ( 2 | { 3 | "repo":{"$eq":"automation-docker-prod-local"}, 4 | "@docker.repoName":{"$eq":"docker-app"}, 5 | "@docker.manifest":{"$eq":"latest"}, 6 | "@qa.functional-test":{"$eq":"pass"}, 7 | "@qa.manual-test":{"$eq":"pass"} 8 | } 9 | ).include("name", "artifact.module.build.name", "artifact.module.build.number", "property.*") 10 | -------------------------------------------------------------------------------- /dockerlfc/src/swampup/testDockerApp.groovy: -------------------------------------------------------------------------------- 1 | package swampup 2 | 3 | import org.jfrog.artifactory.client.Artifactory 4 | import org.jfrog.artifactory.client.ArtifactoryRequest 5 | import org.jfrog.artifactory.client.impl.ArtifactoryRequestImpl 6 | 7 | /** 8 | * Created by stanleyf on 25/04/2017. 9 | */ 10 | class testDockerApp { 11 | def buildName 12 | def buildNumber 13 | def response 14 | Artifactory artifactory 15 | String aqlFile 16 | 17 | testDockerApp(Artifactory artifactory) { 18 | this.artifactory = artifactory 19 | } 20 | 21 | public def setAqlFile (String aqlFile) { 22 | this.aqlFile = aqlFile 23 | } 24 | 25 | // run simulated tests and update the properties. 26 | public def runTest () { 27 | def testPropertyMap = [:] 28 | response = aqlQueryRequest() 29 | getBuildInfo() 30 | if (runAppTestSuite()) { 31 | testPropertyMap.put ("qa.manual-test","pass") 32 | testPropertyMap.put("qa.test-team","platform") 33 | } else { 34 | testPropertyMap.put ("qa.manual-test","fail") 35 | testPropertyMap.put ("qa.test-team","platform") 36 | testPropertyMap.put ("qa.jira-tickets","jira-123") 37 | } 38 | updateTestProperty(testPropertyMap) 39 | println "Test Complete" 40 | } 41 | 42 | 43 | public def downloadToProduction () { 44 | def testPropertyMap = [:] 45 | response = aqlQueryRequest() 46 | getBuildInfo() 47 | testPropertyMap.put("devops.deployed","true") 48 | testPropertyMap.put("devops.deploy","data-center-1") 49 | updateTestProperty(testPropertyMap) 50 | } 51 | 52 | 53 | public def sendApproval () { 54 | def testPropertyMap = [:] 55 | response = aqlQueryRequest() 56 | if (checkApprovalCriteria()) { 57 | testPropertyMap.put ("qa.release-approved","yes") 58 | testPropertyMap.put ("qa.approver","swampup-qa") 59 | updateTestProperty(testPropertyMap) 60 | println "Release Approved" 61 | } else { 62 | testPropertyMap.put ("qa.release-approved","no") 63 | updateTestProperty(testPropertyMap) 64 | println "Release NOT Approved" 65 | } 66 | } 67 | 68 | 69 | def updateTestProperty (def properties) { 70 | properties.each {it -> 71 | artifactory.repository("automation-docker-prod-local") 72 | .folder("docker-app/${buildNumber}") 73 | .properties() 74 | .addProperty(it.key as String, it.value as String) 75 | .doSet(true); 76 | 77 | artifactory.repository("automation-docker-prod-local") 78 | .folder("docker-app/latest") 79 | .properties() 80 | .addProperty(it.key as String, it.value as String) 81 | .doSet(true); 82 | } 83 | } 84 | 85 | 86 | def listPropertiesDeployedDockerApp () { 87 | response = aqlQueryRequest() 88 | getBuildInfo() 89 | List properties = response.results[0].properties 90 | properties.each {it -> 91 | println (it.key + ":" + it.value) 92 | } 93 | } 94 | 95 | // verify no xray critical errors; the aql checks for other critiera. 96 | def checkApprovalCriteria () { 97 | List properties = response.results[0].properties 98 | def approved = true 99 | properties.each { it -> 100 | switch (it.key) { 101 | case "build.name" : 102 | buildName = it.value 103 | println "Jenkins Build Name: " + it.value 104 | break 105 | case "build.number" : 106 | buildNumber = it.value 107 | println "Jenkins Build Number: " + it.value 108 | break 109 | case "xray*.alert.topSeverity" : 110 | if (it.value == "Critical") { 111 | approved = false 112 | } 113 | break 114 | } 115 | } 116 | return approved 117 | } 118 | 119 | // place holder for tests. 120 | def static runAppTestSuite() { 121 | println "Docker Application started: http://localhost/swampup; Please run your tests" 122 | return true 123 | } 124 | 125 | // prints the build number that is tested - retrieving the latest tag - need to know the build number. 126 | def getBuildInfo () { 127 | List properties = response.results[0].properties 128 | properties.each { it -> 129 | switch (it.key) { 130 | case "build.name" : 131 | buildName = it.value 132 | println "Jenkins Build Name: " + it.value 133 | break 134 | case "build.number" : 135 | buildNumber = it.value 136 | println "Jenkins Build Number: " + it.value 137 | break 138 | } 139 | } 140 | } 141 | 142 | // AQL request to artifactory 143 | def aqlQueryRequest () { 144 | def aqlQuery = new File (aqlFile).text 145 | ArtifactoryRequest aqlRequest = new ArtifactoryRequestImpl() 146 | .apiUrl("api/search/aql") 147 | .method(ArtifactoryRequest.Method.POST) 148 | .requestType(ArtifactoryRequest.ContentType.TEXT) 149 | .responseType(ArtifactoryRequest.ContentType.JSON) 150 | .requestBody(aqlQuery); 151 | return artifactory.restCall(aqlRequest) 152 | } 153 | } 154 | -------------------------------------------------------------------------------- /essentials_of_jfrog_artifactory/.gitignore: -------------------------------------------------------------------------------- 1 | *~ 2 | *.lic 3 | *.iml 4 | *.tgz 5 | *.zip 6 | *.gz 7 | .idea/ 8 | real/* -------------------------------------------------------------------------------- /essentials_of_jfrog_artifactory/README.md: -------------------------------------------------------------------------------- 1 | # introduction-to-artifactory-scripts -------------------------------------------------------------------------------- /essentials_of_jfrog_artifactory/aql.json: -------------------------------------------------------------------------------- 1 | items.find( 2 | { 3 | "@product.name" : {"$eq" : "PiedPiper"}, 4 | "@product.version" : {"$eq" : "1.0"} 5 | }) -------------------------------------------------------------------------------- /essentials_of_jfrog_artifactory/commands-unix.list: -------------------------------------------------------------------------------- 1 | ############################################################ 2 | ##FIRST COMMANDS : GET YOUR Artifactory ## 3 | ############################################################ 4 | #keep your git directory in memory for latest command 5 | export SCRIPT_DIR=$(pwd) 6 | 7 | #Set your workspace directory for the class 8 | export WORKSPACE_CLASS= 9 | 10 | #define a user login to connect to artifactory (further usage) 11 | export USER_LOGIN= 12 | 13 | #Download the following archive from the common Artifactory for the class 14 | cd $WORKSPACE_CLASS 15 | 16 | curl -H "X-JFrog-Art-Api:AKCp2UNgwajwpoz5PghBirpgR5xafggbFyCkSU4DTqK2fQPd9thVcar7wHqk2BNmYjg1km6vJ" -O http://192.168.1.139:8081/artifactory/jswampup-remote/artifactory/artifactory-pro-4.7.6.zip 17 | 18 | ################################################################## 19 | ## OVERVIEW SECTION ## 20 | ## START ARTIFACTORY AND CONNECT ## 21 | ################################################################## 22 | 23 | #unzip the archive 24 | unzip artifactory-pro-4.7.6.zip -d $WORKSPACE_CLASS 25 | 26 | #get and deploy the license 27 | sh $SCRIPT_DIR/license.sh 28 | 29 | #open a new terminal and navigate to your WORKSPACE_CLASS 30 | #launch Artifactory 31 | cd artifactory-pro-4.7.6\bin 32 | 33 | sh artifactory.sh 34 | 35 | ################################################################## 36 | ## OVERVIEW SECTION ## 37 | ##OPTIONNAL (if you did not use the UI): Create a non admin user## 38 | ################################################################## 39 | #First edit user.json file and provide an email adress and password 40 | #Create your first user (provide user login in the url 41 | 42 | curl -uadmin:password -X PUT http://localhost:9091/artifactory/api/security/users/$USER_LOGIN -H "content-type: application/vnd.org.jfrog.artifactory.security.User+json" -T $SCRIPT_DIR/user.json 43 | 44 | ################################################################################ 45 | ## REPOSITORIES SECTION ## 46 | ##OPTIONNAL (if you did not use the UI): Lets create repositories (a solution)## 47 | ################################################################################ 48 | #Create local maven repository for snapshots 49 | curl -uadmin:password -X PUT http://localhost:9091/artifactory/api/repositories/my-mvn-snapshot-local -H "content-type: application/vnd.org.jfrog.artifactory.repositories.LocalRepositoryConfiguration+json" -T $SCRIPT_DIR/repository-snapshot-local-config.json 50 | 51 | #Create local maven repository for releases 52 | curl -uadmin:password -X PUT http://localhost:9091/artifactory/api/repositories/my-mvn-staging-local -H "content-type: application/vnd.org.jfrog.artifactory.repositories.LocalRepositoryConfiguration+json" -T $SCRIPT_DIR/repository-staging-local-config.json 53 | 54 | #Create local maven repository for releases 55 | curl -uadmin:password -X PUT http://localhost:9091/artifactory/api/repositories/remote-repo -H "content-type: application/vnd.org.jfrog.artifactory.repositories.RemoteRepositoryConfiguration+json" -T $SCRIPT_DIR/repository-remote-config.json 56 | 57 | #Create virtual maven repository for snapshots 58 | curl -uadmin:password -X PUT http://localhost:9091/artifactory/api/repositories/my-mvn-snapshots -H "content-type: application/json" -T $SCRIPT_DIR/repository-snapshot-virtual-config.json 59 | 60 | #Create virtual maven repository for releases 61 | curl -uadmin:password -X PUT http://localhost:9091/artifactory/api/repositories/my-mvn-releases -H "content-type: application/vnd.org.jfrog.artifactory.repositories.VirtualRepositoryConfiguration+json" -T $SCRIPT_DIR/repository-release-virtual-config.json 62 | 63 | ################################################################################ 64 | ## SECURITY SECTION ## 65 | ##OPTIONNAL (if you did not use the UI): Lets start with secutity (a solution)## 66 | ################################################################################ 67 | #Create a group for dev team 68 | curl -uadmin:password -X PUT http://localhost:9091/artifactory/api/security/groups/dev-team -H "content-type: application/vnd.org.jfrog.artifactory.security.Group+json" -T $SCRIPT_DIR/group.json 69 | 70 | #update user and add it to dev-team group 71 | curl -uadmin:password -X PUT http://localhost:9091/artifactory/api/security/users/$USER_LOGIN -H "content-type: application/vnd.org.jfrog.artifactory.security.User+json" -T $SCRIPT_DIR/update-user.json 72 | 73 | #create permission target for read external, third party libs and local artifacts (both snapshots and releases) 74 | curl -uadmin:password -X PUT http://localhost:9091/artifactory/api/security/permissions/dev-team-read -H "content-type: application/vnd.org.jfrog.artifactory.security.PermissionTarget+json" -T $SCRIPT_DIR/permission-target-read.json 75 | 76 | #create permission target for write snapshots only 77 | curl -uadmin:password -X PUT http://localhost:9091/artifactory/api/security/permissions/dev-team-write -H "content-type: application/vnd.org.jfrog.artifactory.security.PermissionTarget+json" -T $SCRIPT_DIR/permission-target-write.json 78 | 79 | #Remove the "Anything" permissions to prevent dev team to consume artifacts from other projects 80 | curl -uadmin:password -X DELETE http://localhost:9091/artifactory/api/security/permissions/Anything 81 | 82 | ################################################################################ 83 | ## WORKING WITH BUILD TOOLS ## 84 | ################################################################################ 85 | #clone the project-example repository 86 | cd $WORKSPACE_CLASS 87 | 88 | git clone https://simarsingh@bitbucket.org/simarsingh/project-examples.git 89 | 90 | cd project-examples 91 | 92 | cd maven-example 93 | 94 | mvn install 95 | ################################################################################ 96 | ## WORKING WITH CI SERVER (AFTERNOON) ## 97 | ################################################################################ 98 | #open a new command line 99 | #download preconfigured jenkins 100 | cd $HOME 101 | 102 | #for linux users use the following : 103 | #curl -o swampup.zip -H "X-JFrog-Art-Api:AKCp2UNgwajwpoz5PghBirpgR5xafggbFyCkSU4DTqK2fQPd9thVcar7wHqk2BNmYjg1km6vJ" http://192.168.1.139:8081/artifactory/jswampup-remote/jenkins/ubuntu/jenkins.zip 104 | curl -o swampup.zip -H "X-JFrog-Art-Api:AKCp2UNgwajwpoz5PghBirpgR5xafggbFyCkSU4DTqK2fQPd9thVcar7wHqk2BNmYjg1km6vJ" http://192.168.1.139:8081/artifactory/jswampup-remote/jenkins/mac/swampup.zip 105 | 106 | unzip swampup.zip 107 | 108 | export JENKINS_HOME=$HOME/swampup/jenkins 109 | 110 | cd swampup 111 | 112 | java -jar jenkins.war --httpPort=9090 113 | 114 | ################################################################################ 115 | ## Working with build tools and CI Server ## 116 | ##OPTIONNAL (if you did not use the UI): Let's create a new user ## 117 | ################################################################################ 118 | #create a user for jenkins 119 | curl -uadmin:password -X PUT http://localhost:9091/artifactory/api/security/users/jenkins -H "content-type: application/vnd.org.jfrog.artifactory.security.User+json" -T $SCRIPT_DIR/jenkins-user.json 120 | 121 | #create permission for jenkins 122 | curl -uadmin:password -X PUT http://localhost:9091/artifactory/api/security/permissions/jenkins-perm -H "content-type: application/vnd.org.jfrog.artifactory.security.PermissionTarget+json" -T $SCRIPT_DIR/permission-target-jenkins.json 123 | 124 | ############################################################################### 125 | ## LET'S PROMOTE ## 126 | ##OPTIONNAL (if you did not use the UI): Need some place to promote to! ## 127 | ################################################################################ 128 | #Create local maven repository for releases 129 | curl -uadmin:password -X PUT http://localhost:9091/artifactory/api/repositories/my-mvn-release-local -H "content-type: application/vnd.org.jfrog.artifactory.repositories.LocalRepositoryConfiguration+json" -T $SCRIPT_DIR/repository-release-local-config.json 130 | 131 | #update virtuel repository for releases 132 | curl -uadmin:password -X POST http://localhost:9091/artifactory/api/repositories/my-mvn-releases -H "content-type: application/vnd.org.jfrog.artifactory.repositories.VirtualRepositoryConfiguration+json" -T $SCRIPT_DIR/repository-release-virtual-config2.json 133 | 134 | #update permission target for jenkins user 135 | curl -uadmin:password -X PUT http://localhost:9091/artifactory/api/security/permissions/jenkins-perm -H "content-type: application/vnd.org.jfrog.artifactory.security.PermissionTarget+json" -T $SCRIPT_DIR/update-permission-target-jenkins.json 136 | 137 | ################################################################################ 138 | ## LET'S AUTOMATE PROMOTION ## 139 | ################################################################################ 140 | #Create local maven repository for production (can be done through the UI) 141 | curl -uadmin:password -X PUT http://localhost:9091/artifactory/api/repositories/my-mvn-prod-local -H "content-type: application/vnd.org.jfrog.artifactory.repositories.LocalRepositoryConfiguration+json" -T $SCRIPT_DIR/repository-release-local-config.json 142 | 143 | #update permission target for user (can be done through the UI) 144 | curl -uadmin:password -X PUT http://localhost:9091/artifactory/api/security/permissions/release-eng-perm -H "content-type: application/vnd.org.jfrog.artifactory.security.PermissionTarget+json" -T $SCRIPT_DIR/permission-target-release-eng.json 145 | 146 | #promote the build to production 147 | curl -H "content-type:Application/json" -H "X-JFrog-Art-Api:" -X POST http://localhost:9091/artifactory/api/build/promote// -T $SCRIPT_DIR/promote.json 148 | 149 | ################################################################################ 150 | ## LET'S USE THE CLI ## 151 | ################################################################################ 152 | #download the CLI from our central Artifactory (in real life do it from bintray) 153 | curl -H "X-JFrog-Art-Api:AKCp2UNgwajwpoz5PghBirpgR5xafggbFyCkSU4DTqK2fQPd9thVcar7wHqk2BNmYjg1km6vJ" -O http://192.168.1.139:8081/artifactory/jswampup-remote/cli-1.2/mac/jfrog 154 | 155 | cd $WORKSPACE_CLASS 156 | #upload some files with properties : product.version=1.0 and product.name=PiedPiper 157 | ./jfrog rt upload "artifactory-*.zip" "my-generic-local" --url=http://localhost:9091/artifactory --user=admin --password=password --props="product.name=PiedPiper;product.version=1.0" 158 | 159 | ################################################################################ 160 | ## USE AQL ## 161 | ################################################################################ 162 | curl -H "Content-Type:application/json" -H "X-JFrog-Art-Api:" -X POST http://localhost:9091/artifactory/api/search/aql -T $SCRIPT_DIR\aql.json 163 | -------------------------------------------------------------------------------- /essentials_of_jfrog_artifactory/commands-windows.list: -------------------------------------------------------------------------------- 1 | ############################################################ 2 | ##FIRST COMMANDS : GET YOUR ARTIFACTORY ## 3 | ############################################################ 4 | #keep your git directory in memory for latest command 5 | set SCRIPT_DIR=%cd% 6 | 7 | #Set your workspace directory for the class 8 | set WORKSPACE_CLASS= 9 | 10 | #define a user login to connect to artifactory (further usage) 11 | set USER_LOGIN= 12 | 13 | #Download the following archive from the common Artifactory for the class 14 | cd %WORKSPACE_CLASS% 15 | 16 | curl -H "X-JFrog-Art-Api:AKCp2UNgwajwpoz5PghBirpgR5xafggbFyCkSU4DTqK2fQPd9thVcar7wHqk2BNmYjg1km6vJ" -O http://192.168.1.139:8081/artifactory/jswampup-remote/artifactory/artifactory-pro-4.7.6.zip 17 | 18 | ################################################################## 19 | ## OVERVIEW SECTION ## 20 | ## START ARTIFACTORY AND CONNECT ## 21 | ################################################################## 22 | 23 | #unzip the archive 24 | jar xf artifactory-pro-4.7.6.zip 25 | 26 | #get and deploy the license 27 | %SCRIPT_DIR%\license.bat 28 | 29 | #open a new command line and navigate to your WORKSPACE_CLASS directory 30 | #launch Artifactory 31 | cd artifactory-pro-4.?.?\bin 32 | 33 | artifactory.bat 34 | 35 | ################################################################## 36 | ## OVERVIEW SECTION ## 37 | ##OPTIONNAL (if you did not use the UI): Create a non admin user## 38 | ################################################################## 39 | #First edit user.json file and provide an email adress and password 40 | #Create your first user 41 | 42 | curl -uadmin:password -X PUT http://localhost:9091/artifactory/api/security/users/%USER_LOGIN% -H "content-type: application/vnd.org.jfrog.artifactory.security.User+json" -T %SCRIPT_DIR%\user.json 43 | 44 | ################################################################################ 45 | ## REPOSITORIES SECTION ## 46 | ##OPTIONNAL (if you did not use the UI): Lets create repositories (a solution)## 47 | ################################################################################ 48 | #Create local maven repository for snapshots 49 | curl -uadmin:password -X PUT http://localhost:9091/artifactory/api/repositories/my-mvn-snapshot-local -H "content-type: application/vnd.org.jfrog.artifactory.repositories.LocalRepositoryConfiguration+json" -T %SCRIPT_DIR%\repository-snapshot-local-config.json 50 | 51 | #Create local maven repository for releases 52 | curl -uadmin:password -X PUT http://localhost:9091/artifactory/api/repositories/my-mvn-staging-local -H "content-type: application/vnd.org.jfrog.artifactory.repositories.LocalRepositoryConfiguration+json" -T %SCRIPT_DIR%\repository-release-local-config.json 53 | 54 | #Create local maven repository for releases 55 | curl -uadmin:password -X PUT http://localhost:9091/artifactory/api/repositories/remote-repo -H "content-type: application/vnd.org.jfrog.artifactory.repositories.RemoteRepositoryConfiguration+json" -T %SCRIPT_DIR%\repository-remote-config.json 56 | 57 | #Create virtual maven repository for snapshots 58 | curl -uadmin:password -X PUT http://localhost:9091/artifactory/api/repositories/my-mvn-snapshots -H "content-type: application/vnd.org.jfrog.artifactory.repositories.VirtualRepositoryConfiguration+json" -T %SCRIPT_DIR%\repository-snapshot-virtual-config.json 59 | 60 | #Create virtual maven repository for releases 61 | curl -uadmin:password -X PUT http://localhost:9091/artifactory/api/repositories/my-mvn-releases -H "content-type: application/vnd.org.jfrog.artifactory.repositories.VirtualRepositoryConfiguration+json" -T %SCRIPT_DIR%\repository-release-virtual-config.json 62 | 63 | ################################################################################ 64 | ## SECURITY SECTION ## 65 | ##OPTIONNAL (if you did not use the UI): Lets start with secutity (a solution)## 66 | ################################################################################ 67 | #Create a group for dev team 68 | curl -uadmin:password -X PUT http://localhost:9091/artifactory/api/security/groups/dev-team -H "content-type: application/vnd.org.jfrog.artifactory.security.Group+json" -T %SCRIPT_DIR%\group.json 69 | 70 | #update user and add it to dev-team group 71 | curl -uadmin:password -X PUT http://localhost:9091/artifactory/api/security/users/%USER_LOGIN% -H "content-type: application/vnd.org.jfrog.artifactory.security.User+json" -T %SCRIPT_DIR%\update-user.json 72 | 73 | #create permission target for read external, third party libs and local artifacts (both snapshots and releases) 74 | curl -uadmin:password -X PUT http://localhost:9091/artifactory/api/security/permissions/dev-team-read -H "content-type: application/vnd.org.jfrog.artifactory.security.PermissionTarget+json" -T %SCRIPT_DIR%\permission-target-read.json 75 | 76 | #create permission target for write snapshots only 77 | curl -uadmin:password -X PUT http://localhost:9091/artifactory/api/security/permissions/dev-team-write -H "content-type: application/vnd.org.jfrog.artifactory.security.PermissionTarget+json" -T %SCRIPT_DIR%\permission-target-write.json 78 | 79 | #Remove the "Anything" permissions to prevent dev team to consume artifacts from other projects 80 | curl -uadmin:password -X DELETE http://localhost:9091/artifactory/api/security/permissions/Anything 81 | 82 | ################################################################################ 83 | ## WORKING WITH BUILD TOOLS ## 84 | ################################################################################ 85 | #clone the project-example repository 86 | cd $WORKSPACE_CLASS 87 | 88 | git clone https://simarsingh@bitbucket.org/simarsingh/project-examples.git 89 | 90 | cd project-examples 91 | 92 | cd maven-example 93 | 94 | mvn install 95 | ################################################################################ 96 | ## WORKING WITH CI SERVER (AFTERNOON) ## 97 | ################################################################################ 98 | #open a new command line in admin mode 99 | #download preconfigured jenkins and launch it 100 | cd c:\ 101 | cd \ 102 | 103 | curl -H "X-JFrog-Art-Api:AKCp2UNgwajwpoz5PghBirpgR5xafggbFyCkSU4DTqK2fQPd9thVcar7wHqk2BNmYjg1km6vJ" -O http://192.168.1.139:8081/artifactory/jswampup-remote/jenkins/windows/swampupjenkins.zip 104 | 105 | jar xf swampupjenkins.zip 106 | 107 | cd swampupjenkins 108 | 109 | set JENKINS_HOME=%cd%\jenkins 110 | 111 | java -jar jenkins.war --httpPort=9090 112 | 113 | ################################################################################ 114 | ## WORKING WITH BUILD TOOLS AND CI SERVER ## 115 | ##OPTIONNAL (if you did not use the UI): Let's create a new user ## 116 | ################################################################################ 117 | #create a user for jenkins 118 | curl -uadmin:password -X PUT http://localhost:9091/artifactory/api/security/users/jenkins -H "content-type: application/vnd.org.jfrog.artifactory.security.User+json" -T %SCRIPT_DIR%\jenkins-user.json 119 | 120 | #create permission for jenkins 121 | curl -uadmin:password -X PUT http://localhost:9091/artifactory/api/security/permissions/jenkins-perm -H "content-type: application/vnd.org.jfrog.artifactory.security.PermissionTarget+json" -T %SCRIPT_DIR%\permission-target-jenkins.json 122 | 123 | ################################################################################ 124 | ## LET'S PROMOTE ## 125 | ##OPTIONNAL (if you did not use the UI): Need some place to promote to! ## 126 | ################################################################################ 127 | #Create local maven repository for releases 128 | curl -uadmin:password -X PUT http://localhost:9091/artifactory/api/repositories/my-mvn-release-local -H "content-type: application/vnd.org.jfrog.artifactory.repositories.LocalRepositoryConfiguration+json" -T %SCRIPT_DIR%\repository-release-local-config.json 129 | 130 | #update virtuel repository for releases 131 | curl -uadmin:password -X POST http://localhost:9091/artifactory/api/repositories/my-mvn-releases -H "content-type: application/json" -T %SCRIPT_DIR%\repository-release-virtual-config2.json 132 | 133 | #update permission target for jenkins user 134 | curl -uadmin:password -X PUT http://localhost:9091/artifactory/api/security/permissions/jenkins-perm -H "content-type: application/vnd.org.jfrog.artifactory.security.PermissionTarget+json" -T %SCRIPT_DIR%\update-permission-target-jenkins.json 135 | 136 | ################################################################################ 137 | ## LET'S AUTOMATE PROMOTION ## 138 | ################################################################################ 139 | #Create local maven repository for production (can be done through the UI) 140 | curl -uadmin:password -X PUT http://localhost:9091/artifactory/api/repositories/my-mvn-prod-local -H "content-type: application/vnd.org.jfrog.artifactory.repositories.LocalRepositoryConfiguration+json" -T %SCRIPT_DIR%\repository-release-local-config.json 141 | 142 | #update permission target for user (can be done through the UI) 143 | curl -uadmin:password -X PUT http://localhost:9091/artifactory/api/security/permissions/release-eng-perm -H "content-type: application/vnd.org.jfrog.artifactory.security.PermissionTarget+json" -T %SCRIPT_DIR%\permission-target-release-eng.json 144 | 145 | #promote the build to production 146 | curl -H "content-type:Application/json" -H "X-JFrog-Art-Api:" -X POST http://localhost:9091/artifactory/api/build/promote// -T %SCRIPT_DIR%\promote.json 147 | 148 | ################################################################################ 149 | ## LET'S USE THE CLI ## 150 | ################################################################################ 151 | #download the CLI from our central Artifactory (in real life do it from bintray) 152 | curl -H "X-JFrog-Artifactory-Art-Api:AKCp2UNgwajwpoz5PghBirpgR5xafggbFyCkSU4DTqK2fQPd9thVcar7wHqk2BNmYjg1km6vJ" -O http://192.168.1.139:8081/artifactory/jswampup-remote/cli-1.2/win/jfrog.exe 153 | 154 | cd %WORKSPACE_CLASS% 155 | #upload some files with properties : product.version=1.0 and product.name=PiedPiper 156 | jfrog.exe rt upload "artifactory-*.zip" "my-generic-local" --url=http://localhost:9091/artifactory --user=admin --password=password --props="product.name=PiedPiper;product.version=1.0" 157 | 158 | ################################################################################ 159 | ## USE AQL ## 160 | ################################################################################ 161 | curl -H "Content-Type:application/json" -H "X-JFrog-Art-Api:" -X POST http://localhost:9091/artifactory/api/search/aql -T %SCRIPT_DIR%\aql.json 162 | -------------------------------------------------------------------------------- /essentials_of_jfrog_artifactory/group.json: -------------------------------------------------------------------------------- 1 | { 2 | "description" : "The development team group", 3 | "autoJoin" : false 4 | } -------------------------------------------------------------------------------- /essentials_of_jfrog_artifactory/jenkins-user.json: -------------------------------------------------------------------------------- 1 | { 2 | "email" : "", 3 | "password": "", 4 | "admin": false, 5 | "profileUpdatable": true, 6 | "groups" : [ "readers","dev-team" ] 7 | } -------------------------------------------------------------------------------- /essentials_of_jfrog_artifactory/license..bat: -------------------------------------------------------------------------------- 1 | curl -o jfrog -H "X-JFrog-Art-Api:AKCp2UNgwajwpoz5PghBirpgR5xafggbFyCkSU4DTqK2fQPd9thVcar7wHqk2BNmYjg1km6vJ" http://192.168.1.139:8081/artifactory/jswampup-remote-cache/cli-1.2/mac/jfrog 2 | chmod 755 jfrog 3 | ./jfrog mc c --url=http://192.168.1.139:8080 --user=admin --password=password --interactive=false 4 | ./jfrog mc rti attach-lic $1 --bucket-id=1131805150 --bucket-key=6dfbac3d66d839e774809c1f369238cd615123def5b84e5f68617b24002f756e --license-path=$HOME/swampup/artifactory-pro-4.7.6/etc/artifactory.lic 5 | 6 | -------------------------------------------------------------------------------- /essentials_of_jfrog_artifactory/license.bat: -------------------------------------------------------------------------------- 1 | curl -o jfrog.exe -H "X-JFrog-Art-Api:AKCp2UNgwajwpoz5PghBirpgR5xafggbFyCkSU4DTqK2fQPd9thVcar7wHqk2BNmYjg1km6vJ" http://192.168.1.139:8081/artifactory/jswampup-remote-cache/cli-1.2/win/jfrog.exe 2 | jfrog.exe mc c --url=http://192.168.1.139:8080 --user=admin --password=password --interactive=false 3 | jfrog mc rti attach-lic $1 --bucket-id=1131805150 --bucket-key=6dfbac3d66d839e774809c1f369238cd615123def5b84e5f68617b24002f756e --license-path=%WORKSPACE_CLASS%\artifactory-pro-4.7.6\etc\artifactory.lic 4 | 5 | -------------------------------------------------------------------------------- /essentials_of_jfrog_artifactory/license.sh: -------------------------------------------------------------------------------- 1 | #For linux user : uncomment one of the commands above according to your distribution ( and comment the first curl command whic is for mac users) 2 | #curl -o jfrog -H "X-JFrog-Art-Api:AKCp2UNgwajwpoz5PghBirpgR5xafggbFyCkSU4DTqK2fQPd9thVcar7wHqk2BNmYjg1km6vJ" http://192.168.1.139:8081/artifactory/jswampup-remote-cache/cli-1.2/linux-32/jfrog 3 | #curl -o jfrog -H "X-JFrog-Art-Api:AKCp2UNgwajwpoz5PghBirpgR5xafggbFyCkSU4DTqK2fQPd9thVcar7wHqk2BNmYjg1km6vJ" http://192.168.1.139:8081/artifactory/jswampup-remote-cache/cli-1.2/linux-64/jfrog 4 | curl -o jfrog -H "X-JFrog-Art-Api:AKCp2UNgwajwpoz5PghBirpgR5xafggbFyCkSU4DTqK2fQPd9thVcar7wHqk2BNmYjg1km6vJ" http://192.168.1.139:8081/artifactory/jswampup-remote-cache/cli-1.2/mac/jfrog 5 | chmod 755 jfrog 6 | ./jfrog mc c --url=http://192.168.1.139:8080 --user=admin --password=password --interactive=false 7 | ./jfrog mc rti attach-lic $1 --bucket-id=1131805150 --bucket-key=6dfbac3d66d839e774809c1f369238cd615123def5b84e5f68617b24002f756e --license-path=$WORKSPACE_CLASS/artifactory-pro-4.7.6/etc/artifactory.lic 8 | 9 | -------------------------------------------------------------------------------- /essentials_of_jfrog_artifactory/permission-target-jenkins.json: -------------------------------------------------------------------------------- 1 | { 2 | "repositories": ["my-mvn-snapshot-local","my-mvn-staging-local","ext-snapshot-local", "ext-release-local", "remote-repo"], 3 | "principals": { 4 | "users" : { 5 | "jenkins" : ["d","w","n", "r"] 6 | } 7 | } 8 | } -------------------------------------------------------------------------------- /essentials_of_jfrog_artifactory/permission-target-read.json: -------------------------------------------------------------------------------- 1 | { 2 | "repositories": ["my-mvn-snapshot-local", "my-mvn-staging-local", "ext-snapshot-local", "ext-release-local"], 3 | "principals": { 4 | "groups" : { 5 | "dev-team" : ["r"] 6 | } 7 | } 8 | } -------------------------------------------------------------------------------- /essentials_of_jfrog_artifactory/permission-target-release-eng.json: -------------------------------------------------------------------------------- 1 | { 2 | "repositories": ["my-mvn-prod-local","my-mvn-release-local"], 3 | "principals": { 4 | "users" : { 5 | "jenkins" : ["d","w","n", "r"] 6 | } 7 | } 8 | } -------------------------------------------------------------------------------- /essentials_of_jfrog_artifactory/permission-target-write.json: -------------------------------------------------------------------------------- 1 | { 2 | "repositories": ["my-mvn-snapshot-local"], 3 | "principals": { 4 | "groups" : { 5 | "dev-team" : ["d","w","n", "r"] 6 | } 7 | } 8 | } -------------------------------------------------------------------------------- /essentials_of_jfrog_artifactory/promote.json: -------------------------------------------------------------------------------- 1 | { 2 | "status": "production", 3 | "comment" : "passed all tests and ready for production.", 4 | "ciUser": "", 5 | "dryRun" : false, 6 | "targetRepo" : "my-mvn-prod-local", 7 | "copy": true, 8 | "artifacts" : true, 9 | "dependencies" : true, 10 | "properties": { 11 | "qa.approver": [""], 12 | "qa.stage": ["prod"], 13 | "product.version": ["1.0"], 14 | "product.name": ["PiedPiper"], 15 | "product.component": ["JavaPart"] 16 | }, 17 | "failFast": true 18 | } -------------------------------------------------------------------------------- /essentials_of_jfrog_artifactory/repository-release-local-config.json: -------------------------------------------------------------------------------- 1 | { 2 | "rclass" : "local", 3 | "packageType": "maven", 4 | "description": "My local repository for pre-production", 5 | "includesPattern": "**/*", 6 | "excludesPattern": "", 7 | "repoLayoutRef" : "maven-2-default", 8 | "checksumPolicyType": "client-checksums", 9 | "handleReleases": true, 10 | "handleSnapshots": false, 11 | "suppressPomConsistencyChecks": false, 12 | "blackedOut": false, 13 | "archiveBrowsingEnabled" : false 14 | } -------------------------------------------------------------------------------- /essentials_of_jfrog_artifactory/repository-release-virtual-config.json: -------------------------------------------------------------------------------- 1 | { 2 | "rclass" : "virtual", 3 | "packageType": "maven", 4 | "description": "My virtual repository for releases", 5 | "repositories": ["my-mvn-staging-local", "ext-release-local", "remote-repo"], 6 | "artifactoryRequestsCanRetrieveRemoteArtifacts": false, 7 | "defaultDeploymentRepo": "my-mvn-staging-local" 8 | } -------------------------------------------------------------------------------- /essentials_of_jfrog_artifactory/repository-release-virtual-config2.json: -------------------------------------------------------------------------------- 1 | { 2 | "rclass" : "virtual", 3 | "packageType": "maven", 4 | "description": "My virtual repository for releases", 5 | "repositories": ["my-mvn-release-local", "my-mvn-staging-local", "ext-release-local", "remote-repo"], 6 | "artifactoryRequestsCanRetrieveRemoteArtifacts": false, 7 | "defaultDeploymentRepo": "my-mvn-release-local" 8 | } -------------------------------------------------------------------------------- /essentials_of_jfrog_artifactory/repository-remote-config.json: -------------------------------------------------------------------------------- 1 | { 2 | "rclass" : "remote", 3 | "packageType": "maven", 4 | "description": "My remote cache for jcenter", 5 | "url" : "http://192.168.1.139:8081/artifactory/jcenter" 6 | } -------------------------------------------------------------------------------- /essentials_of_jfrog_artifactory/repository-snapshot-local-config.json: -------------------------------------------------------------------------------- 1 | { 2 | "rclass" : "local", 3 | "packageType": "maven", 4 | "description": "My local repository for snapshots", 5 | "includesPattern": "**/*", 6 | "excludesPattern": "", 7 | "repoLayoutRef" : "maven-2-default", 8 | "debianTrivialLayout" : false, 9 | "checksumPolicyType": "client-checksums", 10 | "handleReleases": false, 11 | "handleSnapshots": true, 12 | "maxUniqueSnapshots": 0, 13 | "snapshotVersionBehavior": "unique", 14 | "suppressPomConsistencyChecks": false, 15 | "blackedOut": false, 16 | "archiveBrowsingEnabled" : false 17 | } -------------------------------------------------------------------------------- /essentials_of_jfrog_artifactory/repository-snapshot-virtual-config.json: -------------------------------------------------------------------------------- 1 | { 2 | "rclass" : "virtual", 3 | "packageType": "maven", 4 | "description": "My virtual repository for snapshots", 5 | "repositories": ["my-mvn-snapshot-local", "ext-snapshot-local", "remote-repo"], 6 | "artifactoryRequestsCanRetrieveRemoteArtifacts": false, 7 | "defaultDeploymentRepo": "my-mvn-snapshot-local" 8 | } -------------------------------------------------------------------------------- /essentials_of_jfrog_artifactory/repository-staging-local-config.json: -------------------------------------------------------------------------------- 1 | { 2 | "rclass" : "local", 3 | "packageType": "maven", 4 | "description": "My local repository for releases", 5 | "includesPattern": "**/*", 6 | "excludesPattern": "", 7 | "repoLayoutRef" : "maven-2-default", 8 | "checksumPolicyType": "client-checksums", 9 | "handleReleases": true, 10 | "handleSnapshots": false, 11 | "suppressPomConsistencyChecks": false, 12 | "blackedOut": false, 13 | "archiveBrowsingEnabled" : false 14 | } -------------------------------------------------------------------------------- /essentials_of_jfrog_artifactory/update-permission-target-jenkins.json: -------------------------------------------------------------------------------- 1 | { 2 | "repositories": ["my-mvn-snapshot-local","my-mvn-staging-local", "my-mvn-release-local", "ext-snapshot-local", "ext-release-local", "remote-repo"], 3 | "principals": { 4 | "users" : { 5 | "jenkins" : ["d","w","n", "r"] 6 | } 7 | } 8 | } -------------------------------------------------------------------------------- /essentials_of_jfrog_artifactory/update-user.json: -------------------------------------------------------------------------------- 1 | { 2 | "email" : "", 3 | "password": "", 4 | "admin": false, 5 | "profileUpdatable": true, 6 | "groups" : [ "readers","dev-team" ] 7 | } -------------------------------------------------------------------------------- /essentials_of_jfrog_artifactory/user.json: -------------------------------------------------------------------------------- 1 | { 2 | "email" : "", 3 | "password": "", 4 | "admin": false, 5 | "profileUpdatable": true, 6 | "groups" : [ "readers" ] 7 | } -------------------------------------------------------------------------------- /jfmc/repository/CreateGenericLocalRepo: -------------------------------------------------------------------------------- 1 | repokey = userInput ( 2 | type : "STRING", // "BOOLEAN", "INTEGER", "INSTANCE", "REPOSITORY" 3 | description : "Repository Key", 4 | validations : (["cron"]) 5 | ) 6 | 7 | localRepository(repokey) { 8 | description "Public Description" 9 | notes "Some internal notes" 10 | includesPattern "**/*" // default 11 | excludesPattern "" // default 12 | repoLayoutRef "maven-2-default" 13 | packageType "generic" // "maven" | "gradle" | "ivy" | "sbt" | "nuget" | "gems" | "npm" | "bower" | "debian" | "pypi" | "docker" | "vagrant" | "gitlfs" | "yum" | "generic" 14 | debianTrivialLayout false 15 | checksumPolicyType "client-checksums" // default | "server-generated-checksums" 16 | handleReleases true // default 17 | handleSnapshots true // default 18 | maxUniqueSnapshots 0 // default 19 | snapshotVersionBehavior "unique" // "non-unique" default | "deployer" 20 | blackedOut false // default 21 | archiveBrowsingEnabled true 22 | 23 | } 24 | -------------------------------------------------------------------------------- /jfmc/repository/CreateMultiPushReplicationPeers: -------------------------------------------------------------------------------- 1 | localRepository('repository-key') { 2 | multipushReplication(peers) { 3 | // URL will be calculated based on the repositories created in other instances 4 | cronExp "0 0/9 14 * * ?" 5 | socketTimeoutMillis 15000 6 | username "admin" 7 | password "password" 8 | proxy //"proxy-ref" 9 | enableEventReplication true 10 | enabled true 11 | syncDeletes false 12 | syncProperties true 13 | } 14 | } 15 | -------------------------------------------------------------------------------- /jfmc/repository/CreateVirtualRepo: -------------------------------------------------------------------------------- 1 | name = userInput ( 2 | type : "STRING", 3 | description : "Please provide a repository name" 4 | ) 5 | 6 | repoDescription = userInput ( 7 | type : "STRING", 8 | description : "Please provide a public description" 9 | ) 10 | 11 | repo = userInput ( 12 | type : "REPOSITORY", 13 | description : "Please provide repositories to aggregate ", 14 | multivalued : true 15 | ) 16 | DefaultRepo = userInput ( 17 | type : "REPOSITORY", 18 | description : "Please provide Default deployment", 19 | multivalued : false 20 | ) 21 | virtualRepository(name) { 22 | description "$repoDescription" 23 | repositories (repo*.key) 24 | notes "Created through JFrog Mission Control" 25 | defaultDeploymentRepo "$DefaultRepo.key" 26 | includesPattern "**/*" 27 | excludesPattern "" 28 | packageType "maven" 29 | } 30 | -------------------------------------------------------------------------------- /jfmc/repository/UpdateRepoPushReplication: -------------------------------------------------------------------------------- 1 | repositories = userInput ( 2 | name : "User Friendly Name", // Optional 3 | type : "REPOSITORY", // "BOOLEAN", "INTEGER", "INSTANCE", "REPOSITORY" 4 | description : "please provide a value", 5 | multivalued : true 6 | ) 7 | localRepository() { 8 | description "Public description" 9 | notes "Some internal notes" 10 | multipushReplication(repositories) { 11 | // URL will be calculated based on the repositories created in other instances 12 | cronExp "0 0/9 14 * * ?" 13 | socketTimeoutMillis 15000 14 | username "admin" 15 | password "password" 16 | proxy //"proxy-ref" 17 | enableEventReplication true 18 | enabled true 19 | syncDeletes false 20 | syncProperties true 21 | } 22 | } 23 | -------------------------------------------------------------------------------- /jfmc/repository/test_script: -------------------------------------------------------------------------------- 1 | repokey = userInput ( 2 | type : "STRING", // "BOOLEAN", "INTEGER", "INSTANCE", "REPOSITORY" 3 | description : "Repository Key", 4 | validations : (["cron"]) 5 | ) 6 | 7 | localRepository(repokey) { 8 | description "Public Description" 9 | notes "Some internal notes" 10 | includesPattern "**/*" // default 11 | excludesPattern "" // default 12 | repoLayoutRef "maven-2-default" 13 | packageType "generic" // "maven" | "gradle" | "ivy" | "sbt" | "nuget" | "gems" | "npm" | "bower" | "debian" | "pypi" | "docker" | "vagrant" | "gitlfs" | "yum" | "generic" 14 | debianTrivialLayout false 15 | checksumPolicyType "client-checksums" // default | "server-generated-checksums" 16 | handleReleases true // default 17 | handleSnapshots true // default 18 | maxUniqueSnapshots 0 // default 19 | snapshotVersionBehavior "unique" // "non-unique" default | "deployer" 20 | blackedOut false // default 21 | archiveBrowsingEnabled true 22 | 23 | } 24 | -------------------------------------------------------------------------------- /scale/buildSrc/src/main/resources/dind/list.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | expressions=( 3 | ubuntu 4 | mysql 5 | node 6 | redis 7 | elasticsearch 8 | alpine 9 | debian 10 | java 11 | golang 12 | mariadb 13 | rabbitmq 14 | ruby 15 | logstash 16 | swarm 17 | owncloud 18 | ghost 19 | django 20 | redmine 21 | consul 22 | maven 23 | sentry 24 | neo4j 25 | odoo 26 | buildpack-deps 27 | percona 28 | couchbase 29 | gcc 30 | jetty 31 | clojure 32 | perl 33 | elixir 34 | r-base 35 | traefik 36 | vault 37 | erlang 38 | couchdb 39 | pypy 40 | arangodb 41 | hipache 42 | nextcloud 43 | swift 44 | docker-dev 45 | julia 46 | thrift 47 | orientdb 48 | composer 49 | photon 50 | cirros 51 | chronograf 52 | storm 53 | jpetazzo/squid-in-a-can 54 | rakudo-star 55 | crux 56 | hylang 57 | fsharp 58 | lightstreamer 59 | spiped 60 | plone 61 | gradle 62 | adminer 63 | groovy 64 | panteras/paas-in-a-box 65 | geonetwork 66 | sourcemage 67 | haxe 68 | silverpeas 69 | known 70 | rapidoid 71 | marcelmaatkamp/flightradar24-feeder 72 | mdsakalu/rpi-raspbian-ffmpeg 73 | marcelmaatkamp/piaware 74 | mbodenhamer/goaccess 75 | dharmatecgeo/tilehut 76 | marcelmaatkamp/docker-gnuradio-dump1090 77 | altringres/demo-b 78 | ivs0/b-ros 79 | dnssecworkshop/dnssec-sldns-b 80 | ivs0/b-opencv 81 | thsfewer/docker-general-b 82 | ivs0/b-gcc 83 | mrandyp/dockercloud-hello-world-b 84 | ivs0/b-ros-gcc 85 | mikefaille/docker-ubuntu-asterisk13-b 86 | ivs0/b-base 87 | ivs0/b-arm-m 88 | whtsky/b-server 89 | stephpr/service-b 90 | renansdias/hello-b 91 | sash/microservice-b 92 | allingeek/es-b 93 | anpowell/service-b 94 | lancelet/centos6-lts-b 95 | dandekarabhay/process-b 96 | satoshi5/dev-b 97 | dkur/ef-b 98 | fangzx/py27vim-b 99 | matthiasblankenhaus/svc-b 100 | stratilat19/b-unit-api_postgres 101 | ovinogradov/attention-b 102 | pistun/b-box 103 | allingeek/ex-service-b 104 | ykandrirody/docker-netmvc-b 105 | stratilat19/b-unit-api 106 | hdb3/atrium-b 107 | adak/service-b 108 | mayankbairagi/service-b 109 | ffaxl/dump1090 110 | themgt/slugrunner-b 111 | renansdias/service-b 112 | cfgarden/image-b 113 | fangzx/py27dev-b 114 | luszczynski/mysql-b 115 | zincwombat/client-b 116 | marimysh/centos-b 117 | diceone/ultimatebot 118 | fhautomateddev/test-image-b 119 | andreaswittig/demo-b 120 | robinjmurphy/kubernetes-example-service-b 121 | sanadhisutandi/pms-b 122 | suyogbarve/redis-b-app 123 | jtmilan/apache-b 124 | ruo91/arcus 125 | vvbhalodia/test-b 126 | ofrxnz/test-b 127 | fangzx/py27-b 128 | jvcalise/apache-b 129 | rawlingsj/b-image 130 | waltplatform/rpi-b-minimal 131 | lavvy/celleterw 132 | bishalpd/sample-app_service-b 133 | klaushofrichter/gitlab-2017-03-26-b 134 | lavvy/celleterm 135 | jjteoh/tf012.root6.b 136 | hidetarou2013/centos7-apache-b 137 | dnssecworkshop/dnssec-tldns-b 138 | tyshashank/b-makeanote 139 | waltplatform/rpi-b-plus-minimal 140 | clever/ubuntu-redis-b 141 | pantc328/worker-management-client-normal-b 142 | settld/simple-docker-b 143 | dnssecworkshop/dnssec-rootns-b 144 | scottbcovert/sixpack 145 | joezuntz/nersc-consult-b 146 | bprashanth/nginx-ingress-b 147 | halvves/node-yarn-ncftp 148 | nishadi/wso2-b-tomcat 149 | tyshashank/b-challangeservice 150 | ivs0/b-clang 151 | stevvooe/fedora-test-1.7-b 152 | slowie/exzeo_bb 153 | alifar76/jnj-task-b 154 | ibmcom/db2express-c 155 | jrottenberg/ffmpeg 156 | ibmcom/informix-innovator-c 157 | nachinius/c-dev 158 | angoca/db2-install 159 | ekidd/rust-musl-builder 160 | homme/cesium-terrain-builder 161 | edgepro/c-modbus-slave 162 | alljoynsville/eclipse-cpp-x11 163 | pheasanthunters/db2express-c 164 | frankzhang/shadowsocks-c 165 | dmoj/judge-small 166 | madduci/docker-ubuntu-cpp 167 | nacyot/c-cc 168 | fmanco/cpp-build 169 | zbeekman/nightly-gcc-trunk-docker-image 170 | heliostech/jenkins-slave-gcc 171 | freenas/nzbget 172 | henriquemoreno/db2express-c 173 | bids/c-pac 174 | tomjamescn/ubuntu-dev-c 175 | dynomitedb/build-c 176 | bernddoser/docker-devel-cpp 177 | brendanrius/jupyter-c-kernel 178 | matttbe/docker-shadowsocks-c 179 | vimagick/audiowaveform 180 | ajsmith/carbon-c-relay 181 | zauberpony/rabbitmq-c-hash-ex 182 | qiwihui/shadowsocks-c 183 | avatao/web-ide-c 184 | lhorsley/hello-world-c 185 | eliezio/db2express-c 186 | tcgerlach/c-dev 187 | okuisatoshi/docker_frama-c 188 | markub/gitlab-ci-multi-runner-c-cpp 189 | jonschipp/islet-c 190 | dexec/base-c 191 | bogem/inginious-c-nodejs 192 | ingi/inginious-c-pythia0compat 193 | bogem/inginious-c-lisp 194 | t10471/c-language 195 | baliang/shadowsocks-c 196 | bogem/inginious-c-prolog 197 | venkykrish11/image1-c 198 | coduno/fingerprint-c 199 | jovobe/docker-c-build 200 | dexec/lang-c 201 | giovanniliboni/gitlab-c-cpp-dev 202 | ingi/inginious-c-math 203 | ingi/inginious-c-cpp 204 | ingi/inginious-c-base 205 | syakesaba/docker-c-icap 206 | bodsch/docker-carbon-c-relay 207 | frozeneye/common-api-c-poc 208 | gnuhub/c-docker 209 | bogem/inginious-c-processing 210 | rlincoln/protoc-c 211 | hrektts/gitlab-ci-multi-runner-c-cpp 212 | abegodong/a-c-wp 213 | senbazuru/docker-imagemagick-processing-c-lang 214 | bmanojlovic/zabbix-i-c 215 | ingi/inginious-c-verilog 216 | peterkutschera/crisma-indicators-c 217 | krys/ctrl-c-test 218 | tcgerlach/jenkins-c 219 | qumram/c-icap 220 | ossfuzz/c-ares 221 | abrahammouse/vim-c-ide 222 | amarella/c-dev 223 | devopsbq/leofs3fs-c 224 | inginform/c-dev-env 225 | ingi/inginious-c-php 226 | ingi/inginious-c-r 227 | ergw/ergw-gtp-c-node 228 | johnwlong/hello-c 229 | grounds/exec-c 230 | ingi/inginious-c-mono 231 | immobilienscout24/c-bastion 232 | ingi/inginious-c-java8scala 233 | avatao/webide-c 234 | bearox/ubt-c 235 | kunstmaan/ethereum-eth 236 | ingi/inginious-c-oz 237 | cengizio/c-devel 238 | coursemology/evaluator-image-c_cpp 239 | ingi/inginious-c-java7 240 | ingi/inginious-c-python3 241 | ingi/inginious-c-pythia1compat 242 | csssuf/c-builder 243 | ingi/inginious-c-sekexe 244 | dmoj/judge-medium 245 | ingi/inginious-c-default 246 | mobagel/mobagel-cpp-sdk 247 | sango/c-clang 248 | mdtisdall/devenv_cpp 249 | marijn100/docker-c-dev 250 | sango/c-gcc 251 | blackyoup/httpserver-c 252 | frebib/c-webserver 253 | sleewoo/fm-c 254 | webdeskltd/owncloud 255 | dlanguage/dmd 256 | dlanguage/ldc 257 | bbytes/proxy-etc 258 | dlanguage/gdc 259 | hhucn/dbas-build 260 | justheuristic/agentnet 261 | drmurx/docker-d-note 262 | nacyot/d-gdc 263 | jacobcallahan/content-host-d 264 | davask/d-ubuntu 265 | davask/d-symfony 266 | davask/d-php 267 | davask/d-apache 268 | deibpolimi/d-vert-server 269 | davask/d-wordpress 270 | ppanyukov/vsts-agent-d 271 | giordan/d-essentials 272 | qnib/d-node 273 | devopsclan/d-ubuntu14-base 274 | davask/d-apache-openssl 275 | davask/d-apache-letsencrypt 276 | giordan/d-grunt 277 | davask/d-files-symfony2 278 | davask/d-apache-proxy-reverse 279 | qnib/d-ceph-fuse 280 | gi4nks/d-java7 281 | qnib/d-java6 282 | omihs/d-frag 283 | davask/d-apache-letsencrypt-proxy-reverse 284 | giordan/d-php5-fpm 285 | binfalse/d-java8 286 | dominicporteous/d-cs 287 | dominicporteous/d-scmd 288 | davask/d-apache-openssl-proxy-reverse 289 | qnib/d-hadoop 290 | wolfhesse/hck-socket-d 291 | ymattu/mecab-d 292 | gi4nks/d-java7-mvn 293 | dexec/lang-d 294 | gi4nks/d-base 295 | qnib/d-chromium-base 296 | rnbwd/d-wheezy 297 | kazutan/stan-d 298 | qnib/d-chromium-js 299 | binfalse/d-java8-texlive-full 300 | gi4nks/d-java7-mvn-jnkslv 301 | davask/d-vpn 302 | giordan/d-php7-fpm 303 | compbio/d-syslog 304 | giordan/d-nginx 305 | cslusher/d-build-tools 306 | aptalca/docker-amazon-echo-ha-bridge-d 307 | desktophero/d-cloudify-centos7 308 | jaesharp/orli-ubuntu-1204-chef-client-d 309 | baekjoon/onlinejudge-d 310 | axiatropicsemantics/pvc-d 311 | openoint/holmes-engine-d-standalone 312 | renansdias/hello-d 313 | drischool/d-gateway 314 | cyberdojofoundation/d-4.8.4 315 | qnib/d-java7 316 | drischool/d-tenants 317 | nathansparks/sim-recon 318 | gdoteof/d-vmxmiddle 319 | davask/d-files 320 | mobilecloudnetworking/d-icn-so 321 | lancelet/centos6-lts-d 322 | qnib/d-consul 323 | gdoteof/d-visionai 324 | eon01/d-ealer 325 | amoners/d-thinker 326 | mobilecloudnetworking/d-ims-so 327 | mobilecloudnetworking/d-andsf-so 328 | joseronierison/ruby-java7 329 | mobilecloudnetworking/d-maas-so 330 | qnib/d-terminal 331 | cell/ssh-over-docker 332 | mobilecloudnetworking/d-mobaas-so 333 | ruseinov/d-cleanup 334 | mobilecloudnetworking/d-dnsaas-so 335 | straitjacket/lang-d 336 | gdoteof/d-postgres 337 | drischool/d-eureka-and-config 338 | hamster21/d-constructed 339 | ramm/d-transmission 340 | diogorusso/jekyll-d 341 | schweizerischebundesbahnen/cmsg-d-rabbitmq-provider 342 | qnib/d-supervisor 343 | precon/content-consolidation 344 | mobilecloudnetworking/d-demo-e2e-so 345 | seshnus/docker-d 346 | qnib/d-syslog 347 | schweizerischebundesbahnen/cmsg-d-rabbitmq-consumer 348 | pinver/dmd_wne 349 | nemoz28/mailserver-docker 350 | davask/d-base 351 | igorrudyk1/curator 352 | kiis/docker-d 353 | zazatu/d-mysql 354 | prestashop/prestashop 355 | vimagick/opencart 356 | janeczku/calibre-web 357 | abevoelker/postgres 358 | tozd/mail 359 | edsondewes/postgres-ecidade 360 | suutari/shoop 361 | edsondewes/ecidade 362 | shoopio/shoop 363 | weave/rest-smtp-sink 364 | chrisss404/opencart 365 | rafacianci/compufacil-frontend 366 | delermando/nginx-django 367 | eucm/storm 368 | m4uthiagopedroza/docker-behave-selenium-phantom 369 | smarthall/docker-wal-e-replica 370 | naito/ecell3-ipython 371 | manuelgfx/docker-postgres-wale 372 | imae/oracle12c-e 373 | imae/jasperserver-e 374 | eucm/storm-supervisor 375 | berrygoudswaard/mail 376 | eucm/storm-ui 377 | eucm/storm-nimbus 378 | hdnpt/geartrack-website 379 | danielgusmao/speedtest 380 | imae/sdc-e 381 | lukesmith/postgres-wale 382 | twodcube/poketrainer 383 | aasisvinayak/flymyshop 384 | matthewgall/count.re 385 | donmichael/e-pres 386 | britez/e-partner-api 387 | echizenya/e-yota 388 | robbertkl/magister 389 | erpmicroservices/e-commerce-admin-service 390 | fusionapp/postgres 391 | konradcernyerento/e-frontend 392 | annaksyta/e-business 393 | britez/e-partner-sso 394 | jannishuebl/stolon-wal-e 395 | malgorzataolak/e-business 396 | echizenya/e-yota_20170224_copy_image 397 | rsrpsinr/docker-gitlab-e 398 | borringrafael/e-sus-ab-treinamento 399 | hivesolutions/shopdesk 400 | britez/e-partner-ui 401 | erpmicroservices/e-commerce-admin-ui 402 | aslanbekirov/sharon-sharon-e 403 | erpmicroservices/e-commerce-user-service 404 | portworx/px-e 405 | hacklab/mapasculturais 406 | cfcloudops/cf-smoke-tests 407 | javiergarridomellado/dai 408 | edwinvandenbelt/e-spraak 409 | dashersw/cote-workshop 410 | sitchris/docker-wordpress 411 | fastwhitecat/duka-polska 412 | mmaguero/mii-cc16-17 413 | goliasz/raas-micro-e 414 | openknowledge/dockercloud-db-to-s3 415 | murara/tomcat-ibtech 416 | netlify/netlify-commerce 417 | aslanbekirov/sharon-wal-e 418 | maestrano/prestashop 419 | erpmicroservices/e-commerce-db 420 | mattma/test-e 421 | hivesolutions/budy 422 | intercom/dvara 423 | ismaelfm/wall-e 424 | 405102091/python_run 425 | prodomaines/postgresql 426 | charitee/pg-wal-e 427 | fallenpixel/poketrainer 428 | igorrudyk1/curator 429 | dmadk/embryo-couchdb 430 | biwhite/jaxx 431 | gprevost/e-stored 432 | aslanbekirov/wal-e 433 | hubananano/e-business 434 | xcarpentier/e-medicus-jenkins-slave-nodejs 435 | iron/hud-e 436 | incredibleea/e-business 437 | jonathantron/wal-e 438 | adhoc/docker-odoo-adhoc-e 439 | newfuture/elastic-logview 440 | somatorio/novosga 441 | hasura/postgres-wal-e 442 | devialab/docker-postgres-wale 443 | mateomorrison/fangoo 444 | ecell/ecell4-manylinux 445 | britez/e-partner-cloud-config 446 | talmai/wordpress-ecommerce 447 | thinkbot/e-meter-gom-daemon 448 | dtenenba/gene-e 449 | vinayv/vcdm 450 | codeyu/sportsstore 451 | joezuntz/nersc-consult-e 452 | williehao/e-learning-2 453 | fengyfei/elasticsearch 454 | fsharp 455 | gotsunami/fdroid 456 | eeacms/reportek.fcs 457 | fstarlang/fstar 458 | blacktop/fprot 459 | malice/fprot 460 | malice/fsecure 461 | fstarlang/fstar-emacs 462 | sergeidc/docker-celery-entrypoint-f 463 | elite174/db-f 464 | fstarlang/fstar-emacs-nox 465 | tdgp/f-score-cnn-chip-classifier 466 | raise874/nodejs-f-ssh 467 | nifuramu/f-dev-ubuntu 468 | werawoolf/f-doc 469 | mreferre/haproxy-f-101 470 | nifuramu/f-dev 471 | mreferre/go-frontend-f-101 472 | sebfia/fsharp 473 | bknk/srv-f 474 | yanns/f-spot 475 | headbanger84/docker-fdroid 476 | prolucid/storm-supervisor-fs 477 | geoder101/fsharp-onbuild 478 | cgeo/cgeo-fdroid 479 | fengyfei/fluentd-elasticsearch 480 | kerams/docker-fsinteractiveservice 481 | prolucid/fsshelter-samples 482 | prolucid/mono-fs-base 483 | alienblog/mama2-f 484 | neoeinstein/fsharp-alpine 485 | tandfgroup/docker-node 486 | vmonte/python3.5-psycopg2 487 | andreascederved/asp-net-core-f-sharp 488 | ernespn/fsharp-calculator 489 | danielfabian/fstar 490 | mariusfeteanu/hemphikid 491 | garethrobertlee/angular-on-tutum 492 | ozzyjohnson/cuda-fah 493 | lrakai/fsharpcorewebapi 494 | awenger/dockerbase-node-dbg 495 | geoder101/fsharp 496 | mbermu/httpd-php 497 | dangdangzone/dockertest 498 | montaque/staticweb 499 | vmakhaev/docker-cleanup 500 | bradh/fsappliedcafe 501 | jasonholloway/heroku-fsharp 502 | frolvlad/alpine-gcc 503 | codenvy/cpp_gcc 504 | flubba86/alpine-pipeline-node-gyp 505 | htfy96/build-image 506 | petrpulc/g-wan-minimal 507 | yrahal/dev-machine 508 | mradamczyk/grafana-dashboards 509 | anthonyzou/alpine-build-essentials 510 | dharmatecgeo/tilehut 511 | gnode/gca 512 | threadx/dev-tools 513 | dvilaverde/bpel-g 514 | isaacchapman/g-zp 515 | isaacchapman/g-hi 516 | isaacchapman/g-gc 517 | skunky/g-cent-6 518 | tomgruner/g-streaming 519 | cgars/gin-doi 520 | drewwells/alpine-build 521 | skunky/g-fed-01 522 | salted/check-g 523 | gorzechowski/gcc 524 | andyg303/g-whale 525 | sglim2/centos7 526 | chiangch/centos7-g-group 527 | onefun/rhel63-g-group 528 | chiangch/centos7-g-group-final 529 | gauril/g-whale 530 | eclipse/ubuntu_go 531 | eclipse/cpp_gcc 532 | nicolinux72/microservices-template-g-orm 533 | duruo850/base 534 | kartzum/g-generator 535 | pengweb/u-n-g 536 | mlewis03/elixir-g-plus-plus 537 | ngimenez/ngimenez_bower 538 | weboaks/node-alpine-build-base 539 | snapacs/g-node-ui 540 | wearelifetrading/gcc6 541 | hrektts/gitlab-ci-multi-runner-c-cpp 542 | gnode/gin-auth 543 | codenvy/ubuntu_go 544 | markub/gitlab-ci-multi-runner-c-cpp 545 | congying/ubuntu-dev 546 | retailify/docker-gwan 547 | wartech/plastun-g 548 | surinkim/centos_builder 549 | sherylynn/nodejs 550 | lol233/ansible 551 | yuyangzxw/g-a 552 | esummers/ubuntu_go 553 | mbermu/httpd 554 | cyppan/lein 555 | amsdard/grunt 556 | harmanpa/maven-gcc-python 557 | agi20dla/cpp-build 558 | eristoddle/alpine-gcc-gyp 559 | thinkbot/bundler-geminabox 560 | debitux/x11-dev 561 | tdjones879/alpine-cmake-compiler 562 | thinkbot/chef-ruby-lvm-attrib 563 | eristoddle/debian-gcc-libuv-gyp 564 | drmaas/golang-glide-alpine 565 | antibodyome/ngs-base 566 | splashblot/docker-postgis 567 | adriel/h265ize 568 | marcusmartins/test-h 569 | invisomichele/python-h-requirements 570 | jbellm/h-hg 571 | wz7465/gss-h 572 | biscarch/h-world 573 | tcheung4ncheung/h-jenkins-slave 574 | patrocinio/h-t-container-stable 575 | intent/h-mongo-explorer 576 | tcheung4ncheung/h-jenkins-master 577 | zabirauf/nlp-python-h 578 | tcheung4ncheung/h-jenkins-kube-slave 579 | hfranco/docker-h-whale 580 | dextercai/ubuntu-sshd-gcc-py 581 | record/hath 582 | ccav14/docker-crysadm-h-dockerhub 583 | cell/swarm-socat 584 | michaelmai2000/docker-hath 585 | alexisduque/atlas-docker 586 | historus/fakes3 587 | guillaumewuip/tpch-1go 588 | ozzyjohnson/cuda-fah 589 | pebcac/static_web 590 | parente/screen2web 591 | cpuguy83/nfs-server 592 | ioft/armhf-ubuntu 593 | peenuty/rails-passenger-nginx-docker-i 594 | ioft/armhf-debian 595 | parente/webdav 596 | tianon/mariadb 597 | ptman/alpine-s6 598 | geerlingguy/docker-image-solr 599 | evolinc/evolinc-i 600 | yurii2017/docker-images 601 | bobrik/mesos-where-am-i 602 | sunshineo/tutum-docker-wordpress-nosql 603 | gubaer/i-doit 604 | nullity/alpine_php-fpm 605 | tinchou/envc 606 | kgtech/php7-dev 607 | jpco/mariadb 608 | jpco/postfix 609 | jhouzard/docker-jdk7-mvn3 610 | jpco/couchdb 611 | albertalvarezbruned/web_simple 612 | telemark/minelev 613 | hairyhenderson/jiraprinter 614 | 0x4d4c/i-librarian 615 | resin/cubox-i-buildpack-deps 616 | resin/cubox-i-fedora-golang 617 | resin/cubox-i-alpine 618 | resin/cubox-i-debian 619 | resin/cubox-i-alpine-openjdk 620 | resin/cubox-i-fedora 621 | resin/cubox-i-fedora-python 622 | resin/cubox-i-fedora-node 623 | resin/cubox-i-fedora-openjdk 624 | resin/cubox-i-alpine-buildpack-deps 625 | resin/cubox-i-openjdk 626 | resin/cubox-i-fedora-buildpack-deps 627 | resin/cubox-i-alpine-python 628 | oddlid/arch-cli 629 | resin/cubox-i-alpine-golang 630 | telemark/tfk-saksbehandling-fara-import 631 | dowie/ubuntu-dev 632 | resin/cubox-i-alpine-node 633 | gabeochoa/who-am-i 634 | resin/cubox-i-python 635 | jpco/piwik 636 | swavomir/i-elkf 637 | resin/cubox-i-golang 638 | reliableembeddedsystems/ubuntu-base 639 | resin/cubox-i-node 640 | pitkley/jenkins-slave-texlive-personal 641 | totemteleko/i-0a3dd800a3d7fd2d773d8d2a53d5 642 | iscore/i-score-package-linux 643 | zhaochunqi/json-server 644 | yewton/i-hate-diskfull 645 | ajhaydock/alexhaydock.co.uk 646 | bmanojlovic/zabbix-i-c 647 | uafrica/here-i-am 648 | lgraebin/pln-i 649 | leftathome/gitlab-ee 650 | totemteleko/i-ef280f8f99a9815254c8156153d5 651 | bhale301/toolbox 652 | chrola/personal-web 653 | mostlygeek/i-print-envs 654 | potz/devenv 655 | marcellodesales/centos-i 656 | sahsu/docker-aws-cli 657 | jaehue/whoami 658 | jhou/hello-openshift-i 659 | keijokapp/i-tee 660 | chrisshort/ntp-alpine 661 | bomura/docker-knowledge 662 | lostindigital/i-come 663 | paulstaab/r-pkg-develop 664 | mrmadalex/mynodered-auto 665 | hemags/i-jenkins 666 | rizo928/alpine-plus 667 | telyn/docker-packer 668 | msh100/where-am-i 669 | pbelmann/binning-eval-i 670 | appcelerator/gotools2 671 | topiaruss/dreambase 672 | v1k0d3n/ubuntu-vivid 673 | v1k0d3n/ubuntu-trusty 674 | zaephor/nzbget 675 | sunshineo/tutum-schedule 676 | upendradevisetty/evolinc-i 677 | meobeoi/100concat 678 | grimy55/i-librarian 679 | sergeidc/django_in_containers 680 | fmeppo/utility 681 | aronahl/ubuntu 682 | cami/binning-eval-i 683 | zaherg/php-7.0-xdebug-alpine 684 | fabienfoerster/uzinamonstres 685 | bradleybossard/docker-node-tools 686 | loggerhead/docker-oryx 687 | cyverse/evolinc-i 688 | cgrima/i-librarian 689 | robsyme/basics 690 | pnovotnak/ut 691 | 7vk1/t-on-j 692 | twodcube/poketrainer 693 | nacyot/j-j 694 | sillelien/buddy-j-alpine-test 695 | patrinhani/ciandt-analytics-j-storing-handson 696 | remiii/remiii-dockerfile-ubuntu-app-j 697 | baekjoon/onlinejudge-j 698 | janakiramm/j-hello 699 | sillelien/buddy-j-alpine 700 | milliant/psql-j 701 | berrywira/j-slave 702 | beee/elasticsearch5-j 703 | gregcoleman/j-apache 704 | dinogun/ij 705 | xptech/node-j 706 | rubygem/j-enc 707 | alexanderpeev/j-dev-ubuntu 708 | unselected777/docker-h5ai 709 | fabioluciano/wildfly 710 | alexanderpeev/j-dev-cgroup 711 | arpitadixit/gumball-j-1 712 | jerko000/j-d-base 713 | alexanderpeev/j-dev-cgroup-ioalg 714 | fallenpixel/poketrainer 715 | lstoll/tjts 716 | stjude/mcm 717 | ahmadposten/bugs_reporter 718 | mbariani/c4.5 719 | markerichanson/docker-aws 720 | twodcube/poketrainer 721 | jpetazzo/consulfs 722 | jhipster/consul-config-loader 723 | agunin/k-client-demo 724 | kmcgill88/k-plex 725 | sunkay/k-auth 726 | innotech/k-means-tensorflow 727 | innotech/k-means-runner-tensorflow 728 | joshwyatt/k-test 729 | pokemon0121/k-means 730 | uberguru/pelias-api-golden-k 731 | s80275/nheq-test-k 732 | uberguru/digitransit-proxy-k 733 | kecondezo/k-ubuntoforhub 734 | circlearound/k-php7 735 | kaliberlabs/k-nginx 736 | ly2xing/k-httrack 737 | unselected777/docker-deluge 738 | lujuhu/k-database 739 | startingdreams/k-node 740 | lzhong/k-meancluster 741 | konishilee/k-shadowsock 742 | trueb2/k-framework 743 | dccrazyboy/k-on 744 | biodckrdev/jellyfish 745 | kadirayk/docker-whale-k 746 | yutopp/k-framework-env-image 747 | fallenpixel/poketrainer 748 | yutopp/k-framework-image 749 | kmm996/ss-with-net-speeder 750 | fengyfei/kibana 751 | lenlin/shad 752 | rranshous/cellularsource 753 | huahaiy/kgraph 754 | mhemberg/docker-test 755 | asteris/gestalt 756 | pbesson/consul-config-loader 757 | agunin/k-server-demo 758 | matsprea/mono-aspnetvnext 759 | geniousphp/consul-template-haproxy 760 | richardgill/centos-kdb 761 | propersoft/docker-l 762 | fiitpdt/postgres-l 763 | poojathote/aws-mysql-l 764 | seman/cwrbox2 765 | beet/box 766 | cell/ssh-over-docker 767 | orangain/enju_leaf 768 | gustav83/php 769 | richardhull/lindenmayer-systems 770 | logankimmel/jenkins-l-new 771 | k0st/kfh 772 | mhausenblas/m-shop-nginx 773 | zhaowh/centos 774 | 3apaxicom/generator-m-ionic 775 | davidharris/gpdb-m 776 | yaronr/openjdk-7-jre 777 | z3ntu/android-m-build-env 778 | mojo/m-docker-node 779 | xeor/nginx-proxy-m-forwarder 780 | xeor/nginx-proxy-m-watcher 781 | mojo/m-docker-ruby 782 | yellowiscool/m-utmost 783 | xeor/nginx-proxy-m-watcher-nossl 784 | mrales/m-tree 785 | powerping/p-m 786 | zigweb/m-shop-nginx 787 | zak2k/etcd-m 788 | duglin/m-sb 789 | kelvinlawson/atomthreads-cortex-m 790 | sindbach/ubuntu-mongo-m 791 | snapacs/m-node-ui 792 | mhausenblas/m-frontend 793 | rafkhan/sideboard-m 794 | oldwebtoday/chrome-m 795 | eljefederodeodeljefe/m-smap 796 | ravijain/a-m 797 | ivs0/b-arm-m 798 | takenmake/tm_load_balancer 799 | takenmake/tm_blog 800 | shuyuhey/m-box 801 | yaronr/zookeeper 802 | octonion/basketball-m 803 | jlavs/m-app 804 | jlavs/m-lb 805 | pkolloch/m-load-testing 806 | webrecorder/browser-chrome-m 807 | elite174/db-m 808 | gurugv/hbasedist-m 809 | zerograviti/m-shop-nginx 810 | mcreations/jenkins 811 | amarkwalder/cdk-java-jre 812 | inbartm/tm_backup 813 | stemoi/php-fpm-newrelic-m 814 | yaronr/tomcat7 815 | amarkwalder/cdk-java-jdk 816 | blind/asp-m-st 817 | lavvy/celleter 818 | eljefederodeodeljefe/m-r-d-server 819 | 3apaxi/docker-generator-m-ionic 820 | kmm996/ss-with-net-speeder 821 | bewest/c-r-m 822 | lafenice/m-shop-nginx 823 | lenlin/shad 824 | dmitriyles/m-ci 825 | ravijain/a-p-m 826 | jertremblay/aosp-m-build 827 | dsteffy510/m-shop-app 828 | chuckus/frontera-zeromq 829 | nicolaasvanermen/grafana 830 | amarkwalder/cdk-tomcat 831 | cherriges/m-shop-app 832 | alexcstanciu/ubuntu-32bit-m 833 | sunilthemaster/swarm-zk03-m 834 | daftclouds/m-shop-nginx 835 | kkrusedp/m-shop-nginx 836 | songhui/python-z3-m 837 | philoles/docker-pmail 838 | eljefederodeodeljefe/m-r-d-app 839 | amarkwalder/cdk-nginx 840 | kihaloul/docker-whale-m 841 | kpeiruza/apache-php-deployer 842 | madraziw/tm-base-notebook-ubuntu 843 | zoidbergwill/python-server-example 844 | andlaz/hadoop-mapred-historyserver 845 | prinsmike/govide 846 | fgimenez/emacs-n-go 847 | slepp/arduino 848 | c5244444/chaitrali-n 849 | guyton/yumrepo 850 | orbitable/bridge 851 | z3ntu/android-n-build-env 852 | apsops/scratch-n-cacerts 853 | unknownhero/ubuntu-node-n 854 | vitallan/goal-n-journal 855 | clintconklin/sen-n-docker 856 | troy0820/node-n 857 | alexchesters/nel-n-dory.com 858 | ravenxce/project-n-api 859 | w4beda/docker-n-subdomain-router 860 | lyapun/code-n-coffee2 861 | jacobmarshall/jenkins-slave-n 862 | globidocker/n-puzzle 863 | kartzum/n-generator 864 | gad2103/centos7-n-nodejs 865 | bprashanth/n-way-http 866 | troy0820/node-n-fedora 867 | tleyden5iwx/emacs-n-go 868 | monstrodev/code-n-coffee 869 | hxfeng/emacs-n-go 870 | docstream/nix-base 871 | zifius/code-n-coffee 872 | lyapun/code-n-coffee 873 | guyschaos/emacs-n-go 874 | sujaisd/n-tier-app 875 | igoro/dock-n-ror 876 | lyapun/code-n-coffee-start 877 | dimkk/nodebb 878 | always3133/dotnetcorebenchmark 879 | pengweb/u-n-g 880 | jubr/curl-n-stuff 881 | lavvy/celleterx 882 | lavvy/celletera 883 | pgmahesh/fd 884 | apsops/darbaan 885 | larsks/fedora-rdo-n-c-base 886 | bkuhl/regular-orb-updates 887 | ssmehta/magma 888 | niccolomeloni/docker-nativescript-cli 889 | wongko/bijandocker 890 | makarlsso/wordpress 891 | znetstar/hping3 892 | reinout/nenskins14docker 893 | uynil/docker-cas_mysql 894 | stain/rdfsplit 895 | marvinwu/dockerbase-uwsgi-dev-10-log-collection-as-a-developer-i-n 896 | dtwardow/openldap 897 | marvinwu/fluentd-dev-10-log-collection-as-a-developer-i-n 898 | jonatasrenan/paf-env 899 | brenninc/last5 900 | diceone/boom 901 | ) 902 | 903 | RANDOM=$$$(date +%s) 904 | 905 | # Get random expression... 906 | selectedexpression=${expressions[$RANDOM % ${#expressions[@]} ]} 907 | # Write to Shell 908 | echo $selectedexpression 909 | 910 | 911 | 912 | 913 | -------------------------------------------------------------------------------- /security/commands-unix.list: -------------------------------------------------------------------------------- 1 | ############################################### 2 | # 3 | ############################################### 4 | #keep your git directory in memory for latest command 5 | export SCRIPT_DIR=$(pwd) 6 | 7 | #here we assume your etc host has been changed to have jfrog.local going to your artifactory instance. 8 | #in order to validate this you can run (expected result : OK) : 9 | 10 | curl http://jfrog.local/artifactory/api/system/ping 11 | 12 | #save your password 13 | 14 | export ADMIN_PASSWORD= 15 | 16 | ################################################ 17 | #Let' start with Security 18 | ################################################ 19 | 20 | #list existing permisions targets 21 | 22 | curl -uadmin:$ADMIN_PASSWORD http://jfrog.local/artifactory/api/security/permissions 23 | 24 | #delete default permissions target 25 | 26 | curl -uadmin:$ADMIN_PASSWORD -X DELETE http://jfrog.local/artifactory/api/security/permissions/Anything 27 | curl -uadmin:$ADMIN_PASSWORD -X DELETE http://jfrog.local/artifactory/api/security/permissions/Any%20Remote 28 | 29 | 30 | #create a new group for dev team 31 | 32 | curl -uadmin:$ADMIN_PASSWORD -X PUT http://jfrog.local/artifactory/api/security/groups/dev-team -H "content-type: application/vnd.org.jfrog.artifactory.security.Group+json" -T $SCRIPT_DIR/group.json 33 | 34 | #create a new user 35 | export USER_LOGIN= 36 | 37 | curl -uadmin:$ADMIN_PASSWORD -X PUT http://jfrog.local/artifactory/api/security/users/$USER_LOGIN -H "content-type: application/vnd.org.jfrog.artifactory.security.User+json" -T $SCRIPT_DIR/user.json 38 | 39 | ################################################ 40 | #LDAP Integration 41 | ################################################ 42 | ssh admin@jfrog.local 43 | 44 | sudo docker pull jfrogtraining-docker-dev.jfrog.io/soleng_openldap:1.1 45 | 46 | sudo docker run -itd --name openldap -p 389:389 --network root_default jfrogtraining-docker-dev.jfrog.io/soleng_openldap:1.1 47 | 48 | sudo docker exec openldap /init.sh 49 | 50 | ################################################ 51 | #Connect to LDAP 52 | ################################################ 53 | #connect to LDAP (UI) 54 | #import groups swampadmin and swampuser (UI) 55 | 56 | #give admin privileges to swampadmin group 57 | curl -uadmin:$ADMIN_PASSWORD -X POST http://jfrog.local/artifactory/api/security/groups/swampadmin -H "content-type: application/vnd.org.jfrog.artifactory.security.Group+json" -T $SCRIPT_DIR/group_admin.json 58 | 59 | #Connect with frog_admin user (password = jfrog) (UI) 60 | 61 | #generate frog_admin api key 62 | curl -ufrog_admin:jfrog -X POST http://jfrog.local:8081/artifactory/api/security/apiKey 63 | 64 | #save it as a local variable 65 | export ADMIN_KEY= 66 | 67 | 68 | ################################################ 69 | #Establish a circle of trust 70 | ################################################ 71 | #check standalone 1 trusted store 72 | docker exec -it artifactory-standalone-1 ls -l “/var/opt/jfrog/artifactory/access/etc/keys/trusted/” 73 | 74 | #check standalone 2 trusted store 75 | docker exec -it artifactory-standalone-2 ls -l “/var/opt/jfrog/artifactory/access/etc/keys/trusted/” 76 | 77 | #get artifactory instances service_id 78 | export id_main=$(curl -H "X-Jfrog-Art-API:$ADMIN_KEY" http://jfrog.local/artifactory/api/system/service_id) 79 | 80 | #for standalone instances we are not connected to LDAP => use default admin 81 | export id_s1=$(curl -uadmin:$ADMIN_PASSWORD http://jfrog.local:8094/artifactory/api/system/service_id) 82 | export id_s2=$(curl -uadmin:$ADMIN_PASSWORD http://jfrog.local:8095/artifactory/api/system/service_id) 83 | 84 | #generate a refreshable token shared between the three instances 85 | curl -H "X-Jfrog-Art-API:$ADMIN_KEY" -X POST "http://jfrog.local/artifactory/api/security/token" -d "username=kermit" -d "scope=member-of-groups:swampdev" -d "audience=$id_main $id_s2 $id_s3" -d "refreshable=true" 86 | 87 | #Result example 88 | { 89 | "scope" : "member-of-groups:swampdev api:*", 90 | "access_token" :  "", 91 | "refresh_token" : "", 92 | "expires_in" : 3600, 93 | "token_type" : "Bearer" 94 | } 95 | 96 | export token= 97 | export refresh= 98 | 99 | #refresh token 100 | curl -XPOST "http://jfrog.local/artifactory/api/security/token" -d "grant_type=refresh_token" -d "refresh_token=$refresh" -d "access_token=$token" 101 | 102 | #revoke token 103 | curl -H "X-Jfrog-Art-API:$ADMIN_KEY" -XPOST "http://jfrog.local/artifactory/api/security/token/revoke" -d "$token" 104 | 105 | curl -ukermit:$token -T ./token.json http://jfrog.local/artifactory/example-repo-local/token.json 106 | 107 | curl -H"Authorization: Bearer $token" -T ./token.json http://jfrog.local:8095/artifactory/generic-local/token.json -------------------------------------------------------------------------------- /security/group.json: -------------------------------------------------------------------------------- 1 | { 2 | "description" : "The development team group", 3 | "autoJoin" : false 4 | } -------------------------------------------------------------------------------- /security/group_admin.json: -------------------------------------------------------------------------------- 1 | { 2 | "description" : "for swampup 2018", 3 | "autoJoin" : false, 4 | "realm" : "ldap", 5 | "realmAttributes" : "ldapGroupName=swampadmin;groupsStrategy=STATIC;groupDn=cn=swampadmin,ou=Groups,dc=jfrog,dc=com", 6 | "adminPrivileges" : true 7 | } -------------------------------------------------------------------------------- /security/token.json: -------------------------------------------------------------------------------- 1 | { 2 | "expires_in": 3600, 3 | "scope": "member-of-groups:readers", 4 | "token_type": "Bearer", 5 | "audience": "jfrt@*" 6 | } -------------------------------------------------------------------------------- /security/user.json: -------------------------------------------------------------------------------- 1 | { 2 | "email" : "", 3 | "password": "", 4 | "admin": false, 5 | "profileUpdatable": true, 6 | "groups" : [ "readers","dev-team" ] 7 | } -------------------------------------------------------------------------------- /src/org/jfrog/MyArtifactory.groovy: -------------------------------------------------------------------------------- 1 | package org.jfrog; 2 | 3 | class MyArtifactory implements Serializable { 4 | 5 | def server 6 | def buildInfo 7 | def rtMaven 8 | 9 | MyArtifactory (def artifactory) { 10 | this.server = artifactory.server 'artifactory-ha' 11 | this.rtMaven = artifactory.newMavenBuild() 12 | this.buildInfo = artifactory.newBuildInfo() 13 | this.buildInfo.env.capture = true 14 | this.buildInfo.retention maxBuilds: 10, maxDays: 7, deleteBuildArtifacts: 5 15 | 16 | this.rtMaven.tool = 'mvn' // Tool name from Jenkins configuration 17 | this.rtMaven.deployer releaseRepo:'automation-mvn-excercise-local', snapshotRepo:'automation-mvn-excercise-snapshot-local', server: server 18 | this.rtMaven.resolver releaseRepo:'libs-release', snapshotRepo:'libs-snapshot', server: server 19 | } 20 | 21 | def runMaven () { 22 | this.rtMaven.run pom: 'maven-example/pom.xml', goals: 'clean install', buildInfo: this.buildInfo 23 | } 24 | 25 | def upLoadToArtifactory (def buildNo) { 26 | def uploadSpec = """{ 27 | "files": [ 28 | { 29 | "pattern": "/var/lib/jenkins/workspace/HAP-935/(*).zip", 30 | "target": "hap-935a/tibco/${buildNo}/{1}.zip", 31 | "props": "hap-935=true", 32 | "flat": "false", 33 | "regexp":"false" 34 | }, 35 | { 36 | "pattern" : "(*)pom.xml", 37 | "target":"hap-935a/tibco/${buildNo}/{1}.xml", 38 | "flat":"false", 39 | "props":"type=pom;status=ready" 40 | } 41 | ] 42 | }""" 43 | 44 | this.server.upload(uploadSpec, buildInfo) 45 | this.server.publishBuildInfo(buildInfo) 46 | } 47 | } 48 | --------------------------------------------------------------------------------