├── .circleci
└── config.yml
├── .gitignore
├── LICENSE
├── Makefile
├── README.md
├── affiliation
├── handlers.go
└── service.go
├── apidb
└── service.go
├── cmd
├── serve.go
└── serve_aws.go
├── docker
├── Dockerfile
├── build_image.sh
└── run.sh
├── docs
├── doc.go
└── handlers.go
├── elastic
└── service.go
├── errs
└── errors.go
├── find.sh
├── for_each_go_file.sh
├── gen
└── restapi
│ └── configure_dev_analytics_affiliation.go
├── go.mod
├── go.sum
├── health
├── handlers.go
└── service.go
├── helm
├── da-affiliation
│ ├── .helmignore
│ ├── Chart.yaml
│ ├── secrets
│ │ ├── API_DB_ENDPOINT.secret.example
│ │ ├── AUTH0_CLIENT_ID.secret.example
│ │ ├── AUTH0_DOMAIN.secret.example
│ │ ├── AUTH0_USERNAME_CLAIM.secret.example
│ │ ├── CERT.secret.example
│ │ ├── CORS_ALLOWED_ORIGINS.secret.example
│ │ ├── ELASTIC_PASSWORD.secret.example
│ │ ├── ELASTIC_URL.secret.example
│ │ ├── ELASTIC_USERNAME.secret.example
│ │ ├── SH_DB_ENDPOINT.secret.example
│ │ └── SYNC_URL.secret.example
│ ├── templates
│ │ ├── NOTES.txt
│ │ ├── _helpers.tpl
│ │ ├── api.yaml
│ │ ├── namespace.yaml
│ │ └── secrets.yaml
│ └── values.yaml
├── delete.sh
└── setup.sh
├── logging
├── common.go
└── logger.go
├── main.go
├── main_test.go
├── map_org_names.yaml
├── platform
└── organization.go
├── serverless.yml
├── sh
├── api.sh
├── api_local_prod.sh
├── api_local_test.sh
├── api_logs.sh
├── check_affs_data.sh
├── check_finos_projs.sh
├── check_login.sh
├── check_unknowns.sh
├── compare_commits.sh
├── curl_delete_enrollment.sh
├── curl_delete_enrollments.sh
├── curl_delete_identity.sh
├── curl_delete_matching_blacklist.sh
├── curl_delete_org_domain.sh
├── curl_delete_organization.sh
├── curl_delete_profile.sh
├── curl_delete_slug_mapping.sh
├── curl_es_unaffiliated.sh
├── curl_get_affiliation_both.sh
├── curl_get_affiliation_multi.sh
├── curl_get_affiliation_single.sh
├── curl_get_all_yaml.sh
├── curl_get_find_organization_by_id.sh
├── curl_get_find_organization_by_name.sh
├── curl_get_identity.sh
├── curl_get_list_organizations.sh
├── curl_get_list_organizations_domains.sh
├── curl_get_list_profiles.sh
├── curl_get_list_projects.sh
├── curl_get_list_slug_mappings.sh
├── curl_get_matching_blacklist.sh
├── curl_get_profile.sh
├── curl_get_profile_by_username.sh
├── curl_get_profile_enrollments.sh
├── curl_get_profile_nested.sh
├── curl_get_slug_mapping.sh
├── curl_get_top_contributors.sh
├── curl_get_top_contributors_csv.sh
├── curl_get_top_contributors_query.sh
├── curl_get_unaffiliated.sh
├── curl_list_users.sh
├── curl_merge_unmerge.sh
├── curl_post_add_enrollment.sh
├── curl_post_add_identities.sh
├── curl_post_add_identity.sh
├── curl_post_add_organization.sh
├── curl_post_add_slug_mapping.sh
├── curl_post_add_unique_identity.sh
├── curl_post_bulk_update.sh
├── curl_post_matching_blacklist.sh
├── curl_put_cache_top_contributors.sh
├── curl_put_det_aff_range.sh
├── curl_put_edit_enrollment.sh
├── curl_put_edit_enrollment_by_id.sh
├── curl_put_edit_organization.sh
├── curl_put_edit_profile.sh
├── curl_put_edit_slug_mapping.sh
├── curl_put_hide_emails.sh
├── curl_put_map_org_names.sh
├── curl_put_merge_all.sh
├── curl_put_merge_enrollments.sh
├── curl_put_merge_unique_identities.sh
├── curl_put_move_identity.sh
├── curl_put_org_domain.sh
├── curl_put_sync_sf_profiles.sh
├── curl_unarchive_profile.sh
├── es.sh
├── es_blanks.sh
├── es_documents.sh
├── es_local_docker.sh
├── es_unknowns.sh
├── example_add_identities.json
├── example_bulk.json
├── example_bulk2.json
├── finos_blanks.sh
├── finos_fix_projects.sh
├── finos_rolls.sh
├── finos_unknowns.sh
├── fix_blank_orgs.sh
├── fix_dot_git_in_finos.sh
├── fix_es_docs.sh
├── fix_finos_git_unknowns.sh
├── fix_unknowns.sh
├── get_token.sh
├── local_api.sh
├── mariadb.sh
├── mariadb_drop.sh
├── mariadb_init.sh
├── mariadb_init_default.sh
├── mariadb_local_docker.sh
├── mariadb_readonly_shell.sh
├── mariadb_reinit.sh
├── mariadb_root_shell.sh
├── mariadb_sortinghat_shell.sh
├── merge_gits.sh
├── project_svc.sh
├── psql.sh
├── psql_api_shell.sh
├── psql_init.sh
├── psql_local_docker.sh
├── psql_shell.sh
├── restore_backup.sh
├── sh_structure.sql
├── shared.sh
├── top_contributors.json
├── top_contributors_response.json
├── update_finos_rolls.sh
├── update_mapping.sh
├── uuids.json
└── validate_token.sh
├── shared
└── service.go
├── shdb
└── service.go
├── sql
├── add_last_modified_by.sql
├── add_locked_by.sql
├── add_permissions.sql
├── check_results.sql
├── check_sh_db.sql
├── profiles_without_identities_and_enrollments.sql
├── structure_updates.sql
└── testing_api.sql
├── swagger
├── dev-analytics-affiliation.yaml
└── errors.go
└── usersvc
└── user.go
/.circleci/config.yml:
--------------------------------------------------------------------------------
1 | version: 2.1
2 | jobs:
3 | build_da_affiliation:
4 | docker:
5 | - image: circleci/golang:1.12-stretch-node
6 | steps:
7 | - checkout
8 | - restore_cache:
9 | keys:
10 | - go-mod-v1-{{ checksum "go.sum" }}
11 | - run:
12 | name: Build
13 | command: make setup_dev && make build
14 | - save_cache:
15 | key: go-mod-v1-{{ checksum "go.sum" }}
16 | paths:
17 | - "/go/pkg"
18 | - persist_to_workspace:
19 | root: ./
20 | paths:
21 | - bin
22 | - serverless.yml
23 | - project-vars.yml
24 |
25 | deploy_da_affiliation: &deploy_da_affiliation_anchor
26 | docker:
27 | - image: circleci/node:10.13
28 | working_directory: /tmp/da_affiliation
29 | steps:
30 | - checkout
31 | - restore_cache:
32 | name: Restore Cache
33 | keys:
34 | - npm-packages-{{ checksum "package-lock.json" }}
35 | - run:
36 | name: Install Dependencies Root
37 | command: npm install --frozen-lockfile
38 | - save_cache:
39 | key: npm-packages-{{ checksum "package-lock.json" }}
40 | paths:
41 | - ~/usr/local/lib/node_modules
42 | - run:
43 | name: Setup AWS key
44 | command: |
45 | eval "echo 'export AWS_ACCESS_KEY_ID=\$$AWS_ACCESS_KEY_ID_ENV_VAR'" >> $BASH_ENV
46 | eval "echo 'export AWS_SECRET_ACCESS_KEY=\$$AWS_SECRET_ACCESS_KEY_ENV_VAR'" >> $BASH_ENV
47 | - attach_workspace:
48 | at: ./
49 | # - run:
50 | # name: Create the custom domain in API Gateway
51 | # command: yarn sls -s ${STAGE} -r us-west-2 create_domain
52 | - run:
53 | name: Deploy
54 | command: yarn sls deploy -s ${STAGE} -r us-west-2 --verbose
55 | no_output_timeout: 1.5h
56 |
57 | deploy_test:
58 | <<: *deploy_da_affiliation_anchor
59 | environment:
60 | AWS_ACCESS_KEY_ID_ENV_VAR: AWS_ACCESS_KEY_ID_TEST
61 | AWS_SECRET_ACCESS_KEY_ENV_VAR: AWS_SECRET_ACCESS_KEY_TEST
62 | STAGE: test
63 | deploy_prod:
64 | <<: *deploy_da_affiliation_anchor
65 | environment:
66 | AWS_ACCESS_KEY_ID_ENV_VAR: AWS_ACCESS_KEY_ID_PROD
67 | AWS_SECRET_ACCESS_KEY_ENV_VAR: AWS_SECRET_ACCESS_KEY_PROD
68 | STAGE: prod
69 |
70 | workflows:
71 | da_affiliation:
72 | jobs:
73 | - build_da_affiliation:
74 | filters:
75 | tags:
76 | only: /.*/
77 | - deploy_test:
78 | requires:
79 | - build_da_affiliation
80 | filters:
81 | branches:
82 | only: master
83 | tags:
84 | ignore: /.*/
85 | - approve_prod:
86 | type: approval
87 | requires:
88 | - build_da_affiliation
89 | filters:
90 | branches:
91 | ignore: /.*/
92 | tags:
93 | only: /^v\d+(\.\d+)?(\.\d+)?$/
94 | - deploy_prod:
95 | requires:
96 | - approve_prod
97 | filters:
98 | branches:
99 | ignore: /.*/
100 | tags:
101 | only: /^v\d+(\.\d+)?(\.\d+)?$/
102 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Binaries for programs and plugins
2 | *.exe
3 | *.exe~
4 | *.dll
5 | *.so
6 | *.dylib
7 |
8 | # Test binary, built with `go test -c`
9 | *.test
10 |
11 | # Output of the go coverage tool, specifically when used with LiteIDE
12 | *.out
13 |
14 | # Dependency directories (remove the comment below to include it)
15 | # vendor/
16 |
17 | # VIM
18 | *.swp
19 |
20 | # Go binary
21 | dev-analytics-affiliation
22 |
23 | # Others
24 | out
25 |
26 | # Serverless directories
27 | .serverless
28 |
29 | # golang output binary directory
30 | bin
31 |
32 | # golang vendor (dependencies) directory
33 | vendor
34 |
35 | # ignore all go swagger generated files except the configuration one
36 | gen/restapi/*
37 | gen/models/*
38 | !gen/restapi/configure_dev_analytics_affiliation.go
39 |
40 | node_modules
41 |
42 | .env
43 |
44 | # Binaries
45 | main
46 |
47 | # Sensitive files
48 | *.dump
49 | sh/*.sql
50 | !sh/sh_structure.sql
51 | *.token
52 | *.domain
53 | *.client_id
54 | *.username_claim
55 | *.secret
56 | data.json
57 | confluence.txt
58 | secret/
59 | dump.sql
60 | q*.json
61 | q*.txt
62 | .idea
63 | temp.sql
64 |
--------------------------------------------------------------------------------
/Makefile:
--------------------------------------------------------------------------------
1 | SERVICE = dev-analyics-affiliation
2 | BUILD_TIME=`date -u '+%Y-%m-%d_%I:%M:%S%p'`
3 | BUILD_COMMIT=`git rev-parse HEAD`
4 | BUILD_HOSTNAME=`uname -a | sed "s/ /_/g"`
5 | BUILD_GO_VERSION=`go version | sed "s/ /_/g"`
6 | # Turn off race conditions detector
7 | LDFLAGS=-ldflags "-s -w -extldflags '-static' -X main.BuildStamp=$(BUILD_TIME) -X main.GitHash=$(BUILD_COMMIT) -X main.BuildHostName=$(BUILD_HOSTNAME) -X main.BuildGoVersion=$(BUILD_GO_VERSION)"
8 | # For detecting race conditions
9 | # LDFLAGS=-ldflags "-s -w -extldflags '-static' -X main.BuildStamp=$(BUILD_TIME) -X main.GitHash=$(BUILD_COMMIT) -X main.BuildHostName=$(BUILD_HOSTNAME) -X main.BuildGoVersion=$(BUILD_GO_VERSION)" -race
10 | GO_BIN_FILES=main.go
11 | GO_FMT=gofmt -s -w
12 | GO_VET=go vet
13 | GO_LINT=golint -set_exit_status
14 | GO_TEST=go test
15 | GO_TEST_FILES=main_test.go
16 | GO_STATIC=CGO_ENABLED=0
17 |
18 | .PHONY: build clean deploy
19 |
20 | generate: swagger
21 |
22 | swagger: setup_dev clean
23 | swagger -q generate server -t gen -f swagger/dev-analytics-affiliation.yaml --exclude-main -A dev-analytics-affiliation
24 |
25 | build: swagger deps
26 | env GOOS=linux GOARCH=amd64 go build -tags aws_lambda -o bin/$(SERVICE) -a $(LDFLAGS) .
27 | chmod +x bin/$(SERVICE)
28 |
29 | docker: fmt
30 | ${GO_STATIC} go build -o ./main -a $(LDFLAGS)
31 |
32 | buildmain: fmt
33 | go build -o ./main -a $(LDFLAGS)
34 |
35 | run: vet lint buildmain
36 | ./main
37 |
38 | fastrun: buildmain
39 | ./main
40 |
41 | justrun:
42 | ./main
43 |
44 | clean:
45 | cp gen/restapi/configure_dev_analytics_affiliation.go .
46 | rm -rf ./bin ./gen
47 | mkdir gen
48 | mkdir gen/restapi
49 | mv configure_dev_analytics_affiliation.go gen/restapi
50 |
51 | setup: setup_dev setup_deploy
52 |
53 | setup_dev:
54 | go install github.com/go-swagger/go-swagger/cmd/swagger
55 |
56 | setup_deploy:
57 | npm install serverless
58 |
59 | deps:
60 | go mod tidy
61 |
62 | deploy: clean build
63 | npm install serverless-domain-manager --save-dev
64 | sls -s ${STAGE} -r ${REGION} create_domain
65 | sls deploy -s ${STAGE} -r ${REGION} --verbose
66 |
67 | fmt: ${GO_BIN_FILES}
68 | ./for_each_go_file.sh "${GO_FMT}"
69 |
70 | vet: ${GO_BIN_FILES}
71 | ERROR_EXIT_CODE=0 ./for_each_go_file.sh "${GO_VET}"
72 |
73 | lint: ${GO_BIN_FILES}
74 | ./for_each_go_file.sh "${GO_LINT}"
75 |
76 | test:
77 | ${GO_TEST} ${GO_TEST_FILES}
78 |
--------------------------------------------------------------------------------
/cmd/serve.go:
--------------------------------------------------------------------------------
1 | //go:build !aws_lambda
2 | // +build !aws_lambda
3 |
4 | package cmd
5 |
6 | import (
7 | "github.com/LF-Engineering/dev-analytics-affiliation/gen/restapi"
8 | "github.com/LF-Engineering/dev-analytics-affiliation/gen/restapi/operations"
9 | )
10 |
11 | // Start function starts local services
12 | func Start(api *operations.DevAnalyticsAffiliationAPI, portFlag int) error {
13 | server := restapi.NewServer(api)
14 | defer server.Shutdown() // nolint
15 | server.Port = portFlag
16 | server.ConfigureAPI()
17 |
18 | return server.Serve()
19 | }
20 |
--------------------------------------------------------------------------------
/cmd/serve_aws.go:
--------------------------------------------------------------------------------
1 | //go:build aws_lambda
2 | // +build aws_lambda
3 |
4 | package cmd
5 |
6 | import (
7 | "github.com/LF-Engineering/dev-analytics-affiliation/gen/restapi"
8 | "github.com/LF-Engineering/dev-analytics-affiliation/gen/restapi/operations"
9 | "github.com/aws/aws-lambda-go/lambda"
10 | "github.com/awslabs/aws-lambda-go-api-proxy/httpadapter"
11 | "github.com/sirupsen/logrus"
12 | )
13 |
14 | // Start - AWS lambda entry
15 | func Start(api *operations.DevAnalyticsAffiliationAPI, _ int) error {
16 | server := restapi.NewServer(api)
17 | server.ConfigureAPI()
18 | adapter := httpadapter.New(server.GetHandler())
19 |
20 | logrus.Debug("Starting Lambda")
21 | lambda.Start(adapter.Proxy)
22 | return nil
23 | }
24 |
--------------------------------------------------------------------------------
/docker/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM alpine
2 | COPY main /usr/bin/
3 | COPY map_org_names.yaml /
4 | CMD main
5 |
--------------------------------------------------------------------------------
/docker/build_image.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | if [ -z "${DOCKER_USER}" ]
3 | then
4 | echo "$0: you need to set docker user via DOCKER_USER=username"
5 | exit 1
6 | fi
7 |
8 | image="${DOCKER_USER}/dev-analytics-affiliation-api"
9 | if [ ! -z "${1}" ]
10 | then
11 | image="${image}-${1}"
12 | git checkout "${1}" || exit 5
13 | fi
14 |
15 | make docker || exit 2
16 | docker build -f ./docker/Dockerfile -t "${image}" . || exit 3
17 | docker push "${image}" || exit 4
18 | echo OK
19 |
--------------------------------------------------------------------------------
/docker/run.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | if [ -z "${DOCKER_USER}" ]
3 | then
4 | echo "$0: you need to set docker user via DOCKER_USER=username"
5 | exit 1
6 | fi
7 | if [ -z "${LOG_LEVEL}" ]
8 | then
9 | LOG_LEVEL=info
10 | fi
11 | SYNC_URL="`cat helm/da-affiliation/secrets/SYNC_URL.prod.secret`"
12 | API_DB_ENDPOINT='host=172.17.0.1 user=postgres password=postgrespwd dbname=dev_analytics port=15432 sslmode=disable'
13 | SH_DB_ENDPOINT='sortinghat:pwd@tcp(172.17.0.1:13306)/sortinghat?charset=utf8'
14 | SH_DB_RO_ENDPOINT='sortinghat:pwd@tcp(172.17.0.1:13306)/sortinghat?charset=utf8'
15 | AUTH0_DOMAIN=`cat secret/auth0.domain`
16 | AUTH0_CLIENT_ID=`cat secret/auth0.client_id`
17 | AUTH0_USERNAME_CLAIM=`cat secret/auth0.username_claim`
18 | ELASTIC_URL='http://172.17.0.1:19200'
19 | ELASTIC_USERNAME=''
20 | ELASTIC_PASSWORD=''
21 | docker run -p 18080:8080 -e "USE_SEARCH_IN_MERGE=${USE_SEARCH_IN_MERGE}" -e "N_CPUS=${N_CPUS}" -e "LOG_LEVEL=${LOG_LEVEL}" -e "SYNC_URL=${SYNC_URL}" -e "API_DB_ENDPOINT=${API_DB_ENDPOINT}" -e "SH_DB_ENDPOINT=${SH_DB_ENDPOINT}" -e "SH_DB_RO_ENDPOINT=${SH_DB_RO_ENDPOINT}" -e "AUTH0_DOMAIN=${AUTH0_DOMAIN}" -e "AUTH0_CLIENT_ID=${AUTH0_CLIENT_ID}" -e "AUTH0_USERNAME_CLAIM=${AUTH0_USERNAME_CLAIM}" -e "ELASTIC_URL=${ELASTIC_URL}" -e "ELASTIC_USERNAME=${ELASTIC_USERNAME}" -e "ELASTIC_PASSWORD=${ELASTIC_PASSWORD}" -it "${DOCKER_USER}/dev-analytics-affiliation-api" "/usr/bin/main"
22 |
--------------------------------------------------------------------------------
/docs/doc.go:
--------------------------------------------------------------------------------
1 | package docs
2 |
3 | import (
4 | "net/http"
5 |
6 | "github.com/go-openapi/runtime"
7 | )
8 |
9 | // GetDocOK Success
10 | type GetDocOK struct {
11 | }
12 |
13 | // NewGetDocOK creates GetDocOK with default headers values
14 | func NewGetDocOK() *GetDocOK {
15 | return &GetDocOK{}
16 | }
17 |
18 | // WriteResponse to the client
19 | func (o *GetDocOK) WriteResponse(rw http.ResponseWriter, producer runtime.Producer) {
20 | html := `
21 |
22 |
23 | Insights Affiliations API ReDoc
24 |
25 |
26 |
27 |
28 |
29 |
32 |
38 |
39 |
40 |
41 |
42 |
43 | `
44 |
45 | rw.Header().Set("Content-Type", "text/html")
46 | _, err := rw.Write([]byte(html))
47 | if err != nil {
48 | panic(err)
49 | }
50 | }
51 |
--------------------------------------------------------------------------------
/docs/handlers.go:
--------------------------------------------------------------------------------
1 | package docs
2 |
3 | import (
4 | "github.com/LF-Engineering/dev-analytics-affiliation/gen/restapi/operations"
5 | d "github.com/LF-Engineering/dev-analytics-affiliation/gen/restapi/operations/docs"
6 | "github.com/go-openapi/runtime/middleware"
7 | )
8 |
9 | // Configure configures the documentation service
10 | func Configure(api *operations.DevAnalyticsAffiliationAPI) {
11 |
12 | api.DocsGetDocHandler = d.GetDocHandlerFunc(func(params d.GetDocParams) middleware.Responder {
13 | return NewGetDocOK()
14 | })
15 | }
16 |
--------------------------------------------------------------------------------
/errs/errors.go:
--------------------------------------------------------------------------------
1 | package errs
2 |
3 | import (
4 | "github.com/pkg/errors"
5 | )
6 |
7 | // HTTP status codes
8 | const (
9 | OK = "200"
10 | Created = "201"
11 | ErrBadRequest = "400"
12 | ErrUnauthorized = "401"
13 | ErrForbidden = "403"
14 | ErrNotFound = "404"
15 | ErrServerError = "500"
16 | ErrConflict = "409"
17 | )
18 |
19 | // AffsError is an error type that also holds a status code
20 | type AffsError struct {
21 | Message string
22 | Status string
23 | }
24 |
25 | // CodedError represents an error with a code
26 | type CodedError interface {
27 | Code() string
28 | error
29 | }
30 |
31 | // Error implements the error interface
32 | func (m AffsError) Error() string {
33 | return m.Message
34 | }
35 |
36 | // Code implements coded response interface in swagger
37 | func (m AffsError) Code() string {
38 | return m.Status
39 | }
40 |
41 | // StatusText returns the status code in human text
42 | func StatusText(code string) string {
43 | switch code {
44 | case OK:
45 | return "OK"
46 | case Created:
47 | return "CREATED"
48 | case ErrBadRequest:
49 | return "BAD REQUEST"
50 | case ErrUnauthorized:
51 | return "UNAUTHORIZED"
52 | case ErrForbidden:
53 | return "FORBIDDEN"
54 | case ErrNotFound:
55 | return "NOT FOUND"
56 | case ErrServerError:
57 | return "INTERNAL SERVER ERROR"
58 | default:
59 | return code
60 | }
61 | }
62 |
63 | // New - returns a new AffsError based on error provided + given status code
64 | func New(err error, status string) error {
65 | return AffsError{
66 | Status: status,
67 | Message: err.Error(),
68 | }
69 | }
70 |
71 | // Wrap creates a new error with the same static code if present
72 | func Wrap(err error, message string) error {
73 | e, ok := err.(CodedError)
74 | if !ok {
75 | return errors.Wrap(err, message)
76 | }
77 | return AffsError{
78 | Status: e.Code(),
79 | Message: errors.Wrap(err, message).Error(),
80 | }
81 | }
82 |
--------------------------------------------------------------------------------
/find.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | if [ -z "$1" ]
3 | then
4 | echo "You need to provide path as first arument"
5 | exit 1
6 | fi
7 | if [ -z "$2" ]
8 | then
9 | echo "You need to provide file name pattern as a second argument"
10 | exit 1
11 | fi
12 | if [ -z "$3" ]
13 | then
14 | echo "You need to provide regexp pattern to search for as a third argument"
15 | exit 1
16 | fi
17 | find "$1" -type f -iname "$2" -not -name "out" -not -path '*.git/*' -exec grep -EHIin "$3" "{}" \; | tee -a out
18 |
--------------------------------------------------------------------------------
/for_each_go_file.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | if [ -z "$ERROR_EXIT_CODE" ]
3 | then
4 | ERROR_EXIT_CODE=1
5 | fi
6 | for f in `find . -maxdepth 4 -type f -iname "*.go" -not -path "./vendor/*"`
7 | do
8 | if [ ! -z "$DEBUG" ]
9 | then
10 | echo $f
11 | fi
12 | if [ "$ERROR_EXIT_CODE" = "0" ]
13 | then
14 | $1 "$f"
15 | else
16 | $1 "$f" || exit $ERROR_EXIT_CODE
17 | fi
18 | done
19 | exit 0
20 |
--------------------------------------------------------------------------------
/go.mod:
--------------------------------------------------------------------------------
1 | module github.com/LF-Engineering/dev-analytics-affiliation
2 |
3 | go 1.15
4 |
5 | require (
6 | github.com/LF-Engineering/dev-analytics-libraries v1.1.28
7 | github.com/asaskevich/govalidator v0.0.0-20210307081110-f21760c49a8d // indirect
8 | github.com/aws/aws-lambda-go v1.22.0
9 | github.com/awslabs/aws-lambda-go-api-proxy v0.9.0
10 | github.com/elastic/go-elasticsearch/v7 v7.10.0
11 | github.com/go-openapi/errors v0.20.1
12 | github.com/go-openapi/loads v0.20.3
13 | github.com/go-openapi/runtime v0.20.0
14 | github.com/go-openapi/spec v0.20.4
15 | github.com/go-openapi/strfmt v0.20.3
16 | github.com/go-openapi/swag v0.19.15
17 | github.com/go-openapi/validate v0.20.3
18 | github.com/go-sql-driver/mysql v1.5.0
19 | github.com/go-swagger/go-swagger v0.28.0 // indirect
20 | github.com/golang/protobuf v1.5.2 // indirect
21 | github.com/google/uuid v1.2.0
22 | github.com/jessevdk/go-flags v1.5.0
23 | github.com/jmoiron/sqlx v1.3.1
24 | github.com/joho/godotenv v1.3.0
25 | github.com/json-iterator/go v1.1.11
26 | github.com/labstack/gommon v0.3.0
27 | github.com/lib/pq v1.9.0
28 | github.com/mailru/easyjson v0.7.7 // indirect
29 | github.com/pkg/errors v0.9.1
30 | github.com/rs/cors v1.7.0
31 | github.com/sirupsen/logrus v1.7.0
32 | golang.org/x/net v0.0.0-20211008194852-3b03d305991f
33 | golang.org/x/text v0.3.7
34 | gopkg.in/yaml.v2 v2.4.0
35 | )
36 |
37 | // replace github.com/LF-Engineering/dev-analytics-libraries => /root/dev/go/src/github.com/LF-Engineering/dev-analytics-libraries
38 |
--------------------------------------------------------------------------------
/health/handlers.go:
--------------------------------------------------------------------------------
1 | package health
2 |
3 | import (
4 | "github.com/LF-Engineering/dev-analytics-affiliation/gen/restapi/operations"
5 | "github.com/LF-Engineering/dev-analytics-affiliation/gen/restapi/operations/health"
6 | log "github.com/LF-Engineering/dev-analytics-affiliation/logging"
7 | "github.com/LF-Engineering/dev-analytics-affiliation/swagger"
8 | "github.com/go-openapi/runtime/middleware"
9 | )
10 |
11 | // Configure setups handlers on api with Service
12 | func Configure(api *operations.DevAnalyticsAffiliationAPI, service Service) {
13 |
14 | api.HealthGetHealthHandler = health.GetHealthHandlerFunc(func(params health.GetHealthParams) middleware.Responder {
15 | log.Info("entered GetHealthHandler")
16 | var nilRequestID *string
17 | requestID := log.GetRequestID(nilRequestID)
18 | service.SetServiceRequestID(requestID)
19 | result, err := service.GetHealth(params.HTTPRequest.Context())
20 | if err != nil {
21 | return health.NewGetHealthBadRequest().WithPayload(swagger.ErrorResponse(err))
22 | }
23 | return health.NewGetHealthOK().WithXREQUESTID(requestID).WithPayload(result)
24 | })
25 | }
26 |
--------------------------------------------------------------------------------
/health/service.go:
--------------------------------------------------------------------------------
1 | package health
2 |
3 | import (
4 | "context"
5 | "fmt"
6 | "time"
7 |
8 | log "github.com/LF-Engineering/dev-analytics-affiliation/logging"
9 |
10 | "github.com/LF-Engineering/dev-analytics-affiliation/gen/models"
11 | )
12 |
13 | // Service handles async log of audit event
14 | type Service interface {
15 | GetHealth(ctx context.Context) (*models.Health, error)
16 | SetServiceRequestID(requestID string)
17 | GetServiceRequestID() string
18 | }
19 |
20 | func (s *service) SetServiceRequestID(requestID string) {
21 | s.requestID = requestID
22 | }
23 |
24 | func (s *service) GetServiceRequestID() string {
25 | return s.requestID
26 | }
27 |
28 | type service struct {
29 | requestID string
30 | }
31 |
32 | // New is a simple helper function to create a service instance
33 | func New() Service {
34 | return &service{}
35 | }
36 |
37 | func (s *service) GetHealth(ctx context.Context) (*models.Health, error) {
38 | log.WithField("X-REQUEST-ID", s.GetServiceRequestID()).Info("entered service GetHealth")
39 |
40 | t := time.Now()
41 | health := models.Health{
42 | DateTime: t.String(),
43 | DevAnalyticsAffiliationService: "Running",
44 | }
45 |
46 | log.WithField("X-REQUEST-ID", s.GetServiceRequestID()).Debug(fmt.Sprintf("%#v", health))
47 |
48 | return &health, nil
49 | }
50 |
--------------------------------------------------------------------------------
/helm/da-affiliation/.helmignore:
--------------------------------------------------------------------------------
1 | # Patterns to ignore when building packages.
2 | # This supports shell glob matching, relative path matching, and
3 | # negation (prefixed with !). Only one pattern per line.
4 | .DS_Store
5 | # Common VCS dirs
6 | .git/
7 | .gitignore
8 | .bzr/
9 | .bzrignore
10 | .hg/
11 | .hgignore
12 | .svn/
13 | # Common backup files
14 | *.swp
15 | *.bak
16 | *.tmp
17 | *~
18 | # Various IDEs
19 | .project
20 | .idea/
21 | *.tmproj
22 | .vscode/
23 |
--------------------------------------------------------------------------------
/helm/da-affiliation/Chart.yaml:
--------------------------------------------------------------------------------
1 | apiVersion: v1
2 | appVersion: "1.0"
3 | description: dev-analytics-affiliation API Kubernetes deployment
4 | name: da-affiliation
5 | version: 0.1.0
6 |
--------------------------------------------------------------------------------
/helm/da-affiliation/secrets/API_DB_ENDPOINT.secret.example:
--------------------------------------------------------------------------------
1 | host=127.0.0.1 user=postgres password=postgrespwd dbname=dev_analytics port=15432 sslmode=disable
--------------------------------------------------------------------------------
/helm/da-affiliation/secrets/AUTH0_CLIENT_ID.secret.example:
--------------------------------------------------------------------------------
1 | jghjk435ghjg46k345fgk45k45f6783462
--------------------------------------------------------------------------------
/helm/da-affiliation/secrets/AUTH0_DOMAIN.secret.example:
--------------------------------------------------------------------------------
1 | your.auth0.domain.org
--------------------------------------------------------------------------------
/helm/da-affiliation/secrets/AUTH0_USERNAME_CLAIM.secret.example:
--------------------------------------------------------------------------------
1 | https://your.auth0.domain.org/claims/user
--------------------------------------------------------------------------------
/helm/da-affiliation/secrets/CERT.secret.example:
--------------------------------------------------------------------------------
1 | arn:aws:acm:us-xxxx-N:000000000000:certificate/00000000-0000-0000-0000-000000000000
--------------------------------------------------------------------------------
/helm/da-affiliation/secrets/CORS_ALLOWED_ORIGINS.secret.example:
--------------------------------------------------------------------------------
1 | http://domain1.org, https://domain1.org, https://*.domain1.org
--------------------------------------------------------------------------------
/helm/da-affiliation/secrets/ELASTIC_PASSWORD.secret.example:
--------------------------------------------------------------------------------
1 | pass
--------------------------------------------------------------------------------
/helm/da-affiliation/secrets/ELASTIC_URL.secret.example:
--------------------------------------------------------------------------------
1 | http://127.0.0.1:19200
--------------------------------------------------------------------------------
/helm/da-affiliation/secrets/ELASTIC_USERNAME.secret.example:
--------------------------------------------------------------------------------
1 | user
--------------------------------------------------------------------------------
/helm/da-affiliation/secrets/SH_DB_ENDPOINT.secret.example:
--------------------------------------------------------------------------------
1 | sortinghat:pwd@tcp(localhost:13306)/sortinghat?charset=utf8
--------------------------------------------------------------------------------
/helm/da-affiliation/secrets/SYNC_URL.secret.example:
--------------------------------------------------------------------------------
1 | http://my-deploy.io:6060
--------------------------------------------------------------------------------
/helm/da-affiliation/templates/NOTES.txt:
--------------------------------------------------------------------------------
1 | Please provide secret values for each file in `./secrets/*.secret.example` saving it as `./secrets/*.secret`
2 |
3 | Please note that `vim` automatically adds new line to all text files, to remove it run `truncate -s -1` on a saved file.
4 |
5 | To install:
6 | - `helm install da-affiliation ./da-affiliation --set deployEnv=test|prod,nodeNum=4`.
7 |
8 | Please note variables commented out in `./da-affiliation/values.yaml`. You can either uncomment them or pass their values via `--set variable=name`.
9 |
--------------------------------------------------------------------------------
/helm/da-affiliation/templates/_helpers.tpl:
--------------------------------------------------------------------------------
1 | {{/* vim: set filetype=mustache: */}}
2 | {{/*
3 | Expand the name of the chart.
4 | */}}
5 | {{- define "da-affiliation.name" -}}
6 | {{- default .Chart.Name .Values.nameOverride | trunc 63 | trimSuffix "-" -}}
7 | {{- end -}}
8 |
9 | {{/*
10 | Create a default fully qualified app name.
11 | We truncate at 63 chars because some Kubernetes name fields are limited to this (by the DNS naming spec).
12 | If release name contains chart name it will be used as a full name.
13 | */}}
14 | {{- define "da-affiliation.fullname" -}}
15 | {{- if .Values.fullnameOverride -}}
16 | {{- .Values.fullnameOverride | trunc 63 | trimSuffix "-" -}}
17 | {{- else -}}
18 | {{- $name := default .Chart.Name .Values.nameOverride -}}
19 | {{- if contains $name .Release.Name -}}
20 | {{- .Release.Name | trunc 63 | trimSuffix "-" -}}
21 | {{- else -}}
22 | {{- printf "%s-%s" .Release.Name $name | trunc 63 | trimSuffix "-" -}}
23 | {{- end -}}
24 | {{- end -}}
25 | {{- end -}}
26 |
27 | {{/*
28 | Create chart name and version as used by the chart label.
29 | */}}
30 | {{- define "da-affiliation.chart" -}}
31 | {{- printf "%s-%s" .Chart.Name .Chart.Version | replace "+" "_" | trunc 63 | trimSuffix "-" -}}
32 | {{- end -}}
33 |
--------------------------------------------------------------------------------
/helm/da-affiliation/templates/api.yaml:
--------------------------------------------------------------------------------
1 | {{- $skipAPI := .Values.skipAPI -}}
2 | {{ if not $skipAPI }}
3 | {{- $cert := .Files.Get (printf "secrets/CERT.%s.secret" .Values.deployEnv) -}}
4 | ---
5 | apiVersion: v1
6 | kind: Service
7 | metadata:
8 | namespace: '{{ .Values.namespace }}'
9 | name: '{{ .Values.apiServiceName }}'
10 | annotations:
11 | service.beta.kubernetes.io/aws-load-balancer-backend-protocol: http
12 | service.beta.kubernetes.io/aws-load-balancer-ssl-cert: {{ $cert }}
13 | service.beta.kubernetes.io/aws-load-balancer-ssl-ports: https
14 | labels:
15 | name: da-affiliation
16 | type: api
17 | spec:
18 | ports:
19 | - name: http
20 | port: 80
21 | targetPort: 8080
22 | #protocol: TCP
23 | - name: https
24 | port: 443
25 | targetPort: 8080
26 | selector:
27 | name: da-affiliation
28 | type: api
29 | type: LoadBalancer
30 | ---
31 | apiVersion: apps/v1
32 | kind: Deployment
33 | metadata:
34 | namespace: '{{ .Values.namespace }}'
35 | name: '{{ .Values.apiPodName }}'
36 | labels:
37 | name: da-affiliation
38 | type: api
39 | spec:
40 | replicas: {{ .Values.nodeNum }}
41 | strategy:
42 | type: RollingUpdate
43 | rollingUpdate:
44 | maxSurge: {{ .Values.apiMaxSurge }}
45 | maxUnavailable: {{ .Values.apiMaxUnavailable }}
46 | selector:
47 | matchLabels:
48 | name: da-affiliation
49 | type: api
50 | template:
51 | metadata:
52 | namespace: '{{ .Values.namespace }}'
53 | labels:
54 | name: da-affiliation
55 | type: api
56 | spec:
57 | {{ if gt (.Values.nodeNum|int) 1 }}
58 | affinity:
59 | podAntiAffinity:
60 | requiredDuringSchedulingIgnoredDuringExecution:
61 | - labelSelector:
62 | matchExpressions:
63 | - key: type
64 | operator: In
65 | values:
66 | - api
67 | topologyKey: kubernetes.io/hostname
68 | {{ end }}
69 | containers:
70 | - command:
71 | - {{ .Values.apiCommand }}
72 | name: '{{ .Values.apiPodName }}'
73 | image: {{ .Values.apiImage }}
74 | imagePullPolicy: {{ .Values.imagePullPolicy }}
75 | ports:
76 | - containerPort: 8080
77 | env:
78 | - name: LOG_LEVEL
79 | value: '{{ .Values.logLevel }}'
80 | - name: N_CPUS
81 | value: '{{ .Values.nCPUs }}'
82 | - name: DA_AFF_API_SQL_OUT
83 | value: '{{ .Values.sqlOut }}'
84 | - name: USE_SEARCH_IN_MERGE
85 | value: '{{ .Values.useSearchInMergeQueries }}'
86 | - name: SYNC_URL
87 | valueFrom:
88 | secretKeyRef:
89 | name: {{ .Values.apiSecret }}
90 | key: SYNC_URL.secret
91 | - name: API_DB_ENDPOINT
92 | valueFrom:
93 | secretKeyRef:
94 | name: {{ .Values.apiSecret }}
95 | key: API_DB_ENDPOINT.secret
96 | - name: SH_DB_ENDPOINT
97 | valueFrom:
98 | secretKeyRef:
99 | name: {{ .Values.apiSecret }}
100 | key: SH_DB_ENDPOINT.secret
101 | - name: SH_DB_RO_ENDPOINT
102 | valueFrom:
103 | secretKeyRef:
104 | name: {{ .Values.apiSecret }}
105 | key: SH_DB_RO_ENDPOINT.secret
106 | - name: ELASTIC_URL
107 | valueFrom:
108 | secretKeyRef:
109 | name: {{ .Values.apiSecret }}
110 | key: ELASTIC_URL.secret
111 | - name: ELASTIC_USERNAME
112 | valueFrom:
113 | secretKeyRef:
114 | name: {{ .Values.apiSecret }}
115 | key: ELASTIC_USERNAME.secret
116 | - name: ELASTIC_PASSWORD
117 | valueFrom:
118 | secretKeyRef:
119 | name: {{ .Values.apiSecret }}
120 | key: ELASTIC_PASSWORD.secret
121 | - name: AUTH0_CLIENT_ID
122 | valueFrom:
123 | secretKeyRef:
124 | name: {{ .Values.apiSecret }}
125 | key: AUTH0_CLIENT_ID.secret
126 | - name: AUTH0_DOMAIN
127 | valueFrom:
128 | secretKeyRef:
129 | name: {{ .Values.apiSecret }}
130 | key: AUTH0_DOMAIN.secret
131 | - name: AUTH0_USERNAME_CLAIM
132 | valueFrom:
133 | secretKeyRef:
134 | name: {{ .Values.apiSecret }}
135 | key: AUTH0_USERNAME_CLAIM.secret
136 | - name: CORS_ALLOWED_ORIGINS
137 | valueFrom:
138 | secretKeyRef:
139 | name: {{ .Values.apiSecret }}
140 | key: CORS_ALLOWED_ORIGINS.secret
141 | - name: ELASTIC_CACHE_URL
142 | valueFrom:
143 | secretKeyRef:
144 | name: {{ .Values.apiSecret }}
145 | key: ELASTIC_CACHE_URL.secret
146 | - name: ELASTIC_CACHE_USERNAME
147 | valueFrom:
148 | secretKeyRef:
149 | name: {{ .Values.apiSecret }}
150 | key: ELASTIC_CACHE_USERNAME.secret
151 | - name: ELASTIC_CACHE_PASSWORD
152 | valueFrom:
153 | secretKeyRef:
154 | name: {{ .Values.apiSecret }}
155 | key: ELASTIC_CACHE_PASSWORD.secret
156 | - name: ELASTIC_LOG_URL
157 | valueFrom:
158 | secretKeyRef:
159 | name: {{ .Values.apiSecret }}
160 | key: ELASTIC_LOG_URL.secret
161 | - name: ELASTIC_LOG_USERNAME
162 | valueFrom:
163 | secretKeyRef:
164 | name: {{ .Values.apiSecret }}
165 | key: ELASTIC_LOG_USERNAME.secret
166 | - name: ELASTIC_LOG_PASSWORD
167 | valueFrom:
168 | secretKeyRef:
169 | name: {{ .Values.apiSecret }}
170 | key: ELASTIC_LOG_PASSWORD.secret
171 | - name: STAGE
172 | valueFrom:
173 | secretKeyRef:
174 | name: {{ .Values.apiSecret }}
175 | key: STAGE.secret
176 | - name: AUTH0_GRANT_TYPE
177 | valueFrom:
178 | secretKeyRef:
179 | name: {{ .Values.apiSecret }}
180 | key: AUTH0_GRANT_TYPE.secret
181 | - name: AUTH0_CLIENT_SECRET
182 | valueFrom:
183 | secretKeyRef:
184 | name: {{ .Values.apiSecret }}
185 | key: AUTH0_CLIENT_SECRET.secret
186 | - name: AUTH0_AUDIENCE
187 | valueFrom:
188 | secretKeyRef:
189 | name: {{ .Values.apiSecret }}
190 | key: AUTH0_AUDIENCE.secret
191 | - name: AUTH0_TOKEN_ENDPOINT
192 | valueFrom:
193 | secretKeyRef:
194 | name: {{ .Values.apiSecret }}
195 | key: AUTH0_TOKEN_ENDPOINT.secret
196 | - name: PLATFORM_ORG_SERVICE_ENDPOINT
197 | valueFrom:
198 | secretKeyRef:
199 | name: {{ .Values.apiSecret }}
200 | key: PLATFORM_ORG_SERVICE_ENDPOINT.secret
201 | - name: SLACK_WEBHOOK_URL
202 | valueFrom:
203 | secretKeyRef:
204 | name: {{ .Values.apiSecret }}
205 | key: SLACK_WEBHOOK_URL.secret
206 | restartPolicy: {{ .Values.apiRestartPolicy }}
207 | nodeSelector:
208 | {{- with .Values.nodeSelector -}}
209 | {{ toYaml . | nindent 8 }}
210 | {{ end }}
211 | {{ end }}
212 |
--------------------------------------------------------------------------------
/helm/da-affiliation/templates/namespace.yaml:
--------------------------------------------------------------------------------
1 | {{- $skipNamespace := .Values.skipNamespace -}}
2 | {{ if not $skipNamespace }}
3 | ---
4 | kind: Namespace
5 | apiVersion: v1
6 | metadata:
7 | name: '{{ .Values.namespace }}'
8 | labels:
9 | name: '{{ .Values.namespace }}'
10 | {{ end }}
11 |
--------------------------------------------------------------------------------
/helm/da-affiliation/templates/secrets.yaml:
--------------------------------------------------------------------------------
1 | {{- $skipSecrets := .Values.skipSecrets -}}
2 | {{ if not $skipSecrets }}
3 | {{- $apiDBEndpoint := .Files.Get (printf "secrets/API_DB_ENDPOINT.%s.secret" .Values.deployEnv) -}}
4 | {{- $shDBEndpoint := .Files.Get (printf "secrets/SH_DB_ENDPOINT.%s.secret" .Values.deployEnv) -}}
5 | {{- $shDBROEndpoint := .Files.Get (printf "secrets/SH_DB_RO_ENDPOINT.%s.secret" .Values.deployEnv) -}}
6 | {{- $esURL := .Files.Get (printf "secrets/ELASTIC_URL.%s.secret" .Values.deployEnv) -}}
7 | {{- $esUser := .Files.Get (printf "secrets/ELASTIC_USERNAME.%s.secret" .Values.deployEnv) -}}
8 | {{- $esPass := .Files.Get (printf "secrets/ELASTIC_PASSWORD.%s.secret" .Values.deployEnv) -}}
9 | {{- $auth0Cid := .Files.Get (printf "secrets/AUTH0_CLIENT_ID.%s.secret" .Values.deployEnv) -}}
10 | {{- $auth0Dom := .Files.Get (printf "secrets/AUTH0_DOMAIN.%s.secret" .Values.deployEnv) -}}
11 | {{- $auth0Ucl := .Files.Get (printf "secrets/AUTH0_USERNAME_CLAIM.%s.secret" .Values.deployEnv) -}}
12 | {{- $cors := .Files.Get (printf "secrets/CORS_ALLOWED_ORIGINS.%s.secret" .Values.deployEnv) -}}
13 | {{- $syncUrl := .Files.Get (printf "secrets/SYNC_URL.%s.secret" .Values.deployEnv) -}}
14 | {{- $esCacheURL := .Files.Get (printf "secrets/ELASTIC_CACHE_URL.%s.secret" .Values.deployEnv) -}}
15 | {{- $esCacheUser := .Files.Get (printf "secrets/ELASTIC_CACHE_USERNAME.%s.secret" .Values.deployEnv) -}}
16 | {{- $esCachePass := .Files.Get (printf "secrets/ELASTIC_CACHE_PASSWORD.%s.secret" .Values.deployEnv) -}}
17 | {{- $esLogURL := .Files.Get (printf "secrets/ELASTIC_LOG_URL.%s.secret" .Values.deployEnv) -}}
18 | {{- $esLogUser := .Files.Get (printf "secrets/ELASTIC_LOG_USERNAME.%s.secret" .Values.deployEnv) -}}
19 | {{- $esLogPass := .Files.Get (printf "secrets/ELASTIC_LOG_PASSWORD.%s.secret" .Values.deployEnv) -}}
20 | {{- $stage := .Files.Get (printf "secrets/STAGE.%s.secret" .Values.deployEnv) -}}
21 | {{- $auth0Gt := .Files.Get (printf "secrets/AUTH0_GRANT_TYPE.%s.secret" .Values.deployEnv) -}}
22 | {{- $auth0Cs := .Files.Get (printf "secrets/AUTH0_CLIENT_SECRET.%s.secret" .Values.deployEnv) -}}
23 | {{- $auth0Aud := .Files.Get (printf "secrets/AUTH0_AUDIENCE.%s.secret" .Values.deployEnv) -}}
24 | {{- $auth0TokenURL := .Files.Get (printf "secrets/AUTH0_TOKEN_ENDPOINT.%s.secret" .Values.deployEnv) -}}
25 | {{- $platformOrgURL := .Files.Get (printf "secrets/PLATFORM_ORG_SERVICE_ENDPOINT.%s.secret" .Values.deployEnv) -}}
26 | {{- $slackWebhookURL := .Files.Get (printf "secrets/SLACK_WEBHOOK_URL.%s.secret" .Values.deployEnv) -}}
27 | ---
28 | apiVersion: v1
29 | data:
30 | API_DB_ENDPOINT.secret: {{ $apiDBEndpoint | b64enc }}
31 | SH_DB_ENDPOINT.secret: {{ $shDBEndpoint | b64enc }}
32 | SH_DB_RO_ENDPOINT.secret: {{ $shDBROEndpoint | b64enc }}
33 | ELASTIC_URL.secret: {{ $esURL | b64enc }}
34 | ELASTIC_USERNAME.secret: {{ $esUser | b64enc }}
35 | ELASTIC_PASSWORD.secret: {{ $esPass | b64enc }}
36 | AUTH0_CLIENT_ID.secret: {{ $auth0Cid | b64enc }}
37 | AUTH0_DOMAIN.secret: {{ $auth0Dom | b64enc }}
38 | AUTH0_USERNAME_CLAIM.secret: {{ $auth0Ucl | b64enc }}
39 | CORS_ALLOWED_ORIGINS.secret: {{ $cors | b64enc }}
40 | SYNC_URL.secret: {{ $syncUrl | b64enc }}
41 | ELASTIC_CACHE_URL.secret: {{ $esCacheURL | b64enc }}
42 | ELASTIC_CACHE_USERNAME.secret: {{ $esCacheUser | b64enc }}
43 | ELASTIC_CACHE_PASSWORD.secret: {{ $esCachePass | b64enc }}
44 | ELASTIC_LOG_URL.secret: {{ $esLogURL | b64enc }}
45 | ELASTIC_LOG_USERNAME.secret: {{ $esLogUser | b64enc }}
46 | ELASTIC_LOG_PASSWORD.secret: {{ $esLogPass | b64enc }}
47 | STAGE.secret: {{ $stage | b64enc }}
48 | AUTH0_GRANT_TYPE.secret: {{ $auth0Gt | b64enc }}
49 | AUTH0_CLIENT_SECRET.secret: {{ $auth0Cs | b64enc }}
50 | AUTH0_AUDIENCE.secret: {{ $auth0Aud | b64enc }}
51 | AUTH0_TOKEN_ENDPOINT.secret: {{ $auth0TokenURL | b64enc }}
52 | PLATFORM_ORG_SERVICE_ENDPOINT.secret: {{ $platformOrgURL | b64enc }}
53 | SLACK_WEBHOOK_URL.secret: {{ $slackWebhookURL | b64enc }}
54 | kind: Secret
55 | metadata:
56 | namespace: '{{ .Values.namespace }}'
57 | name: {{ .Values.apiSecret }}
58 | labels:
59 | type: 'secret'
60 | app.kubernetes.io/name: {{ include "da-affiliation.name" . }}
61 | helm.sh/chart: {{ include "da-affiliation.chart" . }}
62 | app.kubernetes.io/instance: {{ .Release.Name }}
63 | app.kubernetes.io/managed-by: {{ .Release.Service }}
64 | type: {{ .Values.secretType }}
65 | {{ end }}
66 |
--------------------------------------------------------------------------------
/helm/da-affiliation/values.yaml:
--------------------------------------------------------------------------------
1 | # skipSecrets (will not attempt to create secrets, if you want to create new secrets delete current secrets first and do not set this flag)
2 | # skipNamespace (will skip da-affiliation namespace creation)
3 | # skiAPI (will skip deploying api)
4 | # skipSecrets: 1
5 | # skipNamespace: 1
6 | # skiAPI: 1
7 | # nodeNum: 3
8 | # dryRun: '1'
9 | # nodeSelector:
10 | # lfda: grimoire
11 |
12 | # Standard values:
13 | deployEnv: 'set-me'
14 | dryRun: ''
15 | logLevel: 'info'
16 | sqlOut: ''
17 | nCPUs: ''
18 | useSearchInMergeQueries: ''
19 | concurrencyPolicy: Forbid
20 | fullnameOverride: da-affiliation
21 | imagePullPolicy: Always
22 | apiCommand: '/usr/bin/main'
23 | apiImage: 'dajohn/dev-analytics-affiliation-api'
24 | apiServiceName: api-service
25 | apiPodName: api
26 | apiRestartPolicy: Always
27 | apiMaxSurge: 0
28 | apiMaxUnavailable: 1
29 | name: da-affiliation
30 | namespace: da-affiliation
31 | nameOverride: da-affiliation
32 | secretType: Opaque
33 | apiSecret: api-secret
34 | nodeNum: 1
35 |
--------------------------------------------------------------------------------
/helm/delete.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | # NS=da-affiliation - set namespace name, default da-affiliation
3 | helm=helm
4 | denv=test
5 | if [ -z "$1" ]
6 | then
7 | echo "$0: you should specify env: test, prod, using default helm"
8 | else
9 | helm="${1}h.sh"
10 | denv="${1}"
11 | fi
12 | if [ -z "$NS" ]
13 | then
14 | NS=da-affiliation
15 | fi
16 | change_namespace.sh $1 "$NS"
17 | $helm delete "$NS"
18 | change_namespace.sh $1 default
19 | $helm delete "${NS}-namespace"
20 |
--------------------------------------------------------------------------------
/helm/setup.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | # NODES=4 - set number of nodes
3 | # DRY=1 - dry run mode
4 | # NS=da-affiliation - set namespace name, default da-affiliation
5 | # LOG_LEVEL=debug - set log level, default 'info'
6 | helm=helm
7 | denv=test
8 | if [ -z "$1" ]
9 | then
10 | echo "$0: you should env: test, prod, using default helm"
11 | else
12 | helm="${1}h.sh"
13 | denv="${1}"
14 | fi
15 | if [ -z "$NODES" ]
16 | then
17 | export NODES=1
18 | fi
19 | if [ -z "$NS" ]
20 | then
21 | NS=da-affiliation
22 | fi
23 | if [ -z "$LOG_LEVEL" ]
24 | then
25 | LOG_LEVEL=info
26 | fi
27 | if [ -z "$DRY" ]
28 | then
29 | $helm install "${NS}-namespace" ./da-affiliation --set "namespace=$NS,skipSecrets=1,skipAPI=1,nodeNum=$NODES"
30 | change_namespace.sh $1 "$NS"
31 | $helm install "$NS" ./da-affiliation --set "namespace=$NS,deployEnv=$denv,skipNamespace=1,nodeNum=$NODES,logLevel=$LOG_LEVEL"
32 | change_namespace.sh $1 default
33 | else
34 | echo "Dry run mode"
35 | change_namespace.sh $1 "$NS"
36 | $helm install --debug --dry-run --generate-name ./da-affiliation --set "namespace=$NS,deployEnv=$denv,nodeNum=$NODES,logLevel=$LOG_LEVEL,dryRun=1"
37 | change_namespace.sh $1 default
38 | fi
39 |
--------------------------------------------------------------------------------
/logging/common.go:
--------------------------------------------------------------------------------
1 | package logging
2 |
3 | import log "github.com/sirupsen/logrus"
4 |
5 | // UTCFormatter structure for logging
6 | type UTCFormatter struct {
7 | log.Formatter
8 | }
9 |
10 | // Format handler for UTC time - usage: log.SetFormatter(UTCFormatter{&log.JSONFormatter{}})
11 | func (u UTCFormatter) Format(e *log.Entry) ([]byte, error) {
12 | e.Time = e.Time.UTC()
13 | return u.Formatter.Format(e)
14 | }
15 |
--------------------------------------------------------------------------------
/logging/logger.go:
--------------------------------------------------------------------------------
1 | package logging
2 |
3 | import (
4 | "crypto/rand"
5 | "fmt"
6 | "os"
7 | "runtime"
8 | "strings"
9 |
10 | "github.com/google/uuid"
11 | "github.com/labstack/gommon/log"
12 | "github.com/sirupsen/logrus"
13 | )
14 |
15 | var logger = logrus.New()
16 |
17 | // init initializes the logger
18 | func init() {
19 | //logger.SetFormatter(&logrus.JSONFormatter{})
20 | logger.SetFormatter(UTCFormatter{&logrus.JSONFormatter{}})
21 | //logger.SetFormatter(UTCFormatter{&logrus.TextFormatter{}})
22 | //logger.SetReportCaller(true)
23 |
24 | // Default log level
25 | logger.SetLevel(logrus.InfoLevel)
26 |
27 | EnvLogLevel := os.Getenv("LOG_LEVEL")
28 | if EnvLogLevel == "trace" {
29 | logger.SetLevel(logrus.TraceLevel)
30 | } else if EnvLogLevel == "debug" {
31 | logger.SetLevel(logrus.DebugLevel)
32 | } else if EnvLogLevel == "info" {
33 | logger.SetLevel(logrus.InfoLevel)
34 | } else if EnvLogLevel == "warn" {
35 | logger.SetLevel(logrus.WarnLevel)
36 | } else if EnvLogLevel == "error" {
37 | logger.SetLevel(logrus.ErrorLevel)
38 | } else if EnvLogLevel == "panic" {
39 | logger.SetLevel(logrus.PanicLevel)
40 | }
41 | }
42 |
43 | // WithField log message with field
44 | func WithField(key string, value interface{}) *logrus.Entry {
45 | return logger.WithField(key, value)
46 | }
47 |
48 | // Warn log message
49 | func Warn(msg string) {
50 | logger.Warn(msg)
51 | }
52 |
53 | // Warnf log message
54 | func Warnf(msg string, args ...interface{}) {
55 | logger.Warnf(msg, args...)
56 | }
57 |
58 | // Info log message
59 | func Info(msg string) {
60 | logger.Info(msg)
61 | }
62 |
63 | // Infof log message
64 | func Infof(msg string, args ...interface{}) {
65 | logger.Infof(msg, args...)
66 | }
67 |
68 | // Debug log message
69 | func Debug(msg string) {
70 | logger.Debug(msg)
71 | }
72 |
73 | // Debugf log message
74 | func Debugf(msg string, args ...interface{}) {
75 | logger.Debugf(msg, args...)
76 | }
77 |
78 | // Error log message with fields
79 | func Error(trace string, err error) {
80 | logger.WithFields(logrus.Fields{
81 | "line": trace,
82 | }).Error(err)
83 | }
84 |
85 | // Fatal log message
86 | func Fatal(args ...interface{}) {
87 | logger.Fatal(args...)
88 | }
89 |
90 | // Fatalf log message
91 | func Fatalf(msg string, args ...interface{}) {
92 | logger.Fatalf(msg, args...)
93 | }
94 |
95 | // Panic log message
96 | func Panic(args ...interface{}) {
97 | logger.Panic(args...)
98 | }
99 |
100 | // Panicf log message
101 | func Panicf(msg string, args ...interface{}) {
102 | logger.Panicf(msg, args...)
103 | }
104 |
105 | // Println log message
106 | func Println(args ...interface{}) {
107 | logger.Println(args...)
108 | }
109 |
110 | // Printf ...
111 | func Printf(msg string, args ...interface{}) {
112 | logger.Printf(msg, args...)
113 | }
114 |
115 | // WithFields logs a message with fields
116 | func WithFields(fields logrus.Fields) *logrus.Entry {
117 | return logger.WithFields(fields)
118 | }
119 |
120 | // WithError logs a message with the specified error
121 | func WithError(err error) *logrus.Entry {
122 | return logger.WithField("error", err)
123 | }
124 |
125 | // Trace returns the source code line and function name (of the calling function)
126 | func Trace() (line string) {
127 | pc := make([]uintptr, 15)
128 | n := runtime.Callers(2, pc)
129 | frames := runtime.CallersFrames(pc[:n])
130 | frame, _ := frames.Next()
131 |
132 | return fmt.Sprintf("%s,:%d %s\n", frame.File, frame.Line, frame.Function)
133 | }
134 |
135 | // StripSpecialChars strips newlines and tabs from a string
136 | func StripSpecialChars(s string) string {
137 | return strings.Map(func(r rune) rune {
138 | switch r {
139 | case '\t', '\n':
140 | return ' '
141 | default:
142 | return r
143 | }
144 | }, s)
145 | }
146 |
147 | // GenerateUUID is function to generate our own uuid if the google uuid throws error
148 | func GenerateUUID() string {
149 | log.Info("entering func generateUUID")
150 | b := make([]byte, 16)
151 | _, err := rand.Read(b)
152 | if err != nil {
153 | log.Error(Trace(), err)
154 | return ""
155 | }
156 | theUUID := fmt.Sprintf("%x-%x-%x-%x-%x",
157 | b[0:4], b[4:6], b[6:8], b[8:10], b[10:])
158 | return theUUID
159 | }
160 |
161 | // GetRequestID is function to generate uuid as request id if client doesn't pass X-REQUEST-ID request header
162 | func GetRequestID(requestIDParams *string) string {
163 | log.Debug("entering func getRequestID")
164 | //generate UUID as request ID if it doesn't exist in request header
165 | if requestIDParams == nil || *requestIDParams == "" {
166 | theUUID, err := uuid.NewUUID()
167 | newUUID := ""
168 | if err == nil {
169 | newUUID = theUUID.String()
170 | } else {
171 | newUUID = GenerateUUID()
172 | }
173 | requestIDParams = &newUUID
174 | }
175 | return *requestIDParams
176 | }
177 |
--------------------------------------------------------------------------------
/platform/organization.go:
--------------------------------------------------------------------------------
1 | package platform
2 |
3 | import (
4 | "fmt"
5 | "strconv"
6 |
7 | "github.com/LF-Engineering/dev-analytics-affiliation/gen/models"
8 |
9 | log "github.com/LF-Engineering/dev-analytics-affiliation/logging"
10 | "github.com/LF-Engineering/dev-analytics-libraries/orgs"
11 | )
12 |
13 | // Service - access platform org services
14 | type Service interface {
15 | GetListOrganizations(string, int64, int64) (*models.GetListOrganizationsServiceOutput, error)
16 | LookupOrganization(name string) (*models.OrganizationServiceDataOutput, error)
17 | }
18 |
19 | type service struct {
20 | client *orgs.Org
21 | }
22 |
23 | // New return ES connection
24 | func New(client *orgs.Org) Service {
25 | return &service{
26 | client: client,
27 | }
28 | }
29 |
30 | // GetListOrganizations ...
31 | func (s *service) GetListOrganizations(q string, rows, page int64) (*models.GetListOrganizationsServiceOutput, error) {
32 | getListOrganizations := &models.GetListOrganizationsServiceOutput{}
33 | nRows := int64(0)
34 | var orgs []*models.OrganizationServiceDataOutput
35 |
36 | // lookup for exact org name match first
37 | sfdcOrg, err := s.client.LookupOrganization(q)
38 | if err != nil {
39 | return nil, err
40 | }
41 |
42 | // append if found in sfdc
43 | if sfdcOrg.Name != "" && sfdcOrg.ID != "" {
44 | orgs = append(orgs, &models.OrganizationServiceDataOutput{ID: (sfdcOrg.ID), Name: sfdcOrg.Name, Domains: []*models.DomainDataOutput{}})
45 | }
46 |
47 | // next, search for org name match.
48 | response, err := s.client.SearchOrganization(q, strconv.FormatInt(rows, 10), strconv.FormatInt(page-1, 10))
49 | if err != nil {
50 | return nil, err
51 | }
52 |
53 | for _, org := range response.Data {
54 | if sfdcOrg.Name != org.Name {
55 | orgs = append(orgs, &models.OrganizationServiceDataOutput{ID: (org.ID), Name: org.Name, Domains: []*models.DomainDataOutput{}})
56 | }
57 | }
58 |
59 | log.Info(fmt.Sprintf("GetListOrganizations: q:%s rows:%d page:%d", q, rows, page))
60 |
61 | getListOrganizations.Organizations = orgs
62 | getListOrganizations.NRecords = nRows
63 | getListOrganizations.Rows = int64(len(orgs))
64 |
65 | if rows == 0 {
66 | getListOrganizations.NPages = 1
67 | } else {
68 | pages := nRows / rows
69 | if nRows%rows != 0 {
70 | pages++
71 | }
72 | getListOrganizations.NPages = pages
73 | }
74 |
75 | getListOrganizations.Page = page
76 | if q != "" {
77 | getListOrganizations.Search = "q=" + q
78 | }
79 |
80 | return getListOrganizations, nil
81 | }
82 |
83 | // LookupOrganization ...
84 | func (s *service) LookupOrganization(name string) (*models.OrganizationServiceDataOutput, error) {
85 | org, err := s.client.LookupOrganization(name)
86 | if err != nil {
87 | return nil, err
88 | }
89 |
90 | return &models.OrganizationServiceDataOutput{ID: org.ID, Name: org.Name,
91 | Domains: []*models.DomainDataOutput{{Name: org.Link, OrganizationName: org.Name}}},
92 | nil
93 | }
94 |
--------------------------------------------------------------------------------
/serverless.yml:
--------------------------------------------------------------------------------
1 | service: dev-analytics-affiliation-service
2 | frameworkVersion: '>=1.28.0 <2.0.0'
3 |
4 | plugins:
5 | - serverless-domain-manager
6 |
7 | provider:
8 | name: aws
9 | runtime: go1.x
10 | stage: ${opt:stage, 'dev'} # dev, test, staging, or prod
11 | region: ${opt:region, 'us-west-2'}
12 | timeout: 60 # optional, in seconds, default is 6
13 |
14 | environment:
15 | CORS_ALLOWED_ORIGINS: https://insights.test.platform.linuxfoundation.org,https://lfanalytics.io
16 | LOG_LEVEL: info
17 | N_CPUS: ''
18 | USE_SEARCH_IN_MERGE: ''
19 | ELASTIC_URL: ${ssm:/da_elastic_endpoint~true}
20 | ELASTIC_USERNAME: ${ssm:/da_elastic_username~true}
21 | ELASTIC_PASSWORD: ${ssm:/da_elastic_password~true}
22 | API_DB_ENDPOINT: host=${ssm:/da_api_db_endpoint~true} user=${ssm:/da_api_db_username~true} password=${ssm:/da_api_db_password~true} dbname=${self:custom.apiDB.${self:provider.stage}} sslmode=require
23 | SH_DB_ENDPOINT: ${ssm:/da_sh_db_username~true}:${ssm:/da_sh_db_password~true}@tcp(${ssm:/da_sh_db_endpoint~true}:${ssm:/da_sh_db_port~true})/${self:custom.shDB.${self:provider.stage}}?charset=utf8
24 | SH_DB_RO_ENDPOINT: ${ssm:/da_sh_db_ro_username~true}:${ssm:/da_sh_db_ro_password~true}@tcp(${ssm:/da_sh_db_ro_endpoint~true}:${ssm:/da_sh_db_port~true})/${self:custom.shRODB.${self:provider.stage}}?charset=utf8
25 | AUTH0_DOMAIN: ${ssm:/da_auth0_domain~true}
26 | AUTH0_CLIENT_ID: ${ssm:/da_auth0_client_id~true}
27 | AUTH0_USERNAME_CLAIM: ${ssm:/da_auth0_username_claim~true}
28 |
29 | custom:
30 | version: v1
31 | project: dev-analytics-affiliation
32 | apiDB:
33 | prod: dev_analytics
34 | test: dev_analytics_test
35 | shDB:
36 | prod: sortinghat
37 | test: sortinghat
38 | legacyApiEndpoint:
39 | test: https://api.insights.test.platform.linuxfoundation.org
40 | prod: https://api.lfanalytics.io
41 | other: https://api.insights.test.platform.linuxfoundation.org
42 | DAdomain:
43 | test: insights.test.platform.linuxfoundation.org
44 | prod: lfanalytics.io
45 | other: insights.test.platform.linuxfoundation.org
46 | customDomain:
47 | domainName: affiliation.${self:custom.DAdomain.${self:provider.stage}, self:custom.DAdomain.other}
48 | basePath: ""
49 | stage: ${self:provider.stage}
50 | createRoute53Record: false
51 |
52 | package:
53 | exclude:
54 | - ./**
55 | include:
56 | - ./bin/**
57 |
58 | functions:
59 | metrics:
60 | name: dev-analytics-affiliation-service-handler
61 | handler: bin/dev-analyics-affiliation
62 | events:
63 | - http:
64 | path: /{proxy+}
65 | method: any
66 | cors: true
67 |
--------------------------------------------------------------------------------
/sh/api.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | # N_CPUS - set number of CPUS, N_CPUS=1 enables singlethreaded mode
3 | # DA_AFF_API_SQL_OUT=1 - output SQL queries
4 | # USE_SEARCH_IN_MERGE - special flag to use search patter in merge queries performed after the main search
5 | if [ -z "${LOG_LEVEL}" ]
6 | then
7 | LOG_LEVEL=info
8 | fi
9 | if [ -z "$API_DB_ENDPOINT" ]
10 | then
11 | export API_DB_ENDPOINT='host=127.0.0.1 user=postgres password=postgrespwd dbname=dev_analytics port=15432 sslmode=disable'
12 | fi
13 | if [ -z "$SH_DB_ENDPOINT" ]
14 | then
15 | export SH_DB_ENDPOINT='sortinghat:pwd@tcp(localhost:13306)/sortinghat?charset=utf8'
16 | fi
17 | if [ -z "$SH_DB_RO_ENDPOINT" ]
18 | then
19 | export SH_DB_RO_ENDPOINT='ro_user:pwd@tcp(localhost:13306)/sortinghat?charset=utf8'
20 | fi
21 | if [ -z "$SYNC_URL" ]
22 | then
23 | export SYNC_URL="`cat helm/da-affiliation/secrets/SYNC_URL.prod.secret`"
24 | fi
25 | if [ -z "$AUTH0_AUDIENCE" ]
26 | then
27 | export AUTH0_AUDIENCE=`cat helm/da-affiliation/secrets/AUTH0_AUDIENCE.prod.secret`
28 | fi
29 | if [ -z "$ELASTIC_URL" ]
30 | then
31 | export ELASTIC_URL='http://127.0.0.1:19200'
32 | fi
33 | export ELASTIC_USERNAME=''
34 | export ELASTIC_PASSWORD=''
35 | if [ -z "$CORS_ALLOWED_ORIGINS" ]
36 | then
37 | export CORS_ALLOWED_ORIGINS='https://insights.test.platform.linuxfoundation.org, https://lfanalytics.io, http://127.0.0.1'
38 | fi
39 | if [ -z "$ONLYRUN" ]
40 | then
41 | make swagger && make build && make run
42 | else
43 | if [ -z "$NOCHECKS" ]
44 | then
45 | make run
46 | else
47 | if [ -z "$JUSTRUN" ]
48 | then
49 | make fastrun
50 | else
51 | make justrun
52 | fi
53 | fi
54 | fi
55 |
--------------------------------------------------------------------------------
/sh/api_local_prod.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | export LOG_LEVEL=debug
3 | export STAGE=prod
4 | export ONLYRUN=1
5 | export NOCHECKS=1
6 | export AUTH0_DOMAIN="`cat helm/da-affiliation/secrets/AUTH0_DOMAIN.prod.secret`"
7 | export ELASTIC_URL="`cat helm/da-affiliation/secrets/ELASTIC_URL.prod.secret`"
8 | export SH_DB_ENDPOINT="`cat helm/da-affiliation/secrets/SH_DB_ENDPOINT.prod.secret`"
9 | export SH_DB_RO_ENDPOINT="`cat helm/da-affiliation/secrets/SH_DB_ENDPOINT.prod.secret`"
10 | export API_DB_ENDPOINT="`cat helm/da-affiliation/secrets/API_DB_ENDPOINT.prod.secret`"
11 | export PLATFORM_USER_SERVICE_ENDPOINT="`cat helm/da-affiliation/secrets/PLATFORM_USER_SERVICE_ENDPOINT.prod.secret`"
12 | export PLATFORM_ORG_SERVICE_ENDPOINT="`cat helm/da-affiliation/secrets/PLATFORM_ORG_SERVICE_ENDPOINT.prod.secret`"
13 | export ELASTIC_CACHE_URL="`cat helm/da-affiliation/secrets/ELASTIC_CACHE_URL.prod.secret`"
14 | export ELASTIC_CACHE_USERNAME="`cat helm/da-affiliation/secrets/ELASTIC_CACHE_USERNAME.prod.secret`"
15 | export ELASTIC_CACHE_PASSWORD="`cat helm/da-affiliation/secrets/ELASTIC_CACHE_PASSWORD.prod.secret`"
16 | export ELASTIC_LOG_URL="`cat helm/da-affiliation/secrets/ELASTIC_LOG_URL.prod.secret`"
17 | export ELASTIC_LOG_USERNAME="`cat helm/da-affiliation/secrets/ELASTIC_LOG_USERNAME.prod.secret`"
18 | export ELASTIC_LOG_PASSWORD="`cat helm/da-affiliation/secrets/ELASTIC_LOG_PASSWORD.prod.secret`"
19 | export AUTH0_GRANT_TYPE="`cat helm/da-affiliation/secrets/AUTH0_GRANT_TYPE.prod.secret`"
20 | export AUTH0_CLIENT_ID="`cat helm/da-affiliation/secrets/AUTH0_CLIENT_ID.prod.secret`"
21 | export AUTH0_CLIENT_SECRET="`cat helm/da-affiliation/secrets/AUTH0_CLIENT_SECRET.prod.secret`"
22 | export AUTH0_AUDIENCE="`cat helm/da-affiliation/secrets/AUTH0_AUDIENCE.prod.secret`"
23 | export AUTH0_TOKEN_ENDPOINT="`cat helm/da-affiliation/secrets/AUTH0_TOKEN_ENDPOINT.prod.secret`"
24 | export SLACK_WEBHOOK_URL="`cat helm/da-affiliation/secrets/SLACK_WEBHOOK_URL.prod.secret`"
25 | ./sh/api.sh
26 |
--------------------------------------------------------------------------------
/sh/api_local_test.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | export LOG_LEVEL=info
3 | export STAGE=test
4 | export ONLYRUN=1
5 | export NOCHECKS=1
6 | export AUTH0_DOMAIN="`cat helm/da-affiliation/secrets/AUTH0_DOMAIN.test.secret`"
7 | export ELASTIC_URL="`cat helm/da-affiliation/secrets/ELASTIC_URL.test.secret`"
8 | export SH_DB_ENDPOINT="`cat helm/da-affiliation/secrets/SH_DB_ENDPOINT.test.secret`"
9 | export SH_DB_RO_ENDPOINT="`cat helm/da-affiliation/secrets/SH_DB_ENDPOINT.test.secret`"
10 | export API_DB_ENDPOINT="`cat helm/da-affiliation/secrets/API_DB_ENDPOINT.test.secret`"
11 | export PLATFORM_USER_SERVICE_ENDPOINT="`cat helm/da-affiliation/secrets/PLATFORM_USER_SERVICE_ENDPOINT.test.secret`"
12 | export PLATFORM_ORG_SERVICE_ENDPOINT="`cat helm/da-affiliation/secrets/PLATFORM_ORG_SERVICE_ENDPOINT.test.secret`"
13 | export ELASTIC_CACHE_URL="`cat helm/da-affiliation/secrets/ELASTIC_CACHE_URL.test.secret`"
14 | export ELASTIC_CACHE_USERNAME="`cat helm/da-affiliation/secrets/ELASTIC_CACHE_USERNAME.test.secret`"
15 | export ELASTIC_CACHE_PASSWORD="`cat helm/da-affiliation/secrets/ELASTIC_CACHE_PASSWORD.test.secret`"
16 | export ELASTIC_LOG_URL="`cat helm/da-affiliation/secrets/ELASTIC_LOG_URL.test.secret`"
17 | export ELASTIC_LOG_USERNAME="`cat helm/da-affiliation/secrets/ELASTIC_LOG_USERNAME.test.secret`"
18 | export ELASTIC_LOG_PASSWORD="`cat helm/da-affiliation/secrets/ELASTIC_LOG_PASSWORD.test.secret`"
19 | export AUTH0_GRANT_TYPE="`cat helm/da-affiliation/secrets/AUTH0_GRANT_TYPE.test.secret`"
20 | export AUTH0_CLIENT_ID="`cat helm/da-affiliation/secrets/AUTH0_CLIENT_ID.test.secret`"
21 | export AUTH0_CLIENT_SECRET="`cat helm/da-affiliation/secrets/AUTH0_CLIENT_SECRET.test.secret`"
22 | export AUTH0_AUDIENCE="`cat helm/da-affiliation/secrets/AUTH0_AUDIENCE.test.secret`"
23 | export AUTH0_TOKEN_ENDPOINT="`cat helm/da-affiliation/secrets/AUTH0_TOKEN_ENDPOINT.test.secret`"
24 | export SLACK_WEBHOOK_URL="`cat helm/da-affiliation/secrets/SLACK_WEBHOOK_URL.test.secret`"
25 | ./sh/api.sh
26 |
--------------------------------------------------------------------------------
/sh/api_logs.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | if [ -z "$K" ]
3 | then
4 | k="prodk.sh"
5 | fi
6 | > api_logs.txt
7 | for po in `${k} -n da-affiliation get po -o json | jq -r '.items[].metadata.name'`
8 | do
9 | echo $po
10 | ${k} -n da-affiliation logs "${po}" >> api_logs.txt
11 | done
12 |
--------------------------------------------------------------------------------
/sh/check_affs_data.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | # Example:
3 | # PATTERN='sds-lfn-opnfv-*,-*-raw,-*-for-merge' MYSQL='mysql -hhost -uuser -ppass db' ./sh/check_login.sh ES='https://elastic:user@url.us-west-1.aws.found.io:port' 'lukasz Gryglicki'
4 | if [ -z "$MYSQL" ]
5 | then
6 | echo "Please specify full mysql connect command, something like MYSQL='mysql -hdburl -uuser -ppassword dbname'"
7 | exit 1
8 | fi
9 | if [ -z "$ES" ]
10 | then
11 | echo "Please specify full ElasticSearch URL, something like ES=elastic.host.com"
12 | exit 2
13 | fi
14 | if [ -z "${PATTERN}" ]
15 | then
16 | PATTERN='sds-*-git*,-*-raw,-*-for-merge'
17 | fi
18 | MYSQL="${MYSQL} -NBAe "
19 | for name in "$@"
20 | do
21 | echo "${name}:"
22 | echo "SortingHat:"
23 | cmd="$MYSQL \"select uuid from profiles where name = '${name}' union select uuid from identities where name = '${name}'\""
24 | uuids=$(eval "${cmd}")
25 | if [ -z "${uuids}" ]
26 | then
27 | echo "No profiles/identities found for ${name}"
28 | continue
29 | fi
30 | i="1"
31 | declare -A logins=()
32 | declare -A emails=()
33 | declare -A eemails=()
34 | for uuid in ${uuids}
35 | do
36 | echo "#${i} uuid: ${uuid}"
37 | echo "Profiles:"
38 | cmd="$MYSQL \"select if(email='','NULL',email), regexp_replace(if(email='','NULL',email), '([^\\s@]+)@([^\\s@]+)', '\\\\\\\\1\\!\\\\\\\\2'), regexp_replace(if(name='','NULL',name), '\\\\\\\\s', '---') from profiles where uuid = '${uuid}' order by email\""
39 | data=$(eval "${cmd}")
40 | if [ -z "${data}" ]
41 | then
42 | echo "Profile ${uuid} not found"
43 | else
44 | ary=(${data})
45 | j="0"
46 | while true
47 | do
48 | email="${ary[${j}]}"
49 | eemail="${ary[((j+1))]}"
50 | pname="${ary[((j+2))]//---/ }"
51 | if [ -z "${email}" ]
52 | then
53 | break
54 | fi
55 | echo -e "${email}\t${pname}"
56 | if [ ! "${email}" = "NULL" ]
57 | then
58 | emails["${email}"]="1"
59 | eemails["${eemail}"]="1"
60 | fi
61 | ((j=j+3))
62 | done
63 | fi
64 | echo "Identities:"
65 | cmd="$MYSQL \"select if(source='','NULL',source), if(username='','NULL',username), if(email='','NULL',email), regexp_replace(if(email='','NULL',email), '([^\\s@]+)@([^\\s@]+)', '\\\\\\\\1\\!\\\\\\\\2'), regexp_replace(if(name='','NULL',name), '\\\\\\\\s', '---') from identities where uuid = '${uuid}' order by source\""
66 | data=$(eval "${cmd}")
67 | if [ -z "${data}" ]
68 | then
69 | echo "Identity ${uuid} not found"
70 | else
71 | ary=(${data})
72 | j="0"
73 | while true
74 | do
75 | src="${ary[${j}]}"
76 | login="${ary[((j+1))]}"
77 | email="${ary[((j+2))]}"
78 | eemail="${ary[((j+3))]}"
79 | iname="${ary[((j+4))]//---/ }"
80 | if [ -z "${src}" ]
81 | then
82 | break
83 | fi
84 | echo -e "${src}\t${login}\t${email}\t${iname}"
85 | if ( [ "${src}" = "github" ] && [ ! "${login}" = "NULL" ] )
86 | then
87 | logins["${login}"]="1"
88 | fi
89 | if [ ! "${email}" = "NULL" ]
90 | then
91 | emails["${email}"]="1"
92 | eemails["${eemail}"]="1"
93 | fi
94 | ((j=j+5))
95 | done
96 | fi
97 | echo "Enrollments:"
98 | cmd="$MYSQL \"select e.project_slug, date(e.start), date(e.end), o.name, e.role from enrollments e, organizations o where e.organization_id = o.id and e.uuid = '${uuid}' order by e.project_slug, e.start\""
99 | data=$(eval "${cmd}")
100 | if [ -z "${data}" ]
101 | then
102 | echo "No enrollments for ${uuid}"
103 | else
104 | echo "${data}"
105 | fi
106 | echo "ElasticSearch ${PATTERN}: author_uuid/author_id: ${uuid}"
107 | #es=`curl -s -XPOST -H 'Content-type: application/json' "${ES}/_sql?format=tsv" -d"{\"query\":\"select metadata__gelk_backend_name, origin, author_id, author_uuid, author_org_name, count(*) as cnt, min(grimoire_creation_date), max(grimoire_creation_date) from \\\\\"${PATTERN}\\\\\" where author_uuid in ('${uuid}') or author_id in ('${uuid}') group by metadata__gelk_backend_name, author_id, author_uuid, origin, author_org_name order by cnt desc\"}" | tail -n +2`
108 | #echo curl -s -XPOST -H 'Content-type: application/json' "${ES}/_sql?format=tsv" -d"{\"query\":\"select metadata__gelk_backend_name, origin, author_id, author_uuid, author_org_name, count(*) as cnt, min(grimoire_creation_date), max(grimoire_creation_date) from \\\\\"${PATTERN}\\\\\" where author_uuid in ('${uuid}') or author_id in ('${uuid}') group by metadata__gelk_backend_name, author_id, author_uuid, origin, author_org_name order by cnt desc\"}"
109 | es=`curl -s -XPOST -H 'Content-type: application/json' "${ES}/_sql?format=tsv" -d"{\"query\":\"select metadata__gelk_backend_name, origin, author_id, author_uuid, author_org_name, count(*) as cnt, min(grimoire_creation_date), max(grimoire_creation_date) from \\\\\"${PATTERN}\\\\\" where author_uuid in ('${uuid}') or author_id in ('${uuid}') group by metadata__gelk_backend_name, author_id, author_uuid, origin, author_org_name order by cnt desc\"}"`
110 | echo "${es}"
111 | ((i=i+1))
112 | done
113 | i="1"
114 | conds=""
115 | lcond=""
116 | econd=""
117 | for login in "${!logins[@]}"
118 | do
119 | cond=".login==\"${login}\""
120 | if [ -z "${conds}" ]
121 | then
122 | conds="${cond}"
123 | else
124 | conds="${conds} or ${cond}"
125 | fi
126 | if [ -z "${lcond}" ]
127 | then
128 | lcond="'${login}'"
129 | else
130 | lcond="${lcond},'${login}'"
131 | fi
132 | done
133 | for email in "${!emails[@]}"
134 | do
135 | if [ -z "${econd}" ]
136 | then
137 | econd="'${email}'"
138 | else
139 | econd="${econd},'${email}'"
140 | fi
141 | done
142 | for eemail in "${!eemails[@]}"
143 | do
144 | cond=".email==\"${eemail}\""
145 | if [ -z "${conds}" ]
146 | then
147 | conds="${cond}"
148 | else
149 | conds="${conds} or ${cond}"
150 | fi
151 | done
152 | echo "ElasticSearch ${PATTERN}: author_name: ${name}"
153 | #echo curl -s -XPOST -H 'Content-type: application/json' "${ES}/_sql?format=tsv" -d"{\"query\":\"select metadata__gelk_backend_name, origin, author_id, author_uuid, author_org_name, count(*) as cnt, min(grimoire_creation_date), max(grimoire_creation_date) from \\\\\"${PATTERN}\\\\\" where author_name in ('${name}') group by metadata__gelk_backend_name, origin, author_id, author_uuid, author_org_name order by cnt desc\"}"
154 | es=`curl -s -XPOST -H 'Content-type: application/json' "${ES}/_sql?format=tsv" -d"{\"query\":\"select metadata__gelk_backend_name, origin, author_id, author_uuid, author_org_name, count(*) as cnt, min(grimoire_creation_date), max(grimoire_creation_date) from \\\\\"${PATTERN}\\\\\" where author_name in ('${name}') group by metadata__gelk_backend_name, origin, author_id, author_uuid, author_org_name order by cnt desc\"}"`
155 | echo "${es}"
156 | done
157 |
--------------------------------------------------------------------------------
/sh/check_finos_projs.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | declare -A glprojects
3 | lines=$(cat projects.json.secret | jq -rS '. | to_entries | .[].value.meta.title')
4 | while read prj
5 | do
6 | prj=${prj//$'\n'/}
7 | prj=${prj//$'\r'/}
8 | if [ "$prj" = "null" ]
9 | then
10 | continue
11 | fi
12 | # echo "GiLab project: '$prj'"
13 | glprojects[$prj]=1
14 | done <<< "$lines"
15 | lines=$(cat sh/finos_prjs.secret)
16 | while read prj
17 | do
18 | prj=${prj//$'\n'/}
19 | prj=${prj//$'\r'/}
20 | found="${glprojects[$prj]}"
21 | if [ -z "$found" ]
22 | then
23 | echo "Project '$prj' not found in GitLab"
24 | fi
25 | done <<< "$lines"
26 |
--------------------------------------------------------------------------------
/sh/check_login.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | # Example:
3 | # PSQL='sudo -u postgres psql db' MYSQL='mysql -hhost -uuser -ppass db' ./sh/check_login.sh ES='https://elastic:user@url.us-west-1.aws.found.io:port' 'lukasz Gryglicki'
4 | if [ -z "$MYSQL" ]
5 | then
6 | echo "Please specify full mysql connect command, something like MYSQL='mysql -hdburl -uuser -ppassword dbname'"
7 | exit 1
8 | fi
9 | if [ -z "$PSQL" ]
10 | then
11 | echo "Please specify full postgresql connect command, something like PSQL='sudo -u postgres psql dbname'"
12 | exit 2
13 | fi
14 | if [ -z "$ES" ]
15 | then
16 | echo "Please specify full ElasticSearch URL, something like ES=elastic.host.com"
17 | exit 3
18 | fi
19 | if [ -z "${JSON}" ]
20 | then
21 | JSON="${HOME}/dev/go/src/github.com/cncf/devstats/github_users.json"
22 | fi
23 | if [ -z "${PATTERN}" ]
24 | then
25 | PATTERN='sds-cncf-*-git*,-*-raw,-*-for-merge'
26 | fi
27 | MYSQL="${MYSQL} -NBAe "
28 | PSQL="${PSQL} -F$'\t' -tAc "
29 | for name in "$@"
30 | do
31 | echo "${name}:"
32 | echo "SortingHat:"
33 | cmd="$MYSQL \"select uuid from profiles where name = '${name}' union select uuid from identities where name = '${name}'\""
34 | uuids=$(eval "${cmd}")
35 | if [ -z "${uuids}" ]
36 | then
37 | echo "No profiles/identities found for ${name}"
38 | continue
39 | fi
40 | i="1"
41 | declare -A logins=()
42 | declare -A emails=()
43 | declare -A eemails=()
44 | for uuid in ${uuids}
45 | do
46 | echo "#${i} uuid: ${uuid}"
47 | echo "Profiles:"
48 | cmd="$MYSQL \"select if(email='','NULL',email), regexp_replace(if(email='','NULL',email), '([^\\s@]+)@([^\\s@]+)', '\\\\\\\\1\\!\\\\\\\\2'), regexp_replace(if(name='','NULL',name), '\\\\\\\\s', '---') from profiles where uuid = '${uuid}' order by email\""
49 | data=$(eval "${cmd}")
50 | if [ -z "${data}" ]
51 | then
52 | echo "Profile ${uuid} not found"
53 | else
54 | ary=(${data})
55 | j="0"
56 | while true
57 | do
58 | email="${ary[${j}]}"
59 | eemail="${ary[((j+1))]}"
60 | pname="${ary[((j+2))]//---/ }"
61 | if [ -z "${email}" ]
62 | then
63 | break
64 | fi
65 | echo -e "${email}\t${pname}"
66 | if [ ! "${email}" = "NULL" ]
67 | then
68 | emails["${email}"]="1"
69 | eemails["${eemail}"]="1"
70 | fi
71 | ((j=j+3))
72 | done
73 | fi
74 | echo "Identities:"
75 | cmd="$MYSQL \"select if(source='','NULL',source), if(username='','NULL',username), if(email='','NULL',email), regexp_replace(if(email='','NULL',email), '([^\\s@]+)@([^\\s@]+)', '\\\\\\\\1\\!\\\\\\\\2'), regexp_replace(if(name='','NULL',name), '\\\\\\\\s', '---') from identities where uuid = '${uuid}' order by source\""
76 | data=$(eval "${cmd}")
77 | if [ -z "${data}" ]
78 | then
79 | echo "Identity ${uuid} not found"
80 | else
81 | ary=(${data})
82 | j="0"
83 | while true
84 | do
85 | src="${ary[${j}]}"
86 | login="${ary[((j+1))]}"
87 | email="${ary[((j+2))]}"
88 | eemail="${ary[((j+3))]}"
89 | iname="${ary[((j+4))]//---/ }"
90 | if [ -z "${src}" ]
91 | then
92 | break
93 | fi
94 | echo -e "${src}\t${login}\t${email}\t${iname}"
95 | if ( [ "${src}" = "github" ] && [ ! "${login}" = "NULL" ] )
96 | then
97 | logins["${login}"]="1"
98 | fi
99 | if [ ! "${email}" = "NULL" ]
100 | then
101 | emails["${email}"]="1"
102 | eemails["${eemail}"]="1"
103 | fi
104 | ((j=j+5))
105 | done
106 | fi
107 | echo "Enrollments:"
108 | cmd="$MYSQL \"select e.project_slug, date(e.start), date(e.end), o.name, e.role from enrollments e, organizations o where e.organization_id = o.id and e.uuid = '${uuid}' order by e.project_slug, e.start\""
109 | data=$(eval "${cmd}")
110 | if [ -z "${data}" ]
111 | then
112 | echo "No enrollments for ${uuid}"
113 | else
114 | echo "${data}"
115 | fi
116 | echo "ElasticSearch ${PATTERN}:"
117 | es=`curl -s -XPOST -H 'Content-type: application/json' "${ES}/_sql?format=tsv" -d"{\"query\":\"select metadata__gelk_backend_name, origin, author_org_name, count(*) as cnt, min(grimoire_creation_date), max(grimoire_creation_date) from \\\\\"${PATTERN}\\\\\" where author_uuid in ('${uuid}') group by metadata__gelk_backend_name, origin, author_org_name order by cnt desc\"}" | tail -n +2`
118 | echo "${es}"
119 | ((i=i+1))
120 | done
121 | i="1"
122 | conds=""
123 | lcond=""
124 | econd=""
125 | for login in "${!logins[@]}"
126 | do
127 | cond=".login==\"${login}\""
128 | if [ -z "${conds}" ]
129 | then
130 | conds="${cond}"
131 | else
132 | conds="${conds} or ${cond}"
133 | fi
134 | if [ -z "${lcond}" ]
135 | then
136 | lcond="'${login}'"
137 | else
138 | lcond="${lcond},'${login}'"
139 | fi
140 | done
141 | for email in "${!emails[@]}"
142 | do
143 | if [ -z "${econd}" ]
144 | then
145 | econd="'${email}'"
146 | else
147 | econd="${econd},'${email}'"
148 | fi
149 | done
150 | for eemail in "${!eemails[@]}"
151 | do
152 | cond=".email==\"${eemail}\""
153 | if [ -z "${conds}" ]
154 | then
155 | conds="${cond}"
156 | else
157 | conds="${conds} or ${cond}"
158 | fi
159 | done
160 | echo "CNCF JSON:"
161 | js=`jq -r ".[] | select(${conds}) | .login + \"/\" + .email + \": \" + (.affiliation // \"-\")" "${JSON}"`
162 | if [ -z "${js}" ]
163 | then
164 | echo "Nothing found for: ${conds}"
165 | else
166 | echo "${conds}:"
167 | echo "${js}"
168 | fi
169 | pcond=''
170 | if [ ! -z "${lcond}" ]
171 | then
172 | pcond="select id from gha_actors where login in (${lcond})"
173 | fi
174 | if [ ! -z "${econd}" ]
175 | then
176 | if [ -z "${pcond}" ]
177 | then
178 | pcond="select actor_id from gha_actors_emails where email in (${econd})"
179 | else
180 | pcond="${pcond} union select actor_id from gha_actors_emails where email in (${econd})"
181 | fi
182 | fi
183 | echo "DevStats DB:"
184 | if [ -z "${pcond}" ]
185 | then
186 | echo "No email or login to search data found"
187 | continue
188 | fi
189 | echo 'Affiliations:'
190 | cmd="$PSQL \"select distinct date(dt_from) as dt_from, date(dt_to) as dt_to, company_name, source from gha_actors_affiliations where actor_id in (${pcond}) order by dt_from, dt_to\""
191 | data=$(eval "${cmd}")
192 | if [ -z "${data}" ]
193 | then
194 | echo "No affiliations found for ${pcond}"
195 | else
196 | echo "${pcond}:"
197 | echo "${data}"
198 | fi
199 | echo 'Commits:'
200 | cmd="$PSQL \"select count(distinct sha) as cnt, date(min(dup_created_at)), date(max(dup_created_at)) from gha_commits where committer_id in (${pcond}) or author_id in (${pcond}) order by cnt desc\""
201 | data=$(eval "${cmd}")
202 | echo "${data}"
203 | contrib="'IssuesEvent', 'PullRequestEvent', 'PushEvent', 'CommitCommentEvent', 'IssueCommentEvent', 'PullRequestReviewCommentEvent'"
204 | echo 'Contributions:'
205 | cmd="$PSQL \"select count(distinct id) as cnt, date(min(created_at)), date(max(created_at)) from gha_events where type in (${contrib}) and actor_id in (${pcond})\""
206 | data=$(eval "${cmd}")
207 | echo "${data}"
208 | echo 'Contribution types:'
209 | cmd="$PSQL \"select type, count(distinct id) as cnt, date(min(created_at)), date(max(created_at)) from gha_events where type in (${contrib}) and actor_id in (${pcond}) group by type order by cnt desc\""
210 | data=$(eval "${cmd}")
211 | echo "${data}"
212 | done
213 |
--------------------------------------------------------------------------------
/sh/check_unknowns.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | # curl -s -XPOST -H 'Content-type: application/json' "${ES_URL}/_sql?format=csv "-d"{\"query\":\"select author_uuid, count(*) as cnt from \\\"sds-cncf-*,*-raw,*-for-merge\\\" where author_org_name = 'Unknown' and not (author_bot = true) and author_uuid is not null and author_uuid != '' group by author_uuid having cnt > 20 order by cnt desc limit 10000\"}"
3 | if [ -z "$MYSQL" ]
4 | then
5 | echo "Please specify full mysql connect command, something like MYSQL='mysql -hdburl -uuser -ppassword dbname' $*"
6 | exit 1
7 | fi
8 | MYSQL="${MYSQL} -NBAe "
9 | for f in `cat ~/unknowns_es.txt`
10 | do
11 | ary=(${f//,/ })
12 | uuid=${ary[0]}
13 | cnt=${ary[1]}
14 | cmd="$MYSQL \"select uuid from profiles where uuid = '${uuid}'\""
15 | uuid2=$(eval "${cmd}")
16 | if [ -z "$uuid2" ]
17 | then
18 | echo "cannot find $uuid profile"
19 | continue
20 | fi
21 | if [ ! "$uuid" = "$uuid2" ]
22 | then
23 | echo "should not happen $uuid != $uuid2"
24 | continue
25 | fi
26 | cmd="$MYSQL \"select source, username, email from identities where uuid = '${uuid}' and source like 'git%'\""
27 | data=$(eval "${cmd}")
28 | cmd="$MYSQL \"select * from enrollments where uuid = '${uuid}' and project_slug like 'cncf/%'\""
29 | rols=$(eval "${cmd}")
30 | if ( [ -z "${rols}" ] && [ ! -z "${data}" ] )
31 | then
32 | echo "For $uuid having $cnt docs we have identity data but no rols:"
33 | echo "${data}"
34 | fi
35 | done
36 |
--------------------------------------------------------------------------------
/sh/compare_commits.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | TEST_ES_URL=`cat "./helm/da-affiliation/secrets/ELASTIC_URL.test.secret"`
3 | PROD_ES_URL=`cat "./helm/da-affiliation/secrets/ELASTIC_URL.prod.secret"`
4 | t=$(curl -s -XPOST -H 'Content-Type: application/json' "${TEST_ES_URL}/_sql?format=json" -d"{\"query\":\"select count(distinct hash) as commits from \\\"sds-cloud-foundry-cloud-foundry-git\\\" where origin = '$1'\",\"fetch_size\":10000}" | jq '.rows[0][0]')
5 | p=$(curl -s -XPOST -H 'Content-Type: application/json' "${PROD_ES_URL}/_sql?format=json" -d"{\"query\":\"select count(distinct hash) as commits from \\\"sds-cloud-foundry-cloud-foundry-git\\\" where origin = '$1'\",\"fetch_size\":10000}" | jq '.rows[0][0]')
6 | echo "test $1 commits: $t"
7 | echo "prod $1 commits: $p"
8 |
--------------------------------------------------------------------------------
/sh/curl_delete_enrollment.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | . ./sh/shared.sh
3 | if [ -z "$2" ]
4 | then
5 | echo "$0: please specify enrollment id as a 2nd arg"
6 | exit 2
7 | fi
8 | id=$(rawurlencode "${2}")
9 |
10 | if [ ! -z "$DEBUG" ]
11 | then
12 | echo curl -i -s -H "Origin: ${ORIGIN}" -H "Authorization: Bearer ${JWT_TOKEN}" -XDELETE "${API_URL}/v1/affiliation/${project}/delete_enrollment/${id}"
13 | curl -i -s -H "Origin: ${ORIGIN}" -H "Authorization: Bearer ${JWT_TOKEN}" -XDELETE "${API_URL}/v1/affiliation/${project}/delete_enrollment/${id}"
14 | else
15 | curl -s -H "Origin: ${ORIGIN}" -H "Authorization: Bearer ${JWT_TOKEN}" -XDELETE "${API_URL}/v1/affiliation/${project}/delete_enrollment/${id}"
16 | fi
17 |
--------------------------------------------------------------------------------
/sh/curl_delete_enrollments.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | . ./sh/shared.sh
3 | if [ -z "$2" ]
4 | then
5 | echo "$0: please specify profile uuid as a 2nd arg"
6 | exit 2
7 | fi
8 | if [ -z "$3" ]
9 | then
10 | echo "$0: please specify organization name (must exist) as a 3rd arg"
11 | exit 3
12 | fi
13 | uuid=$(rawurlencode "${2}")
14 | orgName=$(rawurlencode "${3}")
15 | extra=''
16 |
17 | for prop in start end is_project_specific role
18 | do
19 | if [ ! -z "${!prop}" ]
20 | then
21 | encoded=$(rawurlencode "${!prop}")
22 | if [ -z "$extra" ]
23 | then
24 | extra="?$prop=${encoded}"
25 | else
26 | extra="${extra}&$prop=${encoded}"
27 | fi
28 | fi
29 | done
30 |
31 | if [ ! -z "$DEBUG" ]
32 | then
33 | echo curl -i -s -H "Origin: ${ORIGIN}" -H "Authorization: Bearer ${JWT_TOKEN}" -XDELETE "${API_URL}/v1/affiliation/${project}/delete_enrollments/${uuid}/${orgName}${extra}"
34 | curl -i -s -H "Origin: ${ORIGIN}" -H "Authorization: Bearer ${JWT_TOKEN}" -XDELETE "${API_URL}/v1/affiliation/${project}/delete_enrollments/${uuid}/${orgName}${extra}"
35 | else
36 | curl -s -H "Origin: ${ORIGIN}" -H "Authorization: Bearer ${JWT_TOKEN}" -XDELETE "${API_URL}/v1/affiliation/${project}/delete_enrollments/${uuid}/${orgName}${extra}"
37 | fi
38 |
--------------------------------------------------------------------------------
/sh/curl_delete_identity.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | . ./sh/shared.sh
3 | if [ -z "$2" ]
4 | then
5 | echo "$0: please specify identity id as a 2nd arg"
6 | exit 2
7 | fi
8 | id=$(rawurlencode "${2}")
9 |
10 | if [ ! -z "$DEBUG" ]
11 | then
12 | echo curl -i -s -H "Origin: ${ORIGIN}" -H "Authorization: Bearer ${JWT_TOKEN}" -XDELETE "${API_URL}/v1/affiliation/${project}/delete_identity/${id}"
13 | curl -i -s -H "Origin: ${ORIGIN}" -H "Authorization: Bearer ${JWT_TOKEN}" -XDELETE "${API_URL}/v1/affiliation/${project}/delete_identity/${id}"
14 | else
15 | curl -s -H "Origin: ${ORIGIN}" -H "Authorization: Bearer ${JWT_TOKEN}" -XDELETE "${API_URL}/v1/affiliation/${project}/delete_identity/${id}"
16 | fi
17 |
--------------------------------------------------------------------------------
/sh/curl_delete_matching_blacklist.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | . ./sh/shared.sh
3 | if [ -z "$2" ]
4 | then
5 | echo "$0: please specify email as a 2nd arg"
6 | exit 2
7 | fi
8 | email=$(rawurlencode "${2}")
9 |
10 | if [ ! -z "$DEBUG" ]
11 | then
12 | echo curl -i -s -H "Origin: ${ORIGIN}" -H "Authorization: Bearer ${JWT_TOKEN}" -XDELETE "${API_URL}/v1/affiliation/${project}/matching_blacklist/${email}"
13 | curl -i -s -H "Origin: ${ORIGIN}" -H "Authorization: Bearer ${JWT_TOKEN}" -XDELETE "${API_URL}/v1/affiliation/${project}/matching_blacklist/${email}"
14 | else
15 | curl -s -H "Origin: ${ORIGIN}" -H "Authorization: Bearer ${JWT_TOKEN}" -XDELETE "${API_URL}/v1/affiliation/${project}/matching_blacklist/${email}"
16 | fi
17 |
--------------------------------------------------------------------------------
/sh/curl_delete_org_domain.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | . ./sh/shared.sh
3 | if [ -z "$2" ]
4 | then
5 | echo "$0: please specify organization name as a 2nd arg"
6 | exit 3
7 | fi
8 | if [ -z "$3" ]
9 | then
10 | echo "$0: please specify domain as a 3rd arg"
11 | exit 4
12 | fi
13 | org=$(rawurlencode "${2}")
14 | dom=$(rawurlencode "${3}")
15 |
16 | if [ ! -z "$DEBUG" ]
17 | then
18 | echo "$project $org $dom $ov $top"
19 | echo curl -i -s -H "Origin: ${ORIGIN}" -H "Authorization: Bearer ${JWT_TOKEN}" -XDELETE "${API_URL}/v1/affiliation/${project}/remove_domain/${org}/${dom}"
20 | curl -i -s -H "Origin: ${ORIGIN}" -H "Authorization: Bearer ${JWT_TOKEN}" -XDELETE "${API_URL}/v1/affiliation/${project}/remove_domain/${org}/${dom}"
21 | else
22 | curl -s -H "Origin: ${ORIGIN}" -H "Authorization: Bearer ${JWT_TOKEN}" -XDELETE "${API_URL}/v1/affiliation/${project}/remove_domain/${org}/${dom}"
23 | fi
24 |
--------------------------------------------------------------------------------
/sh/curl_delete_organization.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | . ./sh/shared.sh
3 | if [ -z "$2" ]
4 | then
5 | echo "$0: please specify organization ID as a 2nd arg"
6 | exit 2
7 | fi
8 | orgID=$(rawurlencode "${2}")
9 |
10 | if [ ! -z "$DEBUG" ]
11 | then
12 | echo curl -i -s -H "Origin: ${ORIGIN}" -H "Authorization: Bearer ${JWT_TOKEN}" -XDELETE "${API_URL}/v1/affiliation/${project}/delete_organization_by_id/${orgID}"
13 | curl -i -s -H "Origin: ${ORIGIN}" -H "Authorization: Bearer ${JWT_TOKEN}" -XDELETE "${API_URL}/v1/affiliation/${project}/delete_organization_by_id/${orgID}"
14 | else
15 | curl -s -H "Origin: ${ORIGIN}" -H "Authorization: Bearer ${JWT_TOKEN}" -XDELETE "${API_URL}/v1/affiliation/${project}/delete_organization_by_id/${orgID}"
16 | fi
17 |
--------------------------------------------------------------------------------
/sh/curl_delete_profile.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | . ./sh/shared.sh
3 | if [ -z "$2" ]
4 | then
5 | echo "$0: please specify profile uuid as a 2nd arg"
6 | exit 2
7 | fi
8 | uuid=$(rawurlencode "${2}")
9 | archive="false"
10 | if [ "$3" = "1" ]
11 | then
12 | archive="true"
13 | fi
14 |
15 | if [ ! -z "$DEBUG" ]
16 | then
17 | echo curl -i -s -H "Origin: ${ORIGIN}" -H "Authorization: Bearer ${JWT_TOKEN}" -XDELETE "${API_URL}/v1/affiliation/${project}/delete_profile/${uuid}?archive=${archive}"
18 | curl -i -s -H "Origin: ${ORIGIN}" -H "Authorization: Bearer ${JWT_TOKEN}" -XDELETE "${API_URL}/v1/affiliation/${project}/delete_profile/${uuid}?archive=${archive}"
19 | else
20 | curl -s -H "Origin: ${ORIGIN}" -H "Authorization: Bearer ${JWT_TOKEN}" -XDELETE "${API_URL}/v1/affiliation/${project}/delete_profile/${uuid}?archive=${archive}"
21 | fi
22 |
--------------------------------------------------------------------------------
/sh/curl_delete_slug_mapping.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | export SKIP_PROJECT=1
3 | . ./sh/shared.sh
4 | for prop in da_name sf_name sf_id
5 | do
6 | if [ ! -z "${!prop}" ]
7 | then
8 | encoded=$(rawurlencode "${!prop}")
9 | if [ -z "$extra" ]
10 | then
11 | extra="?$prop=${encoded}"
12 | else
13 | extra="${extra}&$prop=${encoded}"
14 | fi
15 | fi
16 | done
17 | if [ ! -z "$DEBUG" ]
18 | then
19 | echo curl -i -s -H "Origin: ${ORIGIN}" -H "Authorization: Bearer ${JWT_TOKEN}" -XDELETE "${API_URL}/v1/affiliation/delete_slug_mapping${extra}"
20 | curl -i -s -H "Origin: ${ORIGIN}" -H "Authorization: Bearer ${JWT_TOKEN}" -XDELETE "${API_URL}/v1/affiliation/delete_slug_mapping${extra}"
21 | else
22 | curl -s -H "Origin: ${ORIGIN}" -H "Authorization: Bearer ${JWT_TOKEN}" -XDELETE "${API_URL}/v1/affiliation/delete_slug_mapping${extra}"
23 | fi
24 |
--------------------------------------------------------------------------------
/sh/curl_es_unaffiliated.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | if [ -z "$1" ]
3 | then
4 | echo "$0: please provide project slug as a first arg"
5 | exit 1
6 | fi
7 | if [ -z "$ES_URL" ]
8 | then
9 | export ES_URL="http://127.0.0.1:19200"
10 | fi
11 | idx="sds-${1//\//-}"
12 | if [ -z "$RAW" ]
13 | then
14 | curl -H 'Content-Type: application/json' "${ES_URL}/${idx}-*,-${idx}-*-raw/_search" -d'{"size":10,"aggs":{"unaffiliated":{"terms":{"field":"author_org_name","missing":"null","size":10}}}}' 2>/dev/null | jq
15 | curl -H 'Content-Type: application/json' "${ES_URL}/${idx}-*,-${idx}-*-raw/_search" -d'{"size":10, "aggs":{"unaffiliated":{"filter":{"terms":{"author_org_name":["Unknown","NotFound","","-","?"]}},"aggs":{"unaffiliated":{"terms":{"field":"author_uuid","missing":"","size": 10}}}}}}' 2>/dev/null | jq
16 | else
17 | curl -H 'Content-Type: application/json' "${ES_URL}/${idx}-*,-${idx}-*-raw/_search" -d'{"size":0, "aggs":{"unaffiliated":{"filter":{"terms":{"author_org_name":["Unknown","NotFound","","-","?"]}},"aggs":{"unaffiliated":{"terms":{"field":"author_uuid","missing":"","size":10000}}}}}}' 2>/dev/devll
18 | fi
19 |
--------------------------------------------------------------------------------
/sh/curl_get_affiliation_both.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | . ./sh/shared.sh
3 | if [ -z "$2" ]
4 | then
5 | echo "$0: please specify UUID as a 2nd arg"
6 | exit 2
7 | fi
8 | if [ -z "$3" ]
9 | then
10 | echo "$0: please specify dt as a 3rd arg (format 2015-05-05T15:15[:05Z])"
11 | exit 3
12 | fi
13 | uuid=$(rawurlencode "${2}")
14 | dt=$(rawurlencode "${3}")
15 |
16 | if [ ! -z "$DEBUG" ]
17 | then
18 | echo curl -i -s -H "Origin: ${ORIGIN}" -H "Authorization: Bearer ${JWT_TOKEN}" -XGET "${API_URL}/v1/affiliation/${project}/both/${uuid}/${dt}"
19 | curl -i -s -H "Origin: ${ORIGIN}" -H "Authorization: Bearer ${JWT_TOKEN}" -XGET "${API_URL}/v1/affiliation/${project}/both/${uuid}/${dt}"
20 | else
21 | curl -s -H "Origin: ${ORIGIN}" -H "Authorization: Bearer ${JWT_TOKEN}" -XGET "${API_URL}/v1/affiliation/${project}/both/${uuid}/${dt}"
22 | fi
23 |
--------------------------------------------------------------------------------
/sh/curl_get_affiliation_multi.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | . ./sh/shared.sh
3 | if [ -z "$2" ]
4 | then
5 | echo "$0: please specify UUID as a 2nd arg"
6 | exit 2
7 | fi
8 | if [ -z "$3" ]
9 | then
10 | echo "$0: please specify dt as a 3rd arg (format 2015-05-05T15:15[:05Z])"
11 | exit 3
12 | fi
13 | uuid=$(rawurlencode "${2}")
14 | dt=$(rawurlencode "${3}")
15 |
16 | if [ ! -z "$DEBUG" ]
17 | then
18 | echo curl -i -s -H "Origin: ${ORIGIN}" -H "Authorization: Bearer ${JWT_TOKEN}" -XGET "${API_URL}/v1/affiliation/${project}/multi/${uuid}/${dt}"
19 | curl -i -s -H "Origin: ${ORIGIN}" -H "Authorization: Bearer ${JWT_TOKEN}" -XGET "${API_URL}/v1/affiliation/${project}/multi/${uuid}/${dt}"
20 | else
21 | curl -s -H "Origin: ${ORIGIN}" -H "Authorization: Bearer ${JWT_TOKEN}" -XGET "${API_URL}/v1/affiliation/${project}/multi/${uuid}/${dt}"
22 | fi
23 |
--------------------------------------------------------------------------------
/sh/curl_get_affiliation_single.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | . ./sh/shared.sh
3 | if [ -z "$2" ]
4 | then
5 | echo "$0: please specify UUID as a 2nd arg"
6 | exit 2
7 | fi
8 | if [ -z "$3" ]
9 | then
10 | echo "$0: please specify dt as a 3rd arg (format 2015-05-05T15:15[:05Z])"
11 | exit 3
12 | fi
13 | uuid=$(rawurlencode "${2}")
14 | dt=$(rawurlencode "${3}")
15 |
16 | if [ ! -z "$DEBUG" ]
17 | then
18 | echo curl -i -s -H "Origin: ${ORIGIN}" -H "Authorization: Bearer ${JWT_TOKEN}" -XGET "${API_URL}/v1/affiliation/${project}/single/${uuid}/${dt}"
19 | curl -i -s -H "Origin: ${ORIGIN}" -H "Authorization: Bearer ${JWT_TOKEN}" -XGET "${API_URL}/v1/affiliation/${project}/single/${uuid}/${dt}"
20 | else
21 | curl -s -H "Origin: ${ORIGIN}" -H "Authorization: Bearer ${JWT_TOKEN}" -XGET "${API_URL}/v1/affiliation/${project}/single/${uuid}/${dt}"
22 | fi
23 |
--------------------------------------------------------------------------------
/sh/curl_get_all_yaml.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | if [ -z "$JWT_TOKEN" ]
3 | then
4 | echo "$0: please sepcify your JWT token via JWT_TOKEN=..."
5 | exit 1
6 | fi
7 |
8 | if [ -z "$API_URL" ]
9 | then
10 | export API_URL="http://127.0.0.1:8080"
11 | fi
12 |
13 | if [ -z "$ORIGIN" ]
14 | then
15 | export ORIGIN="https://insights.test.platform.linuxfoundation.org"
16 | fi
17 |
18 | if [ ! -z "$DEBUG" ]
19 | then
20 | echo curl -i -s -H "Origin: ${ORIGIN}" -H "Authorization: Bearer ${JWT_TOKEN}" -H 'Accept: application/yaml' -XGET "${API_URL}/v1/affiliation/all"
21 | curl -i -s -H "Origin: ${ORIGIN}" -H "Authorization: Bearer ${JWT_TOKEN}" -H 'Accept: application/yaml' -XGET "${API_URL}/v1/affiliation/all"
22 | else
23 | curl -s -H "Origin: ${ORIGIN}" -H "Authorization: Bearer ${JWT_TOKEN}" -H 'Accept: application/yaml' -XGET "${API_URL}/v1/affiliation/all"
24 | fi
25 |
--------------------------------------------------------------------------------
/sh/curl_get_find_organization_by_id.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | . ./sh/shared.sh
3 | if [ -z "$2" ]
4 | then
5 | echo "$0: please specify organization ID as a 2nd arg"
6 | exit 2
7 | fi
8 | orgID=$(rawurlencode "${2}")
9 |
10 | if [ ! -z "$DEBUG" ]
11 | then
12 | echo curl -i -s -H "Origin: ${ORIGIN}" -H "Authorization: Bearer ${JWT_TOKEN}" -XGET "${API_URL}/v1/affiliation/${project}/find_organization_by_id/${orgID}"
13 | curl -i -s -H "Origin: ${ORIGIN}" -H "Authorization: Bearer ${JWT_TOKEN}" -XGET "${API_URL}/v1/affiliation/${project}/find_organization_by_id/${orgID}"
14 | else
15 | curl -s -H "Origin: ${ORIGIN}" -H "Authorization: Bearer ${JWT_TOKEN}" -XGET "${API_URL}/v1/affiliation/${project}/find_organization_by_id/${orgID}"
16 | fi
17 |
--------------------------------------------------------------------------------
/sh/curl_get_find_organization_by_name.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | . ./sh/shared.sh
3 | if [ -z "$2" ]
4 | then
5 | echo "$0: please specify organization name as a 2nd arg"
6 | exit 2
7 | fi
8 | orgName=$(rawurlencode "${2}")
9 |
10 | if [ ! -z "$DEBUG" ]
11 | then
12 | echo curl -i -s -H "Origin: ${ORIGIN}" -H "Authorization: Bearer ${JWT_TOKEN}" -XGET "${API_URL}/v1/affiliation/${project}/find_organization_by_name/${orgName}"
13 | curl -i -s -H "Origin: ${ORIGIN}" -H "Authorization: Bearer ${JWT_TOKEN}" -XGET "${API_URL}/v1/affiliation/${project}/find_organization_by_name/${orgName}"
14 | else
15 | curl -s -H "Origin: ${ORIGIN}" -H "Authorization: Bearer ${JWT_TOKEN}" -XGET "${API_URL}/v1/affiliation/${project}/find_organization_by_name/${orgName}"
16 | fi
17 |
18 |
--------------------------------------------------------------------------------
/sh/curl_get_identity.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | export SKIP_PROJECT=1
3 | . ./sh/shared.sh
4 | if [ -z "$1" ]
5 | then
6 | echo "$0: please specify identity ID as a 2nd arg"
7 | exit 2
8 | fi
9 | id=$(rawurlencode "${1}")
10 |
11 | if [ ! -z "$DEBUG" ]
12 | then
13 | echo curl -i -s -H "Origin: ${ORIGIN}" -H "Authorization: Bearer ${JWT_TOKEN}" -XGET "${API_URL}/v1/affiliation/get_identity/${id}"
14 | curl -i -s -H "Origin: ${ORIGIN}" -H "Authorization: Bearer ${JWT_TOKEN}" -XGET "${API_URL}/v1/affiliation/get_identity/${id}"
15 | else
16 | curl -s -H "Origin: ${ORIGIN}" -H "Authorization: Bearer ${JWT_TOKEN}" -XGET "${API_URL}/v1/affiliation/get_identity/${id}"
17 | fi
18 |
--------------------------------------------------------------------------------
/sh/curl_get_list_organizations.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | . ./sh/shared.sh
3 | q=$(rawurlencode "${2}")
4 | rows=$(rawurlencode "${3}")
5 | page=$(rawurlencode "${4}")
6 |
7 | if [ ! -z "$DEBUG" ]
8 | then
9 | echo curl -i -s -H "Origin: https://insights.test.platform.linuxfoundation.org" -H "Authorization: Bearer ${JWT_TOKEN}" -XGET "${API_URL}/v1/affiliation/${project}/list_organizations?q=${q}&rows=${rows}&page=${page}"
10 | curl -i -s -H "Origin: ${ORIGIN}" -H "Authorization: Bearer ${JWT_TOKEN}" -XGET "${API_URL}/v1/affiliation/${project}/list_organizations?q=${q}&rows=${rows}&page=${page}"
11 | else
12 | curl -s -H "Origin: ${ORIGIN}" -H "Authorization: Bearer ${JWT_TOKEN}" -XGET "${API_URL}/v1/affiliation/${project}/list_organizations?q=${q}&rows=${rows}&page=${page}"
13 | fi
14 |
15 |
--------------------------------------------------------------------------------
/sh/curl_get_list_organizations_domains.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | . ./sh/shared.sh
3 | orgID=$(rawurlencode "${2}")
4 | q=$(rawurlencode "${3}")
5 | rows=$(rawurlencode "${4}")
6 | page=$(rawurlencode "${5}")
7 |
8 | if [ ! -z "$DEBUG" ]
9 | then
10 | echo curl -i -s -H "Origin: ${ORIGIN}" -H "Authorization: Bearer ${JWT_TOKEN}" -XGET "${API_URL}/v1/affiliation/${project}/list_domains?orgID=${orgID}&q=${q}&rows=${rows}&page=${page}"
11 | curl -i -s -H "Origin: ${ORIGIN}" -H "Authorization: Bearer ${JWT_TOKEN}" -XGET "${API_URL}/v1/affiliation/${project}/list_domains?orgID=${orgID}&q=${q}&rows=${rows}&page=${page}"
12 | else
13 | curl -s -H "Origin: ${ORIGIN}" -H "Authorization: Bearer ${JWT_TOKEN}" -XGET "${API_URL}/v1/affiliation/${project}/list_domains?orgID=${orgID}&q=${q}&rows=${rows}&page=${page}"
14 | fi
15 |
--------------------------------------------------------------------------------
/sh/curl_get_list_profiles.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | . ./sh/shared.sh
3 | q=$(rawurlencode "${2}")
4 | rows=$(rawurlencode "${3}")
5 | page=$(rawurlencode "${4}")
6 |
7 | if [ ! -z "$DEBUG" ]
8 | then
9 | echo curl -i -s -H "Origin: ${ORIGIN}" -H "Authorization: Bearer ${JWT_TOKEN}" -XGET "${API_URL}/v1/affiliation/${project}/list_profiles?q=${q}&rows=${rows}&page=${page}"
10 | curl -i -s -H "Origin: ${ORIGIN}" -H "Authorization: Bearer ${JWT_TOKEN}" -XGET "${API_URL}/v1/affiliation/${project}/list_profiles?q=${q}&rows=${rows}&page=${page}"
11 | else
12 | curl -s -H "Origin: ${ORIGIN}" -H "Authorization: Bearer ${JWT_TOKEN}" -XGET "${API_URL}/v1/affiliation/${project}/list_profiles?q=${q}&rows=${rows}&page=${page}"
13 | fi
14 |
--------------------------------------------------------------------------------
/sh/curl_get_list_projects.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | export SKIP_PROJECT=1
3 | . ./sh/shared.sh
4 | if [ ! -z "$DEBUG" ]
5 | then
6 | echo curl -i -s -H "Origin: ${ORIGIN}" -H "Authorization: Bearer ${JWT_TOKEN}" -XGET "${API_URL}/v1/affiliation/list_projects"
7 | curl -i -s -H "Origin: ${ORIGIN}" -H "Authorization: Bearer ${JWT_TOKEN}" -XGET "${API_URL}/v1/affiliation/list_projects"
8 | else
9 | curl -s -H "Origin: ${ORIGIN}" -H "Authorization: Bearer ${JWT_TOKEN}" -XGET "${API_URL}/v1/affiliation/list_projects"
10 | fi
11 |
--------------------------------------------------------------------------------
/sh/curl_get_list_slug_mappings.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | export SKIP_PROJECT=1
3 | export SKIP_TOKEN=1
4 | . ./sh/shared.sh
5 |
6 | if [ ! -z "$DEBUG" ]
7 | then
8 | echo curl -i -s -H "Origin: https://insights.test.platform.linuxfoundation.org" -XGET "${API_URL}/v1/affiliation/list_slug_mappings"
9 | curl -i -s -H "Origin: ${ORIGIN}" -XGET "${API_URL}/v1/affiliation/list_slug_mappings"
10 | else
11 | curl -s -H "Origin: ${ORIGIN}" -XGET "${API_URL}/v1/affiliation/list_slug_mappings"
12 | fi
13 |
14 |
--------------------------------------------------------------------------------
/sh/curl_get_matching_blacklist.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | . ./sh/shared.sh
3 | q=$(rawurlencode "${2}")
4 | rows=$(rawurlencode "${3}")
5 | page=$(rawurlencode "${4}")
6 |
7 | if [ ! -z "$DEBUG" ]
8 | then
9 | echo curl -i -s -H "Origin: ${ORIGIN}" -H "Authorization: Bearer ${JWT_TOKEN}" -XGET "${API_URL}/v1/affiliation/${project}/matching_blacklist?q=${q}&rows=${rows}&page=${page}"
10 | curl -i -s -H "Origin: ${ORIGIN}" -H "Authorization: Bearer ${JWT_TOKEN}" -XGET "${API_URL}/v1/affiliation/${project}/matching_blacklist?q=${q}&rows=${rows}&page=${page}"
11 | else
12 | curl -s -H "Origin: ${ORIGIN}" -H "Authorization: Bearer ${JWT_TOKEN}" -XGET "${API_URL}/v1/affiliation/${project}/matching_blacklist?q=${q}&rows=${rows}&page=${page}"
13 | fi
14 |
--------------------------------------------------------------------------------
/sh/curl_get_profile.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | . ./sh/shared.sh
3 | if [ -z "$2" ]
4 | then
5 | echo "$0: please specify profile UUID as a 2nd arg"
6 | exit 2
7 | fi
8 | uuid=$(rawurlencode "${2}")
9 |
10 | if [ ! -z "$DEBUG" ]
11 | then
12 | echo curl -i -s -H "Origin: ${ORIGIN}" -H "Authorization: Bearer ${JWT_TOKEN}" -XGET "${API_URL}/v1/affiliation/${project}/get_profile/${uuid}"
13 | curl -i -s -H "Origin: ${ORIGIN}" -H "Authorization: Bearer ${JWT_TOKEN}" -XGET "${API_URL}/v1/affiliation/${project}/get_profile/${uuid}"
14 | else
15 | curl -s -H "Origin: ${ORIGIN}" -H "Authorization: Bearer ${JWT_TOKEN}" -XGET "${API_URL}/v1/affiliation/${project}/get_profile/${uuid}"
16 | fi
17 |
--------------------------------------------------------------------------------
/sh/curl_get_profile_by_username.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | . ./sh/shared.sh
3 | if [ -z "$2" ]
4 | then
5 | echo "$0: please specify profile's identity username as a 2nd arg"
6 | exit 2
7 | fi
8 | username=$(rawurlencode "${2}")
9 |
10 | if [ ! -z "$DEBUG" ]
11 | then
12 | echo curl -i -s -H "Origin: ${ORIGIN}" -H "Authorization: Bearer ${JWT_TOKEN}" -XGET "${API_URL}/v1/affiliation/${project}/get_profile_by_username/${username}"
13 | curl -i -s -H "Origin: ${ORIGIN}" -H "Authorization: Bearer ${JWT_TOKEN}" -XGET "${API_URL}/v1/affiliation/${project}/get_profile_by_username/${username}"
14 | else
15 | curl -s -H "Origin: ${ORIGIN}" -H "Authorization: Bearer ${JWT_TOKEN}" -XGET "${API_URL}/v1/affiliation/${project}/get_profile_by_username/${username}"
16 | fi
17 |
--------------------------------------------------------------------------------
/sh/curl_get_profile_enrollments.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | . ./sh/shared.sh
3 | if [ -z "$2" ]
4 | then
5 | echo "$0: please specify profile UUID as a 2nd arg"
6 | exit 2
7 | fi
8 | uuid=$(rawurlencode "${2}")
9 |
10 | if [ ! -z "$DEBUG" ]
11 | then
12 | echo curl -i -s -H "Origin: ${ORIGIN}" -H "Authorization: Bearer ${JWT_TOKEN}" -XGET "${API_URL}/v1/affiliation/${project}/enrollments/${uuid}"
13 | curl -i -s -H "Origin: ${ORIGIN}" -H "Authorization: Bearer ${JWT_TOKEN}" -XGET "${API_URL}/v1/affiliation/${project}/enrollments/${uuid}"
14 | else
15 | curl -s -H "Origin: ${ORIGIN}" -H "Authorization: Bearer ${JWT_TOKEN}" -XGET "${API_URL}/v1/affiliation/${project}/enrollments/${uuid}"
16 | fi
17 |
--------------------------------------------------------------------------------
/sh/curl_get_profile_nested.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | export SKIP_PROJECT=1
3 | . ./sh/shared.sh
4 | if [ -z "$1" ]
5 | then
6 | echo "$0: please specify profile UUID as a 1st arg"
7 | exit 2
8 | fi
9 | uuid=$(rawurlencode "${1}")
10 |
11 | if [ ! -z "$DEBUG" ]
12 | then
13 | echo curl -i -s -H "Origin: ${ORIGIN}" -H "Authorization: Bearer ${JWT_TOKEN}" -XGET "${API_URL}/v1/affiliation/get_profile/${uuid}"
14 | curl -i -s -H "Origin: ${ORIGIN}" -H "Authorization: Bearer ${JWT_TOKEN}" -XGET "${API_URL}/v1/affiliation/get_profile/${uuid}"
15 | else
16 | curl -s -H "Origin: ${ORIGIN}" -H "Authorization: Bearer ${JWT_TOKEN}" -XGET "${API_URL}/v1/affiliation/get_profile/${uuid}"
17 | fi
18 |
--------------------------------------------------------------------------------
/sh/curl_get_slug_mapping.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | export SKIP_PROJECT=1
3 | export SKIP_TOKEN=1
4 | . ./sh/shared.sh
5 | extra=''
6 | for prop in da_name sf_name sf_id
7 | do
8 | if [ ! -z "${!prop}" ]
9 | then
10 | encoded=$(rawurlencode "${!prop}")
11 | if [ -z "$extra" ]
12 | then
13 | extra="?$prop=${encoded}"
14 | else
15 | extra="${extra}&$prop=${encoded}"
16 | fi
17 | fi
18 | done
19 | if [ ! -z "$DEBUG" ]
20 | then
21 | echo curl -i -s -H "Origin: ${ORIGIN}" -H "Authorization: Bearer ${JWT_TOKEN}" -XGET "${API_URL}/v1/affiliation/get_slug_mapping${extra}"
22 | curl -i -s -H "Origin: ${ORIGIN}" -H "Authorization: Bearer ${JWT_TOKEN}" -XGET "${API_URL}/v1/affiliation/get_slug_mapping${extra}"
23 | else
24 | curl -s -H "Origin: ${ORIGIN}" -H "Authorization: Bearer ${JWT_TOKEN}" -XGET "${API_URL}/v1/affiliation/get_slug_mapping${extra}"
25 | fi
26 |
--------------------------------------------------------------------------------
/sh/curl_get_top_contributors.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | export SKIP_TOKEN=1
3 | . ./sh/shared.sh
4 | from=''
5 | if [ ! -z "$2" ]
6 | then
7 | from=$(rawurlencode "${2}")
8 | fi
9 | to=''
10 | if [ ! -z "$3" ]
11 | then
12 | to=$(rawurlencode "${3}")
13 | fi
14 | limit=10
15 | if [ ! -z "$4" ]
16 | then
17 | limit=$(rawurlencode "${4}")
18 | fi
19 | offset=0
20 | if [ ! -z "$5" ]
21 | then
22 | offset=$(rawurlencode "${5}")
23 | fi
24 | search=''
25 | if [ ! -z "$6" ]
26 | then
27 | search=$(rawurlencode "${6}")
28 | fi
29 | sortField=''
30 | if [ ! -z "$7" ]
31 | then
32 | sortField=$(rawurlencode "${7}")
33 | fi
34 | sortOrder=''
35 | if [ ! -z "$8" ]
36 | then
37 | sortOrder=$(rawurlencode "${8}")
38 | fi
39 | dataSource=''
40 | if [ ! -z "$9" ]
41 | then
42 | dataSource=$(rawurlencode "${9}")
43 | fi
44 |
45 | if [ -z "${JWT_TOKEN}" ]
46 | then
47 | if [ ! -z "$DEBUG" ]
48 | then
49 | echo curl -i -s -H "Origin: ${ORIGIN}" -H 'Content-Type: application/json' -XGET "${API_URL}/v1/affiliation/${project}/top_contributors?from=${from}&to=${to}&limit=${limit}&offset=${offset}&search=${search}&sort_field=${sortField}&sort_order=${sortOrder}&data_source=${dataSource}"
50 | curl -i -s -H "Origin: ${ORIGIN}" -H 'Content-Type: application/json' -XGET "${API_URL}/v1/affiliation/${project}/top_contributors?from=${from}&to=${to}&limit=${limit}&offset=${offset}&search=${search}&sort_field=${sortField}&sort_order=${sortOrder}&data_source=${dataSource}"
51 | else
52 | curl -s -H "Origin: ${ORIGIN}" -H 'Content-Type: application/json' -XGET "${API_URL}/v1/affiliation/${project}/top_contributors?from=${from}&to=${to}&limit=${limit}&offset=${offset}&search=${search}&sort_field=${sortField}&sort_order=${sortOrder}&data_source=${dataSource}"
53 | fi
54 | else
55 | if [ ! -z "$DEBUG" ]
56 | then
57 | echo curl -i -s -H "Origin: ${ORIGIN}" -H 'Content-Type: application/json' -H "Authorization: Bearer ${JWT_TOKEN}" -XGET "${API_URL}/v1/affiliation/${project}/top_contributors?from=${from}&to=${to}&limit=${limit}&offset=${offset}&search=${search}&sort_field=${sortField}&sort_order=${sortOrder}&data_source=${dataSource}"
58 | curl -i -s -H "Origin: ${ORIGIN}" -H 'Content-Type: application/json' -H "Authorization: Bearer ${JWT_TOKEN}" -XGET "${API_URL}/v1/affiliation/${project}/top_contributors?from=${from}&to=${to}&limit=${limit}&offset=${offset}&search=${search}&sort_field=${sortField}&sort_order=${sortOrder}&data_source=${dataSource}"
59 | else
60 | curl -s -H "Origin: ${ORIGIN}" -H 'Content-Type: application/json' -H "Authorization: Bearer ${JWT_TOKEN}" -XGET "${API_URL}/v1/affiliation/${project}/top_contributors?from=${from}&to=${to}&limit=${limit}&offset=${offset}&search=${search}&sort_field=${sortField}&sort_order=${sortOrder}&data_source=${dataSource}"
61 | fi
62 | fi
63 |
--------------------------------------------------------------------------------
/sh/curl_get_top_contributors_csv.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | export SKIP_TOKEN=1
3 | . ./sh/shared.sh
4 | from=''
5 | if [ ! -z "$2" ]
6 | then
7 | from=$(rawurlencode "${2}")
8 | fi
9 | to=''
10 | if [ ! -z "$3" ]
11 | then
12 | to=$(rawurlencode "${3}")
13 | fi
14 | limit=10
15 | if [ ! -z "$4" ]
16 | then
17 | limit=$(rawurlencode "${4}")
18 | fi
19 | offset=0
20 | if [ ! -z "$5" ]
21 | then
22 | offset=$(rawurlencode "${5}")
23 | fi
24 | search=''
25 | if [ ! -z "$6" ]
26 | then
27 | search=$(rawurlencode "${6}")
28 | fi
29 | sortField=''
30 | if [ ! -z "$7" ]
31 | then
32 | sortField=$(rawurlencode "${7}")
33 | fi
34 | sortOrder=''
35 | if [ ! -z "$8" ]
36 | then
37 | sortOrder=$(rawurlencode "${8}")
38 | fi
39 | dataSource=''
40 | if [ ! -z "$9" ]
41 | then
42 | dataSource=$(rawurlencode "${9}")
43 | fi
44 |
45 | if [ -z "${JWT_TOKEN}" ]
46 | then
47 | if [ ! -z "$DEBUG" ]
48 | then
49 | echo curl -s -i -H "Origin: ${ORIGIN}" -H 'Content-Type: application/octet-stream' -XGET "${API_URL}/v1/affiliation/${project}/top_contributors_csv?from=${from}&to=${to}&limit=${limit}&offset=${offset}&search=${search}&sort_field=${sortField}&sort_order=${sortOrder}&data_source=${dataSource}"
50 | curl -s -i -H "Origin: ${ORIGIN}" -H 'Content-Type: application/octet-streams' -XGET "${API_URL}/v1/affiliation/${project}/top_contributors_csv?from=${from}&to=${to}&limit=${limit}&offset=${offset}&search=${search}&sort_field=${sortField}&sort_order=${sortOrder}&data_source=${dataSource}"
51 | else
52 | curl -s -H "Origin: ${ORIGIN}" -H 'Content-Type: application/octet-streams' -XGET "${API_URL}/v1/affiliation/${project}/top_contributors_csv?from=${from}&to=${to}&limit=${limit}&offset=${offset}&search=${search}&sort_field=${sortField}&sort_order=${sortOrder}&data_source=${dataSource}"
53 | fi
54 | else
55 | if [ ! -z "$DEBUG" ]
56 | then
57 | echo curl -s -i -H "Origin: ${ORIGIN}" -H 'Content-Type: application/octet-stream' -H "Authorization: Bearer ${JWT_TOKEN}" -XGET "${API_URL}/v1/affiliation/${project}/top_contributors_csv?from=${from}&to=${to}&limit=${limit}&offset=${offset}&search=${search}&sort_field=${sortField}&sort_order=${sortOrder}&data_source=${dataSource}"
58 | curl -s -i -H "Origin: ${ORIGIN}" -H 'Content-Type: application/octet-streams' -H "Authorization: Bearer ${JWT_TOKEN}" -XGET "${API_URL}/v1/affiliation/${project}/top_contributors_csv?from=${from}&to=${to}&limit=${limit}&offset=${offset}&search=${search}&sort_field=${sortField}&sort_order=${sortOrder}&data_source=${dataSource}"
59 | else
60 | curl -s -H "Origin: ${ORIGIN}" -H 'Content-Type: application/octet-streams' -H "Authorization: Bearer ${JWT_TOKEN}" -XGET "${API_URL}/v1/affiliation/${project}/top_contributors_csv?from=${from}&to=${to}&limit=${limit}&offset=${offset}&search=${search}&sort_field=${sortField}&sort_order=${sortOrder}&data_source=${dataSource}"
61 | fi
62 | fi
63 |
--------------------------------------------------------------------------------
/sh/curl_get_top_contributors_query.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | if [ -z "${ES_URL}" ]
3 | then
4 | ES_URL="`cat ./helm/da-affiliation/secrets/ELASTIC_URL.prod.secret`"
5 | fi
6 | if [ -z "${1}" ]
7 | then
8 | echo "$0: please specify project slug like 'lfn-onap' or 'lfn'"
9 | exit 1
10 | fi
11 | if [ -z "${FROM}" ]
12 | then
13 | FROM=0
14 | fi
15 | if [ -z "${TO}" ]
16 | then
17 | TO=2552790984700
18 | fi
19 | if [ -z "${SIZE}" ]
20 | then
21 | SIZE=10
22 | fi
23 | if [ -z "${SORT_FIELD}" ]
24 | then
25 | SORT_FIELD=git_commits
26 | fi
27 | if [ -z "${SORT_ORDER}" ]
28 | then
29 | SORT_ORDER=asc
30 | fi
31 | if [ -z "${SEARCH}" ]
32 | then
33 | SEARCH=''
34 | else
35 | # status, *name, *domain, author*, *login, *org_name
36 | SEARCH=",{\"query_string\":{\"fields\":[\"status\",\"\*name\",\"\*domain\",\"author\*\",\"\*login\",\"\*org_name\"],\"query\":\"\*${SEARCH}\*\"}}"
37 | #SEARCH=",{\"query_string\":{\"query\":\"${SEARCH}\"}}"
38 | fi
39 | fn=/tmp/top_contributors.json
40 | function on_exit {
41 | rm -f "${fn}"
42 | }
43 | cp sh/top_contributors.json /tmp/
44 | trap on_exit EXIT
45 | vim --not-a-term -c "%s/param_from/${FROM}/g" -c "%s/param_to/${TO}/g" -c "%s/param_size/${SIZE}/g" -c "%s/param_sort_field/${SORT_FIELD}/g" -c "%s/param_sort_order/${SORT_ORDER}/g" -c "%s/param_search/${SEARCH}/g" -c 'wq!' "$fn"
46 | cat "${fn}"
47 | if [ -z "${RAW}" ]
48 | then
49 | curl -s -H "Content-Type: application/json" "${ES_URL}/sds-${1}-*,-*-raw,-*-for-merge/_search" -d "@${fn}" | jq
50 | else
51 | curl -H "Content-Type: application/json" "${ES_URL}/sds-${1}-*,-*-raw,-*-for-merge/_search" -d "@${fn}"
52 | fi
53 |
--------------------------------------------------------------------------------
/sh/curl_get_unaffiliated.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | . ./sh/shared.sh
3 | rows=$(rawurlencode "${2}")
4 | page=$(rawurlencode "${3}")
5 |
6 | if [ ! -z "$DEBUG" ]
7 | then
8 | echo curl -i -s -H "Origin: ${ORIGIN}" -H "Authorization: Bearer ${JWT_TOKEN}" -XGET "${API_URL}/v1/affiliation/${project}/unaffiliated?rows=${rows}&page=${page}"
9 | curl -i -s -H "Origin: ${ORIGIN}" -H "Authorization: Bearer ${JWT_TOKEN}" -XGET "${API_URL}/v1/affiliation/${project}/unaffiliated?rows=${rows}&page=${page}"
10 | else
11 | curl -s -H "Origin: ${ORIGIN}" -H "Authorization: Bearer ${JWT_TOKEN}" -XGET "${API_URL}/v1/affiliation/${project}/unaffiliated?rows=${rows}&page=${page}"
12 | fi
13 |
--------------------------------------------------------------------------------
/sh/curl_list_users.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | #curl -i -s -XGET -H "Authorization: Bearer `cat secret/lgryglicki.prod.token`" -s "`cat helm/da-affiliation/secrets/PLATFORM_USER_SERVICE_ENDPOINT.prod.secret`/users?pageSize=5000&offset=0" | jq '.'
3 | curl -s -XGET -H "Authorization: Bearer `cat secret/lgryglicki.prod.token`" -s "`cat helm/da-affiliation/secrets/PLATFORM_USER_SERVICE_ENDPOINT.prod.secret`/users?pageSize=5000&offset=0" | jq '.' > out
4 | curl -s -XGET -H "Authorization: Bearer `cat secret/lgryglicki.prod.token`" -s "`cat helm/da-affiliation/secrets/PLATFORM_USER_SERVICE_ENDPOINT.prod.secret`/users?pageSize=5000&offset=0" | jq -r '.Data[].Emails[].EmailAddress' | sort | uniq
5 |
--------------------------------------------------------------------------------
/sh/curl_merge_unmerge.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | export JWT_TOKEN="`cat secret/lgryglicki.token`"
3 | #export TESTING_API=1
4 | ./sh/mariadb.sh
5 | ar1="1"
6 | if [ ! -z "$2" ]
7 | then
8 | ar1="$2"
9 | fi
10 | ar2="1"
11 | if [ ! -z "$3" ]
12 | then
13 | ar2="$3"
14 | fi
15 | if [ "$1" = "1" ]
16 | then
17 | ./sh/curl_put_merge_unique_identities.sh 'odpi/egeria' 16fe424acecf8d614d102fc0ece919a22200481d aaa8024197795de9b90676592772633c5cfcb35a "$ar1"
18 | ./sh/curl_put_move_identity.sh 'odpi/egeria' 16fe424acecf8d614d102fc0ece919a22200481d 16fe424acecf8d614d102fc0ece919a22200481d "$ar2"
19 | fi
20 | if [ "$1" = "2" ]
21 | then
22 | ./sh/curl_put_merge_unique_identities.sh 'odpi/egeria' aaa8024197795de9b90676592772633c5cfcb35a 16fe424acecf8d614d102fc0ece919a22200481d "$ar1"
23 | ./sh/curl_put_move_identity.sh 'odpi/egeria' aaa8024197795de9b90676592772633c5cfcb35a aaa8024197795de9b90676592772633c5cfcb35a "$ar2"
24 | fi
25 | if [ "$1" = "3" ]
26 | then
27 | ./sh/curl_put_merge_unique_identities.sh 'odpi/egeria' 16fe424acecf8d614d102fc0ece919a22200481d aaa8024197795de9b90676592772633c5cfcb35a "$ar1"
28 | ./sh/curl_put_move_identity.sh 'odpi/egeria' aaa8024197795de9b90676592772633c5cfcb35a aaa8024197795de9b90676592772633c5cfcb35a "$ar2"
29 | fi
30 | if [ "$1" = "4" ]
31 | then
32 | ./sh/curl_put_merge_unique_identities.sh 'odpi/egeria' aaa8024197795de9b90676592772633c5cfcb35a 16fe424acecf8d614d102fc0ece919a22200481d "$ar1"
33 | ./sh/curl_put_move_identity.sh 'odpi/egeria' 16fe424acecf8d614d102fc0ece919a22200481d 16fe424acecf8d614d102fc0ece919a22200481d "$ar2"
34 | fi
35 |
--------------------------------------------------------------------------------
/sh/curl_post_add_enrollment.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | . ./sh/shared.sh
3 | if [ -z "$2" ]
4 | then
5 | echo "$0: please specify profile uuid as a 2nd arg"
6 | exit 2
7 | fi
8 | if [ -z "$3" ]
9 | then
10 | echo "$0: please specify organization name (must exist) as a 3rd arg"
11 | exit 3
12 | fi
13 | uuid=$(rawurlencode "${2}")
14 | orgName=$(rawurlencode "${3}")
15 | extra=''
16 |
17 | for prop in start end merge is_project_specific role
18 | do
19 | if [ ! -z "${!prop}" ]
20 | then
21 | encoded=$(rawurlencode "${!prop}")
22 | if [ -z "$extra" ]
23 | then
24 | extra="?$prop=${encoded}"
25 | else
26 | extra="${extra}&$prop=${encoded}"
27 | fi
28 | fi
29 | done
30 |
31 | if [ ! -z "$DEBUG" ]
32 | then
33 | echo curl -i -s -H "Origin: ${ORIGIN}" -H "Authorization: Bearer ${JWT_TOKEN}" -XPOST "${API_URL}/v1/affiliation/${project}/add_enrollment/${uuid}/${orgName}${extra}"
34 | curl -i -s -H "Origin: ${ORIGIN}" -H "Authorization: Bearer ${JWT_TOKEN}" -XPOST "${API_URL}/v1/affiliation/${project}/add_enrollment/${uuid}/${orgName}${extra}"
35 | else
36 | curl -s -H "Origin: ${ORIGIN}" -H "Authorization: Bearer ${JWT_TOKEN}" -XPOST "${API_URL}/v1/affiliation/${project}/add_enrollment/${uuid}/${orgName}${extra}"
37 | fi
38 |
--------------------------------------------------------------------------------
/sh/curl_post_add_identities.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | . ./sh/shared.sh
3 | if [ ! -z "$DEBUG" ]
4 | then
5 | echo curl -i -s -H "Origin: ${ORIGIN}" -H "Authorization: Bearer ${JWT_TOKEN}" -H 'Accept: application/json' -H 'Content-Type: application/json' -XPOST "${API_URL}/v1/affiliation/${project}/add_identities" -d @sh/example_add_identities.json
6 | curl -i -s -H 'Accept: application/json' -H "Origin: ${ORIGIN}" -H 'Content-Type: application/json' -H "Authorization: Bearer ${JWT_TOKEN}" -XPOST "${API_URL}/v1/affiliation/${project}/add_identities" -d @sh/example_add_identities.json
7 | else
8 | curl -s -H 'Accept: application/json' -H "Origin: ${ORIGIN}" -H 'Content-Type: application/json' -H "Authorization: Bearer ${JWT_TOKEN}" -XPOST "${API_URL}/v1/affiliation/${project}/add_identities" -d @sh/example_add_identities.json
9 | fi
10 |
--------------------------------------------------------------------------------
/sh/curl_post_add_identity.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | . ./sh/shared.sh
3 | if [ -z "$2" ]
4 | then
5 | echo "$0: please specify identity source as a 2nd arg"
6 | exit 2
7 | fi
8 | ssource=$(rawurlencode "${2}")
9 | extra=''
10 |
11 | for prop in name email username uuid
12 | do
13 | if [ ! -z "${!prop}" ]
14 | then
15 | encoded=$(rawurlencode "${!prop}")
16 | if [ -z "$extra" ]
17 | then
18 | extra="?$prop=${encoded}"
19 | else
20 | extra="${extra}&$prop=${encoded}"
21 | fi
22 | fi
23 | done
24 |
25 | if [ ! -z "$DEBUG" ]
26 | then
27 | echo curl -i -s -H "Origin: ${ORIGIN}" -H "Authorization: Bearer ${JWT_TOKEN}" -XPOST "${API_URL}/v1/affiliation/${project}/add_identity/${ssource}${extra}"
28 | curl -i -s -H "Origin: ${ORIGIN}" -H "Authorization: Bearer ${JWT_TOKEN}" -XPOST "${API_URL}/v1/affiliation/${project}/add_identity/${ssource}${extra}"
29 | else
30 | curl -s -H "Origin: ${ORIGIN}" -H "Authorization: Bearer ${JWT_TOKEN}" -XPOST "${API_URL}/v1/affiliation/${project}/add_identity/${ssource}${extra}"
31 | fi
32 |
--------------------------------------------------------------------------------
/sh/curl_post_add_organization.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | . ./sh/shared.sh
3 | if [ -z "$2" ]
4 | then
5 | echo "$0: please specify organization name as a 2nd arg"
6 | exit 2
7 | fi
8 | orgName=$(rawurlencode "${2}")
9 |
10 | if [ ! -z "$DEBUG" ]
11 | then
12 | echo curl -i -s -H "Origin: ${ORIGIN}" -H "Authorization: Bearer ${JWT_TOKEN}" -XPOST "${API_URL}/v1/affiliation/${project}/add_organization/${orgName}"
13 | curl -i -s -H "Origin: ${ORIGIN}" -H "Authorization: Bearer ${JWT_TOKEN}" -XPOST "${API_URL}/v1/affiliation/${project}/add_organization/${orgName}"
14 | else
15 | curl -s -H "Origin: ${ORIGIN}" -H "Authorization: Bearer ${JWT_TOKEN}" -XPOST "${API_URL}/v1/affiliation/${project}/add_organization/${orgName}"
16 | fi
17 |
--------------------------------------------------------------------------------
/sh/curl_post_add_slug_mapping.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | export SKIP_PROJECT=1
3 | . ./sh/shared.sh
4 | extra=''
5 | for prop in da_name sf_name sf_id
6 | do
7 | if [ ! -z "${!prop}" ]
8 | then
9 | encoded=$(rawurlencode "${!prop}")
10 | if [ -z "$extra" ]
11 | then
12 | extra="?$prop=${encoded}"
13 | else
14 | extra="${extra}&$prop=${encoded}"
15 | fi
16 | fi
17 | done
18 | if [ ! -z "$DEBUG" ]
19 | then
20 | echo curl -i -s -H "Origin: ${ORIGIN}" -H "Authorization: Bearer ${JWT_TOKEN}" -XPOST "${API_URL}/v1/affiliation/add_slug_mapping${extra}"
21 | curl -i -s -H "Origin: ${ORIGIN}" -H "Authorization: Bearer ${JWT_TOKEN}" -XPOST "${API_URL}/v1/affiliation/add_slug_mapping${extra}"
22 | else
23 | curl -s -H "Origin: ${ORIGIN}" -H "Authorization: Bearer ${JWT_TOKEN}" -XPOST "${API_URL}/v1/affiliation/add_slug_mapping${extra}"
24 | fi
25 |
--------------------------------------------------------------------------------
/sh/curl_post_add_unique_identity.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | . ./sh/shared.sh
3 | if [ -z "$2" ]
4 | then
5 | echo "$0: please specify unique identity uuid as a 2nd arg"
6 | exit 2
7 | fi
8 | uuid=$(rawurlencode "${2}")
9 |
10 | if [ ! -z "$DEBUG" ]
11 | then
12 | echo curl -i -s -H "Origin: ${ORIGIN}" -H "Authorization: Bearer ${JWT_TOKEN}" -XPOST "${API_URL}/v1/affiliation/${project}/add_unique_identity/${uuid}"
13 | curl -i -s -H "Origin: ${ORIGIN}" -H "Authorization: Bearer ${JWT_TOKEN}" -XPOST "${API_URL}/v1/affiliation/${project}/add_unique_identity/${uuid}"
14 | else
15 | curl -s -H "Origin: ${ORIGIN}" -H "Authorization: Bearer ${JWT_TOKEN}" -XPOST "${API_URL}/v1/affiliation/${project}/add_unique_identity/${uuid}"
16 | fi
17 |
--------------------------------------------------------------------------------
/sh/curl_post_bulk_update.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | if [ -z "$JWT_TOKEN" ]
3 | then
4 | echo "$0: please sepcify your JWT token via JWT_TOKEN=..."
5 | exit 1
6 | fi
7 |
8 | if [ -z "$API_URL" ]
9 | then
10 | export API_URL="http://127.0.0.1:8080"
11 | fi
12 |
13 | if [ -z "$ORIGIN" ]
14 | then
15 | export ORIGIN="https://insights.test.platform.linuxfoundation.org"
16 | fi
17 |
18 | if [ ! -z "$DEBUG" ]
19 | then
20 | echo curl -i -s -H "Origin: ${ORIGIN}" -H "Authorization: Bearer ${JWT_TOKEN}" -H 'Accept: application/json' -H 'Content-Type: application/json' -XPOST "${API_URL}/v1/affiliation/bulk_update" -d @sh/example_bulk.json
21 | curl -i -s -H 'Accept: application/json' -H "Origin: ${ORIGIN}" -H 'Content-Type: application/json' -H "Authorization: Bearer ${JWT_TOKEN}" -XPOST "${API_URL}/v1/affiliation/bulk_update" -d @sh/example_bulk.json
22 | else
23 | curl -s -H 'Accept: application/json' -H "Origin: ${ORIGIN}" -H 'Content-Type: application/json' -H "Authorization: Bearer ${JWT_TOKEN}" -XPOST "${API_URL}/v1/affiliation/bulk_update" -d @sh/example_bulk.json
24 | fi
25 |
--------------------------------------------------------------------------------
/sh/curl_post_matching_blacklist.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | . ./sh/shared.sh
3 | if [ -z "$2" ]
4 | then
5 | echo "$0: please specify email as a 2nd arg"
6 | exit 2
7 | fi
8 | email=$(rawurlencode "${2}")
9 |
10 | if [ ! -z "$DEBUG" ]
11 | then
12 | echo curl -i -s -H "Origin: ${ORIGIN}" -H "Authorization: Bearer ${JWT_TOKEN}" -XPOST "${API_URL}/v1/affiliation/${project}/matching_blacklist/${email}"
13 | curl -i -s -H "Origin: ${ORIGIN}" -H "Authorization: Bearer ${JWT_TOKEN}" -XPOST "${API_URL}/v1/affiliation/${project}/matching_blacklist/${email}"
14 | else
15 | curl -s -H "Origin: ${ORIGIN}" -H "Authorization: Bearer ${JWT_TOKEN}" -XPOST "${API_URL}/v1/affiliation/${project}/matching_blacklist/${email}"
16 | fi
17 |
--------------------------------------------------------------------------------
/sh/curl_put_cache_top_contributors.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | export SKIP_PROJECT=1
3 | . ./sh/shared.sh
4 | if [ ! -z "$DEBUG" ]
5 | then
6 | echo curl -i -s -H "Origin: ${ORIGIN}" -H "Authorization: Bearer ${JWT_TOKEN}" -XPUT "${API_URL}/v1/affiliation/cache_top_contributors"
7 | curl -i -s -H "Origin: ${ORIGIN}" -H "Authorization: Bearer ${JWT_TOKEN}" -XPUT "${API_URL}/v1/affiliation/cache_top_contributors"
8 | else
9 | curl -s -H "Origin: ${ORIGIN}" -H "Authorization: Bearer ${JWT_TOKEN}" -XPUT "${API_URL}/v1/affiliation/cache_top_contributors"
10 | fi
11 |
--------------------------------------------------------------------------------
/sh/curl_put_det_aff_range.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | export SKIP_PROJECT=1
3 | . ./sh/shared.sh
4 | if [ ! -z "$DEBUG" ]
5 | then
6 | echo curl -i -s -H "Origin: ${ORIGIN}" -H "Authorization: Bearer ${JWT_TOKEN}" -XPUT "${API_URL}/v1/affiliation/det_aff_range"
7 | curl -i -s -H "Origin: ${ORIGIN}" -H "Authorization: Bearer ${JWT_TOKEN}" -XPUT "${API_URL}/v1/affiliation/det_aff_range"
8 | else
9 | curl -s -H "Origin: ${ORIGIN}" -H "Authorization: Bearer ${JWT_TOKEN}" -XPUT "${API_URL}/v1/affiliation/det_aff_range"
10 | fi
11 |
--------------------------------------------------------------------------------
/sh/curl_put_edit_enrollment.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | . ./sh/shared.sh
3 | if [ -z "$2" ]
4 | then
5 | echo "$0: please specify profile uuid as a 2nd arg"
6 | exit 2
7 | fi
8 | if [ -z "$3" ]
9 | then
10 | echo "$0: please specify organization name (must exist) as a 3rd arg"
11 | exit 3
12 | fi
13 | uuid=$(rawurlencode "${2}")
14 | orgName=$(rawurlencode "${3}")
15 | extra=''
16 |
17 | for prop in start end merge is_project_specific role new_start new_end new_is_project_specific new_role
18 | do
19 | if [ ! -z "${!prop}" ]
20 | then
21 | encoded=$(rawurlencode "${!prop}")
22 | if [ -z "$extra" ]
23 | then
24 | extra="?$prop=${encoded}"
25 | else
26 | extra="${extra}&$prop=${encoded}"
27 | fi
28 | fi
29 | done
30 |
31 | if [ ! -z "$DEBUG" ]
32 | then
33 | echo curl -i -s -H "Origin: ${ORIGIN}" -H "Authorization: Bearer ${JWT_TOKEN}" -XPUT "${API_URL}/v1/affiliation/${project}/edit_enrollment/${uuid}/${orgName}${extra}"
34 | curl -i -s -H "Origin: ${ORIGIN}" -H "Authorization: Bearer ${JWT_TOKEN}" -XPUT "${API_URL}/v1/affiliation/${project}/edit_enrollment/${uuid}/${orgName}${extra}"
35 | else
36 | curl -s -H "Origin: ${ORIGIN}" -H "Authorization: Bearer ${JWT_TOKEN}" -XPUT "${API_URL}/v1/affiliation/${project}/edit_enrollment/${uuid}/${orgName}${extra}"
37 | fi
38 |
--------------------------------------------------------------------------------
/sh/curl_put_edit_enrollment_by_id.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | . ./sh/shared.sh
3 | if [ -z "$2" ]
4 | then
5 | echo "$0: please specify enrollment ID as a 2nd arg"
6 | exit 2
7 | fi
8 | enrollment_id=$(rawurlencode "${2}")
9 | extra=''
10 |
11 | for prop in merge new_start new_end new_is_project_specific new_role new_org
12 | do
13 | if [ ! -z "${!prop}" ]
14 | then
15 | encoded=$(rawurlencode "${!prop}")
16 | if [ -z "$extra" ]
17 | then
18 | extra="?$prop=${encoded}"
19 | else
20 | extra="${extra}&$prop=${encoded}"
21 | fi
22 | fi
23 | done
24 |
25 | if [ ! -z "$DEBUG" ]
26 | then
27 | echo curl -i -s -H "Origin: ${ORIGIN}" -H "Authorization: Bearer ${JWT_TOKEN}" -XPUT "${API_URL}/v1/affiliation/${project}/edit_enrollment_by_id/${enrollment_id}${extra}"
28 | curl -i -s -H "Origin: ${ORIGIN}" -H "Authorization: Bearer ${JWT_TOKEN}" -XPUT "${API_URL}/v1/affiliation/${project}/edit_enrollment_by_id/${enrollment_id}${extra}"
29 | else
30 | curl -s -H "Origin: ${ORIGIN}" -H "Authorization: Bearer ${JWT_TOKEN}" -XPUT "${API_URL}/v1/affiliation/${project}/edit_enrollment_by_id/${enrollment_id}${extra}"
31 | fi
32 |
--------------------------------------------------------------------------------
/sh/curl_put_edit_organization.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | . ./sh/shared.sh
3 | if [ -z "$2" ]
4 | then
5 | echo "$0: please specify organization ID as a 2nd arg"
6 | exit 3
7 | fi
8 | if [ -z "$3" ]
9 | then
10 | echo "$0: please specify organization name as a 3rd arg"
11 | exit 4
12 | fi
13 | orgID=$(rawurlencode "${2}")
14 | orgName=$(rawurlencode "${3}")
15 |
16 | if [ ! -z "$DEBUG" ]
17 | then
18 | echo curl -i -s -H "Origin: ${ORIGIN}" -H "Authorization: Bearer ${JWT_TOKEN}" -XPUT "${API_URL}/v1/affiliation/${project}/edit_organization/${orgID}/${orgName}"
19 | curl -i -s -H "Origin: ${ORIGIN}" -H "Authorization: Bearer ${JWT_TOKEN}" -XPUT "${API_URL}/v1/affiliation/${project}/edit_organization/${orgID}/${orgName}"
20 | else
21 | curl -s -H "Origin: ${ORIGIN}" -H "Authorization: Bearer ${JWT_TOKEN}" -XPUT "${API_URL}/v1/affiliation/${project}/edit_organization/${orgID}/${orgName}"
22 | fi
23 |
--------------------------------------------------------------------------------
/sh/curl_put_edit_profile.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | . ./sh/shared.sh
3 | if [ -z "$2" ]
4 | then
5 | echo "$0: please specify profile UUID as a 2nd arg"
6 | exit 2
7 | fi
8 | uuid=$(rawurlencode "${2}")
9 | extra=''
10 |
11 | # for prop in name email gender gender_acc is_bot country_code
12 | for prop in name email is_bot country_code
13 | do
14 | if [ ! -z "${!prop}" ]
15 | then
16 | encoded=$(rawurlencode "${!prop}")
17 | if [ -z "$extra" ]
18 | then
19 | extra="?$prop=${encoded}"
20 | else
21 | extra="${extra}&$prop=${encoded}"
22 | fi
23 | fi
24 | done
25 |
26 | if [ ! -z "$DEBUG" ]
27 | then
28 | echo curl -i -s -H "Origin: ${ORIGIN}" -H "Authorization: Bearer ${JWT_TOKEN}" -XPUT "${API_URL}/v1/affiliation/${project}/edit_profile/${uuid}${extra}"
29 | curl -i -s -H "Origin: ${ORIGIN}" -H "Authorization: Bearer ${JWT_TOKEN}" -XPUT "${API_URL}/v1/affiliation/${project}/edit_profile/${uuid}${extra}"
30 | else
31 | curl -s -H "Origin: ${ORIGIN}" -H "Authorization: Bearer ${JWT_TOKEN}" -XPUT "${API_URL}/v1/affiliation/${project}/edit_profile/${uuid}${extra}"
32 | fi
33 |
--------------------------------------------------------------------------------
/sh/curl_put_edit_slug_mapping.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | export SKIP_PROJECT=1
3 | . ./sh/shared.sh
4 | extra=''
5 | for prop in da_name sf_name sf_id new_da_name new_sf_name new_sf_id
6 | do
7 | if [ ! -z "${!prop}" ]
8 | then
9 | encoded=$(rawurlencode "${!prop}")
10 | if [ -z "$extra" ]
11 | then
12 | extra="?$prop=${encoded}"
13 | else
14 | extra="${extra}&$prop=${encoded}"
15 | fi
16 | fi
17 | done
18 | if [ ! -z "$DEBUG" ]
19 | then
20 | echo curl -i -s -H "Origin: ${ORIGIN}" -H "Authorization: Bearer ${JWT_TOKEN}" -XPUT "${API_URL}/v1/affiliation/edit_slug_mapping${extra}"
21 | curl -i -s -H "Origin: ${ORIGIN}" -H "Authorization: Bearer ${JWT_TOKEN}" -XPUT "${API_URL}/v1/affiliation/edit_slug_mapping${extra}"
22 | else
23 | curl -s -H "Origin: ${ORIGIN}" -H "Authorization: Bearer ${JWT_TOKEN}" -XPUT "${API_URL}/v1/affiliation/edit_slug_mapping${extra}"
24 | fi
25 |
--------------------------------------------------------------------------------
/sh/curl_put_hide_emails.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | export SKIP_PROJECT=1
3 | . ./sh/shared.sh
4 | if [ ! -z "$DEBUG" ]
5 | then
6 | echo curl -i -s -H "Origin: ${ORIGIN}" -H "Authorization: Bearer ${JWT_TOKEN}" -XPUT "${API_URL}/v1/affiliation/hide_emails"
7 | curl -i -s -H "Origin: ${ORIGIN}" -H "Authorization: Bearer ${JWT_TOKEN}" -XPUT "${API_URL}/v1/affiliation/hide_emails"
8 | else
9 | curl -s -H "Origin: ${ORIGIN}" -H "Authorization: Bearer ${JWT_TOKEN}" -XPUT "${API_URL}/v1/affiliation/hide_emails"
10 | fi
11 |
--------------------------------------------------------------------------------
/sh/curl_put_map_org_names.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | export SKIP_PROJECT=1
3 | . ./sh/shared.sh
4 | if [ ! -z "$DEBUG" ]
5 | then
6 | echo curl -i -s -H "Origin: ${ORIGIN}" -H "Authorization: Bearer ${JWT_TOKEN}" -XPUT "${API_URL}/v1/affiliation/map_org_names"
7 | curl -i -s -H "Origin: ${ORIGIN}" -H "Authorization: Bearer ${JWT_TOKEN}" -XPUT "${API_URL}/v1/affiliation/map_org_names"
8 | else
9 | curl -s -H "Origin: ${ORIGIN}" -H "Authorization: Bearer ${JWT_TOKEN}" -XPUT "${API_URL}/v1/affiliation/map_org_names"
10 | fi
11 |
--------------------------------------------------------------------------------
/sh/curl_put_merge_all.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | export SKIP_PROJECT=1
3 | . ./sh/shared.sh
4 | debug=$(rawurlencode "${1}")
5 | dry=$(rawurlencode "${2}")
6 | if [ ! -z "$DEBUG" ]
7 | then
8 | echo curl -i -s -H "Origin: ${ORIGIN}" -H "Authorization: Bearer ${JWT_TOKEN}" -XPUT "${API_URL}/v1/affiliation/merge_all?debug=${debug}&dry=${dry}"
9 | curl -i -s -H "Origin: ${ORIGIN}" -H "Authorization: Bearer ${JWT_TOKEN}" -XPUT "${API_URL}/v1/affiliation/merge_all?debug=${debug}&dry=${dry}"
10 | else
11 | curl -s -H "Origin: ${ORIGIN}" -H "Authorization: Bearer ${JWT_TOKEN}" -XPUT "${API_URL}/v1/affiliation/merge_all?debug=${debug}&dry=${dry}"
12 | fi
13 |
--------------------------------------------------------------------------------
/sh/curl_put_merge_enrollments.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | . ./sh/shared.sh
3 | if [ -z "$2" ]
4 | then
5 | echo "$0: please specify profile uuid as a 2nd arg"
6 | exit 2
7 | fi
8 | if [ -z "$3" ]
9 | then
10 | echo "$0: please specify organization name (must exist) as a 3rd arg"
11 | exit 3
12 | fi
13 | uuid=$(rawurlencode "${2}")
14 | orgName=$(rawurlencode "${3}")
15 | extra=''
16 |
17 | for prop in is_project_specific all_projects
18 | do
19 | if [ ! -z "${!prop}" ]
20 | then
21 | encoded=$(rawurlencode "${!prop}")
22 | if [ -z "$extra" ]
23 | then
24 | extra="?$prop=${encoded}"
25 | else
26 | extra="${extra}&$prop=${encoded}"
27 | fi
28 | fi
29 | done
30 |
31 | if [ ! -z "$DEBUG" ]
32 | then
33 | echo curl -i -s -H "Origin: ${ORIGIN}" -H "Authorization: Bearer ${JWT_TOKEN}" -XPUT "${API_URL}/v1/affiliation/${project}/merge_enrollments/${uuid}/${orgName}${extra}"
34 | curl -i -s -H "Origin: ${ORIGIN}" -H "Authorization: Bearer ${JWT_TOKEN}" -XPUT "${API_URL}/v1/affiliation/${project}/merge_enrollments/${uuid}/${orgName}${extra}"
35 | else
36 | curl -s -H "Origin: ${ORIGIN}" -H "Authorization: Bearer ${JWT_TOKEN}" -XPUT "${API_URL}/v1/affiliation/${project}/merge_enrollments/${uuid}/${orgName}${extra}"
37 | fi
38 |
--------------------------------------------------------------------------------
/sh/curl_put_merge_unique_identities.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | . ./sh/shared.sh
3 | if [ -z "$2" ]
4 | then
5 | echo "$0: please specify from uidentity uuid as a 2nd arg"
6 | exit 3
7 | fi
8 | if [ -z "$3" ]
9 | then
10 | echo "$0: please specify to uidentity uuid as a 3rd arg"
11 | exit 4
12 | fi
13 | from_uuid=$(rawurlencode "${2}")
14 | to_uuid=$(rawurlencode "${3}")
15 | ar="true"
16 | if [ "$4" = "0" ]
17 | then
18 | ar="false"
19 | fi
20 |
21 | if [ ! -z "$DEBUG" ]
22 | then
23 | echo "$project $from_uuid $to_uuid"
24 | echo curl -i -s -H "Origin: ${ORIGIN}" -H "Authorization: Bearer ${JWT_TOKEN}" -XPUT "${API_URL}/v1/affiliation/${project}/merge_unique_identities/${from_uuid}/${to_uuid}?archive=${ar}"
25 | curl -i -s -H "Origin: ${ORIGIN}" -H "Authorization: Bearer ${JWT_TOKEN}" -XPUT "${API_URL}/v1/affiliation/${project}/merge_unique_identities/${from_uuid}/${to_uuid}?archive=${ar}"
26 | else
27 | curl -s -H "Origin: ${ORIGIN}" -H "Authorization: Bearer ${JWT_TOKEN}" -XPUT "${API_URL}/v1/affiliation/${project}/merge_unique_identities/${from_uuid}/${to_uuid}?archive=${ar}"
28 | fi
29 |
--------------------------------------------------------------------------------
/sh/curl_put_move_identity.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | . ./sh/shared.sh
3 | if [ -z "$2" ]
4 | then
5 | echo "$0: please specify from identity id as a 2nd arg"
6 | exit 3
7 | fi
8 | if [ -z "$3" ]
9 | then
10 | echo "$0: please specify to uidentity uuid as a 3rd arg"
11 | exit 4
12 | fi
13 | from_id=$(rawurlencode "${2}")
14 | to_uuid=$(rawurlencode "${3}")
15 | ar="true"
16 | if [ "$4" = "0" ]
17 | then
18 | ar="false"
19 | fi
20 |
21 | if [ ! -z "$DEBUG" ]
22 | then
23 | echo "$project $from_id $to_uuid"
24 | echo curl -i -s -H "Origin: ${ORIGIN}" -H "Authorization: Bearer ${JWT_TOKEN}" -XPUT "${API_URL}/v1/affiliation/${project}/move_identity/${from_id}/${to_uuid}?archive=${ar}"
25 | curl -i -s -H "Origin: ${ORIGIN}" -H "Authorization: Bearer ${JWT_TOKEN}" -XPUT "${API_URL}/v1/affiliation/${project}/move_identity/${from_id}/${to_uuid}?archive=${ar}"
26 | else
27 | curl -s -H "Origin: ${ORIGIN}" -H "Authorization: Bearer ${JWT_TOKEN}" -XPUT "${API_URL}/v1/affiliation/${project}/move_identity/${from_id}/${to_uuid}?archive=${ar}"
28 | fi
29 |
--------------------------------------------------------------------------------
/sh/curl_put_org_domain.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | . ./sh/shared.sh
3 | if [ -z "$2" ]
4 | then
5 | echo "$0: please specify organization name as a 2nd arg"
6 | exit 3
7 | fi
8 | if [ -z "$3" ]
9 | then
10 | echo "$0: please specify domain as a 3rd arg"
11 | exit 4
12 | fi
13 | org=$(rawurlencode "${2}")
14 | dom=$(rawurlencode "${3}")
15 |
16 | ov="false"
17 | if [ "$4" = "1" ]
18 | then
19 | ov="true"
20 | fi
21 |
22 | top="false"
23 | if [ "$5" = "1" ]
24 | then
25 | top="true"
26 | fi
27 |
28 | skip_enrollments="false"
29 | if [ "$6" = "1" ]
30 | then
31 | skip_enrollments="true"
32 | fi
33 |
34 | if [ ! -z "$DEBUG" ]
35 | then
36 | echo "$project $org $dom $ov $top $skip_enrollments"
37 | echo curl -i -s -H "Origin: ${ORIGIN}" -H "Authorization: Bearer ${JWT_TOKEN}" -XPUT "${API_URL}/v1/affiliation/${project}/add_domain/${org}/${dom}?overwrite=${ov}&is_top_domain=${top}&skip_enrollments=${skip_enrollments}"
38 | curl -i -s -H "Origin: ${ORIGIN}" -H "Authorization: Bearer ${JWT_TOKEN}" -XPUT "${API_URL}/v1/affiliation/${project}/add_domain/${org}/${dom}?overwrite=${ov}&is_top_domain=${top}&skip_enrollments=${skip_enrollments}"
39 | else
40 | curl -s -H "Origin: ${ORIGIN}" -H "Authorization: Bearer ${JWT_TOKEN}" -XPUT "${API_URL}/v1/affiliation/${project}/add_domain/${org}/${dom}?overwrite=${ov}&is_top_domain=${top}&skip_enrollments=${skip_enrollments}"
41 | fi
42 |
--------------------------------------------------------------------------------
/sh/curl_put_sync_sf_profiles.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | export SKIP_PROJECT=1
3 | . ./sh/shared.sh
4 | debug=$(rawurlencode "${1}")
5 | if [ ! -z "$DEBUG" ]
6 | then
7 | echo curl -i -s -H "Origin: ${ORIGIN}" -H "Authorization: Bearer ${JWT_TOKEN}" -XPUT "${API_URL}/v1/affiliation/sync_sf_profiles"
8 | curl -i -s -H "Origin: ${ORIGIN}" -H "Authorization: Bearer ${JWT_TOKEN}" -XPUT "${API_URL}/v1/affiliation/sync_sf_profiles"
9 | else
10 | curl -s -H "Origin: ${ORIGIN}" -H "Authorization: Bearer ${JWT_TOKEN}" -XPUT "${API_URL}/v1/affiliation/sync_sf_profiles"
11 | fi
12 |
--------------------------------------------------------------------------------
/sh/curl_unarchive_profile.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | . ./sh/shared.sh
3 | if [ -z "$2" ]
4 | then
5 | echo "$0: please specify profile uuid as a 2nd arg"
6 | exit 2
7 | fi
8 | uuid=$(rawurlencode "${2}")
9 |
10 | if [ ! -z "$DEBUG" ]
11 | then
12 | echo curl -i -s -H "Origin: ${ORIGIN}" -H "Authorization: Bearer ${JWT_TOKEN}" -XPOST "${API_URL}/v1/affiliation/${project}/unarchive_profile/${uuid}"
13 | curl -i -s -H "Origin: ${ORIGIN}" -H "Authorization: Bearer ${JWT_TOKEN}" -XPOST "${API_URL}/v1/affiliation/${project}/unarchive_profile/${uuid}"
14 | else
15 | curl -s -H "Origin: ${ORIGIN}" -H "Authorization: Bearer ${JWT_TOKEN}" -XPOST "${API_URL}/v1/affiliation/${project}/unarchive_profile/${uuid}"
16 | fi
17 |
--------------------------------------------------------------------------------
/sh/es.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | ./sh/es_local_docker.sh
3 |
--------------------------------------------------------------------------------
/sh/es_blanks.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | # Example run: ESURL="https://[redacted]" PROJ="finos" CONDITION="author_name in ('ryanpetersonOF', 'pjbroadbent', 'Li Cui', 'Michael M. Coates', 'Harsimran Singh', 'Luis Espinola', 'Michael Coates', 'Daniel Kocielinski', 'nisse', 'brybailey', 'David H', 'deadbeef', 'brandtr', 'James Leftley', 'kjellander', 'Nicholas Goodman', 'David Hamberlin', 'Sergio Garcia Murillo', 'magjed', 'Sami Kalliomäki', 'malaysf', 'Aziz Yokubjonov', 'Danil Chapovalov', 'Mark Josling', 'Aziem Chawdhary', 'michaelt', 'Aaron Griswold')" ./sh/es_blanks.sh
3 | if [ -z "$ESURL" ]
4 | then
5 | echo "$0: you need to specify ESURL=..."
6 | exit 1
7 | fi
8 | if [ -z "$PROJ" ]
9 | then
10 | echo "$0: you need to specify PROJ=..."
11 | exit 2
12 | fi
13 | if [ -z "$CONDITION" ]
14 | then
15 | query="{\"query\":\"select origin, project, project_slug, author_uuid, author_id, author_name, author_org_name as org, min(metadata__updated_on) as dt_from, max(metadata__updated_on) as dt_to, count(*) as cnt from \\\"IDXNAME\\\" where author_org_name is null or author_org_name in ('') group by origin, project, project_slug, author_uuid, author_id, author_name, author_org_name\",\"fetch_size\":10000}"
16 | querymin="{\"query\":\"select origin, author_uuid, author_id, author_name, author_org_name as org, min(metadata__updated_on) as dt_from, max(metadata__updated_on) as dt_to, count(*) as cnt from \\\"IDXNAME\\\" where author_org_name is null or author_org_name in ('') group by origin, author_uuid, author_id, author_name, author_org_name\",\"fetch_size\":10000}"
17 | else
18 | query="{\"query\":\"select origin, project, project_slug, author_uuid, author_id, author_name, author_org_name as org, min(metadata__updated_on) as dt_from, max(metadata__updated_on) as dt_to, count(*) as cnt from \\\"IDXNAME\\\" where ${CONDITION} and (author_org_name is null or author_org_name in ('')) group by origin, project, project_slug, author_uuid, author_id, author_name, author_org_name\",\"fetch_size\":10000}"
19 | querymin="{\"query\":\"select origin, author_uuid, author_id, author_name, author_org_name as org, min(metadata__updated_on) as dt_from, max(metadata__updated_on) as dt_to, count(*) as cnt from \\\"IDXNAME\\\" where ${CONDITION} and (author_org_name is null or author_org_name in ('')) group by origin, author_uuid, author_id, author_name, author_org_name\",\"fetch_size\":10000}"
20 | fi
21 | echo $query > unknowns-query.json.secret
22 | echo $querymin > unknowns-querymin.json.secret
23 | > unknowns.log.secret
24 | for idx in $(curl -s "${ESURL}/_cat/indices?format=json" | jq -rS '.[].index' | grep -E "^(bitergia.+|sds-.*)${PROJ}" | grep -Ev '(-repository|-raw|-googlegroups|-slack|-dockerhub|-jenkins|-last-action-date-cache|-social_media|-earned_media|finosmeetings)(-for-merge)?$' | grep -Ev '\-onion_')
25 | do
26 | data=`cat unknowns-query.json.secret`
27 | data=${data/IDXNAME/$idx}
28 | echo $data > q.json.secret
29 | res=$(curl -s -XPOST -H 'Content-Type: application/json' "${ESURL}/_sql?format=json" -d@q.json.secret | jq -r '.rows')
30 | if [ ! "$res" = "[]" ]
31 | then
32 | if [ "$res" = "null" ]
33 | then
34 | data=`cat unknowns-querymin.json.secret`
35 | data=${data/IDXNAME/$idx}
36 | echo $data > q.json.secret
37 | res=$(curl -s -XPOST -H 'Content-Type: application/json' "${ESURL}/_sql?format=json" -d@q.json.secret | jq -r '.rows')
38 | if [ ! "$res" = "[]" ]
39 | then
40 | echo "special $idx: $res" | tee -a unknowns.log.secret
41 | fi
42 | else
43 | echo "$idx: $res" | tee -a unknowns.log.secret
44 | fi
45 | fi
46 | done
47 |
--------------------------------------------------------------------------------
/sh/es_documents.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | # Example run: ESURL="https://[redacted]" CONDITION="author_name in ('ryanpetersonOF', 'pjbroadbent', 'Li Cui', 'Michael M. Coates', 'Harsimran Singh', 'Luis Espinola', 'Michael Coates', 'Daniel Kocielinski', 'nisse', 'brybailey', 'David H', 'deadbeef', 'brandtr', 'James Leftley', 'kjellander', 'Nicholas Goodman', 'David Hamberlin', 'Sergio Garcia Murillo', 'magjed', 'Sami Kalliomäki', 'malaysf', 'Aziz Yokubjonov', 'Danil Chapovalov', 'Mark Josling', 'Aziem Chawdhary', 'michaelt', 'Aaron Griswold')" ./sh/es_documents.sh
3 | if [ -z "$ESURL" ]
4 | then
5 | echo "$0: you need to specify ESURL=..."
6 | exit 1
7 | fi
8 | if [ -z "$CONDITION" ]
9 | then
10 | query="{\"query\":\"select origin, project, project_slug, author_uuid, author_id, author_name, author_org_name as org, min(metadata__updated_on) as dt_from, max(metadata__updated_on) as dt_to, count(*) as cnt from \\\"IDXNAME\\\" group by origin, project, project_slug, author_uuid, author_id, author_name, author_org_name\",\"fetch_size\":10000}"
11 | querymin="{\"query\":\"select origin, author_uuid, author_id, author_name, author_org_name as org, min(metadata__updated_on) as dt_from, max(metadata__updated_on) as dt_to, count(*) as cnt from \\\"IDXNAME\\\" group by origin, author_uuid, author_id, author_name, author_org_name\",\"fetch_size\":10000}"
12 | else
13 | query="{\"query\":\"select origin, project, project_slug, author_uuid, author_id, author_name, author_org_name as org, min(metadata__updated_on) as dt_from, max(metadata__updated_on) as dt_to, count(*) as cnt from \\\"IDXNAME\\\" where ${CONDITION} group by origin, project, project_slug, author_uuid, author_id, author_name, author_org_name\",\"fetch_size\":10000}"
14 | querymin="{\"query\":\"select origin, author_uuid, author_id, author_name, author_org_name as org, min(metadata__updated_on) as dt_from, max(metadata__updated_on) as dt_to, count(*) as cnt from \\\"IDXNAME\\\" where ${CONDITION} group by origin, author_uuid, author_id, author_name, author_org_name\",\"fetch_size\":10000}"
15 | fi
16 | echo $query > documents-query.json.secret
17 | echo $querymin > documents-querymin.json.secret
18 | > .log.secret
19 | #for idx in $(curl -s "${ESURL}/_cat/indices?format=json" | jq -rS '.[].index' | grep -E "^(bitergia.+|sds-.*)" | grep -Ev '(-repository(-for-merge)?|-raw|-googlegroups|-slack|-dockerhub|-last-action-date-cache|-social_media|finosmeetings)$' | grep -Ev '\-onion_')
20 | for idx in $(curl -s "${ESURL}/_cat/indices?format=json" | jq -rS '.[].index' | grep -E "^(bitergia.+|sds-.*)" | grep -Ev '(-repository(-for-merge)?|-raw|-last-action-date-cache|-social_media)$' | grep -Ev '\-onion_')
21 | do
22 | data=`cat documents-query.json.secret`
23 | data=${data/IDXNAME/$idx}
24 | echo $data > q.json.secret
25 | res=$(curl -s -XPOST -H 'Content-Type: application/json' "${ESURL}/_sql?format=json" -d@q.json.secret | jq -r '.rows')
26 | if [ ! "$res" = "[]" ]
27 | then
28 | if [ "$res" = "null" ]
29 | then
30 | data=`cat documents-querymin.json.secret`
31 | data=${data/IDXNAME/$idx}
32 | echo $data > q.json.secret
33 | res=$(curl -s -XPOST -H 'Content-Type: application/json' "${ESURL}/_sql?format=json" -d@q.json.secret | jq -r '.rows')
34 | if ( [ ! "$res" = "[]" ] && [ ! "$res" = "null" ] )
35 | then
36 | echo "special $idx: $res" | tee -a documents.log.secret
37 | fi
38 | else
39 | echo "$idx: $res" | tee -a documents.log.secret
40 | fi
41 | fi
42 | done
43 |
--------------------------------------------------------------------------------
/sh/es_local_docker.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | docker run -p 19200:9200 -p 19300:9300 -e "discovery.type=single-node" docker.elastic.co/elasticsearch/elasticsearch:7.5.2
3 |
--------------------------------------------------------------------------------
/sh/es_unknowns.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | # Example run: ESURL="https://[redacted]" PROJ="finos" CONDITION="author_name in ('ryanpetersonOF', 'pjbroadbent', 'Li Cui', 'Michael M. Coates', 'Harsimran Singh', 'Luis Espinola', 'Michael Coates', 'Daniel Kocielinski', 'nisse', 'brybailey', 'David H', 'deadbeef', 'brandtr', 'James Leftley', 'kjellander', 'Nicholas Goodman', 'David Hamberlin', 'Sergio Garcia Murillo', 'magjed', 'Sami Kalliomäki', 'malaysf', 'Aziz Yokubjonov', 'Danil Chapovalov', 'Mark Josling', 'Aziem Chawdhary', 'michaelt', 'Aaron Griswold')" ./sh/es_unknowns.sh
3 | if [ -z "$ESURL" ]
4 | then
5 | echo "$0: you need to specify ESURL=..."
6 | exit 1
7 | fi
8 | if [ -z "$PROJ" ]
9 | then
10 | echo "$0: you should specify PROJ=..."
11 | # exit 2
12 | fi
13 | if [ -z "$CONDITION" ]
14 | then
15 | query="{\"query\":\"select origin, project, project_slug, author_uuid, author_id, author_name, author_org_name as org, min(metadata__updated_on) as dt_from, max(metadata__updated_on) as dt_to, count(*) as cnt from \\\"IDXNAME\\\" where author_org_name in ('Unknown') group by origin, project, project_slug, author_uuid, author_id, author_name, author_org_name\",\"fetch_size\":10000}"
16 | querymin="{\"query\":\"select origin, author_uuid, author_id, author_name, author_org_name as org, min(metadata__updated_on) as dt_from, max(metadata__updated_on) as dt_to, count(*) as cnt from \\\"IDXNAME\\\" where author_org_name in ('Unknown') group by origin, author_uuid, author_id, author_name, author_org_name\",\"fetch_size\":10000}"
17 | else
18 | query="{\"query\":\"select origin, project, project_slug, author_uuid, author_id, author_name, author_org_name as org, min(metadata__updated_on) as dt_from, max(metadata__updated_on) as dt_to, count(*) as cnt from \\\"IDXNAME\\\" where ${CONDITION} and author_org_name in ('Unknown') group by origin, project, project_slug, author_uuid, author_id, author_name, author_org_name\",\"fetch_size\":10000}"
19 | querymin="{\"query\":\"select origin, author_uuid, author_id, author_name, author_org_name as org, min(metadata__updated_on) as dt_from, max(metadata__updated_on) as dt_to, count(*) as cnt from \\\"IDXNAME\\\" where ${CONDITION} and author_org_name in ('Unknown') group by origin, author_uuid, author_id, author_name, author_org_name\",\"fetch_size\":10000}"
20 | fi
21 | echo $query > unknowns-query.json.secret
22 | echo $querymin > unknowns-querymin.json.secret
23 | > unknowns.log.secret
24 | for idx in $(curl -s "${ESURL}/_cat/indices?format=json" | jq -rS '.[].index' | grep -E "^(bitergia.+|sds-.*)${PROJ}" | grep -Ev '(-repository|-raw|-googlegroups|-slack|-dockerhub|-jenkins|-last-action-date-cache|-social_media|-earned_media|finosmeetings)(-for-merge)?$' | grep -Ev '\-onion_')
25 | do
26 | if [ ! -z "$DBG" ]
27 | then
28 | echo $idx
29 | fi
30 | data=`cat unknowns-query.json.secret`
31 | data=${data/IDXNAME/$idx}
32 | echo $data > q.json.secret
33 | res=$(curl -s -XPOST -H 'Content-Type: application/json' "${ESURL}/_sql?format=json" -d@q.json.secret | jq -r '.rows')
34 | if [ ! "$res" = "[]" ]
35 | then
36 | if [ "$res" = "null" ]
37 | then
38 | data=`cat unknowns-querymin.json.secret`
39 | data=${data/IDXNAME/$idx}
40 | echo $data > q.json.secret
41 | res=$(curl -s -XPOST -H 'Content-Type: application/json' "${ESURL}/_sql?format=json" -d@q.json.secret | jq -r '.rows')
42 | if [ ! "$res" = "[]" ]
43 | then
44 | echo "special $idx: $res" | tee -a unknowns.log.secret
45 | fi
46 | else
47 | echo "$idx: $res" | tee -a unknowns.log.secret
48 | fi
49 | fi
50 | done
51 |
--------------------------------------------------------------------------------
/sh/example_add_identities.json:
--------------------------------------------------------------------------------
1 | {
2 | "identities": [
3 | {
4 | "id": "",
5 | "uuid": "",
6 | "source": "LG",
7 | "name": "LG Lukasz",
8 | "username": "LG lukaszgryglicki",
9 | "email": "LG lgryglicki@cncf.io"
10 | },
11 | {
12 | "source": "LG",
13 | "name": "LG2 Lukasz",
14 | "username": "LG2 lukaszgryglicki",
15 | "email": "LG2 lgryglicki@cncf.io"
16 | },
17 | {
18 | "id": "ABC",
19 | "uuid": "DEF",
20 | "source": "LG",
21 | "name": "LG3 Lukasz",
22 | "username": "LG3 lukaszgryglicki",
23 | "email": "LG3 lgryglicki@cncf.io"
24 | }
25 | ]
26 | }
27 |
--------------------------------------------------------------------------------
/sh/example_bulk.json:
--------------------------------------------------------------------------------
1 | {
2 | "add": [
3 | {
4 | "country_code": "pl ",
5 | "email": " Lukaszgryglicki!o2.pl",
6 | "name": " Łukasz Gryglicki",
7 | "is_bot": 0,
8 | "enrollments": [
9 | {
10 | "start": "1900-01-01",
11 | "end": "2006-03-01",
12 | "organization": "Independent",
13 | "role" : "M"
14 | },
15 | {
16 | "project_slug": "pl",
17 | "start": "2006-03-01",
18 | "end": "2012-08-01",
19 | "role" : "M",
20 | "organization": "IGF PAN Hornsund Expedition"
21 | },
22 | {
23 | "project_slug": "cs",
24 | "start": "2012-08-01",
25 | "end": "2013-10-15",
26 | "role" : "C",
27 | "organization": "Cleverstep"
28 | },
29 | {
30 | "project_slug": "sg",
31 | "start": " 2013-10-15 ",
32 | "end": " 2014-04-15 ",
33 | "role" : "M",
34 | "organization": "Societe Generale "
35 | },
36 | {
37 | "project_slug": "cs",
38 | "start": "2014-04-15",
39 | "end": "2015-11-20",
40 | "role" : "C",
41 | "organization": " Cleverstep"
42 | },
43 | {
44 | "project_slug": "lf",
45 | "start": "2015-11-20",
46 | "end": "2017-03-15",
47 | "role" : "M",
48 | "organization": " Google "
49 | },
50 | {
51 | "project_slug": "cncf",
52 | "start": "2017-03-15",
53 | "end": "2020-05-01",
54 | "role" : "C",
55 | "organization": "CNCF "
56 | },
57 | {
58 | "project_slug": "lf",
59 | "start": "2020-05-01",
60 | "end": "2100-01-01",
61 | "role" : "M",
62 | "organization": " The Linux foundation "
63 | }
64 | ],
65 | "identities": [
66 | {
67 | "source": "git",
68 | "email": "lukaszgryglicki!o2.pl",
69 | "name": "Lukasz Gryglicki"
70 | },
71 | {
72 | "source": "git",
73 | "email": "lukaszgryglicki!o2.pl",
74 | "name": "Łukasz Gryglicki"
75 | },
76 | {
77 | "source": "git",
78 | "email": "lukaszgryglicki!o2.pl",
79 | "name": "lukaszgryglicki"
80 | },
81 | {
82 | "source": "jira",
83 | "email": "lgryglicki!cncf.io",
84 | "name": "Łukasz Gryglicki",
85 | "username": "lgryglicki"
86 | }
87 | ]
88 | }
89 | ],
90 | "del": [
91 | {
92 | "country_code": "PL",
93 | "email": "lukaszgryglicki!o2.pl",
94 | "name": "Łukasz Gryglicki",
95 | "is_bot": 0,
96 | "enrollments": [
97 | {
98 | "start": "1900-01-01",
99 | "end": "2006-03-01",
100 | "organization": "Independent",
101 | "role" : "M"
102 | },
103 | {
104 | "start": "1970-01-01",
105 | "end": "2006-03-01",
106 | "project_slug": "cncf",
107 | "role" : "C",
108 | "organization": "Independent"
109 | },
110 | {
111 | "start": "2006-03-01",
112 | "end": "2011-06-15",
113 | "role" : "C",
114 | "organization": "Innsoft"
115 | },
116 | {
117 | "start": "2011-06-15",
118 | "end": "2012-08-01",
119 | "role" : "C",
120 | "organization": "IGF PAN Hornsund Expedition"
121 | },
122 | {
123 | "start": "2012-08-01",
124 | "end": "2013-10-15",
125 | "project_slug": "cncf",
126 | "role" : "C",
127 | "organization": "Cleverstep"
128 | },
129 | {
130 | "start": "2013-10-15",
131 | "end": "2014-04-15",
132 | "role" : "C",
133 | "organization": "Societe Generale"
134 | },
135 | {
136 | "project_slug": "sg",
137 | "start": " 2013-10-15 ",
138 | "end": " 2014-04-15 ",
139 | "role" : "C",
140 | "organization": "Societe Generale "
141 | },
142 | {
143 | "start": "2014-04-15",
144 | "end": "2015-11-20",
145 | "role" : "C",
146 | "organization": "Cleverstep"
147 | },
148 | {
149 | "start": "2015-11-20",
150 | "end": "2017-03-15",
151 | "project_slug": "cncf",
152 | "role" : "C",
153 | "organization": "Spreemo"
154 | },
155 | {
156 | "start": "2017-03-15",
157 | "end": "2099-01-01",
158 | "role" : "C",
159 | "organization": "CNCF"
160 | }
161 | ],
162 | "identities": [
163 | {
164 | "source": "github",
165 | "email": "lukaszgryglicki!o2.pl",
166 | "name": "Łukasz Gryglicki",
167 | "username": "lukaszgryglicki"
168 | },
169 | {
170 | "source": "git",
171 | "email": "lukaszgryglicki!o2.pl",
172 | "name": "Lukasz Gryglicki"
173 | },
174 | {
175 | "source": "git",
176 | "email": "lukaszgryglicki!o2.pl",
177 | "name": "Łukasz Gryglicki"
178 | },
179 | {
180 | "source": "git",
181 | "email": "lukaszgryglicki!o2.pl",
182 | "name": "lukaszgryglicki"
183 | }
184 | ]
185 | }
186 | ]
187 | }
188 |
--------------------------------------------------------------------------------
/sh/example_bulk2.json:
--------------------------------------------------------------------------------
1 | {
2 | "add": [
3 | {
4 | "country_code": "RU",
5 | "email": "da-jeff!linuxfoundation.com",
6 | "name": "DAJeff",
7 | "is_bot": 0,
8 | "enrollments": [
9 | {
10 | "start": "1900-01-01",
11 | "end": "2100-01-01",
12 | "organization": "The Linux Foundation"
13 | }
14 | ],
15 | "identities": [
16 | {
17 | "source": "github",
18 | "email": "da-jeff!linuxfoundation.com",
19 | "name": "DA Jeff",
20 | "username": "da-jeff"
21 | }
22 | ]
23 | },
24 | {
25 | "name": "Julian Zucker",
26 | "email": "julian.zucker!gmail.com",
27 | "is_bot": 0,
28 | "country_code": "US"
29 | },
30 | {
31 | "name": "Tammer Saleh",
32 | "email": "hello!superorbit.al",
33 | "is_bot": 0,
34 | "country_code": "US"
35 | }
36 | ],
37 | "del": [
38 | {
39 | "country_code": "PL",
40 | "email": "da-jeff!linuxfoundation.com",
41 | "name": "DAJeff",
42 | "is_bot": 0,
43 | "enrollments": [
44 | {
45 | "start": "1900-01-01",
46 | "end": "2100-01-01",
47 | "organization": "The Linux Foundation"
48 | }
49 | ],
50 | "identities": [
51 | {
52 | "source": "github",
53 | "email": "da-jeff!linuxfoundation.com",
54 | "name": "DA Jeff",
55 | "username": "da-jeff"
56 | }
57 | ]
58 | },
59 | {
60 | "name": "Tammer Saleh",
61 | "email": "hello!superorbit.al",
62 | "is_bot": 0,
63 | "country_code": "US",
64 | "identities": [
65 | {
66 | "source": "github",
67 | "email": "hello@superorbit.al",
68 | "name": "Tammer Saleh",
69 | "username": "tammersaleh"
70 | }
71 | ]
72 | }
73 | ]
74 | }
75 |
--------------------------------------------------------------------------------
/sh/finos_blanks.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | # Example run: [NO_BITERGIA=1] ESURL="https://[redacted]" CONDITION="author_name in ('ryanpetersonOF', 'pjbroadbent', 'Li Cui', 'Michael M. Coates', 'Harsimran Singh', 'Luis Espinola', 'Michael Coates', 'Daniel Kocielinski', 'nisse', 'brybailey', 'David H', 'deadbeef', 'brandtr', 'James Leftley', 'kjellander', 'Nicholas Goodman', 'David Hamberlin', 'Sergio Garcia Murillo', 'magjed', 'Sami Kalliomäki', 'malaysf', 'Aziz Yokubjonov', 'Danil Chapovalov', 'Mark Josling', 'Aziem Chawdhary', 'michaelt', 'Aaron Griswold')" ./sh/finos_blanks.sh
3 | if [ -z "$ESURL" ]
4 | then
5 | echo "$0: you need to specify ESURL=..."
6 | exit 1
7 | fi
8 | if [ -z "$CONDITION" ]
9 | then
10 | query="{\"query\":\"select origin, project, project_slug, author_uuid, author_id, author_name, author_org_name as org, min(metadata__updated_on) as dt_from, max(metadata__updated_on) as dt_to, count(*) as cnt from \\\"IDXNAME\\\" where author_org_name is null or author_org_name in ('') group by origin, project, project_slug, author_uuid, author_id, author_name, author_org_name\",\"fetch_size\":10000}"
11 | querymin="{\"query\":\"select origin, author_uuid, author_id, author_name, author_org_name as org, min(metadata__updated_on) as dt_from, max(metadata__updated_on) as dt_to, count(*) as cnt from \\\"IDXNAME\\\" where author_org_name is null or author_org_name in ('') group by origin, author_uuid, author_id, author_name, author_org_name\",\"fetch_size\":10000}"
12 | else
13 | query="{\"query\":\"select origin, project, project_slug, author_uuid, author_id, author_name, author_org_name as org, min(metadata__updated_on) as dt_from, max(metadata__updated_on) as dt_to, count(*) as cnt from \\\"IDXNAME\\\" where ${CONDITION} and (author_org_name is null or author_org_name in ('')) group by origin, project, project_slug, author_uuid, author_id, author_name, author_org_name\",\"fetch_size\":10000}"
14 | querymin="{\"query\":\"select origin, author_uuid, author_id, author_name, author_org_name as org, min(metadata__updated_on) as dt_from, max(metadata__updated_on) as dt_to, count(*) as cnt from \\\"IDXNAME\\\" where ${CONDITION} and (author_org_name is null or author_org_name in ('')) group by origin, author_uuid, author_id, author_name, author_org_name\",\"fetch_size\":10000}"
15 | fi
16 | echo $query > finos-query.json.secret
17 | echo $querymin > finos-querymin.json.secret
18 | > finos.log.secret
19 | for idx in $(curl -s "${ESURL}/_cat/indices?format=json" | jq -rS '.[].index' | grep -E '^(bitergia.+(finos|symphonyoss)|sds-finos-)' | grep -Ev '(-repository(-for-merge)?|-raw|-googlegroups|-slack|-dockerhub|-last-action-date-cache|-social_media|finosmeetings)$' | grep -Ev '\-onion_')
20 | do
21 | if ( [ ! -z "${NO_BITERGIA}" ] && [[ $idx == *"bitergia"* ]] )
22 | then
23 | continue
24 | fi
25 | data=`cat finos-query.json.secret`
26 | data=${data/IDXNAME/$idx}
27 | echo $data > q.json.secret
28 | res=$(curl -s -XPOST -H 'Content-Type: application/json' "${ESURL}/_sql?format=json" -d@q.json.secret | jq -r '.rows')
29 | if [ ! "$res" = "[]" ]
30 | then
31 | if [ "$res" = "null" ]
32 | then
33 | data=`cat finos-querymin.json.secret`
34 | data=${data/IDXNAME/$idx}
35 | echo $data > q.json.secret
36 | res=$(curl -s -XPOST -H 'Content-Type: application/json' "${ESURL}/_sql?format=json" -d@q.json.secret | jq -r '.rows')
37 | if [ ! "$res" = "[]" ]
38 | then
39 | echo "special $idx: $res" | tee -a finos.log.secret
40 | fi
41 | else
42 | echo "$idx: $res" | tee -a finos.log.secret
43 | fi
44 | fi
45 | done
46 |
--------------------------------------------------------------------------------
/sh/finos_fix_projects.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | if [ -z "${ES}" ]
3 | then
4 | echo "$0: ES env variable must be set"
5 | exit 1
6 | fi
7 | echo -n 'Devops Mutualization -> DevOps Mutualization: ' && curl -s -XPOST -H 'Content-type: application/json' "${ES}/sds-finos-*,-*-raw/_update_by_query?conflicts=proceed" -d"{\"query\":{\"term\":{\"project\":\"Devops Mutualization\"}},\"script\":\"ctx._source.project='DevOps Mutualization'\"}" | jq -rS '.updated'
8 | echo -n 'InnerSource SIG -> InnerSource Special Interest Group: ' && curl -s -XPOST -H 'Content-type: application/json' "${ES}/sds-finos-*,-*-raw/_update_by_query?conflicts=proceed" -d"{\"query\":{\"term\":{\"project\":\"InnerSource SIG\"}},\"script\":\"ctx._source.project='InnerSource Special Interest Group'\"}" | jq -rS '.updated'
9 | echo -n 'kdbplus -> kdb+: ' && curl -s -XPOST -H 'Content-type: application/json' "${ES}/sds-finos-*,-*-raw/_update_by_query?conflicts=proceed" -d"{\"query\":{\"term\":{\"project\":\"kdbplus\"}},\"script\":\"ctx._source.project='kdb+'\"}" | jq -rS '.updated'
10 | echo -n 'ODP Project -> Open Developer Platform: ' && curl -s -XPOST -H 'Content-type: application/json' "${ES}/sds-finos-*,-*-raw/_update_by_query?conflicts=proceed" -d"{\"query\":{\"term\":{\"project\":\"ODP Project\"}},\"script\":\"ctx._source.project='Open Developer Platform'\"}" | jq -rS '.updated'
11 | echo -n 'openmama -> OpenMAMA: ' && curl -s -XPOST -H 'Content-type: application/json' "${ES}/sds-finos-*,-*-raw/_update_by_query?conflicts=proceed" -d"{\"query\":{\"term\":{\"project\":\"openmama\"}},\"script\":\"ctx._source.project='OpenMAMA'\"}" | jq -rS '.updated'
12 | echo -n 'Cloud Service Certification -> Compliant Financial Infrastructure: ' && curl -s -XPOST -H 'Content-type: application/json' "${ES}/sds-finos-*,-*-raw/_update_by_query?conflicts=proceed" -d"{\"query\":{\"term\":{\"project\":\"Cloud Service Certification\"}},\"script\":\"ctx._source.project='Compliant Financial Infrastructure'\"}" | jq -rS '.updated'
13 | echo -n 'Alloy -> Legend: ' && curl -s -XPOST -H 'Content-type: application/json' "${ES}/sds-finos-*,-*-raw/_update_by_query?conflicts=proceed" -d"{\"query\":{\"term\":{\"project\":\"Alloy\"}},\"script\":\"ctx._source.project='Legend'\"}" | jq -rS '.updated'
14 | echo -n 'Decentralized Ecosystem Growth -> FINOS Community: ' && curl -s -XPOST -H 'Content-type: application/json' "${ES}/sds-finos-*,-*-raw/_update_by_query?conflicts=proceed" -d"{\"query\":{\"term\":{\"project\":\"Decentralized Ecosystem Growth\"}},\"script\":\"ctx._source.project='FINOS Community'\"}" | jq -rS '.updated'
15 | echo -n 'FINOS -> FINOS Community: ' && curl -s -XPOST -H 'Content-type: application/json' "${ES}/sds-finos-finos-community-slack/_update_by_query?conflicts=proceed" -d"{\"query\":{\"term\":{\"project\":\"FINOS\"}},\"script\":\"ctx._source.project='FINOS Community'\"}" | jq -rS '.updated'
16 |
--------------------------------------------------------------------------------
/sh/finos_rolls.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | # CHECK=1 - only check data
3 | # James McLeod
4 | ./sh/update_finos_rolls.sh prod d89e39413399a38f1780456e37dfb4ee16810e13 'Individual - No Account;;2013-11-01' 'Publicis Sapient;2013-11-01;2017-06-01' 'Lloyds Bank plc;2017-06-01;2019-08-31' 'Finos;2019-08-31;2020-04-08' 'The Linux Foundation;2020-04-08;'
5 | # Maurizio Pillitu
6 | ./sh/update_finos_rolls.sh prod 7d25f805bcc1886acd05a50efdacd1c95dff8912 'Individual - No Account;;2007-09-01' 'Sourcesense;2007-09-01;2012-01-31' 'Individual - No Account;2012-01-31;2012-04-01' 'Alfresco;2012-04-01;2016-04-30' 'Finos;2016-04-30;2020-04-08' 'The Linux Foundation;2020-04-08;'
7 | # Gabriele Columbro
8 | ./sh/update_finos_rolls.sh prod 054aecbdae617c4687cd2e7129800231875ea96a 'Individual - No Account;;2006-01-01' 'Sourcesense;2006-01-01;2009-04-01' 'Alfresco;2009-04-01;2015-12-01' 'Finos;2009-04-01;2020-04-01' 'Individual - No Account;2020-04-01;'
9 | # Aitana Myohl
10 | ./sh/update_finos_rolls.sh prod 23a161cf1252c045e6e265fd37cacb4dbf283a6b 'Individual - No Account;;2018-08-01' 'Columbia University;2018-08-01;2019-05-01' 'Finos;2019-05-01;2020-04-08' 'The Linux Foundation;2020-04-08;'
11 | # Nicholas Kolba
12 | ./sh/update_finos_rolls.sh prod 864b7c8c91b9bc2c6bcd67bf90dd0bf5ce5da31f 'Individual - No Account;;2000-05-01' 'Thomson Reuters Markets LLC;2000-05-01;2012-01-01' 'SitePen, Inc;2012-01-01;2012-07-01' 'Individual - No Account;2012-07-01;2014-05-01' 'Thomson Reuters Markets LLC;2014-05-01;2016-07-01' 'OpenFin;2016-07-01;2019-11-01' 'Individual - No Account;2019-11-01;2020-05-01' 'Genesis Global Technology limited;2020-05-01;2020-12-01' 'Individual - No Account;2020-12-01;'
13 | # Aaron Griswold
14 | ./sh/update_finos_rolls.sh prod 0013bb8cdbd3e57b1ecc7c0ecfe2025e2a176c92 'Individual - No Account;;2018-05-01' 'Finos;2018-05-01;2020-04-08' 'The Linux Foundation;2020-04-08;'
15 | # ColinEberhardt
16 | ./sh/update_finos_rolls.sh prod cbdf896ace2068132f2dc0423254065c5d031bbd 'Individual - No Account;;2006-01-01' 'Scott Logic Ltd;2006-01-01;'
17 | # Rob Underwood
18 | ./sh/update_finos_rolls.sh prod 9277421f819d24c825be9e11b1bfc975526538f1 'Individual - No Account;;2000-10-01' 'KPMG;2000-10-01;2006-09-01' 'Deloitte Consulting LLP;2006-09-01;2013-01-31' 'Finos;2013-01-31;2020-04-08' 'The Linux Foundation;2020-04-08;2020-12-31' 'Individual - No Account;2020-12-31;'
19 | # Tosha Ellison
20 | ./sh/update_finos_rolls.sh prod 01cff3f0f12ce79b16172684bce225ebd846f446 'Individual - No Account;;2003-01-01' 'Credit Suisse;2003-01-01;2013-12-31' 'Individual - No account;2013-12-31;2018-06-01' 'Finos;2018-06-01;2020-04-08' 'The Linux Foundation;2020-04-08;'
21 | # Aaron Williamson
22 | ./sh/update_finos_rolls.sh prod 1ed2874f8be955e9f18332a4a77574d300ee6d39 'Individual - No Account;;2016-10-01' 'Finos;2016-10-01;2019-07-01' 'Individual - No Account;2019-07-01;'
23 | # J Lovejoy
24 | ./sh/update_finos_rolls.sh prod 35ff79f0395d36302f18fc42d939550c07f31479 'Individual - No Account;;2014-01-01' 'Arm;2014-01-01;2018-09-30' 'Individual - No Account;2018-09-30;2020-01-01' 'Canonical Ltd.;2020-01-01;2020-12-01' 'Individual - No Account;2020-12-01;'
25 | # Peter Monks
26 | ./sh/update_finos_rolls.sh prod 023ea3e3a304a7cbc725cf08ad750fd64f065125 'Individual - No Account;;2007-04-01' 'Alfresco;2007-04-01;2015-08-01' 'Inkling;2015-08-01;2016-06-01' 'Finos;2016-06-01;2018-07-01' 'Individual - No Account;2018-07-01;'
27 | # Nikolay Ganev
28 | ./sh/update_finos_rolls.sh prod 308aa6b9bf53a63781a5e6a8fc3197532512cf96 'Individual - No Account;;2017-07-01' 'Finos;2017-07-01;2019-06-01' 'Boehringer Ingelheim;2019-06-01;2021-01-01' 'Hillrom, Inc.;2021-01-01;'
29 | # Beth Hall
30 | ./sh/update_finos_rolls.sh prod 31df9be40e635af0df89050717c598869b9a2c57 'Individual - No Account;;2011-09-01' 'Capital One Financial Corporation;2011-09-01;2012-08-01' 'Individual - No Account;2012-08-01;'
31 | # Alexandra Stratigos
32 | ./sh/update_finos_rolls.sh prod 10842675fe4aa10408fe0351f29e71d4b5abd03d 'Individual - No Account;;2016-04-01' 'Finos;2016-04-01;2020-04-08' 'The Linux Foundation;2020-04-08;'
33 | # rikoe
34 | ./sh/update_finos_rolls.sh prod 8454b86202c6ab1333ea1df67414b4d20036a925 'Individual - No Account;;2012-04-01' 'Barclays;2012-04-01;2013-06-30' 'Individual - No Account;2013-06-30;2014-11-01' 'Deutsche Bank AG;2014-11-01;2016-09-30' 'Adaptive Financial Consulting Limited;2016-09-30;2018-08-01' 'Finos;2018-08-01;2020-04-08' 'The Linux Foundation;2020-04-08;'
35 |
--------------------------------------------------------------------------------
/sh/finos_unknowns.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | # Example run: [NO_BITERGIA=1] ESURL="https://[redacted]" CONDITION="author_name in ('ryanpetersonOF', 'pjbroadbent', 'Li Cui', 'Michael M. Coates', 'Harsimran Singh', 'Luis Espinola', 'Michael Coates', 'Daniel Kocielinski', 'nisse', 'brybailey', 'David H', 'deadbeef', 'brandtr', 'James Leftley', 'kjellander', 'Nicholas Goodman', 'David Hamberlin', 'Sergio Garcia Murillo', 'magjed', 'Sami Kalliomäki', 'malaysf', 'Aziz Yokubjonov', 'Danil Chapovalov', 'Mark Josling', 'Aziem Chawdhary', 'michaelt', 'Aaron Griswold')" ./sh/finos_unknowns.sh
3 | if [ -z "$ESURL" ]
4 | then
5 | echo "$0: you need to specify ESURL=..."
6 | exit 1
7 | fi
8 | if [ -z "$CONDITION" ]
9 | then
10 | query="{\"query\":\"select origin, project, project_slug, author_uuid, author_id, author_name, author_org_name as org, min(metadata__updated_on) as dt_from, max(metadata__updated_on) as dt_to, count(*) as cnt from \\\"IDXNAME\\\" where author_org_name in ('Unknown') group by origin, project, project_slug, author_uuid, author_id, author_name, author_org_name\",\"fetch_size\":10000}"
11 | querymin="{\"query\":\"select origin, author_uuid, author_id, author_name, author_org_name as org, min(metadata__updated_on) as dt_from, max(metadata__updated_on) as dt_to, count(*) as cnt from \\\"IDXNAME\\\" where author_org_name in ('Unknown') group by origin, author_uuid, author_id, author_name, author_org_name\",\"fetch_size\":10000}"
12 | else
13 | query="{\"query\":\"select origin, project, project_slug, author_uuid, author_id, author_name, author_org_name as org, min(metadata__updated_on) as dt_from, max(metadata__updated_on) as dt_to, count(*) as cnt from \\\"IDXNAME\\\" where ${CONDITION} and author_org_name in ('Unknown') group by origin, project, project_slug, author_uuid, author_id, author_name, author_org_name\",\"fetch_size\":10000}"
14 | querymin="{\"query\":\"select origin, author_uuid, author_id, author_name, author_org_name as org, min(metadata__updated_on) as dt_from, max(metadata__updated_on) as dt_to, count(*) as cnt from \\\"IDXNAME\\\" where ${CONDITION} and author_org_name in ('Unknown') group by origin, author_uuid, author_id, author_name, author_org_name\",\"fetch_size\":10000}"
15 | fi
16 | echo $query > finos-query.json.secret
17 | echo $querymin > finos-querymin.json.secret
18 | > finos.log.secret
19 | for idx in $(curl -s "${ESURL}/_cat/indices?format=json" | jq -rS '.[].index' | grep -E '^(bitergia.+(finos|symphonyoss)|sds-finos-)' | grep -Ev '(-repository(-for-merge)?|-raw|-googlegroups|-slack|-dockerhub|-last-action-date-cache|-social_media|finosmeetings)$' | grep -Ev '\-onion_')
20 | do
21 | if ( [ ! -z "${NO_BITERGIA}" ] && [[ $idx == *"bitergia"* ]] )
22 | then
23 | continue
24 | fi
25 | data=`cat finos-query.json.secret`
26 | data=${data/IDXNAME/$idx}
27 | echo $data > q.json.secret
28 | res=$(curl -s -XPOST -H 'Content-Type: application/json' "${ESURL}/_sql?format=json" -d@q.json.secret | jq -r '.rows')
29 | if [ ! "$res" = "[]" ]
30 | then
31 | if [ "$res" = "null" ]
32 | then
33 | data=`cat finos-querymin.json.secret`
34 | data=${data/IDXNAME/$idx}
35 | echo $data > q.json.secret
36 | res=$(curl -s -XPOST -H 'Content-Type: application/json' "${ESURL}/_sql?format=json" -d@q.json.secret | jq -r '.rows')
37 | if [ ! "$res" = "[]" ]
38 | then
39 | echo "special $idx: $res" | tee -a finos.log.secret
40 | fi
41 | else
42 | echo "$idx: $res" | tee -a finos.log.secret
43 | fi
44 | fi
45 | done
46 |
--------------------------------------------------------------------------------
/sh/fix_blank_orgs.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | # ES_URL=...
3 | # INDEX=sds-cncf-k8s-github-issue
4 | # API_URL=prod
5 | # PSLUG='cncf/k8s'
6 | if [ -z "${ES_URL}" ]
7 | then
8 | echo "$0: you need to specify ES_URL=..."
9 | exit 1
10 | fi
11 | if [ -z "${INDEX}" ]
12 | then
13 | echo "$0: you need to specify INDEX=..."
14 | exit 2
15 | fi
16 | if [ -z "${API_URL}" ]
17 | then
18 | echo "$0: you need to specify API_URL=..."
19 | exit 3
20 | fi
21 | if [ -z "${PSLUG}" ]
22 | then
23 | echo "$0: you need to specify PSLUG=..."
24 | exit 4
25 | fi
26 | updates=0
27 | fails=0
28 | uuids=$(curl -s -XPOST -H 'Content-type: application/json' "${ES_URL}/${INDEX}/_search?size=10000" -d'{"query":{"term":{"author_org_name":""}}}' | jq -rS '.hits.hits[]._source.author_uuid' | sort | uniq)
29 | for uuid in $uuids
30 | do
31 | echo "UUID: $uuid"
32 | data=$(curl -s -XPOST -H 'Content-type: application/json' "${ES_URL}/${INDEX}/_search?size=10000" -d"{\"query\":{\"bool\":{\"must\":[{\"term\":{\"author_org_name\":\"\"}},{\"term\":{\"author_uuid\":\"${uuid}\"}}]}}}" | jq --compact-output -rS '.hits.hits')
33 | ids=$(echo $data | jq -rS '.[]._id')
34 | dates=($(echo $data | jq -rS '.[]._source.metadata__updated_on'))
35 | i=0
36 | for id in $ids
37 | do
38 | date=${dates[$i]}
39 | echo "uuid: $uuid, date: $date, doc_id=$id, i=$i"
40 | i=$((i+1))
41 | res=$(JWT_TOKEN=`cat secret/lgryglicki.${API_URL}.token` ./sh/curl_get_affiliation_single.sh "$PSLUG" $uuid $date | jq '.')
42 | org=$(echo $res | jq -r '.org')
43 | if ( [ "$org" = "null" ] || [ "$org" = "" ] )
44 | then
45 | echo "Cannot get org for uuid: $uuid, date: $date: $res"
46 | fails=$((fails+1))
47 | else
48 | echo "uuid: $uuid, date: $date --> $org"
49 | res=$(curl -s -XPOST -H 'Content-type: application/json' "${ES_URL}/${INDEX}/_update_by_query?conflicts=proceed" -d"{\"query\":{\"term\":{\"_id\":\"$id\"}},\"script\":\"ctx._source.author_org_name='${org}'\"}" | jq -rS '.')
50 | upd=$(echo $res | jq -r '.updated')
51 | if ( [ "$upd" = "null" ] || [ "$upd" = "" ] || [ "$upd" = "0" ] )
52 | then
53 | echo "Failed updating uuid: $uuid, date: $date, doc_id=$id: $res"
54 | else
55 | echo "uuid: $uuid, date: $date, doc_id=$id updated org to: $org"
56 | updates=$((updates+1))
57 | fi
58 | fi
59 | done
60 | done
61 | echo "Done $updates updates"
62 | if [ ! "$fails" = "0" ]
63 | then
64 | echo "$fails failed"
65 | fi
66 |
--------------------------------------------------------------------------------
/sh/fix_dot_git_in_finos.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | for field in origin repo_name tag
3 | do
4 | echo "cleanup $field"
5 | values=$(curl -s -XPOST -H 'Content-Type: application/json' "${ES}/_sql?format=csv" -d"{\"query\":\"select $field,1 from \\\"sds-finos-*,-*-raw\\\" where $field like '%.git' group by $field\",\"fetch_size\":10000}")
6 | for data in $values
7 | do
8 | arr=(${data//,/ })
9 | value=${arr[0]}
10 | if [ "$value" = "$field" ]
11 | then
12 | continue
13 | fi
14 | new_value=${value::-4}
15 | echo "$field: $value -> $new_value"
16 | curl -s -XPOST -H 'Content-type: application/json' "${ES}/sds-finos-*,-*-raw/_update_by_query?conflicts=proceed" -d"{\"query\":{\"term\":{\"${field}\":\"${value}\"}},\"script\":\"ctx._source.${field}='${new_value}'\"}" | jq -rS '.'
17 | done
18 | done
19 |
--------------------------------------------------------------------------------
/sh/fix_es_docs.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | # Example run: INDICES='idx1 idx2 ... idxN' UUIDS='uuid1 uuid2 ... uuidN' ORG='OrgName' [FROM=2011-01-01 TO=2012-07-01] [DBG=1] ESURL='https://...' ./sh/fix_es_docs.sh
3 | if [ -z "$ESURL" ]
4 | then
5 | echo "$0: you need to specify ESURL=..."
6 | exit 1
7 | fi
8 | if [ -z "$INDICES" ]
9 | then
10 | echo "$0: you need to specify INDICES='idx1 idx2 ... idxN'"
11 | exit 2
12 | fi
13 | if [ -z "$UUIDS" ]
14 | then
15 | echo "$0: you need to specify UUIDS='uuid1 uuid2 ... uuidN'"
16 | exit 3
17 | fi
18 | if [ -z "$ORG" ]
19 | then
20 | echo "$0: you need to specify ORG='Org Name'"
21 | exit 4
22 | fi
23 | for idx in $INDICES
24 | do
25 | q="{\"script\":\"ctx._source.author_org_name='${ORG}'\","
26 | if [ -z "${FROM}" ]
27 | then
28 | q="${q}\"query\":{\"terms\":{\"author_uuid\":["
29 | for uuid in $UUIDS
30 | do
31 | q="${q}\"${uuid}\","
32 | done
33 | q="${q::-1}]}}}"
34 | else
35 | q="${q}\"query\":{\"bool\":{\"must\":[{\"terms\":{\"author_uuid\":["
36 | for uuid in $UUIDS
37 | do
38 | q="${q}\"${uuid}\","
39 | done
40 | q="${q::-1}]}},{\"range\":{\"metadata__updated_on\":{\"gte\":\"${FROM}\",\"lt\":\"${TO}\"}}}]}}}"
41 | fi
42 | echo $q > q.json.secret
43 | if [ ! -z "${DBG}" ]
44 | then
45 | cat q.json.secret
46 | cat q.json.secret | jq -rS .
47 | fi
48 | echo -n "${idx}: "
49 | curl -s -XPOST -H 'Content-Type: application/json' "${ESURL}/${idx}/_update_by_query?conflicts=proceed" -d@q.json.secret | jq -rS .
50 | done
51 |
--------------------------------------------------------------------------------
/sh/fix_finos_git_unknowns.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | # ES_URL=...
3 | # API_URL=prod
4 | export INDEX='sds-finos-*-git,-*-raw'
5 | if [ -z "${ES_URL}" ]
6 | then
7 | echo "$0: you need to specify ES_URL=..."
8 | exit 1
9 | fi
10 | if [ -z "${API_URL}" ]
11 | then
12 | echo "$0: you need to specify API_URL=..."
13 | exit 2
14 | fi
15 | gupdates=0
16 | gunknowns=0
17 | gfails=0
18 | pslugs=$(curl -s -XPOST -H 'Content-Type: application/json' "${ES_URL}/_sql?format=json" -d"{\"query\":\"select project_slug from \\\"${INDEX}\\\" group by project_slug\",\"fetch_size\":10000}" | jq -rS '.rows[][0]')
19 | for pslug in $pslugs
20 | do
21 | updates=0
22 | unknowns=0
23 | fails=0
24 | # echo "Project: $pslug"
25 | uuids=$(curl -s -XPOST -H 'Content-type: application/json' "${ES_URL}/${INDEX}/_search?size=10000" -d"{\"query\":{\"bool\":{\"must\":[{\"term\":{\"author_org_name\":\"Unknown\"}},{\"term\":{\"project_slug\":\"${pslug}\"}}]}}}" | jq -rS '.hits.hits[]._source.author_uuid' | sort | uniq)
26 | for uuid in $uuids
27 | do
28 | # echo "UUID: $uuid"
29 | data=$(curl -s -XPOST -H 'Content-type: application/json' "${ES_URL}/${INDEX}/_search?size=10000" -d"{\"query\":{\"bool\":{\"must\":[{\"term\":{\"author_org_name\":\"Unknown\"}},{\"term\":{\"project_slug\":\"${pslug}\"}},{\"term\":{\"author_uuid\":\"${uuid}\"}}]}}}" | jq --compact-output -rS '.hits.hits')
30 | ids=$(echo $data | jq -rS '.[]._id')
31 | dates=($(echo $data | jq -rS '.[]._source.metadata__updated_on'))
32 | i=0
33 | for id in $ids
34 | do
35 | date=${dates[$i]}
36 | echo "uuid: $uuid, date: $date, doc_id=$id, i=$i"
37 | i=$((i+1))
38 | res=$(JWT_TOKEN=`cat secret/lgryglicki.${API_URL}.token` ./sh/curl_get_affiliation_single.sh "$pslug" $uuid $date | jq '.')
39 | org=$(echo $res | jq -r '.org')
40 | if ( [ "$org" = "null" ] || [ "$org" = "" ] )
41 | then
42 | echo "Cannot get org for uuid: $uuid, date: $date: $res"
43 | fails=$((fails+1))
44 | elif [ "$org" = "Unknown" ]
45 | then
46 | echo "Still Unknown for uuid: $uuid, date: $date"
47 | unknowns=$((unknowns+1))
48 | else
49 | echo "uuid: $uuid, date: $date --> $org"
50 | res=$(curl -s -XPOST -H 'Content-type: application/json' "${ES_URL}/${INDEX}/_update_by_query?conflicts=proceed" -d"{\"query\":{\"term\":{\"_id\":\"$id\"}},\"script\":\"ctx._source.author_org_name='${org}'\"}" | jq -rS '.')
51 | upd=$(echo $res | jq -r '.updated')
52 | if ( [ "$upd" = "null" ] || [ "$upd" = "" ] || [ "$upd" = "0" ] )
53 | then
54 | echo "Failed updating uuid: $uuid, date: $date, doc_id=$id: $res"
55 | else
56 | echo "uuid: $uuid, date: $date, doc_id=$id updated org to: $org"
57 | updates=$((updates+1))
58 | fi
59 | fi
60 | done
61 | done
62 | if [ ! "$updates" = "0" ]
63 | then
64 | echo "$pslug: done $updates updates"
65 | gupdates=$((gupdates+updates))
66 | fi
67 | if [ ! "$unknowns" = "0" ]
68 | then
69 | echo "$pslug: $unknowns still unknown"
70 | gunknowns=$((gunknowns+unknowns))
71 | fi
72 | if [ ! "$fails" = "0" ]
73 | then
74 | echo "$pslug: $fails failed"
75 | gfails=$((gfails+fails))
76 | fi
77 | done
78 | if [ ! "$gupdates" = "0" ]
79 | then
80 | echo "done $gupdates updates"
81 | fi
82 | if [ ! "$gunknowns" = "0" ]
83 | then
84 | echo "$gunknowns still unknown"
85 | fi
86 | if [ ! "$gfails" = "0" ]
87 | then
88 | echo "$gfails failed"
89 | fi
90 |
--------------------------------------------------------------------------------
/sh/fix_unknowns.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | # ES_URL=...
3 | # API_URL=prod
4 | # INDEX='sds-finos-*-git,-*-raw'
5 | # DBG=1
6 | # DRY=1
7 | if [ -z "${ES_URL}" ]
8 | then
9 | echo "$0: you need to specify ES_URL=..."
10 | exit 1
11 | fi
12 | if [ -z "${API_URL}" ]
13 | then
14 | echo "$0: you need to specify API_URL=..."
15 | exit 2
16 | fi
17 | if [ -z "$INDEX" ]
18 | then
19 | echo "$0: you need to specify INDEX=..., example INDEX='sds-finos-*-git,-*-raw'"
20 | exit 3
21 | fi
22 | gupdates=0
23 | gunknowns=0
24 | gfails=0
25 | pslugs=$(curl -s -XPOST -H 'Content-Type: application/json' "${ES_URL}/_sql?format=json" -d"{\"query\":\"select project_slug from \\\"${INDEX}\\\" group by project_slug\",\"fetch_size\":10000}" | jq -rS '.rows[][0]')
26 | for pslug in $pslugs
27 | do
28 | updates=0
29 | unknowns=0
30 | fails=0
31 | if [ ! -z "$DBG" ]
32 | then
33 | echo "Project: $pslug"
34 | fi
35 | uuids=$(curl -s -XPOST -H 'Content-type: application/json' "${ES_URL}/${INDEX}/_search?size=10000" -d"{\"query\":{\"bool\":{\"must\":[{\"term\":{\"author_org_name\":\"Unknown\"}},{\"term\":{\"project_slug\":\"${pslug}\"}}]}}}" | jq -rS '.hits.hits[]._source.author_uuid' | sort | uniq)
36 | for uuid in $uuids
37 | do
38 | if [ ! -z "$DBG" ]
39 | then
40 | echo "UUID: $uuid"
41 | fi
42 | data=$(curl -s -XPOST -H 'Content-type: application/json' "${ES_URL}/${INDEX}/_search?size=10000" -d"{\"query\":{\"bool\":{\"must\":[{\"term\":{\"author_org_name\":\"Unknown\"}},{\"term\":{\"project_slug\":\"${pslug}\"}},{\"term\":{\"author_uuid\":\"${uuid}\"}}]}}}" | jq --compact-output -rS '.hits.hits')
43 | ids=$(echo $data | jq -rS '.[]._id')
44 | dates=($(echo $data | jq -rS '.[]._source.metadata__updated_on'))
45 | i=0
46 | for id in $ids
47 | do
48 | date=${dates[$i]}
49 | echo "uuid: $uuid, date: $date, doc_id=$id, i=$i"
50 | i=$((i+1))
51 | res=$(JWT_TOKEN=`cat secret/lgryglicki.${API_URL}.token` ./sh/curl_get_affiliation_single.sh "$pslug" $uuid $date | jq '.')
52 | org=$(echo $res | jq -r '.org')
53 | if ( [ "$org" = "null" ] || [ "$org" = "" ] )
54 | then
55 | echo "Cannot get org for uuid: $uuid, date: $date: $res"
56 | fails=$((fails+1))
57 | elif [ "$org" = "Unknown" ]
58 | then
59 | echo "Still Unknown for uuid: $uuid, date: $date"
60 | unknowns=$((unknowns+1))
61 | else
62 | if [ ! -z "$DRY" ]
63 | then
64 | echo "would update: uuid: $uuid, date: $date --> $org"
65 | else
66 | echo "uuid: $uuid, date: $date --> $org"
67 | res=$(curl -s -XPOST -H 'Content-type: application/json' "${ES_URL}/${INDEX}/_update_by_query?conflicts=proceed" -d"{\"query\":{\"term\":{\"_id\":\"$id\"}},\"script\":\"ctx._source.author_org_name='${org}'\"}" | jq -rS '.')
68 | upd=$(echo $res | jq -r '.updated')
69 | if ( [ "$upd" = "null" ] || [ "$upd" = "" ] || [ "$upd" = "0" ] )
70 | then
71 | echo "Failed updating uuid: $uuid, date: $date, doc_id=$id: $res"
72 | else
73 | echo "uuid: $uuid, date: $date, doc_id=$id updated org to: $org"
74 | updates=$((updates+1))
75 | fi
76 | fi
77 | fi
78 | done
79 | done
80 | if [ ! "$updates" = "0" ]
81 | then
82 | echo "$pslug: done $updates updates"
83 | gupdates=$((gupdates+updates))
84 | fi
85 | if [ ! "$unknowns" = "0" ]
86 | then
87 | echo "$pslug: $unknowns still unknown"
88 | gunknowns=$((gunknowns+unknowns))
89 | fi
90 | if [ ! "$fails" = "0" ]
91 | then
92 | echo "$pslug: $fails failed"
93 | gfails=$((gfails+fails))
94 | fi
95 | done
96 | if [ ! "$gupdates" = "0" ]
97 | then
98 | echo "done $gupdates updates"
99 | fi
100 | if [ ! "$gunknowns" = "0" ]
101 | then
102 | echo "$gunknowns still unknown"
103 | fi
104 | if [ ! "$gfails" = "0" ]
105 | then
106 | echo "$gfails failed"
107 | fi
108 |
--------------------------------------------------------------------------------
/sh/get_token.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | if [ -z "${1}" ]
3 | then
4 | echo "$0: you need to specify env: test|prod"
5 | exit 1
6 | fi
7 | if [ -z "${2}" ]
8 | then
9 | fn="secret/lgryglicki.${1}.token"
10 | else
11 | fn="${2}"
12 | fi
13 | domain="`cat helm/da-affiliation/secrets/AUTH0_DOMAIN.${1}.secret`"
14 | if [ -z "$domain" ]
15 | then
16 | echo "$0: cannot read file helm/da-affiliation/secrets/AUTH0_DOMAIN.${1}.secret"
17 | exit 2
18 | fi
19 | audience="`cat helm/da-affiliation/secrets/AUTH0_AUDIENCE.${1}.secret`"
20 | if [ -z "$audience" ]
21 | then
22 | echo "$0: cannot read file helm/da-affiliation/secrets/AUTH0_AUDIENCE.${1}.secret"
23 | exit 3
24 | fi
25 | clientid="`cat helm/da-affiliation/secrets/AUTH0_CLIENT_ID.${1}.secret`"
26 | if [ -z "$clientid" ]
27 | then
28 | echo "$0: cannot read file helm/da-affiliation/secrets/AUTH0_CLIENT_ID.${1}.secret"
29 | exit 4
30 | fi
31 | clientsecret="`cat helm/da-affiliation/secrets/AUTH0_CLIENT_SECRET.${1}.secret`"
32 | if [ -z "$clientsecret" ]
33 | then
34 | echo "$0: cannot read file helm/da-affiliation/secrets/AUTH0_CLIENT_SECRET.${1}.secret"
35 | exit 5
36 | fi
37 | payload="{\"grant_type\":\"client_credentials\",\"client_id\":\"${clientid}\",\"client_secret\":\"${clientsecret}\",\"audience\":\"${audience}\",\"scope\":\"access:api\"}"
38 | if [ ! -z "$DEBUG" ]
39 | then
40 | echo "curl -XPOST -H 'Content-Type: application/json' ${domain}/oauth/token -d'${payload}'"
41 | #curl -s -XPOST -H 'Content-Type: application/json' "${domain}/oauth/token" -d"${payload}"
42 | fi
43 | token=`curl -s -XPOST -H 'Content-Type: application/json' "${domain}/oauth/token" -d"${payload}" | jq -r '.access_token'`
44 | echo "${token}" > "$fn"
45 |
--------------------------------------------------------------------------------
/sh/local_api.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | #LOG_LEVEL=debug ONLYRUN='' NOCHECKS='' AUTH0_DOMAIN="`cat helm/da-affiliation/secrets/AUTH0_DOMAIN.local.secret`" ELASTIC_URL="`cat helm/da-affiliation/secrets/ELASTIC_URL.local.secret`" SH_DB_ENDPOINT="`cat helm/da-affiliation/secrets/SH_DB_ENDPOINT.local.secret`" SH_DB_RO_ENDPOINT="`cat helm/da-affiliation/secrets/SH_DB_RO_ENDPOINT.local.secret`" API_DB_ENDPOINT="`cat helm/da-affiliation/secrets/API_DB_ENDPOINT.local.secret`" ./sh/api.sh
3 | LOG_LEVEL=debug ONLYRUN=1 NOCHECKS=1 AUTH0_DOMAIN="`cat helm/da-affiliation/secrets/AUTH0_DOMAIN.local.secret`" ELASTIC_URL="`cat helm/da-affiliation/secrets/ELASTIC_URL.local.secret`" SH_DB_ENDPOINT="`cat helm/da-affiliation/secrets/SH_DB_ENDPOINT.local.secret`" SH_DB_RO_ENDPOINT="`cat helm/da-affiliation/secrets/SH_DB_RO_ENDPOINT.local.secret`" API_DB_ENDPOINT="`cat helm/da-affiliation/secrets/API_DB_ENDPOINT.local.secret`" ./sh/api.sh
4 |
--------------------------------------------------------------------------------
/sh/mariadb.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | # UPDATE_STRUCTURE=1 - run update structure SQL
3 | # TESTING_API=1 - add data useful for testing merge/unmerge API
4 | if [ "$1" = "docker" ]
5 | then
6 | PASS=rootpwd ./sh/mariadb_local_docker.sh
7 | else
8 | USR=root PASS=rootpwd SH_USR=sortinghat SH_RO_USR=ro_user SH_PASS=pwd SH_DB=sortinghat ./sh/mariadb_reinit.sh
9 | fi
10 |
--------------------------------------------------------------------------------
/sh/mariadb_drop.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | if ( [ -z "$PASS" ] || [ -z "$USR" ] || [ -z "$SH_USR" ] || [ -z "$SH_RO_USR" ] || [ -z "$SH_DB" ] )
3 | then
4 | echo "$0: please specify MariaDB root user via USR=..."
5 | echo "$0: please specify MariaDB root password via PASS=..."
6 | echo "$0: please specify MariaDB Sorting Hat user via SH_USR=..."
7 | echo "$0: please specify MariaDB Sorting Hat database via SH_DB=..."
8 | echo "$0: please specify MariaDB Read Only user via SH_RO_USR=..."
9 | exit 1
10 | fi
11 |
12 | echo "drop database ``${SH_DB}``;" | mysql -h127.0.0.1 -P13306 -p"${PASS}" -u"${USR}"
13 | echo "drop user '$SH_USR';" | mysql -h127.0.0.1 -P13306 -p"${PASS}" -u"${USR}"
14 | echo "drop user '$SH_RO_USR';" | mysql -h127.0.0.1 -P13306 -p"${PASS}" -u"${USR}"
15 |
--------------------------------------------------------------------------------
/sh/mariadb_init.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | if ( [ -z "$PASS" ] || [ -z "$USR" ] || [ -z "$SH_PASS" ] || [ -z "$SH_USR" ] || [ -z "$SH_RO_USR" ] || [ -z "$SH_DB" ] )
3 | then
4 | echo "$0: please specify MariaDB root user via USR=..."
5 | echo "$0: please specify MariaDB root password via PASS=..."
6 | echo "$0: please specify MariaDB Sorting Hat user via SH_USR=..."
7 | echo "$0: please specify MariaDB Sorting Hat user via SH_PASS=..."
8 | echo "$0: please specify MariaDB Sorting Hat database via SH_DB=..."
9 | echo "$0: please specify MariaDB Read Only user via SH_RO_USR=..."
10 | exit 1
11 | fi
12 |
13 | #echo "create user '$SH_USR'@localhost identified by '$SH_PASS';" | mysql -h127.0.0.1 -P13306 -p"${PASS}" -u"${USR}"
14 | echo "create user '$SH_USR'@'%' identified by '$SH_PASS';" | mysql -h127.0.0.1 -P13306 -p"${PASS}" -u"${USR}"
15 | echo "create user '$SH_RO_USR'@'%' identified by '$SH_PASS';" | mysql -h127.0.0.1 -P13306 -p"${PASS}" -u"${USR}"
16 | echo "create database ``${SH_DB}``;" | mysql -h127.0.0.1 -P13306 -p"${PASS}" -u"${USR}"
17 | echo "grant all privileges on ``${SH_DB}``.* to '$SH_USR'@'%';" | mysql -h127.0.0.1 -P13306 -p"${PASS}" -u"${USR}"
18 | echo "grant all privileges on ``${SH_DB}``.* to '$SH_USR'@localhost;" | mysql -h127.0.0.1 -P13306 -p"${PASS}" -u"${USR}"
19 | echo "grant select on ``${SH_DB}``.* to '$SH_RO_USR'@'%';" | mysql -h127.0.0.1 -P13306 -p"${PASS}" -u"${USR}"
20 | echo "grant select on ``${SH_DB}``.* to '$SH_RO_USR'@localhost;" | mysql -h127.0.0.1 -P13306 -p"${PASS}" -u"${USR}"
21 | echo "flush privileges;" | mysql -h127.0.0.1 -P13306 -p"${PASS}" -u"${USR}"
22 | if [ -z "$FULL" ]
23 | then
24 | mysql -h127.0.0.1 -P13306 -p"${PASS}" -u"${USR}" "${SH_DB}" < sh/sh_structure.sql
25 | else
26 | mysql -h127.0.0.1 -P13306 -p"${PASS}" -u"${USR}" "${SH_DB}" < sh/sh_full.sql
27 | fi
28 | if [ ! -z "$UPDATE_STRUCTURE" ]
29 | then
30 | mysql -h127.0.0.1 -P13306 -p"${SH_PASS}" -u"${SH_USR}" "${SH_DB}" < sh/sh_full.sql < sql/structure_updates.sql
31 | fi
32 | if [ ! -z "$TESTING_API" ]
33 | then
34 | mysql -h127.0.0.1 -P13306 -p"${SH_PASS}" -u"${SH_USR}" "${SH_DB}" < sh/sh_full.sql < sql/testing_api.sql
35 | fi
36 | echo "show databases;" | mysql -h127.0.0.1 -P13306 -p"${SH_PASS}" -u"${SH_USR}" "${SH_DB}"
37 |
--------------------------------------------------------------------------------
/sh/mariadb_init_default.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | if ( [ -z "$PASS" ] || [ -z "$USR" ] || [ -z "$SH_PASS" ] || [ -z "$SH_USR" ] || [ -z "$SH_RO_USR" ] || [ -z "$SH_DB" ] )
3 | then
4 | echo "$0: please specify MariaDB root user via USR=..."
5 | echo "$0: please specify MariaDB root password via PASS=..."
6 | echo "$0: please specify MariaDB Sorting Hat user via SH_USR=..."
7 | echo "$0: please specify MariaDB Sorting Hat user via SH_PASS=..."
8 | echo "$0: please specify MariaDB Sorting Hat database via SH_DB=..."
9 | echo "$0: please specify MariaDB Read Only user via SH_RO_USR=..."
10 | exit 1
11 | fi
12 |
13 | #echo "create user '$SH_USR'@localhost identified by '$SH_PASS';" | mysql -h127.0.0.1 -P3306 -p"${PASS}" -u"${USR}"
14 | echo "create user '$SH_USR'@'%' identified by '$SH_PASS';" | mysql -h127.0.0.1 -P3306 -p"${PASS}" -u"${USR}"
15 | echo "create user '$SH_RO_USR'@'%' identified by '$SH_PASS';" | mysql -h127.0.0.1 -P3306 -p"${PASS}" -u"${USR}"
16 | echo "create database ``${SH_DB}``;" | mysql -h127.0.0.1 -P3306 -p"${PASS}" -u"${USR}"
17 | echo "grant all privileges on ``${SH_DB}``.* to '$SH_USR'@'%';" | mysql -h127.0.0.1 -P3306 -p"${PASS}" -u"${USR}"
18 | echo "grant all privileges on ``${SH_DB}``.* to '$SH_USR'@localhost;" | mysql -h127.0.0.1 -P3306 -p"${PASS}" -u"${USR}"
19 | echo "grant select on ``${SH_DB}``.* to '$SH_RO_USR'@'%';" | mysql -h127.0.0.1 -P3306 -p"${PASS}" -u"${USR}"
20 | echo "grant select on ``${SH_DB}``.* to '$SH_RO_USR'@localhost;" | mysql -h127.0.0.1 -P3306 -p"${PASS}" -u"${USR}"
21 | echo "flush privileges;" | mysql -h127.0.0.1 -P3306 -p"${PASS}" -u"${USR}"
22 | if [ -z "$FULL" ]
23 | then
24 | mysql -h127.0.0.1 -P3306 -p"${PASS}" -u"${USR}" "${SH_DB}" < sh/sh_structure.sql
25 | else
26 | mysql -h127.0.0.1 -P3306 -p"${PASS}" -u"${USR}" "${SH_DB}" < sh/sh_full.sql
27 | fi
28 | if [ ! -z "$UPDATE_STRUCTURE" ]
29 | then
30 | mysql -h127.0.0.1 -P3306 -p"${SH_PASS}" -u"${SH_USR}" "${SH_DB}" < sh/sh_full.sql < sql/structure_updates.sql
31 | fi
32 | if [ ! -z "$TESTING_API" ]
33 | then
34 | mysql -h127.0.0.1 -P3306 -p"${SH_PASS}" -u"${SH_USR}" "${SH_DB}" < sh/sh_full.sql < sql/testing_api.sql
35 | fi
36 | echo "show databases;" | mysql -h127.0.0.1 -P3306 -p"${SH_PASS}" -u"${SH_USR}" "${SH_DB}"
37 |
--------------------------------------------------------------------------------
/sh/mariadb_local_docker.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | if [ -z "$PASS" ]
3 | then
4 | echo "$0: please specify MariaDB password via PASS=..."
5 | exit 1
6 | fi
7 | docker run -p 13306:3306 -e MYSQL_ROOT_PASSWORD="${PASS}" mariadb
8 |
--------------------------------------------------------------------------------
/sh/mariadb_readonly_shell.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | if ( [ -z "$SH_PASS" ] || [ -z "$SH_RO_USR" ] || [ -z "$SH_DB" ] )
3 | then
4 | echo "$0: please specify MariaDB read only user via SH_RO_USR=..."
5 | echo "$0: please specify MariaDB read only password via SH_PASS=..."
6 | echo "$0: please specify MariaDB read only database via SH_DB=..."
7 | exit 1
8 | fi
9 | echo mysql -h127.0.0.1 -P13306 -p"${SH_PASS}" -u"${SH_RO_USR}" "${SH_DB}"
10 | mysql -h127.0.0.1 -P13306 -p"${SH_PASS}" -u"${SH_RO_USR}" "${SH_DB}"
11 |
--------------------------------------------------------------------------------
/sh/mariadb_reinit.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | if ( [ -z "$PASS" ] || [ -z "$USR" ] || [ -z "$SH_PASS" ] || [ -z "$SH_USR" ] || [ -z "$SH_RO_USR" ] || [ -z "$SH_DB" ] )
3 | then
4 | echo "$0: please specify MariaDB root user via USR=..."
5 | echo "$0: please specify MariaDB root password via PASS=..."
6 | echo "$0: please specify MariaDB Sorting Hat user via SH_USR=..."
7 | echo "$0: please specify MariaDB Sorting Hat user via SH_PASS=..."
8 | echo "$0: please specify MariaDB Sorting Hat database via SH_DB=..."
9 | echo "$0: please specify MariaDB Read Only user via SH_RO_USR=..."
10 | exit 1
11 | fi
12 | ./sh/mariadb_drop.sh
13 | ./sh/mariadb_init.sh
14 |
--------------------------------------------------------------------------------
/sh/mariadb_root_shell.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | if ( [ -z "$PASS" ] || [ -z "$USR" ] )
3 | then
4 | echo "$0: please specify MariaDB root user via USR=..."
5 | echo "$0: please specify MariaDB password via PASS=..."
6 | exit 1
7 | fi
8 | mysql -h127.0.0.1 -P13306 -p"${PASS}" -u"${USR}"
9 |
--------------------------------------------------------------------------------
/sh/mariadb_sortinghat_shell.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | if ( [ -z "$SH_PASS" ] || [ -z "$SH_USR" ] || [ -z "$SH_DB" ] )
3 | then
4 | echo "$0: please specify MariaDB read only user via SH_RO_USR=..."
5 | echo "$0: please specify MariaDB read only password via SH_PASS=..."
6 | echo "$0: please specify MariaDB read only database via SH_DB=..."
7 | exit 1
8 | fi
9 | echo mysql -h127.0.0.1 -P13306 -p"${SH_PASS}" -u"${SH_RO_USR}" "${SH_DB}"
10 | mysql -h127.0.0.1 -P13306 -p"${SH_PASS}" -u"${SH_RO_USR}" "${SH_DB}"
11 |
--------------------------------------------------------------------------------
/sh/merge_gits.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | export INDEX='sds-finos-*-git,-*-raw'
3 | uuids=$(curl -s -XPOST -H 'Content-Type: application/json' "${ES}/_sql?format=csv" -d"{\"query\":\"select author_id, author_uuid, count(*) as cnt, 1 from \\\"${INDEX}\\\" where author_org_name = 'Unknown' group by author_id, author_uuid order by cnt desc\",\"fetch_size\":10000}")
4 | for data in $uuids
5 | do
6 | arr=(${data//,/ })
7 | id=${arr[0]}
8 | uuid=${arr[1]}
9 | cnt=${arr[2]}
10 | if [ "$id" = "author_id" ]
11 | then
12 | continue
13 | fi
14 | echo "$id $uuid $cnt"
15 | cmd="${SH} \"select uuid from identities where id = '$id'\""
16 | uuid2=$(eval "${cmd}")
17 | if [ ! "$uuid" = "$uuid2" ]
18 | then
19 | echo "$id uuid mismatch $uuid != $uuid2"
20 | # continue
21 | res=$(curl -s -XPOST -H 'Content-type: application/json' "${ES}/${INDEX}/_update_by_query?conflicts=proceed" -d"{\"query\":{\"term\":{\"author_id\":\"${id}\"}},\"script\":\"ctx._source.author_uuid='${uuid2}'\"}" | jq -rS '.')
22 | upd=$(echo $res | jq -r '.updated')
23 | if ( [ "$upd" = "null" ] || [ "$upd" = "" ] || [ "$upd" = "0" ] )
24 | then
25 | echo "failed updating author_id $id uuid $uuid -> $uuid2,$res"
26 | else
27 | echo "updated author_id: $id uuid $uuid -> $uuid2"
28 | fi
29 | fi
30 | cmd="${SH} \"select name from identities where id = '$id'\""
31 | name=$(eval "${cmd}")
32 | echo "$id $uuid $cnt -> '$name'"
33 | cmd="${SH} \"select email from identities where id = '$id'\""
34 | email=$(eval "${cmd}")
35 | echo "$id $uuid $cnt '$name' -> $email"
36 | # cmd="${SH} \"select e.uuid, count(*) as cnt from identities i, enrollments e where i.uuid = e.uuid and i.name is not null and trim(i.name) != '' and i.name = '$name' and i.id != '$id' and i.email != '$email' group by e.uuid order by cnt desc limit 1\""
37 | cmd="${SH} \"select e.uuid, count(*) as cnt from identities i, enrollments e where i.uuid = e.uuid and i.name is not null and trim(i.name) != '' and i.name = '$name' and i.id != '$id' and i.source = 'git' group by e.uuid order by cnt desc limit 1\""
38 | res=$(eval "${cmd}")
39 | new_uuid=(${res[0]})
40 | echo "$id $uuid/$uuid2 $cnt '$name' $email -> $new_uuid"
41 | if ( [ ! -z "$new_uuid" ] && [ ! "$uuid2" = "$new_uuid" ] )
42 | then
43 | echo "need to merge $uuid2 into $new_uuid"
44 | API_URL=prod JWT_TOKEN=`cat secret/lgryglicki.prod.token` ./sh/curl_put_merge_unique_identities.sh 'finos-f' "$uuid2" "$new_uuid"
45 | fi
46 | done
47 |
--------------------------------------------------------------------------------
/sh/project_svc.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | if [ -z "$1" ]
3 | then
4 | echo "$0: you need to specify env as a 1st arg: test|prod"
5 | exit 1
6 | fi
7 | if [ -z "$2" ]
8 | then
9 | echo "$0: you need to specify project as a 2nd arg"
10 | exit 2
11 | fi
12 | if [ ! -f "secret/lgryglicki.$1.token" ]
13 | then
14 | echo "$0: missing secret/lgryglicki.$1.token file, use ./sh/get_token.sh $1 to get it"
15 | exit 3
16 | fi
17 | token="`cat secret/lgryglicki.$1.token`"
18 | curl -s -H "Authorization: Bearer ${token}" "https://api-gw.platform.linuxfoundation.org/project-service/v1/projects/${2}" | jq '.'
19 |
--------------------------------------------------------------------------------
/sh/psql.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | if [ "$1" = "docker" ]
3 | then
4 | PASS=postgrespwd ./sh/psql_local_docker.sh
5 | else
6 | PASS=postgrespwd APIPASS=apipwd SQL=1 ./sh/psql_init.sh
7 | fi
8 |
--------------------------------------------------------------------------------
/sh/psql_api_shell.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | if ( [ -z "$USR" ] || [ -z "$PASS" ] )
3 | then
4 | echo "$0: please specify user via USR=..."
5 | echo "$0: please specify password via PASS=..."
6 | exit 1
7 | fi
8 | PGPASSWORD="${PASS}" psql -U "${USR}" -h 127.0.0.1 -p 15432 dev_analytics
9 |
--------------------------------------------------------------------------------
/sh/psql_init.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | if ( [ -z "$PASS" ] || [ -z "$APIPASS" ] )
3 | then
4 | echo "$0: please specify Postgres root password via PASS=..."
5 | echo "$0: please specify Postgres API password via APIPASS=..."
6 | exit 1
7 | fi
8 | fn=/tmp/query.sql
9 | function finish {
10 | rm -f "$fn" 2>/dev/null
11 | }
12 | trap finish EXIT
13 | cp sh/api_init.sql "$fn"
14 | vim --not-a-term -c "%s/PWD/${APIPASS}/g" -c 'wq!' "$fn"
15 | export PGPASSWORD="${PASS}"
16 | psql -U postgres -h 127.0.0.1 -p 15432 < sh/api_drop.sql
17 | psql -U postgres -h 127.0.0.1 -p 15432 -c "select pg_terminate_backend(pid) from pg_stat_activity where datname = 'dev_analytics'"
18 | psql -U postgres -h 127.0.0.1 -p 15432 -c 'drop database if exists "dev_analytics"'
19 | psql -U postgres -h 127.0.0.1 -p 15432 < "${fn}"
20 | # createdb -U postgres -h 127.0.0.1 -p 15432 dev_analytics
21 | if [ -z "$SQL" ]
22 | then
23 | pg_restore -U postgres -h 127.0.0.1 -p 15432 -d dev_analytics sh/dev_analytics_prod.dump
24 | else
25 | psql -U postgres -h 127.0.0.1 -p 15432 dev_analytics < sh/dev_analytics_prod.sql
26 | fi
27 | # psql -U postgres -h 127.0.0.1 -p 15432 dev_analytics -c "insert into access_control_entries(scope, subject, resource, action, effect, extra) select 'odpi/egeria', 'lgryglicki', 'identity', 'manage', 0, '{}'"
28 | psql -U postgres -h 127.0.0.1 -p 15432 dev_analytics -c "insert into access_control_entries(scope, subject, resource, action, effect) select distinct slug, 'lgryglicki', 'identity', 'manage', 0 from projects"
29 | psql -U postgres -h 127.0.0.1 -p 15432 dev_analytics -c "insert into access_control_entries(scope, subject, resource, action, effect) select distinct '/projects/' || slug, 'lgryglicki', 'identity', 'manage', 0 from projects"
30 | psql -U postgres -h 127.0.0.1 -p 15432 dev_analytics -c '\d'
31 |
--------------------------------------------------------------------------------
/sh/psql_local_docker.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | if [ -z "$PASS" ]
3 | then
4 | echo "$0: please specify Postgres password via PASS=..."
5 | exit 1
6 | fi
7 | docker run -p 15432:5432 -e POSTGRES_PASSWORD="${PASS}" postgres
8 |
--------------------------------------------------------------------------------
/sh/psql_shell.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | if [ -z "$PASS" ]
3 | then
4 | echo "$0: please specify Postgres root password via PASS=..."
5 | exit 1
6 | fi
7 | PGPASSWORD="${PASS}" psql -U postgres -h 127.0.0.1 -p 15432 dev_analytics
8 |
--------------------------------------------------------------------------------
/sh/restore_backup.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | if [ -z "$1" ]
3 | then
4 | echo "$0: please provide day value 01-31"
5 | exit 1
6 | fi
7 | if [ -z "$ELB" ]
8 | then
9 | echo "$0: please provide SH backups AWS ELB service URL via ELB=..."
10 | exit 2
11 | fi
12 | rm -f out.bz2 out && wget "${ELB}/backups/sortinghat-${1}.sql.bz2" -O out.bz2 && bzip2 -d out.bz2 && mv out sh/sh_full.sql && ./sh/mariadb.sh
13 |
--------------------------------------------------------------------------------
/sh/shared.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | if ( [ -z "${SKIP_TOKEN}" ] && [ -z "$JWT_TOKEN" ] )
3 | then
4 | echo "$0: please sepcify your JWT token via JWT_TOKEN=..."
5 | exit 1
6 | fi
7 |
8 | if ( [ -z "${SKIP_PROJECT}" ] && [ -z "$1" ] )
9 | then
10 | echo "$0: please specify project slug(s) as a 1st arg, example 'onap,opnfv,burrow,aries'"
11 | exit 2
12 | fi
13 |
14 | if [ -z "$API_URL" ]
15 | then
16 | export API_URL="http://127.0.0.1:8080"
17 | fi
18 | if [ "$API_URL" = "prod" ]
19 | then
20 | export API_URL="`cat helm/da-affiliation/secrets/API_URL.prod.secret`"
21 | fi
22 | if [ "$API_URL" = "test" ]
23 | then
24 | export API_URL="`cat helm/da-affiliation/secrets/API_URL.test.secret`"
25 | fi
26 |
27 | if [ -z "$ORIGIN" ]
28 | then
29 | export ORIGIN="http://127.0.0.1"
30 | fi
31 | if [ "$ORIGIN" = "prod" ]
32 | then
33 | export ORIGIN='https://lfanalytics.io'
34 | fi
35 | if [ "$ORIGIN" = "test" ]
36 | then
37 | export ORIGIN='https://insights.test.platform.linuxfoundation.org'
38 | fi
39 |
40 |
41 | rawurlencode() {
42 | local string="${1}"
43 | local strlen=${#string}
44 | local encoded=""
45 | local pos c o
46 | for (( pos=0 ; pos= '${efrom}' and metadata__updated_on < '${eto}' group by author_uuid, author_org_name limit 10000\\\"}\""
120 | echo "$cid: ${arr[0]}: $efrom - $eto"
121 | eval "$cmd"
122 | else
123 | echo "$cid: ${arr[0]}: $from - $to"
124 | fi
125 | done
126 | if [ "$pass" = "1" ]
127 | then
128 | echo "Final SH enrollments:"
129 | cmd="$shacc \"select e.project_slug, o.name, e.start, e.end from enrollments e, organizations o where e.organization_id = o.id and e.uuid = '${uuid}' order by e.project_slug, e.start\""
130 | eval "${cmd}"
131 | echo "Final ES data:"
132 | cmd="curl -s -XPOST -H 'Content-Type: application/json' \"${esacc}/_sql?format=txt\" -d\"{\\\"query\\\":\\\"select author_uuid, author_org_name, count(*) as cnt, min(metadata__updated_on) as first, max(metadata__updated_on) as last from \\\\\\\"sds-finos-*,-*-raw,-*-temp\\\\\\\" where author_uuid = '${uuid}' group by author_uuid, author_org_name limit 10000\\\"}\""
133 | eval "$cmd"
134 | fi
135 | done
136 |
--------------------------------------------------------------------------------
/sh/update_mapping.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | # args: env index-name mapping.json
3 | if [ -z "$1" ]
4 | then
5 | echo "$0: you need to specify env: test|prod"
6 | exit 1
7 | fi
8 | ESURL="`cat ../sync-data-sources/helm-charts/sds-helm/sds-helm/secrets/ES_URL.${1}.secret`"
9 | if [ -z "$ESURL" ]
10 | then
11 | echo "$0: cannot get US URL value"
12 | exit 2
13 | fi
14 | if [ -z "$2" ]
15 | then
16 | echo "$0: you need to provide the index name as a 2nd arg"
17 | exit 3
18 | fi
19 | if [ -z "$3" ]
20 | then
21 | echo "$0: you need to provide mapping file as a 3rd arg"
22 | exit 4
23 | fi
24 | index="${2}"
25 | mapping=$(cat "${3}" | jq --compact-output -rS '.')
26 | if [ -z "$mapping" ]
27 | then
28 | echo "$0: cannot read mapping from '$3'"
29 | exit 6
30 | fi
31 | suff=`tr -dc a-z0-9 /dev/null 2>/dev/null
34 | com="curl -s -XPUT -H 'Content-Type: application/json' \"${ESURL}/${tmpindex}\" -d'${mapping}' | jq -rS '.'"
35 | #echo $com
36 | eval $com || exit 7
37 | curl -s -XPOST -H 'Content-Type: application/json' "${ESURL}/_reindex?refresh=true&wait_for_completion=true" -d"{\"conflicts\":\"proceed\",\"source\":{\"index\":\"${index}\"},\"dest\":{\"index\":\"tmpindex\"}}" | jq -rS '.' || exit 8
38 | curl -s -XDELETE "${ESURL}/${index}" | jq -rS '.' || exit 9
39 | com="curl -s -XPUT -H 'Content-Type: application/json' \"${ESURL}/${index}\" -d'${mapping}' | jq -rS '.'"
40 | #echo $com
41 | eval $com || exit 9
42 | curl -s -XPOST -H 'Content-Type: application/json' "${ESURL}/_reindex?refresh=true&wait_for_completion=true" -d"{\"conflicts\":\"proceed\",\"source\":{\"index\":\"${tmpindex}\"},\"dest\":{\"index\":\"index\"}}" | jq -rS '.' || exit 10
43 | curl -s -XDELETE "${ESURL}/${tmpindex}" | jq -rS '.'
44 | echo "All OK"
45 |
--------------------------------------------------------------------------------
/sh/uuids.json:
--------------------------------------------------------------------------------
1 | {
2 | "size": 0,
3 | "query": {
4 | "bool": {
5 | "must": [
6 | {
7 | "exists": {
8 | "field": "author_uuid"
9 | }
10 | }
11 | ],
12 | "must_not": [
13 | {
14 | "match_phrase": {
15 | "author_bot": true
16 | }
17 | }
18 | ]
19 | }
20 | },
21 | "aggs": {
22 | "contributions": {
23 | "terms": {
24 | "field": "author_uuid",
25 | "missing": "",
26 | "size": 10
27 | }
28 | }
29 | }
30 | }
31 |
--------------------------------------------------------------------------------
/sh/validate_token.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | if [ -z "${1}" ]
3 | then
4 | echo "$0: you need to specify env: test|prod"
5 | exit 1
6 | fi
7 | if [ -z "${2}" ]
8 | then
9 | fn="secret/lgryglicki.${1}.token"
10 | else
11 | fn="${2}"
12 | fi
13 | token="`cat ${fn}`"
14 | if [ -z "${token}" ]
15 | then
16 | echo "$0: cannot read file ${fn}"
17 | exit 2
18 | fi
19 | audience="`cat helm/da-affiliation/secrets/AUTH0_AUDIENCE.${1}.secret`"
20 | if [ -z "$audience" ]
21 | then
22 | echo "$0: cannot read file helm/da-affiliation/secrets/AUTH0_AUDIENCE.${1}.secret"
23 | exit 3
24 | fi
25 | if [ ! -z "$DEBUG" ]
26 | then
27 | echo "curl -s -XGET -H 'Content-Type: application/json' -H 'Authorization: Bearer ${token}' '${audience}authping'"
28 | curl -s -XGET -H 'Content-Type: application/json' -H "Authorization: Bearer ${token}" "${audience}authping"
29 | fi
30 | result=`curl -s -XGET -H 'Content-Type: application/json' -H "Authorization: Bearer ${token}" "${audience}authping"`
31 | msg=`echo "${result}" | jq '.Message'`
32 | if [ ! "${msg}" = "null" ]
33 | then
34 | echo "Token invalid: ${msg}"
35 | else
36 | sub=`echo "${result}" | jq '.sub'`
37 | echo "Valid: ${sub}"
38 | fi
39 |
--------------------------------------------------------------------------------
/sql/add_last_modified_by.sql:
--------------------------------------------------------------------------------
1 | -- Adds `last_modified_by` column to DB tables
2 | alter table domains_organizations add last_modified_by varchar(128);
3 | alter table enrollments add last_modified_by varchar(128);
4 | alter table enrollments_archive add last_modified_by varchar(128);
5 | alter table identities add last_modified_by varchar(128);
6 | alter table identities_archive add last_modified_by varchar(128);
7 | alter table matching_blacklist add last_modified_by varchar(128);
8 | alter table organizations add last_modified_by varchar(128);
9 | alter table profiles add last_modified_by varchar(128);
10 | alter table profiles_archive add last_modified_by varchar(128);
11 | alter table slug_mapping add last_modified_by varchar(128);
12 | alter table uidentities add last_modified_by varchar(128);
13 | alter table uidentities_archive add last_modified_by varchar(128);
14 |
--------------------------------------------------------------------------------
/sql/add_locked_by.sql:
--------------------------------------------------------------------------------
1 | -- Adds `locked_by` column to DB tables
2 | alter table enrollments add locked_by varchar(128);
3 | alter table identities add locked_by varchar(128);
4 | alter table profiles add locked_by varchar(128);
5 | alter table uidentities add locked_by varchar(128);
6 | -- Indices
7 | create index enrollments_locked_by_idx on enrollments(locked_by);
8 | create index identities_locked_by_idx on identities(locked_by);
9 | create index profiles_locked_by_idx on profiles(locked_by);
10 | create index uidentities_locked_by_idx on uidentities(locked_by);
11 |
--------------------------------------------------------------------------------
/sql/add_permissions.sql:
--------------------------------------------------------------------------------
1 | insert into access_control_entries(scope, subject, resource, action, effect) select distinct slug, 'lgryglicki', 'identity', 'manage', 0 from projects;
2 | insert into access_control_entries(scope, subject, resource, action, effect) select distinct '/projects/' || slug, 'lgryglicki', 'identity', 'manage', 0 from projects;
3 |
--------------------------------------------------------------------------------
/sql/check_results.sql:
--------------------------------------------------------------------------------
1 | select * from uidentities where uuid in ('16fe424acecf8d614d102fc0ece919a22200481d', 'aaa8024197795de9b90676592772633c5cfcb35a');
2 | select * from identities where uuid in ('16fe424acecf8d614d102fc0ece919a22200481d', 'aaa8024197795de9b90676592772633c5cfcb35a');
3 | select * from profiles where uuid in ('16fe424acecf8d614d102fc0ece919a22200481d', 'aaa8024197795de9b90676592772633c5cfcb35a');
4 | select * from enrollments where uuid in ('16fe424acecf8d614d102fc0ece919a22200481d', 'aaa8024197795de9b90676592772633c5cfcb35a');
5 | select * from uidentities_archive where uuid in ('16fe424acecf8d614d102fc0ece919a22200481d', 'aaa8024197795de9b90676592772633c5cfcb35a');
6 | select * from identities_archive where uuid in ('16fe424acecf8d614d102fc0ece919a22200481d', 'aaa8024197795de9b90676592772633c5cfcb35a');
7 | select * from profiles_archive where uuid in ('16fe424acecf8d614d102fc0ece919a22200481d', 'aaa8024197795de9b90676592772633c5cfcb35a');
8 | select * from enrollments_archive where uuid in ('16fe424acecf8d614d102fc0ece919a22200481d', 'aaa8024197795de9b90676592772633c5cfcb35a');
9 |
--------------------------------------------------------------------------------
/sql/check_sh_db.sql:
--------------------------------------------------------------------------------
1 | select e1.uuid, e1.id, e2.id, e1.start, e2.start, e1.end, e2.end, o1.name, o2.name from enrollments e1, enrollments e2, organizations o1, organizations o2 where e1.uuid = e2.uuid and e1.start = '1900-01-01' and e2.start = '1970-01-01' and e1.organization_id = o1.id and e2.organization_id = o2.id;
2 | select e1.uuid, e1.id, e2.id, e1.start, e2.start, e1.end, e2.end, o1.name, o2.name from enrollments e1, enrollments e2, organizations o1, organizations o2 where e1.uuid = e2.uuid and e1.end = '2100-01-01' and e2.end = '2099-01-01' and e1.organization_id = o1.id and e2.organization_id = o2.id;
3 | select e1.uuid, e1.id, e2.id, e1.start, e2.start, e1.end, e2.end, o1.name, o2.name from enrollments e1, enrollments e2, organizations o1, organizations o2 where e1.uuid = e2.uuid and (e1.start = '1900-01-01' or e1.end = '2100-01-01') and (e2.start = '1970-01-01' or e2.end = '2099-01-01') and e1.organization_id = o1.id and e2.organization_id = o2.id;
4 | delete from enrollments where id in (select e1.id from enrollments e1, enrollments e2 where e1.uuid = e2.uuid and e1.start = '1900-01-01' and e1.end = '2100-01-01' and e2.start = '1970-01-01' and e2.end = '2099-01-01' and e1.organization_id = e2.organization_id);
5 | delete from enrollments where id in (select e1.id from enrollments e1, enrollments e2 where e1.uuid = e2.uuid and e1.start = '1900-01-01' and e1.end = '2100-01-01' and e2.start = '1970-01-01' and e2.end = '2099-01-01');
6 | delete from enrollments where id in (select e1.id from enrollments e1, enrollments e2 where e1.uuid = e2.uuid and e1.start = '1900-01-01' and e2.start = '1970-01-01');
7 | delete from enrollments where id in (select e2.id from enrollments e1, enrollments e2 where e1.uuid = e2.uuid and e1.end = '2100-01-01' and e2.end = '2099-01-01');
8 | update enrollments set start = '1900-01-01' where start = '1970-01-01';
9 | update enrollments set end = '2100-01-01' where end = '2099-01-01';
10 | delete from enrollments where start = '2099-01-01';
11 |
--------------------------------------------------------------------------------
/sql/profiles_without_identities_and_enrollments.sql:
--------------------------------------------------------------------------------
1 | select i.* from (select p.* from profiles p left join identities i on p.uuid = i.uuid where i.uuid is null) i left join enrollments e on i.uuid = e.uuid where e.uuid is null;
2 |
--------------------------------------------------------------------------------
/sql/structure_updates.sql:
--------------------------------------------------------------------------------
1 | DROP TABLE IF EXISTS `profiles_archive`;
2 | /*!40101 SET @saved_cs_client = @@character_set_client */;
3 | /*!40101 SET character_set_client = utf8 */;
4 | CREATE TABLE `profiles_archive` (
5 | `archived_at` datetime(6) NOT NULL DEFAULT now(),
6 | `uuid` varchar(128) COLLATE utf8mb4_unicode_520_ci NOT NULL,
7 | `name` varchar(128) COLLATE utf8mb4_unicode_520_ci DEFAULT NULL,
8 | `email` varchar(128) COLLATE utf8mb4_unicode_520_ci DEFAULT NULL,
9 | `gender` varchar(32) COLLATE utf8mb4_unicode_520_ci DEFAULT NULL,
10 | `gender_acc` int(11) DEFAULT NULL,
11 | `is_bot` tinyint(1) DEFAULT NULL,
12 | `country_code` varchar(2) COLLATE utf8mb4_unicode_520_ci DEFAULT NULL
13 | ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_520_ci;
14 | /*!40101 SET character_set_client = @saved_cs_client */;
15 |
16 | DROP TABLE IF EXISTS `uidentities_archive`;
17 | /*!40101 SET @saved_cs_client = @@character_set_client */;
18 | /*!40101 SET character_set_client = utf8 */;
19 | CREATE TABLE `uidentities_archive` (
20 | `archived_at` datetime(6) NOT NULL DEFAULT now(),
21 | `uuid` varchar(128) COLLATE utf8mb4_unicode_520_ci NOT NULL,
22 | `last_modified` datetime(6) DEFAULT NULL
23 | ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_520_ci;
24 |
25 | DROP TABLE IF EXISTS `enrollments_archive`;
26 | /*!40101 SET @saved_cs_client = @@character_set_client */;
27 | /*!40101 SET character_set_client = utf8 */;
28 | CREATE TABLE `enrollments_archive` (
29 | `archived_at` datetime(6) NOT NULL DEFAULT now(),
30 | `id` int(11) NOT NULL,
31 | `start` datetime NOT NULL,
32 | `end` datetime NOT NULL,
33 | `uuid` varchar(128) COLLATE utf8mb4_unicode_520_ci NOT NULL,
34 | `organization_id` int(11) NOT NULL
35 | ) ENGINE=InnoDB AUTO_INCREMENT=5347 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_520_ci;
36 | /*!40101 SET character_set_client = @saved_cs_client */;
37 |
38 | DROP TABLE IF EXISTS `identities_archive`;
39 | /*!40101 SET @saved_cs_client = @@character_set_client */;
40 | /*!40101 SET character_set_client = utf8 */;
41 | CREATE TABLE `identities_archive` (
42 | `archived_at` datetime(6) NOT NULL DEFAULT now(),
43 | `id` varchar(128) COLLATE utf8mb4_unicode_520_ci NOT NULL,
44 | `name` varchar(128) COLLATE utf8mb4_unicode_520_ci DEFAULT NULL,
45 | `email` varchar(128) COLLATE utf8mb4_unicode_520_ci DEFAULT NULL,
46 | `username` varchar(128) COLLATE utf8mb4_unicode_520_ci DEFAULT NULL,
47 | `source` varchar(32) COLLATE utf8mb4_unicode_520_ci NOT NULL,
48 | `uuid` varchar(128) COLLATE utf8mb4_unicode_520_ci DEFAULT NULL,
49 | `last_modified` datetime(6) DEFAULT NULL
50 | ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_520_ci;
51 | /*!40101 SET character_set_client = @saved_cs_client */;
52 |
--------------------------------------------------------------------------------
/sql/testing_api.sql:
--------------------------------------------------------------------------------
1 | update profiles set name = null, gender = null, gender_acc = null where uuid = '16fe424acecf8d614d102fc0ece919a22200481d';
2 | update profiles set email = null, country_code = null, is_bot = 1 where uuid = 'aaa8024197795de9b90676592772633c5cfcb35a';
3 |
--------------------------------------------------------------------------------
/swagger/errors.go:
--------------------------------------------------------------------------------
1 | package swagger
2 |
3 | import (
4 | "strings"
5 |
6 | "github.com/LF-Engineering/dev-analytics-affiliation/errs"
7 | "github.com/LF-Engineering/dev-analytics-affiliation/gen/models"
8 | "github.com/LF-Engineering/dev-analytics-affiliation/gen/restapi/operations/health"
9 | "github.com/LF-Engineering/dev-analytics-affiliation/shared"
10 | "github.com/go-openapi/runtime/middleware"
11 | "github.com/sirupsen/logrus"
12 | )
13 |
14 | // ErrorResponse wraps the error in the api standard models.ErrorResponse object
15 | func ErrorResponse(err error) *models.ErrorResponse {
16 | cd := ""
17 | if e, ok := err.(errs.CodedError); ok {
18 | cd = e.Code()
19 | }
20 | errMsg := err.Error()
21 | for redacted := range shared.GRedacted {
22 | if len(redacted) > 3 {
23 | errMsg = strings.Replace(errMsg, redacted, "[redacted]", -1)
24 | }
25 | }
26 | return &models.ErrorResponse{
27 | Code: cd,
28 | Message: errMsg,
29 | }
30 | }
31 |
32 | // ErrorHandler is a convenience method for returning the appropriate response based on the error
33 | func ErrorHandler(label string, err error) middleware.Responder {
34 | logrus.WithError(err).Error(label)
35 | e, ok := err.(errs.CodedError)
36 | if !ok {
37 | return health.NewGetHealthBadRequest().WithPayload(ErrorResponse(err))
38 | }
39 |
40 | switch e.Code() {
41 | case errs.ErrBadRequest:
42 | return health.NewGetHealthBadRequest().WithPayload(ErrorResponse(err))
43 | case errs.ErrUnauthorized:
44 | return health.NewGetHealthUnauthorized().WithPayload(ErrorResponse(err))
45 | case errs.ErrForbidden:
46 | return health.NewGetHealthForbidden().WithPayload(ErrorResponse(err))
47 | case errs.ErrNotFound:
48 | return health.NewGetHealthNotFound().WithPayload(ErrorResponse(err))
49 | case errs.ErrConflict:
50 | return health.NewGetHealthConflict().WithPayload(ErrorResponse(err))
51 | default:
52 | return health.NewGetHealthInternalServerError().WithPayload(ErrorResponse(err))
53 | }
54 | }
55 |
--------------------------------------------------------------------------------
/usersvc/user.go:
--------------------------------------------------------------------------------
1 | package usersvc
2 |
3 | import (
4 | "fmt"
5 | "strconv"
6 | "strings"
7 | "time"
8 |
9 | "github.com/LF-Engineering/dev-analytics-affiliation/gen/models"
10 | "github.com/LF-Engineering/dev-analytics-affiliation/shared"
11 |
12 | log "github.com/LF-Engineering/dev-analytics-affiliation/logging"
13 | "github.com/LF-Engineering/dev-analytics-libraries/users"
14 | )
15 |
16 | // Service - access platform user services
17 | type Service interface {
18 | GetList(string, int64, int64) (*models.UserDataArray, error)
19 | GetListAll() (*models.UserDataArray, error)
20 | }
21 |
22 | type service struct {
23 | shared.ServiceStruct
24 | usr *users.Client
25 | }
26 |
27 | // New return ES connection
28 | func New(usr *users.Client) Service {
29 | return &service{
30 | usr: usr,
31 | }
32 | }
33 |
34 | // GetList ...
35 | func (s *service) GetList(q string, rows, page int64) (*models.UserDataArray, error) {
36 | getList := &models.UserDataArray{}
37 | var users []*models.UserData
38 | response, err := s.usr.List(q, strconv.FormatInt(rows, 10), strconv.FormatInt(page-1, 10))
39 | if err != nil {
40 | return nil, err
41 | }
42 | for _, usr := range response.Data {
43 | users = append(users, &models.UserData{ID: usr.ID, Name: usr.Name, Email: usr.Email, Username: usr.Username})
44 | }
45 | log.Info(fmt.Sprintf("GetList: q:%s rows:%d page:%d", q, rows, page))
46 | getList.Users = users
47 | return getList, nil
48 | }
49 |
50 | // GetListAll ...
51 | func (s *service) GetListAll() (*models.UserDataArray, error) {
52 | getList := &models.UserDataArray{}
53 | var users []*models.UserData
54 | pageSize := 5000
55 | offset := 0
56 | total := -1
57 | for {
58 | response, err := s.usr.List("", strconv.Itoa(pageSize), strconv.Itoa(offset))
59 | if err != nil {
60 | if strings.Contains(err.Error(), "502 Bad Gateway") {
61 | time.Sleep(3 * time.Second)
62 | continue
63 | }
64 | return nil, err
65 | }
66 | for _, usr := range response.Data {
67 | users = append(users, &models.UserData{ID: usr.ID, Name: usr.Name, Email: usr.Email, Username: usr.Username})
68 | }
69 | if total < 0 {
70 | total = response.Metadata.TotalSize
71 | }
72 | if offset+pageSize < total {
73 | offset += pageSize
74 | //if offset > 3*pageSize {
75 | // break
76 | //}
77 | } else {
78 | break
79 | }
80 | log.Info(fmt.Sprintf("GetListAll: got %d users so far, page size: %d, offset: %d", len(users), pageSize, offset))
81 | //log.Info(fmt.Sprintf("Metadata: %+v\n", response.Metadata))
82 | }
83 | getList.Users = users
84 | return getList, nil
85 | }
86 |
--------------------------------------------------------------------------------