├── .dockerignore ├── .editorconfig ├── .envrc ├── .gitignore ├── .gitlab-ci.yml ├── .pylintrc ├── Dockerfile ├── LICENSE ├── Makefile ├── README.md ├── app.py ├── automig.Dockerfile ├── deploy ├── .gitignore ├── Makefile ├── README.md ├── arbout-gcloud.tf └── kube │ ├── .gitignore │ ├── deployment.yml │ ├── kustomization.yml │ ├── migrate.yml │ └── service.yml ├── dev-requirements.txt ├── lib ├── __init__.py ├── core_blueprint.py ├── diff_summary.py ├── search.py ├── states.json └── util.py ├── mypy.ini ├── prod.Dockerfile ├── requirements.txt ├── schema └── base.sql ├── static └── .gitkeep ├── templates ├── after_submit.jinja.htm ├── base.jinja.htm ├── dispute.jinja.htm ├── invalid.jinja.htm ├── macros.jinja.htm ├── search.jinja.htm ├── serp.jinja.htm ├── splash.jinja.htm ├── submit.jinja.htm └── terms.jinja.htm └── test ├── __init__.py └── test_diff_summary.py /.dockerignore: -------------------------------------------------------------------------------- 1 | .direnv/ 2 | __pycache__/ 3 | *.zip 4 | *.ods 5 | .mypy_cache/ 6 | -------------------------------------------------------------------------------- /.editorconfig: -------------------------------------------------------------------------------- 1 | root = true 2 | 3 | [*.{py,htm,js,yml,go,tf,sql}] 4 | indent_style = space 5 | indent_size = 2 6 | -------------------------------------------------------------------------------- /.envrc: -------------------------------------------------------------------------------- 1 | layout_python3 2 | export AUTOMIG_CON=postgresql://postgres@$(make db-host) 3 | # note: these are for local testing only and MUST not be used in prod 4 | export ARB_SALT=307a1fb84d6349bf 5 | # 24-byte AES key 6 | export ARB_CRYPT=ce1b03bc681ecbdb5aa2ad35af297cbb17114cdc30ba0099 7 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | *.ods 2 | .direnv/ 3 | __pycache__/ 4 | *.zip 5 | static/bootstrap.min.css 6 | .mypy_cache/ 7 | -------------------------------------------------------------------------------- /.gitlab-ci.yml: -------------------------------------------------------------------------------- 1 | build: 2 | variables: 3 | IMAGE_TAG: $CI_REGISTRY_IMAGE:$CI_COMMIT_SHORT_SHA 4 | script: 5 | - docker login -u $CI_REGISTRY_USER -p $CI_REGISTRY_PASSWORD $CI_REGISTRY 6 | - docker build -t $IMAGE_TAG --build-arg build_slug=$CI_COMMIT_SHORT_SHA -f prod.Dockerfile . 7 | - docker push $IMAGE_TAG 8 | 9 | build_migrate: 10 | variables: 11 | IMAGE_TAG: $CI_REGISTRY_IMAGE/migrate:$CI_COMMIT_SHORT_SHA 12 | script: 13 | - docker login -u $CI_REGISTRY_USER -p $CI_REGISTRY_PASSWORD $CI_REGISTRY 14 | - docker build -f automig.Dockerfile --build-arg build_slug=$CI_COMMIT_SHORT_SHA -t $IMAGE_TAG . 15 | - docker push $IMAGE_TAG 16 | 17 | test: 18 | variables: 19 | IMAGE_TAG: $CI_REGISTRY_IMAGE/test:$CI_COMMIT_SHORT_SHA 20 | script: 21 | - docker build -t $IMAGE_TAG . 22 | - docker run --rm $IMAGE_TAG pylint lib 23 | - docker run --rm $IMAGE_TAG mypy lib 24 | - docker run --rm $IMAGE_TAG pytest 25 | 26 | deploy: 27 | environment: local 28 | variables: 29 | IMAGE_TAG: $CI_REGISTRY_IMAGE:$CI_COMMIT_SHORT_SHA 30 | when: manual 31 | script: 32 | - docker rm -fv arbout || echo rm fail 33 | - docker run -d --name arbout -e ARB_SALT=$ARB_SALT -e SCRIPT_NAME=/arbout -p $ARB_PORT:8000 --restart unless-stopped $IMAGE_TAG 34 | -------------------------------------------------------------------------------- /.pylintrc: -------------------------------------------------------------------------------- 1 | [MESSAGES CONTROL] 2 | 3 | disable= 4 | line-too-long, 5 | multiple-imports, 6 | fixme, 7 | missing-function-docstring, 8 | multiple-statements, 9 | too-many-locals, 10 | trailing-comma-tuple, 11 | no-else-return, 12 | missing-class-docstring, 13 | no-else-raise, 14 | too-few-public-methods, 15 | global-statement, 16 | too-many-instance-attributes, 17 | 18 | [FORMAT] 19 | 20 | indent-string = " " 21 | indent-after-paren=2 22 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | FROM python:3.7.6 2 | 3 | WORKDIR /arbout 4 | 5 | # deps 6 | COPY requirements.txt . 7 | RUN pip install -r requirements.txt 8 | COPY dev-requirements.txt . 9 | RUN pip install -r dev-requirements.txt 10 | 11 | # files 12 | COPY lib lib/ 13 | COPY test test/ 14 | COPY mypy.ini . 15 | COPY .pylintrc . 16 | COPY app.py . 17 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Modified MIT License 2 | 3 | MIT License: 4 | 5 | Copyright (c) 2020 Abe Winter 6 | 7 | Permission is hereby granted, free of charge, to any person obtaining a copy 8 | of this software and associated documentation files (the "Software"), to deal 9 | in the Software without restriction, including without limitation the rights 10 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 11 | copies of the Software, and to permit persons to whom the Software is 12 | furnished to do so, subject to the following conditions: 13 | 14 | The above copyright notice and this permission notice shall be included in all 15 | copies or substantial portions of the Software. 16 | 17 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 18 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 19 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 20 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 21 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 22 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 23 | SOFTWARE. 24 | 25 | Extra conditions: 26 | 27 | No license of marks: This license does not grant you permission to use the Arbout 28 | name or other marks, including logos. 29 | 30 | No legal advice: Nothing in this project shall be construed as legal advice. 31 | 32 | No retaliation: If you run an instance of this project or a substantial clone, 33 | you must not retaliate against, punish, or otherwise harm submitters to your 34 | instance, or submitters to any other instance or clone of this project. 35 | 36 | Operator responsible for terms: If you run an instance of this project or 37 | substantial clone, you must review the terms of service and either comply with 38 | them or modify them. 39 | 40 | Transparent build numbers: If you run an instance or clone and set the VERSION 41 | param or use other means to display a build number, you must really be running 42 | that build. 43 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | DBNAME=arbout-db 2 | 3 | start-db: 4 | docker run --name $(DBNAME) -d postgres:11 5 | 6 | db-host: 7 | @docker inspect -f '{{.NetworkSettings.IPAddress}}' $(DBNAME) 8 | 9 | psql: 10 | docker exec -it $(DBNAME) psql -U postgres 11 | 12 | BOOTSTRAP_VERSION := 4.4.1 13 | BOOTSTRAP_ZIP := bootstrap-$(BOOTSTRAP_VERSION)-dist.zip 14 | $(BOOTSTRAP_ZIP): 15 | wget -q https://github.com/twbs/bootstrap/releases/download/v$(BOOTSTRAP_VERSION)/$(BOOTSTRAP_ZIP) 16 | 17 | static/bootstrap.min.css: $(BOOTSTRAP_ZIP) 18 | unzip -j $(BOOTSTRAP_ZIP) bootstrap-$(BOOTSTRAP_VERSION)-dist/css/bootstrap.min.css -d static 19 | 20 | lint: 21 | pylint lib 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Arbout 2 | 3 | Arbout is a database & aggregate search engine for arbitration outcomes. 4 | 5 | There's a live server hosted at [arbout.org](https://arbout.org). 6 | 7 | ## Why aggregate search 8 | 9 | The arbout search engine doesn't return individual cases -- it only returns summaries when many cases against a single party have the same details. 10 | 11 | Individual arbitration outcomes are often protected from disclosure by confidentiality rules, but there's a public policy interest in revealing these outcomes in aggregate. 12 | -------------------------------------------------------------------------------- /app.py: -------------------------------------------------------------------------------- 1 | "app.py -- flask entry-point" 2 | 3 | import flask 4 | from lib import core_blueprint 5 | from lib.util import ssl_middleware 6 | 7 | APP = flask.Flask(__name__) 8 | APP.before_request(ssl_middleware) 9 | APP.register_blueprint(core_blueprint.CORE) 10 | -------------------------------------------------------------------------------- /automig.Dockerfile: -------------------------------------------------------------------------------- 1 | # This image runs migration jobs 2 | 3 | FROM python:3.7.6-slim 4 | 5 | # deps 6 | RUN apt-get update 7 | RUN apt-get install git -qqy 8 | # note: update this to the latest version if it looks old 9 | RUN pip install automig[postgres]==0.0.20 10 | 11 | # files 12 | WORKDIR /migrate 13 | # note: schema is the location of your schema -- should match the AUTOMIG_GLOB you pass to the automig tool 14 | COPY schema schema/ 15 | # note: automig relies on a git repo, normally dockerfiles don't include a git repo 16 | COPY .git .git/ 17 | 18 | ARG build_slug 19 | ENV TARGET $build_slug 20 | -------------------------------------------------------------------------------- /deploy/.gitignore: -------------------------------------------------------------------------------- 1 | .terraform/ 2 | terraform.tfstate* 3 | .envrc 4 | kubeconfig.yml 5 | -------------------------------------------------------------------------------- /deploy/Makefile: -------------------------------------------------------------------------------- 1 | kustomize: 2 | kubectl apply -k kube/ 3 | 4 | # this creates a migration job. update the migrate newTag in kustomization.yml or this will probably be a no-op 5 | migrate: 6 | kubectl create job mig-$(shell whoami)-$(shell date -u +%Y%m%d-%H%M) --from=cronjob/arbout-migrate 7 | -------------------------------------------------------------------------------- /deploy/README.md: -------------------------------------------------------------------------------- 1 | # Arbout deployment 2 | 3 | Example terraform & kube configs. This is used to host the live arbout at https://arbout.org. 4 | 5 | ## Setup steps 6 | 7 | ### Cloud basics 8 | 9 | * deps: kubectl and terraform binaries 10 | * set up a google cloud account 11 | * look at the terraform variables at the top of gcloud.tf and populate them in an .envrc or something 12 | * run the terraform 13 | 14 | ### Kube basics 15 | 16 | * when the terraform is clean (may initially error because resources take a while to create), use something like `gcloud container clusters get-credentials $CLUSTERNAME` to get kubeconfig credentials, set KUBECONFIG=kubeconfig.yml or something 17 | * you'll need to build the kube images and push them to a docker registry somewhere. Check out ../.gitlab-ci.yml for an example of how to do this 18 | * in kustomization.yml, set `newName` to your hosted location and set `newTag` to an up-to-date tag 19 | * create kube/secrets.env with vars `db_url`, `global_salt`, `global_crypt` (todo: instructions on key sizes) 20 | * create kube/.dockerconfigjson with creds to your repo 21 | * update `image` everywhere to point to your docker image (todo: build and host this publicly) 22 | * override the host in the Ingress in service.yml 23 | * run `make kustomize` to apply the kube 24 | * it takes a really long time (10+ minutes) for the the certs and ingress to be correct, expect a bunch of 502s in the meantime. you can `kube describe managedcertificate` to watch provisioning status 25 | * (todo: describe how to run the initial migration by modifying migrate.yml and running `make migrate`) 26 | 27 | ## Deploying a new version 28 | 29 | * update `newTag` and run `make kustomize` 30 | * (todo: migration instructions) 31 | -------------------------------------------------------------------------------- /deploy/arbout-gcloud.tf: -------------------------------------------------------------------------------- 1 | # live.tf -- cloud resources for hosting arbout on google cloud 2 | 3 | variable google_project {} 4 | variable google_zone { default = "us-central1-a" } 5 | variable sql_readwrite_password {} 6 | variable sql_readwrite_username {} 7 | variable dns_name { default = "arbout.org." } 8 | 9 | provider google { 10 | project = var.google_project 11 | zone = var.google_zone 12 | } 13 | 14 | data google_compute_network default { 15 | name = "default" 16 | } 17 | 18 | resource google_compute_global_address db-private-ip { 19 | name = "arbout-db-private-ip" 20 | purpose = "VPC_PEERING" 21 | address_type = "INTERNAL" 22 | prefix_length = 16 23 | network = data.google_compute_network.default.self_link 24 | } 25 | 26 | resource google_service_networking_connection cx { 27 | network = data.google_compute_network.default.self_link 28 | service = "servicenetworking.googleapis.com" 29 | reserved_peering_ranges = [google_compute_global_address.db-private-ip.name] 30 | } 31 | 32 | resource google_sql_database_instance arbout-master { 33 | name = "arbout-master" 34 | database_version = "POSTGRES_11" 35 | depends_on = [google_service_networking_connection.cx] 36 | settings { 37 | tier = "db-g1-small" 38 | ip_configuration { 39 | ipv4_enabled = false 40 | private_network = data.google_compute_network.default.self_link 41 | } 42 | } 43 | } 44 | 45 | resource google_sql_user readwrite { 46 | name = var.sql_readwrite_username 47 | instance = google_sql_database_instance.arbout-master.name 48 | password = var.sql_readwrite_password 49 | } 50 | 51 | resource google_dns_managed_zone arbout { 52 | name = "arbout" 53 | dns_name = var.dns_name 54 | } 55 | 56 | resource google_compute_global_address arbout-ingress-ip { 57 | name = "arbout-ingress-ip" 58 | } 59 | 60 | resource google_dns_record_set naked { 61 | name = google_dns_managed_zone.arbout.dns_name 62 | managed_zone = google_dns_managed_zone.arbout.name 63 | type = "A" 64 | ttl = 300 65 | rrdatas = [google_compute_global_address.arbout-ingress-ip.address] 66 | } 67 | 68 | output sql_ip { value = google_sql_database_instance.arbout-master.private_ip_address } 69 | output sql_user { value = google_sql_user.readwrite.name } 70 | output dns_ns { value = google_dns_managed_zone.arbout.name_servers } 71 | output ingress_ip { value = google_compute_global_address.arbout-ingress-ip.address } 72 | -------------------------------------------------------------------------------- /deploy/kube/.gitignore: -------------------------------------------------------------------------------- 1 | secrets.env 2 | .dockerconfigjson 3 | -------------------------------------------------------------------------------- /deploy/kube/deployment.yml: -------------------------------------------------------------------------------- 1 | # api-res.yml -- deployment, service etc for the API 2 | apiVersion: apps/v1 3 | kind: Deployment 4 | metadata: 5 | name: arbout 6 | labels: &labels 7 | app: arbout 8 | spec: 9 | replicas: 1 10 | selector: 11 | matchLabels: *labels 12 | template: 13 | metadata: 14 | labels: *labels 15 | spec: 16 | containers: 17 | - name: arbout 18 | image: arbout 19 | ports: 20 | - containerPort: 8000 21 | resources: 22 | requests: 23 | cpu: 0.05 24 | memory: 200M 25 | env: 26 | - name: AUTOMIG_CON 27 | valueFrom: {secretKeyRef: {name: arbout-secrets, key: db_url}} 28 | - name: ARB_SALT 29 | valueFrom: {secretKeyRef: {name: arbout-secrets, key: global_salt}} 30 | - name: ARB_CRYPT 31 | valueFrom: {secretKeyRef: {name: arbout-secrets, key: global_crypt}} 32 | readinessProbe: 33 | httpGet: 34 | path: /health 35 | port: 8000 36 | initialDelaySeconds: 1 37 | periodSeconds: 10 38 | imagePullSecrets: 39 | - name: regcred 40 | -------------------------------------------------------------------------------- /deploy/kube/kustomization.yml: -------------------------------------------------------------------------------- 1 | apiVersion: kustomize.config.k8s.io/v1beta1 2 | kind: Kustomization 3 | images: 4 | - name: arbout 5 | newName: repo/name/arbout 6 | newTag: a4af7973 7 | - name: arbout-migrate 8 | newName: repo/name/arbout/migrate 9 | newTag: a4af7973 10 | secretGenerator: 11 | - name: regcred 12 | files: 13 | - .dockerconfigjson 14 | type: kubernetes.io/dockerconfigjson 15 | - name: arbout-secrets 16 | env: secrets.env 17 | resources: 18 | - deployment.yml 19 | - service.yml 20 | - migrate.yml 21 | -------------------------------------------------------------------------------- /deploy/kube/migrate.yml: -------------------------------------------------------------------------------- 1 | apiVersion: batch/v1beta1 2 | kind: CronJob 3 | metadata: 4 | name: arbout-migrate 5 | spec: 6 | schedule: "0 0 1 * 0" 7 | suspend: true # this is only used as a --from= for jobs, hence doesn't run auto 8 | # backoffLimit: 1 # is this not supported in v1beta1? 9 | jobTemplate: 10 | spec: 11 | template: 12 | spec: 13 | restartPolicy: Never 14 | containers: 15 | - name: migrate 16 | image: arbout-migrate 17 | command: [automig_pg, update] 18 | env: 19 | - name: AUTOMIG_GLOB 20 | value: schema/*.sql 21 | - name: AUTOMIG_CON 22 | valueFrom: {secretKeyRef: {name: arbout-secrets, key: db_url}} 23 | - name: PGCONNECT_TIMEOUT 24 | value: "10" 25 | imagePullSecrets: 26 | - name: regcred 27 | -------------------------------------------------------------------------------- /deploy/kube/service.yml: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: Service 3 | metadata: 4 | name: arbout-api 5 | labels: &labels 6 | app: arbout 7 | spec: 8 | type: NodePort 9 | ports: 10 | - port: 8000 11 | targetPort: 8000 12 | selector: *labels 13 | --- 14 | apiVersion: networking.gke.io/v1beta1 15 | kind: ManagedCertificate 16 | metadata: 17 | name: arbout 18 | spec: 19 | domains: 20 | - arbout.org 21 | --- 22 | apiVersion: extensions/v1beta1 23 | kind: Ingress 24 | metadata: 25 | name: arbout 26 | annotations: 27 | kubernetes.io/ingress.global-static-ip-name: arbout-ingress-ip 28 | networking.gke.io/managed-certificates: arbout 29 | spec: 30 | tls: 31 | - hosts: 32 | - arbout.org 33 | secretName: arbout 34 | rules: 35 | - host: arbout.org 36 | http: 37 | paths: 38 | - path: /* 39 | backend: 40 | serviceName: arbout-api 41 | servicePort: 8000 42 | -------------------------------------------------------------------------------- /dev-requirements.txt: -------------------------------------------------------------------------------- 1 | pytest 2 | pylint 3 | automig==0.0.19 4 | mypy==0.761 5 | -------------------------------------------------------------------------------- /lib/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/abe-winter/arbout/935a5628bb7a0170a3e412be5baa2c21e69a3d9b/lib/__init__.py -------------------------------------------------------------------------------- /lib/core_blueprint.py: -------------------------------------------------------------------------------- 1 | "core_blueprint.py -- flask Blueprint for core functionality" 2 | 3 | import binascii, json, os 4 | from dataclasses import dataclass 5 | import flask, scrypt 6 | from Crypto.Cipher import AES 7 | from Crypto.Util import Padding 8 | from marshmallow import Schema, fields, validate, ValidationError 9 | from .util import init_pool, insert_stmt, strip_empty, withcon 10 | from . import diff_summary, search 11 | 12 | CORE = flask.Blueprint('core', __name__) 13 | STATES = json.load(open(os.path.join(os.path.split(__file__)[0], 'states.json')))['states'] 14 | GLOBAL_SALT = binascii.unhexlify(os.environ['ARB_SALT']) 15 | ARB_CRYPT = binascii.unhexlify(os.environ['ARB_CRYPT']) 16 | 17 | @CORE.context_processor 18 | def inject_version(): 19 | return {'version': os.environ.get('VERSION')} 20 | 21 | # todo: would rather this happened at app startup vs registration i.e. import. 22 | # But before_first_request is too late. 23 | CORE.record(init_pool) 24 | 25 | @CORE.route('/') 26 | def splash(): 27 | return flask.render_template('splash.jinja.htm') 28 | 29 | @dataclass 30 | class CatPair: 31 | key: str 32 | label: str 33 | 34 | CATEGORIES = [ 35 | CatPair('nadvert', 'Not as advertised'), 36 | CatPair('ndeliv', 'Never delivered'), 37 | CatPair('nwork', "Doesn't work"), 38 | CatPair('price', "Overcharged, undercharged, or bad price"), 39 | CatPair('navail', 'Outage or intermittent availability'), 40 | CatPair('ncompat', 'Not compatible with other stuff'), 41 | CatPair('mistreat', 'Mistreatment by company'), 42 | CatPair('woreout', 'Wore out quickly'), 43 | CatPair('danger', 'Dangerous or caused an injury'), 44 | CatPair('late', 'Arrived late'), 45 | CatPair('npay', 'Underpaid or paid late'), 46 | CatPair('other', 'Other'), 47 | ] 48 | CAT_LOOKUP = {pair.key: pair.label for pair in CATEGORIES} 49 | 50 | AFFIRMATION = """I affirm, on penalty of perjury, that I believe my submission to be (1) accurate and (2) not a duplicate submission. 51 | 52 | I understand that submitting information to this database could expose me to legal risks.""" 53 | 54 | @CORE.route('/submit') 55 | def get_submit(): 56 | return flask.render_template('submit.jinja.htm', categories=CATEGORIES, affirmation=AFFIRMATION, states=STATES) 57 | 58 | def yesno(required=False): 59 | return fields.Str(validate=validate.OneOf(['yes', 'no']), required=required) 60 | 61 | class SubmissionSchema(Schema): 62 | counterparty = fields.Str() 63 | counterparty_domain = fields.Str() 64 | claimant = yesno(True) 65 | issue_cat = fields.Str(validate=validate.OneOf([cat.key for cat in CATEGORIES]), required=True) 66 | issue_det = fields.Str() 67 | terms_link = fields.Str() 68 | you_negotiate = yesno() 69 | sought_dollars = fields.Int() 70 | settlement_dollars = fields.Int() 71 | favor = yesno() 72 | fair = yesno() 73 | incident_date = fields.DateTime(format='%Y-%m') 74 | dispute_date = fields.DateTime(format='%Y-%m') 75 | file_date = fields.DateTime(format='%Y-%m') 76 | arb_date = fields.DateTime(format='%Y-%m', required=True) 77 | agency = fields.Str() 78 | state = fields.Str(validate=validate.OneOf([row[0] for row in STATES]), required=True) 79 | chose = fields.Str(validate=validate.OneOf(['yes', 'yes_list', 'no'])) 80 | case_real_id = fields.Str() 81 | email = fields.Email() 82 | password = fields.Str() 83 | affirm = fields.Str(required=True) 84 | 85 | @staticmethod 86 | def valid_affirm(value): 87 | if value.replace('\r\n', '\n') != AFFIRMATION: 88 | raise ValidationError('invalid affirmation') 89 | 90 | @CORE.errorhandler(ValidationError) 91 | def handle_validation_error(err): 92 | return flask.render_template('invalid.jinja.htm', messages=err.messages) 93 | 94 | def yesno_null(value): 95 | "cast non-null yesno to bool" 96 | # todo: do this with marshmallow 97 | return {'yes': True, 'no': False, None: None}[value] 98 | 99 | def date_null(value): 100 | return value and value.date() 101 | 102 | @CORE.route('/submit', methods=['POST']) 103 | def post_submit(): 104 | # ValidationError here gets caught by middleware 105 | parsed = SubmissionSchema().load(strip_empty(flask.request.form)) 106 | db_fields = { 107 | 'counterparty': parsed.get('counterparty'), 108 | 'counterparty_domain': parsed.get('counterparty_domain'), 109 | 'submitter_initiated': yesno_null(parsed.get('claimant')), 110 | 'issue_category': parsed.get('issue_cat'), 111 | 'issue': parsed.get('issue_det'), 112 | 'terms_link': parsed.get('terms_link'), 113 | 'draft_contract': yesno_null(parsed.get('you_negotiate')), 114 | 'sought_dollars': parsed.get('sought_dollars'), 115 | 'settlement_dollars': parsed.get('settlement_dollars'), 116 | 'subjective_inmyfavor': yesno_null(parsed.get('favor')), 117 | 'subjective_fair': yesno_null(parsed.get('fair')), 118 | 'incident_date': date_null(parsed.get('incident_date')), 119 | 'dispute_date': date_null(parsed.get('dispute_date')), 120 | 'file_date': date_null(parsed.get('file_date')), 121 | 'arbitration_date': date_null(parsed.get('arb_date')), 122 | 'arbitration_agency': parsed.get('agency'), 123 | # todo: agency domain 124 | 'arbitration_state': parsed.get('state'), 125 | 'submitter_choose_agency': parsed.get('chose'), 126 | 'affirm': parsed.get('affirm'), 127 | } 128 | if 'email' in parsed: 129 | block_size = 16 130 | ivec = os.urandom(block_size) 131 | db_fields['email_iv'] = ivec 132 | padded = Padding.pad(parsed['email'].encode('utf8'), block_size) 133 | db_fields['email_cipher'] = AES.new(ARB_CRYPT, AES.MODE_CBC, ivec).encrypt(padded) 134 | if 'case_real_id' in parsed: 135 | db_fields['real_id_hash'] = scrypt.hash(parsed['case_real_id'], GLOBAL_SALT) 136 | if 'password' in parsed: 137 | salt = os.urandom(12) 138 | db_fields['password_salt'] = salt 139 | db_fields['password_hash'] = scrypt.hash(parsed['password'], salt) 140 | with withcon() as con, con.cursor() as cur: 141 | cur.execute(insert_stmt('cases', 'caseid', db_fields), db_fields) 142 | con.commit() 143 | return flask.render_template('after_submit.jinja.htm') 144 | 145 | @CORE.route('/search') 146 | def get_search(): 147 | return flask.render_template('search.jinja.htm', categories=CATEGORIES, states=STATES) 148 | 149 | @CORE.route('/search', methods=['POST']) 150 | def post_search(): 151 | # todo: look into marshmallow-dataclass so we're not relying on psycopg2 / libpq for sanitization 152 | params = search.Search(**strip_empty(flask.request.form)) 153 | if params.empty(): 154 | return 'Error: set at least one search term' 155 | rows = search.search(params) 156 | groups = diff_summary.diff_group_counterparty(rows) 157 | summaries = [diff_summary.summarize(key, rows) for key, rows in groups.items()] 158 | # note: sorting by party key so stable sort doesn't reveal relative sizes 159 | summaries.sort(key=lambda summary: (summary.total, summary.key)) 160 | # todo: paging 161 | summaries = summaries[:20] 162 | for summary in summaries: 163 | for approx in summary.issue_cats: 164 | approx.label = CAT_LOOKUP.get(approx.label, approx.label) 165 | for approx in summary.settlement_dollars: 166 | approx.label = f"${approx.label.lower} - ${approx.label.upper}" 167 | return flask.render_template('serp.jinja.htm', summaries=summaries) 168 | 169 | @CORE.route('/disputes') 170 | def disputes(): 171 | return flask.render_template('dispute.jinja.htm') 172 | 173 | @CORE.route('/terms') 174 | def terms(): 175 | return flask.render_template('terms.jinja.htm') 176 | 177 | @CORE.route('/health') 178 | def get_health(): 179 | return flask.jsonify({"ok": True}) 180 | 181 | class IntentionalCrash(Exception): 182 | "intentional crash to test error reporting" 183 | 184 | @CORE.route('/crash') 185 | def get_crash(): 186 | raise IntentionalCrash 187 | -------------------------------------------------------------------------------- /lib/diff_summary.py: -------------------------------------------------------------------------------- 1 | "diff_summary.py -- differential-aware search results summarization" 2 | 3 | from __future__ import annotations # for classmethod return type 4 | import collections, itertools 5 | from dataclasses import dataclass 6 | from typing import List, Dict, Generator, Optional, Union 7 | from .search import CaseRow 8 | from .util import Bracket 9 | 10 | GROUP_THRESHOLD = 10 11 | 12 | @dataclass 13 | class CounterpartyLabel: 14 | "groupable domain / name" 15 | kind: str 16 | value: str 17 | 18 | def __lt__(self, other): 19 | return (self.kind, self.value) < (other.kind, other.value) 20 | 21 | def __hash__(self): 22 | return hash((self.kind, self.value)) 23 | 24 | @classmethod 25 | def key(cls, row: CaseRow) -> CounterpartyLabel: 26 | return cls('domain', row.counterparty_domain) if row.counterparty_domain else cls('name', row.counterparty) 27 | 28 | def diff_group_counterparty(rows: List[CaseRow]) -> Dict[CounterpartyLabel, List[CaseRow]]: 29 | """Grouping key is (counterparty_domain or counterparty). 30 | Respects global GROUP_THRESHOLD by stripping smaller groups. 31 | """ 32 | sorted_ = sorted(rows, key=CounterpartyLabel.key) 33 | ret = { 34 | key: list(inner_rows) 35 | for key, inner_rows in itertools.groupby(sorted_, key=CounterpartyLabel.key) 36 | } 37 | too_small = [] 38 | for key, val in ret.items(): 39 | if len(val) < GROUP_THRESHOLD: 40 | too_small.append(key) 41 | for key in too_small: 42 | del ret[key] 43 | return ret 44 | 45 | @dataclass 46 | class ApproxLabel: 47 | label: Union[str, int, Bracket] 48 | bracket: Bracket 49 | 50 | @classmethod 51 | def make(cls, values: Union[List, Generator]) -> List[ApproxLabel]: 52 | "summarize attr from rows according to group_threshold" 53 | ret = [ 54 | cls(value, Bracket.round(count)) 55 | # note: sorted() below instead of most_common() so we don't leak non-rounded counts 56 | for value, count in sorted(collections.Counter(value for value in values if value is not None).items()) 57 | if count >= GROUP_THRESHOLD 58 | ] 59 | return sorted(ret, reverse=True, key=lambda approx: approx.bracket) 60 | 61 | @dataclass 62 | class Summary: 63 | key: CounterpartyLabel 64 | total: Bracket 65 | removed: Optional[Bracket] # 'yes' means records were removed after a correctness or other dispute 66 | agencies: List[ApproxLabel] 67 | issue_cats: List[ApproxLabel] 68 | arbitration_years: List[ApproxLabel] 69 | settlement_dollars: List[ApproxLabel] 70 | fair: List[ApproxLabel] 71 | drafted: List[ApproxLabel] 72 | chose_agency: List[ApproxLabel] 73 | states: List[ApproxLabel] 74 | 75 | def summarize(key: CounterpartyLabel, rows: List[CaseRow]) -> Summary: 76 | "rolls up a list of cases, giving more details when the threshold is met" 77 | removed = sum(row.removed for row in rows) 78 | return Summary( 79 | key=key, 80 | total=Bracket.round(len(rows)), 81 | removed=Bracket.round(removed) if removed else None, 82 | agencies=ApproxLabel.make(row.arbitration_agency for row in rows), 83 | issue_cats=ApproxLabel.make(row.issue_category for row in rows), 84 | arbitration_years=ApproxLabel.make(row.arbitration_date.year for row in rows if row.arbitration_date), 85 | settlement_dollars=ApproxLabel.make(Bracket.round(row.settlement_dollars) for row in rows if row.settlement_dollars is not None), 86 | fair=ApproxLabel.make(row.subjective_fair for row in rows), 87 | chose_agency=ApproxLabel.make(row.submitter_choose_agency for row in rows), 88 | drafted=ApproxLabel.make(row.draft_contract for row in rows), 89 | states=ApproxLabel.make(row.arbitration_state for row in rows), 90 | ) 91 | -------------------------------------------------------------------------------- /lib/search.py: -------------------------------------------------------------------------------- 1 | "search.py -- convert searches to queries" 2 | 3 | from datetime import date 4 | from dataclasses import dataclass, fields 5 | from typing import List, Optional 6 | import psycopg2.extras 7 | from .util import withcon 8 | 9 | class WhereClause: 10 | "builder for whereclause" 11 | 12 | def __init__(self): 13 | self.terms = [] 14 | self.params = {} 15 | 16 | def add(self, sql_name, value, operator='=', transform=None, ignore_null=True): # pylint: disable=too-many-arguments 17 | "transform is when you want to use a column expression that won't work with %()s" 18 | if value is None: 19 | if ignore_null: 20 | return 21 | else: 22 | raise NotImplementedError("todo 'is null'") 23 | self.terms.append(f'{transform or sql_name} {operator} %({sql_name})s') 24 | self.params[sql_name] = value 25 | 26 | def clause(self): 27 | return ' and '.join(self.terms) 28 | 29 | @dataclass 30 | class Search: 31 | "constraints for a search" 32 | party: Optional[str] = None 33 | party_domain: Optional[str] = None 34 | start_year: Optional[str] = None 35 | end_year: Optional[str] = None 36 | state: Optional[str] = None 37 | 38 | def empty(self): 39 | return all(getattr(self, field.name, None) is None for field in fields(Search)) 40 | 41 | def whereclause(self): 42 | "render the search to a sql query" 43 | where = WhereClause() 44 | where.add('counterparty', self.party) 45 | where.add('counterparty_domain', self.party_domain) 46 | where.add('start_year', self.start_year and int(self.start_year), operator='>=', transform='extract(year from arbitration_date)') 47 | where.add('end_year', self.end_year and int(self.end_year), operator='<=', transform='extract(year from arbitration_date)') 48 | where.add('arbitration_state', self.state) 49 | return where 50 | 51 | @dataclass 52 | class CaseRow: 53 | "representation of database row" 54 | counterparty: str 55 | counterparty_domain: Optional[str] = None 56 | issue_category: Optional[str] = None 57 | incident_date: Optional[date] = None 58 | dispute_date: Optional[date] = None 59 | file_date: Optional[date] = None 60 | arbitration_date: Optional[date] = None 61 | sought_dollars: Optional[int] = None 62 | settlement_dollars: Optional[int] = None 63 | subjective_fair: Optional[bool] = None 64 | subjective_inmyfavor: Optional[bool] = None 65 | submitter_initiated: Optional[bool] = None 66 | arbitration_agency: Optional[str] = None 67 | submitter_choose_agency: Optional[str] = None 68 | arbitration_state: Optional[str] = None 69 | draft_contract: Optional[bool] = None 70 | terms_link: Optional[str] = None 71 | removed: bool = False 72 | 73 | def search(terms: Search) -> List[CaseRow]: 74 | if terms.empty(): 75 | raise ValueError("empty search") 76 | where = terms.whereclause() 77 | names = [field.name for field in fields(CaseRow)] 78 | query = f"select {', '.join(names)} from cases where {where.clause()}" 79 | with withcon() as con, con.cursor(cursor_factory=psycopg2.extras.DictCursor) as cur: 80 | cur.execute(query, where.params) 81 | return [CaseRow(**row) for row in cur.fetchall()] 82 | -------------------------------------------------------------------------------- /lib/states.json: -------------------------------------------------------------------------------- 1 | { 2 | "source": "https://www.iso.org/obp/ui/#iso:code:3166:US", 3 | "states": [ 4 | ["AL", "Alabama"], 5 | ["AK", "Alaska"], 6 | ["AS", "American Samoa"], 7 | ["AZ", "Arizona"], 8 | ["AR", "Arkansas"], 9 | ["CA", "California"], 10 | ["CO", "Colorado"], 11 | ["CT", "Connecticut"], 12 | ["DE", "Delaware"], 13 | ["DC", "District of Columbia"], 14 | ["FL", "Florida"], 15 | ["GA", "Georgia"], 16 | ["GU", "Guam"], 17 | ["HI", "Hawaii"], 18 | ["ID", "Idaho"], 19 | ["IL", "Illinois"], 20 | ["IN", "Indiana"], 21 | ["IA", "Iowa"], 22 | ["KS", "Kansas"], 23 | ["KY", "Kentucky"], 24 | ["LA", "Louisiana"], 25 | ["ME", "Maine"], 26 | ["MD", "Maryland"], 27 | ["MA", "Massachusetts"], 28 | ["MI", "Michigan"], 29 | ["MN", "Minnesota"], 30 | ["MS", "Mississippi"], 31 | ["MO", "Missouri"], 32 | ["MT", "Montana"], 33 | ["NE", "Nebraska"], 34 | ["NV", "Nevada"], 35 | ["NH", "New Hampshire"], 36 | ["NJ", "New Jersey"], 37 | ["NM", "New Mexico"], 38 | ["NY", "New York"], 39 | ["NC", "North Carolina"], 40 | ["ND", "North Dakota"], 41 | ["MP", "Northern Mariana Islands"], 42 | ["OH", "Ohio"], 43 | ["OK", "Oklahoma"], 44 | ["OR", "Oregon"], 45 | ["PA", "Pennsylvania"], 46 | ["PR", "Puerto Rico"], 47 | ["RI", "Rhode Island"], 48 | ["SC", "South Carolina"], 49 | ["SD", "South Dakota"], 50 | ["TN", "Tennessee"], 51 | ["TX", "Texas"], 52 | ["UM", "United States Minor Outlying Islands"], 53 | ["UT", "Utah"], 54 | ["VT", "Vermont"], 55 | ["VI", "Virgin Islands, U.S."], 56 | ["VA", "Virginia"], 57 | ["WA", "Washington"], 58 | ["WV", "West Virginia"], 59 | ["WI", "Wisconsin"], 60 | ["WY", "Wyoming"] 61 | ] 62 | } 63 | -------------------------------------------------------------------------------- /lib/util.py: -------------------------------------------------------------------------------- 1 | "common & util" 2 | 3 | from __future__ import annotations # for classmethod return type 4 | import contextlib, os, socket 5 | from dataclasses import dataclass 6 | import flask, psycopg2.pool, werkzeug 7 | 8 | POOL = None 9 | 10 | def init_pool(_setup_state): 11 | global POOL 12 | if POOL is None: 13 | POOL = psycopg2.pool.ThreadedConnectionPool(0, 4, os.environ['AUTOMIG_CON']) 14 | 15 | @contextlib.contextmanager 16 | def withcon(): 17 | "helper to get / return a DB pool connection" 18 | # todo: move to util 19 | con = POOL.getconn() 20 | try: yield con 21 | finally: 22 | POOL.putconn(con) 23 | 24 | def strip_empty(form): 25 | "return copy of form with empty fields removed" 26 | return {key: val for key, val in form.items() if val} 27 | 28 | def insert_stmt(table, returning, db_fields): 29 | "helper to generate an insert stmt from db_fields dict" 30 | keys = ', '.join(db_fields) 31 | subs = ', '.join(f"%({key})s" for key in db_fields) 32 | stmt = f"insert into {table} ({keys}) values ({subs})" 33 | if returning: 34 | stmt += f" returning {returning}" 35 | return stmt 36 | 37 | @dataclass 38 | class Bracket: 39 | "rounding helper" 40 | lower: int 41 | upper: int 42 | 43 | def __lt__(self, other): 44 | return (self.lower, self.upper) < (other.lower, other.upper) 45 | 46 | def __hash__(self): 47 | return hash((self.lower, self.upper)) 48 | 49 | @classmethod 50 | def round(cls, count: int) -> Bracket: 51 | "round a count to a bracket" 52 | if count < 1: 53 | raise ValueError("round() takes values >= 1, you passed", count) 54 | bucket = 10 if count < 100 else 100 55 | bottom = count - (count % bucket) 56 | return cls(bottom or 1, bottom + bucket - 1) 57 | 58 | def render(self): 59 | return f"{self.lower} - {self.upper}" 60 | 61 | def host_is_ip(forwarded_host): 62 | "or localhost" 63 | sans_port = forwarded_host.split(':')[0] 64 | if sans_port == 'localhost': 65 | return True 66 | try: 67 | socket.inet_aton(sans_port) 68 | return True 69 | except socket.error: 70 | return False 71 | 72 | # pylint: disable=inconsistent-return-statements 73 | def ssl_middleware(): 74 | """Janky HSTS that doesn't engage when host is an IP address, i.e. health checks. 75 | Using this instead of flask-talisman because GKE ingress has wrong health check path. 76 | https://cloud.google.com/kubernetes-engine/docs/concepts/ingress#health_checks 77 | """ 78 | req = flask.request 79 | if not host_is_ip(werkzeug.wsgi.get_host(req.environ)) and not (req.is_secure or req.headers.get('X-Forwarded-Proto') == 'https'): 80 | return flask.redirect(req.url.replace('http://', 'https://', 1)) 81 | -------------------------------------------------------------------------------- /mypy.ini: -------------------------------------------------------------------------------- 1 | [mypy] 2 | python_version = 3.7 3 | 4 | [mypy-psycopg2] 5 | ignore_missing_imports = True 6 | 7 | [mypy-psycopg2.extras] 8 | ignore_missing_imports = True 9 | 10 | [mypy-psycopg2.pool] 11 | ignore_missing_imports = True 12 | 13 | [mypy-scrypt] 14 | ignore_missing_imports = True 15 | -------------------------------------------------------------------------------- /prod.Dockerfile: -------------------------------------------------------------------------------- 1 | FROM python:3.7.6 2 | 3 | WORKDIR /arbout 4 | 5 | # deps 6 | COPY requirements.txt . 7 | RUN pip install -r requirements.txt 8 | 9 | # static css 10 | COPY Makefile . 11 | RUN mkdir static 12 | RUN make static/bootstrap.min.css && rm *.zip 13 | 14 | # files 15 | COPY lib lib/ 16 | COPY static static/ 17 | COPY templates templates/ 18 | COPY app.py . 19 | 20 | ENV AUTOMIG_CON postgres://postgres@arbout-db 21 | ARG build_slug 22 | ENV VERSION $build_slug 23 | EXPOSE 8000 24 | CMD gunicorn -w 2 -b 0.0.0.0 --access-logfile - --error-logfile - app:APP 25 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | flask==1.1.1 2 | marshmallow==3.3.0 3 | psycopg2==2.8.4 4 | scrypt==0.8.13 5 | gunicorn==20.0.4 6 | pycryptodome==3.9.6 7 | -------------------------------------------------------------------------------- /schema/base.sql: -------------------------------------------------------------------------------- 1 | create extension if not exists "uuid-ossp"; 2 | 3 | create table cases ( 4 | caseid uuid primary key default uuid_generate_v4(), 5 | email_cipher bytea, -- using global ARB_CRYPT so this can be decrypted in case of dispute 6 | email_iv bytea, 7 | password_salt bytea, 8 | password_hash bytea, 9 | counterparty text, 10 | counterparty_domain text, 11 | issue_category text, -- broad category of the dispute 12 | issue text, 13 | real_id_hash bytea, -- using global salt so it can be crossed in the escrow DB 14 | incident_date date, 15 | dispute_date date, 16 | file_date date, 17 | arbitration_date date, 18 | sought_dollars int, 19 | settlement_dollars int, 20 | subjective_fair boolean, 21 | subjective_inmyfavor boolean, 22 | submitter_initiated boolean, 23 | arbitration_agency text, 24 | arbitration_agency_domain text, 25 | submitter_choose_agency text, -- (yes, yes_list, no) 26 | arbitration_state text, -- two-digit state 27 | draft_contract boolean, -- did submitter have a role in drafting the contract 28 | terms_link text, -- url to terms of service / contract 29 | affirm text, -- affirmation text uploaded by user 30 | removed boolean not null default false, 31 | created timestamp not null default now(), 32 | modified timestamp not null default now() 33 | ); 34 | 35 | create index cases_real_id_hash on cases (real_id_hash); 36 | -------------------------------------------------------------------------------- /static/.gitkeep: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/abe-winter/arbout/935a5628bb7a0170a3e412be5baa2c21e69a3d9b/static/.gitkeep -------------------------------------------------------------------------------- /templates/after_submit.jinja.htm: -------------------------------------------------------------------------------- 1 | {% from 'macros.jinja.htm' import home_link %} 2 | {% extends "base.jinja.htm" %} 3 | {% block title %}submission received{% endblock %} 4 | {% block container %} 5 | {{ home_link() }} 6 |
Information about index inclusion
8 |Todo: edit link if you posted a password
9 | {% endblock container %} 10 | -------------------------------------------------------------------------------- /templates/base.jinja.htm: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 |Server version {{version}}
{% endif %} 14 |If you believe the information in this database to be in error, please email the maintainer.
8 |If you believe that our database contains false or mis-reported cases:
10 |We will hold your case list in strict confidence and, as long as we believe it to be correct, we will only use it to remove false information from our site.
15 |If we discover your list to be substantially in error, we may restore the cases we removed because of it. We may pursue legal action for fraud or other harms. If a court grants us relief, we may publish the list.
16 |If you're an agency or a party to many arbitrations and you want to pre-upload your case list for dispute matching, contact us. Our technology doesn't currently support this but we're open to building the extension once we have a partner.
19 |The following fields had problems:
7 |{{field}} | 11 | | {{', '.join(errs)}} | 12 | |
Note: this version of the software only shows the first 20 results of a search
13 | {% if not summaries %} 14 |Total cases | {{summary.total.render()}} |
Arbout is a crowd-sourced public database of arbitration outcomes. It aggregates search results to preserve the confidentiality of individual cases.
8 | 9 | 10 |Arbitration is an alternative to suing in court.
14 |Unlike the court system, the arbitration system is run by private companies and doesn't enter its outcomes into the public record. Furthermore, some arbitration is confidential by design, which can shield the parties from the reputational risk of disputes, at the cost of leaving lawmakers and the public in the dark.
15 |Arbitration is increasingly relevant because many online services include binding arbitration clauses. These clauses can force customers to use arbitration rather than going to court.
16 |Arbout doesn't reveal the individual contents of any arbitration, and tries to make it difficult to prove that the database contains any individual case. It uses aggregation over search results to only reveal information when many cases have similar outcomes.
21 |For example, if a company paid a $1000-$1999 settlement in 50 cases, Arbout would reveal that in aggregate. But we wouldn't show the individual amounts or cases.
22 |We have a dispute process. We're committed to working with arbitration parties and agencies to correct errors.
27 |Arbout is open source and you can learn about the project's internals and authorship in the Arbout github repo.
32 |©️ 2020 Arbout development team
35 | {% endblock container %} 36 | -------------------------------------------------------------------------------- /templates/submit.jinja.htm: -------------------------------------------------------------------------------- 1 | {% from 'macros.jinja.htm' import yesno, option_warn, home_link, select_state, select_issue_cat %} 2 | {% extends "base.jinja.htm" %} 3 | {% block title %}submit case{% endblock %} 4 | {% block container %} 5 | {{ home_link() }} 6 |Nothing you see on this website or the associated open source accounts or message boards should be construed as legal advice.
9 |It is your responsibility to determine whether the laws and contractual obligations in your particular situation allow you to use this site, whether to submit a case or to search.
10 |As a user, you assume all risk from submitting to this site and from using the search results you find here.
13 |If you think our information is wrong, see disputes.
16 |We won't intentionally reveal submissions to a third party unless we are compelled to do so.
19 |If you've given an email address, you may be contacted in case of a dispute of your case. However, we will attempt to place the burden of proof on the disputing party before we contact you. We won't reveal email addresses or other PII to the counterparty unless compelled.
20 |A non-exhaustive list of circumstances that could lead to your data being revealed:
21 |We don't intentionally set cookies, and to our knowledge, no site functionality depends on or benefits from cookies. It's possible that our various open source libraries or frameworks automatically set some cookies. It's possible that our cloud vendor or your ISP are setting cookies on the connection.
33 |It's our policy to report cyberattacks and data leaks whenever practical. A non-exhaustive list of reasons we may not report an attack immediately or at all:
36 |Most pages on this site have a version tag at the bottom. When this tag corresponds to a version in our public github repository, we guarantee on a best-effort basis that the version of the software serving the site is built from that git verison.
45 |