├── .dockerignore ├── .envrc ├── .gitignore ├── MANIFEST.in ├── Makefile ├── README.md ├── alembic.ini ├── alembic ├── README ├── env.py ├── script.py.mako └── versions │ └── 21c058faa1d9_initialise_database.py ├── build-requirements.txt ├── doc ├── design.md ├── privileged_scans.md ├── requirements.md └── scans.md ├── docker-compose.yml ├── docker ├── coordinator.Dockerfile ├── coordinator.sh ├── testssl.Dockerfile ├── web.Dockerfile └── xmpp.Dockerfile ├── setup.py └── testxmpp ├── __init__.py ├── api ├── __init__.py ├── common.py └── coordinator.py ├── blackbox └── __init__.py ├── certutil.py ├── cli ├── __init__.py └── __main__.py ├── common.py ├── coordinator ├── __init__.py ├── __main__.py ├── auth.py ├── cli.py ├── common.py ├── daemon.py ├── endpoints.py ├── tasks.py ├── test_auth.py ├── testssl.py └── xmpp.py ├── dns.py ├── model.py ├── testssl ├── __init__.py ├── __main__.py ├── cli.py └── daemon.py ├── web ├── __init__.py ├── infra.py ├── main.py ├── scss │ ├── _baseline.scss │ ├── _theme.scss │ └── app.scss └── templates │ ├── _base.html │ ├── _library.html │ ├── _page.html │ ├── index.html │ └── scan_result.html └── xmpp ├── __init__.py ├── __main__.py ├── cli.py └── daemon.py /.dockerignore: -------------------------------------------------------------------------------- 1 | /.direnv 2 | -------------------------------------------------------------------------------- /.envrc: -------------------------------------------------------------------------------- 1 | layout python python3 2 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | __pycache__ 2 | /.local 3 | /.direnv 4 | /test.sqlite 5 | /*.egg-info 6 | /dist 7 | /testxmpp/web/static/css 8 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | recursive-include testxmpp/web/templates *.html 2 | recursive-include testxmpp/web/static/css *.css 3 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | scss_files = $(filter-out testxmpp/web/scss/_%.scss,$(wildcard testxmpp/web/scss/*.scss)) 2 | scss_includes = $(filter testxmpp/web/scss/_%.scss,$(wildcard testxmpp/web/scss/*.scss)) 3 | generated_css_files = $(patsubst testxmpp/web/scss/%.scss,testxmpp/web/static/css/%.css,$(scss_files)) 4 | images = $(wildcard docker/*.Dockerfile) 5 | image_targets = $(patsubst docker/%.Dockerfile,%-image,$(images)) 6 | repository = testxmpp 7 | 8 | PYTHON3 ?= python3 9 | DOCKER ?= docker 10 | SCSSC ?= $(PYTHON3) -m scss --load-path testxmpp/web/scss/ 11 | 12 | all: build_css images 13 | 14 | images: $(image_targets) 15 | 16 | $(image_targets): %-image: docker/%.Dockerfile 17 | $(DOCKER) build -t $(repository)/$(patsubst %-image,%,$@):latest -f docker/$(patsubst %-image,%,$@).Dockerfile . 18 | 19 | build_css: $(generated_css_files) 20 | 21 | $(generated_css_files): testxmpp/web/static/css/%.css: testxmpp/web/scss/%.scss $(scss_includes) 22 | mkdir -p testxmpp/web/static/css/ 23 | $(SCSSC) -o "$@" "$<" 24 | 25 | clean: 26 | rm -f $(generated_css_files) 27 | 28 | .PHONY: build_css clean images testssl-image coordinator-image web-image 29 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # testxmpp 2 | 3 | A re-implementation of the original [xmppoke](https://github.com/xmpp-observatory/xmppoke/)-based XMPP observatory in Python, based on [testssl.sh](https://testssl.sh). 4 | 5 | Currently, this isn't quite ready for the stage yet, but if you want to play, you should be able to get quite far with `docker-compose up` and then navigating to `http://localhost:8000`. If your user ID is not 1000, you might have to change the docker-compose file so that it can read/write the testing SQLite database. 6 | -------------------------------------------------------------------------------- /alembic.ini: -------------------------------------------------------------------------------- 1 | # A generic, single database configuration. 2 | 3 | [alembic] 4 | # path to migration scripts 5 | script_location = alembic 6 | 7 | # template used to generate migration files 8 | # file_template = %%(rev)s_%%(slug)s 9 | 10 | # timezone to use when rendering the date 11 | # within the migration file as well as the filename. 12 | # string value is passed to dateutil.tz.gettz() 13 | # leave blank for localtime 14 | # timezone = 15 | 16 | # max length of characters to apply to the 17 | # "slug" field 18 | # truncate_slug_length = 40 19 | 20 | # set to 'true' to run the environment during 21 | # the 'revision' command, regardless of autogenerate 22 | # revision_environment = false 23 | 24 | # set to 'true' to allow .pyc and .pyo files without 25 | # a source .py file to be detected as revisions in the 26 | # versions/ directory 27 | # sourceless = false 28 | 29 | # version location specification; this defaults 30 | # to alembic/versions. When using multiple version 31 | # directories, initial revisions must be specified with --version-path 32 | # version_locations = %(here)s/bar %(here)s/bat alembic/versions 33 | 34 | # the output encoding used when revision files 35 | # are written from script.py.mako 36 | # output_encoding = utf-8 37 | 38 | # Set via environment variable from env.py 39 | #sqlalchemy.url = sqlite:///%(here)s/test.sqlite 40 | 41 | 42 | [post_write_hooks] 43 | # post_write_hooks defines scripts or Python functions that are run 44 | # on newly generated revision scripts. See the documentation for further 45 | # detail and examples 46 | 47 | # format using "black" - use the console_scripts runner, against the "black" entrypoint 48 | # hooks=black 49 | # black.type=console_scripts 50 | # black.entrypoint=black 51 | # black.options=-l 79 52 | 53 | # Logging configuration 54 | [loggers] 55 | keys = root,sqlalchemy,alembic 56 | 57 | [handlers] 58 | keys = console 59 | 60 | [formatters] 61 | keys = generic 62 | 63 | [logger_root] 64 | level = WARN 65 | handlers = console 66 | qualname = 67 | 68 | [logger_sqlalchemy] 69 | level = WARN 70 | handlers = 71 | qualname = sqlalchemy.engine 72 | 73 | [logger_alembic] 74 | level = INFO 75 | handlers = 76 | qualname = alembic 77 | 78 | [handler_console] 79 | class = StreamHandler 80 | args = (sys.stderr,) 81 | level = NOTSET 82 | formatter = generic 83 | 84 | [formatter_generic] 85 | format = %(levelname)-5.5s [%(name)s] %(message)s 86 | datefmt = %H:%M:%S 87 | -------------------------------------------------------------------------------- /alembic/README: -------------------------------------------------------------------------------- 1 | Generic single-database configuration. -------------------------------------------------------------------------------- /alembic/env.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | from logging.config import fileConfig 4 | 5 | from sqlalchemy import engine_from_config 6 | from sqlalchemy import pool 7 | 8 | from alembic import context 9 | 10 | # this is the Alembic Config object, which provides 11 | # access to the values within the .ini file in use. 12 | config = context.config 13 | try: 14 | db_uri = os.environ["TESTXMPP_DB_URI"] 15 | except KeyError: 16 | raise ValueError("TESTXMPP_DB_URI must be set") 17 | else: 18 | config.set_main_option('sqlalchemy.url', db_uri) 19 | 20 | # Interpret the config file for Python logging. 21 | # This line sets up loggers basically. 22 | fileConfig(config.config_file_name) 23 | 24 | # add your model's MetaData object here 25 | # for 'autogenerate' support 26 | # from myapp import mymodel 27 | # target_metadata = mymodel.Base.metadata 28 | target_metadata = None 29 | 30 | # other values from the config, defined by the needs of env.py, 31 | # can be acquired: 32 | # my_important_option = config.get_main_option("my_important_option") 33 | # ... etc. 34 | 35 | 36 | def run_migrations_offline(): 37 | """Run migrations in 'offline' mode. 38 | 39 | This configures the context with just a URL 40 | and not an Engine, though an Engine is acceptable 41 | here as well. By skipping the Engine creation 42 | we don't even need a DBAPI to be available. 43 | 44 | Calls to context.execute() here emit the given string to the 45 | script output. 46 | 47 | """ 48 | url = config.get_main_option("sqlalchemy.url") 49 | context.configure( 50 | url=url, 51 | target_metadata=target_metadata, 52 | literal_binds=True, 53 | dialect_opts={"paramstyle": "named"}, 54 | ) 55 | 56 | with context.begin_transaction(): 57 | context.run_migrations() 58 | 59 | 60 | def run_migrations_online(): 61 | """Run migrations in 'online' mode. 62 | 63 | In this scenario we need to create an Engine 64 | and associate a connection with the context. 65 | 66 | """ 67 | connectable = engine_from_config( 68 | config.get_section(config.config_ini_section), 69 | prefix="sqlalchemy.", 70 | poolclass=pool.NullPool, 71 | ) 72 | 73 | with connectable.connect() as connection: 74 | context.configure( 75 | connection=connection, target_metadata=target_metadata 76 | ) 77 | 78 | with context.begin_transaction(): 79 | context.run_migrations() 80 | 81 | 82 | if context.is_offline_mode(): 83 | run_migrations_offline() 84 | else: 85 | run_migrations_online() 86 | -------------------------------------------------------------------------------- /alembic/script.py.mako: -------------------------------------------------------------------------------- 1 | """${message} 2 | 3 | Revision ID: ${up_revision} 4 | Revises: ${down_revision | comma,n} 5 | Create Date: ${create_date} 6 | 7 | """ 8 | from alembic import op 9 | import sqlalchemy as sa 10 | ${imports if imports else ""} 11 | 12 | # revision identifiers, used by Alembic. 13 | revision = ${repr(up_revision)} 14 | down_revision = ${repr(down_revision)} 15 | branch_labels = ${repr(branch_labels)} 16 | depends_on = ${repr(depends_on)} 17 | 18 | 19 | def upgrade(): 20 | ${upgrades if upgrades else "pass"} 21 | 22 | 23 | def downgrade(): 24 | ${downgrades if downgrades else "pass"} 25 | -------------------------------------------------------------------------------- /alembic/versions/21c058faa1d9_initialise_database.py: -------------------------------------------------------------------------------- 1 | """initialise database 2 | 3 | Revision ID: 21c058faa1d9 4 | Revises: 5 | Create Date: 2020-09-23 21:59:12.160103 6 | 7 | """ 8 | from alembic import op 9 | import sqlalchemy as sa 10 | 11 | 12 | # revision identifiers, used by Alembic. 13 | revision = '21c058faa1d9' 14 | down_revision = None 15 | branch_labels = None 16 | depends_on = None 17 | 18 | 19 | def upgrade(): 20 | # GLOBAL DATA 21 | op.create_table( 22 | "cipher_metadata", 23 | sa.Column("id", sa.Integer(), 24 | primary_key=True, 25 | nullable=False), 26 | sa.Column("openssl_name", 27 | sa.Unicode(255), 28 | nullable=True), 29 | sa.Column("iana_name", 30 | sa.Unicode(255), 31 | nullable=True), 32 | ) 33 | op.create_index( 34 | "cipher_metadata_ix_openssl_name", 35 | "cipher_metadata", 36 | ["openssl_name"], 37 | unique=True, 38 | ) 39 | op.create_index( 40 | "cipher_metadata_ix_iana_name", 41 | "cipher_metadata", 42 | ["iana_name"], 43 | unique=True, 44 | ) 45 | 46 | op.create_table( 47 | "sasl_mechanism", 48 | sa.Column( 49 | "id", 50 | sa.Integer(), 51 | primary_key=True, 52 | autoincrement=True, 53 | nullable=False, 54 | ), 55 | sa.Column( 56 | "name", 57 | sa.Unicode(20), 58 | nullable=False, 59 | ) 60 | ) 61 | op.create_index( 62 | "sasl_mechanism_ix_name", 63 | "sasl_mechanism", 64 | ["name"], 65 | unique=True, 66 | ) 67 | 68 | op.create_table( 69 | "san_type", 70 | sa.Column("id", sa.Integer, 71 | primary_key=True, 72 | nullable=False, 73 | autoincrement=True), 74 | sa.Column("asn1_name", sa.Unicode(128), 75 | nullable=False), 76 | ) 77 | op.create_index( 78 | "san_type_ix_asn1_name", 79 | "san_type", 80 | ["asn1_name"], 81 | unique=True, 82 | ) 83 | 84 | # PER-SCAN DATA 85 | 86 | op.create_table( 87 | "scan", 88 | sa.Column("id", sa.Integer, 89 | primary_key=True, 90 | nullable=False, 91 | autoincrement=True), 92 | sa.Column("domain", sa.types.VARCHAR(1023), 93 | nullable=False), 94 | sa.Column("protocol", sa.Unicode(32), 95 | nullable=False), 96 | sa.Column("created_at", sa.DateTime(), 97 | nullable=False), 98 | sa.Column("state", sa.Unicode(32), 99 | nullable=False), 100 | sa.Column("certificate_score", sa.Integer(), 101 | nullable=True), 102 | sa.Column("kex_score", sa.Integer(), 103 | nullable=True), 104 | sa.Column("protocol_score", sa.Integer(), 105 | nullable=True), 106 | sa.Column("cipher_score", sa.Integer(), 107 | nullable=True), 108 | sa.Column("privileged", sa.Boolean(), 109 | nullable=False), 110 | ) 111 | 112 | op.create_index( 113 | "scan_ix_recent", 114 | "scan", 115 | [ 116 | "domain", 117 | "created_at", 118 | ] 119 | ) 120 | 121 | op.create_table( 122 | "srv_record", 123 | sa.Column("id", sa.Integer, 124 | primary_key=True, 125 | nullable=False, 126 | autoincrement=True), 127 | sa.Column("scan_id", sa.Integer, 128 | sa.ForeignKey("scan.id", 129 | ondelete="CASCADE", 130 | onupdate="CASCADE"), 131 | nullable=False), 132 | sa.Column("service", sa.Unicode(63), 133 | nullable=False), 134 | sa.Column("protocol", sa.Unicode(63), 135 | nullable=False), 136 | sa.Column("port", sa.Integer(), 137 | nullable=False), 138 | sa.Column("host", sa.types.VARCHAR(255), 139 | nullable=False), 140 | sa.Column("priority", sa.Integer(), 141 | nullable=False), 142 | sa.Column("weight", sa.Integer(), 143 | nullable=False), 144 | ) 145 | 146 | op.create_table( 147 | "xmppconnect_record", 148 | sa.Column("id", sa.Integer, 149 | primary_key=True, 150 | nullable=False, 151 | autoincrement=True), 152 | sa.Column("scan_id", sa.Integer, 153 | sa.ForeignKey("scan.id", 154 | ondelete="CASCADE", 155 | onupdate="CASCADE"), 156 | nullable=False), 157 | sa.Column("attribute_name", sa.types.VARBINARY(1023), 158 | nullable=False), 159 | sa.Column("attribute_value", sa.types.VARBINARY(1023), 160 | nullable=False), 161 | ) 162 | 163 | op.create_table( 164 | "host_meta_object", 165 | sa.Column("id", sa.Integer, 166 | primary_key=True, 167 | nullable=False, 168 | autoincrement=True), 169 | sa.Column("scan_id", sa.Integer, 170 | sa.ForeignKey("scan.id", 171 | ondelete="CASCADE", 172 | onupdate="CASCADE"), 173 | nullable=False), 174 | sa.Column("url", sa.Unicode(1023), 175 | nullable=False), 176 | sa.Column("format", sa.Unicode(32), 177 | nullable=False), 178 | ) 179 | 180 | op.create_table( 181 | "host_meta_link", 182 | sa.Column("id", sa.Integer, 183 | primary_key=True, 184 | nullable=False, 185 | autoincrement=True), 186 | sa.Column("object_id", sa.Integer, 187 | sa.ForeignKey("host_meta_object.id", 188 | ondelete="CASCADE", 189 | onupdate="CASCADE"), 190 | nullable=False), 191 | sa.Column("rel", sa.Unicode(1023), 192 | nullable=False), 193 | sa.Column("href", sa.Unicode(32), 194 | nullable=False), 195 | ) 196 | 197 | op.create_table( 198 | "endpoint", 199 | sa.Column("id", sa.Integer(), 200 | primary_key=True, autoincrement=True, nullable=False), 201 | sa.Column("scan_id", sa.Integer(), 202 | sa.ForeignKey("scan.id", 203 | ondelete="CASCADE", onupdate="CASCADE"), 204 | nullable=False), 205 | sa.Column("transport", sa.Unicode(32), 206 | nullable=False), 207 | sa.Column("srv_record_id", sa.Integer(), 208 | sa.ForeignKey("srv_record.id", 209 | ondelete="SET NULL", onupdate="CASCADE")), 210 | sa.Column("host_meta_link_id", sa.Integer(), 211 | sa.ForeignKey("host_meta_link.id", 212 | ondelete="SET NULL", onupdate="CASCADE")), 213 | sa.Column("xmppconnect_record_id", sa.Integer(), 214 | sa.ForeignKey("xmppconnect_record.id", 215 | ondelete="SET NULL", onupdate="CASCADE")), 216 | ) 217 | 218 | op.create_table( 219 | "endpoint_tcp", 220 | sa.Column("endpoint_id", sa.Integer(), 221 | sa.ForeignKey("endpoint.id", 222 | ondelete="CASCADE", onupdate="CASCADE"), 223 | primary_key=True, nullable=False), 224 | sa.Column("tls_mode", sa.Unicode(32), 225 | nullable=False), 226 | sa.Column("hostname", sa.types.VARBINARY(255), 227 | nullable=False), 228 | sa.Column("port", sa.Integer(), 229 | nullable=False), 230 | ) 231 | 232 | op.create_table( 233 | "endpoint_http", 234 | sa.Column("endpoint_id", sa.Integer(), 235 | sa.ForeignKey("endpoint.id", 236 | ondelete="CASCADE", onupdate="CASCADE"), 237 | primary_key=True, nullable=False), 238 | sa.Column("url", sa.Unicode(1023), 239 | nullable=False), 240 | sa.Column("http_mode", sa.Unicode(32), 241 | nullable=False), 242 | ) 243 | 244 | op.create_table( 245 | "tls_offering", 246 | sa.Column("endpoint_id", sa.Integer, 247 | sa.ForeignKey("endpoint.id", 248 | ondelete="CASCADE", onupdate="CASCADE"), 249 | primary_key=True, 250 | nullable=False), 251 | sa.Column("sslv2", sa.Boolean(), 252 | nullable=True), 253 | sa.Column("sslv3", sa.Boolean(), 254 | nullable=True), 255 | sa.Column("tlsv1", sa.Boolean(), 256 | nullable=True), 257 | sa.Column("tlsv1_1", sa.Boolean(), 258 | nullable=True), 259 | sa.Column("tlsv1_2", sa.Boolean(), 260 | nullable=True), 261 | sa.Column("tlsv1_3", sa.Boolean(), 262 | nullable=True), 263 | sa.Column("server_cipher_order", sa.Boolean(), 264 | nullable=True), 265 | ) 266 | 267 | op.create_table( 268 | "certificate", 269 | sa.Column("id", sa.Integer, 270 | primary_key=True, 271 | nullable=False, 272 | autoincrement=True), 273 | sa.Column("fp_sha1", sa.types.VARBINARY(20), 274 | nullable=False), 275 | sa.Column("fp_sha256", sa.types.VARBINARY(32), 276 | nullable=False), 277 | sa.Column("fp_sha512", sa.types.VARBINARY(64), 278 | nullable=False), 279 | sa.Column("raw_der", sa.types.VARBINARY(8192), 280 | nullable=False), 281 | sa.Column("not_before", sa.DateTime(), 282 | nullable=False), 283 | sa.Column("not_after", sa.DateTime(), 284 | nullable=False), 285 | sa.Column("public_key", sa.types.VARBINARY(2048), 286 | nullable=False), 287 | sa.Column("public_key_type", sa.Unicode(128), 288 | nullable=False), 289 | sa.Column("subject", sa.Unicode(1024), 290 | nullable=False), 291 | sa.Column("issuer", sa.Unicode(1024), 292 | nullable=False), 293 | ) 294 | op.create_index( 295 | "certificate_ix_fingerprint_sha1", 296 | "certificate", 297 | ["fp_sha1"], 298 | ) 299 | op.create_index( 300 | "certificate_ix_fingerprint_sha256", 301 | "certificate", 302 | ["fp_sha256"], 303 | ) 304 | op.create_index( 305 | "certificate_ix_fingerprint_sha512", 306 | "certificate", 307 | ["fp_sha512"], 308 | ) 309 | op.create_index( 310 | "certificate_ix_fp_sha1_2", 311 | "certificate", 312 | ["fp_sha1", "fp_sha256", "fp_sha512"], 313 | ) 314 | 315 | op.create_table( 316 | "san", 317 | sa.Column("id", sa.Integer(), 318 | primary_key=True, 319 | nullable=False, 320 | autoincrement=True), 321 | sa.Column("certificate_id", sa.Integer, 322 | sa.ForeignKey("certificate.id", 323 | ondelete="CASCADE", onupdate="CASCADE"), 324 | nullable=False), 325 | sa.Column("type_id", sa.Integer, 326 | sa.ForeignKey("san_type.id", 327 | ondelete="RESTRICT", onupdate="CASCADE"), 328 | nullable=False), 329 | sa.Column("value", sa.Unicode(256), 330 | nullable=False), 331 | ) 332 | op.create_index( 333 | "san_ix_certificate_san_type", 334 | "san", 335 | ["certificate_id", "type_id"], 336 | ) 337 | 338 | op.create_table( 339 | "certificate_offering", 340 | sa.Column("endpoint_id", sa.Integer(), 341 | sa.ForeignKey("endpoint.id", 342 | ondelete="CASCADE", onupdate="CASCADE"), 343 | primary_key=True, 344 | nullable=False), 345 | sa.Column("chain_index", sa.Integer(), 346 | primary_key=True, 347 | nullable=False), 348 | sa.Column("certificate_id", sa.Integer(), 349 | sa.ForeignKey("certificate.id", 350 | ondelete="CASCADE", onupdate="CASCADE"), 351 | primary_key=False, 352 | nullable=False), 353 | ) 354 | op.create_index( 355 | "certificate_offering_ix_endpoint_id", 356 | "certificate_offering", 357 | ["endpoint_id"], 358 | ) 359 | 360 | op.create_table( 361 | "cipher_offering", 362 | sa.Column("endpoint_id", sa.Integer(), 363 | sa.ForeignKey("endpoint.id", 364 | ondelete="CASCADE", onupdate="CASCADE"), 365 | primary_key=True, 366 | nullable=False), 367 | sa.Column("cipher_id", sa.Integer(), 368 | sa.ForeignKey("cipher_metadata.id", 369 | ondelete="RESTRICT", onupdate="CASCADE"), 370 | primary_key=True, 371 | nullable=False), 372 | sa.Column("key_exchange_info", 373 | sa.Unicode(127), 374 | nullable=True), 375 | ) 376 | 377 | op.create_table( 378 | "cipher_offering_order", 379 | sa.Column( 380 | "endpoint_id", 381 | sa.Integer(), 382 | primary_key=True, 383 | nullable=False, 384 | ), 385 | sa.Column( 386 | "cipher_id", 387 | sa.Integer(), 388 | primary_key=True, 389 | nullable=False, 390 | ), 391 | sa.Column( 392 | "tls_version", 393 | sa.Unicode(32), 394 | primary_key=True, 395 | nullable=False, 396 | ), 397 | sa.Column( 398 | "order", 399 | sa.Integer(), 400 | nullable=False, 401 | ), 402 | sa.ForeignKeyConstraint( 403 | ["endpoint_id", "cipher_id"], 404 | ["cipher_offering.endpoint_id", "cipher_offering.cipher_id"] 405 | ), 406 | ) 407 | op.create_index( 408 | "cipher_offering_order_ix_endpoint_id", 409 | "cipher_offering_order", 410 | ["endpoint_id"], 411 | ) 412 | op.create_index( 413 | "cipher_offering_order_ix_endpoint_cipher_id", 414 | "cipher_offering_order", 415 | ["endpoint_id", "cipher_id"], 416 | ) 417 | 418 | op.create_table( 419 | "scan_task", 420 | sa.Column("id", sa.types.BINARY(16), 421 | primary_key=True, nullable=False), 422 | sa.Column("scan_id", sa.Integer(), 423 | sa.ForeignKey("scan.id", 424 | ondelete="CASCADE", onupdate="CASCADE"), 425 | nullable=False), 426 | sa.Column("type", sa.Unicode(32), 427 | nullable=False), 428 | sa.Column("state", sa.Unicode(32), 429 | nullable=False), 430 | sa.Column("fail_reason", sa.Unicode(32), 431 | nullable=True), 432 | sa.Column("endpoint_id", sa.Integer(), 433 | sa.ForeignKey("endpoint.id", 434 | ondelete="CASCADE", onupdate="CASCADE"), 435 | nullable=True), 436 | sa.Column("heartbeat", sa.DateTime(), 437 | nullable=True), 438 | sa.Column("assigned_worker", sa.types.BINARY(16), 439 | nullable=True), 440 | ) 441 | 442 | op.create_table( 443 | "scan_task_dependency", 444 | sa.Column("parent_task_id", sa.Integer(), 445 | sa.ForeignKey("scan_task.id", 446 | ondelete="CASCADE", onupdate="CASCADE"), 447 | primary_key=True, nullable=False, autoincrement=False), 448 | sa.Column("child_task_id", sa.Integer(), 449 | sa.ForeignKey("scan_task.id", 450 | ondelete="CASCADE", onupdate="CASCADE"), 451 | primary_key=True, nullable=False, autoincrement=False), 452 | ) 453 | op.create_index( 454 | "scan_task_dependency_ix_parent", 455 | "scan_task_dependency", 456 | ["parent_task_id"] 457 | ) 458 | op.create_index( 459 | "scan_task_dependency_ix_child", 460 | "scan_task_dependency", 461 | ["child_task_id"] 462 | ) 463 | 464 | op.create_table( 465 | "endpoint_scan_result", 466 | sa.Column("endpoint_id", sa.Integer(), 467 | sa.ForeignKey("endpoint.id", 468 | ondelete="CASCADE", onupdate="CASCADE"), 469 | nullable=False, primary_key=True), 470 | sa.Column("tls_offered", sa.Boolean(), 471 | nullable=False), 472 | sa.Column("tls_negotiated", sa.Boolean(), 473 | nullable=False), 474 | sa.Column("sasl_pre_tls", sa.Boolean(), 475 | nullable=False), 476 | sa.Column("sasl_post_tls", sa.Boolean(), 477 | nullable=False), 478 | sa.Column("errno", sa.Integer(), 479 | nullable=True), 480 | sa.Column("error", sa.Unicode(1023), 481 | nullable=True), 482 | ) 483 | 484 | op.create_table( 485 | "endpoint_scan_sasl_offering", 486 | sa.Column("endpoint_scan_result_id", sa.Integer(), 487 | sa.ForeignKey("endpoint_scan_result.endpoint_id", 488 | ondelete="CASCADE", onupdate="CASCADE"), 489 | nullable=False, primary_key=True), 490 | sa.Column("sasl_mechanism_id", sa.Integer(), 491 | sa.ForeignKey("sasl_mechanism.id", 492 | ondelete="RESTRICT", onupdate="CASCADE"), 493 | nullable=False, primary_key=True), 494 | sa.Column("phase", sa.Unicode(32), 495 | nullable=False, primary_key=True) 496 | ) 497 | op.create_index( 498 | "endpoint_scan_sasl_offering_ix_scan_phase", 499 | "endpoint_scan_sasl_offering", 500 | ["endpoint_scan_result_id", "phase"], 501 | ) 502 | op.create_index( 503 | "endpoint_scan_sasl_offering_ix_scan", 504 | "endpoint_scan_sasl_offering", 505 | ["endpoint_scan_result_id"], 506 | ) 507 | op.create_index( 508 | "endpoint_scan_sasl_offering_ix_mechanism", 509 | "endpoint_scan_sasl_offering", 510 | ["sasl_mechanism_id"], 511 | ) 512 | op.create_index( 513 | "endpoint_scan_sasl_offering_ix_mechanism_phase", 514 | "endpoint_scan_sasl_offering", 515 | ["sasl_mechanism_id", "phase"], 516 | ) 517 | 518 | 519 | def downgrade(): 520 | op.drop_index("certificate_offering_ix_endpoint_id") 521 | op.drop_table("certificate_offering") 522 | op.drop_index("san_ix_certificate_san_type") 523 | op.drop_table("san") 524 | op.drop_index("certificate_ix_fingerprint_sha512") 525 | op.drop_index("certificate_ix_fingerprint_sha256") 526 | op.drop_index("certificate_ix_fingerprint_sha1") 527 | op.drop_table("certificate") 528 | 529 | op.drop_index("endpoint_scan_sasl_offering_ix_mechanism_phase") 530 | op.drop_index("endpoint_scan_sasl_offering_ix_mechanism") 531 | op.drop_index("endpoint_scan_sasl_offering_ix_scan") 532 | op.drop_index("endpoint_scan_sasl_offering_ix_scan_phase") 533 | op.drop_table("endpoint_scan_sasl_offering") 534 | op.drop_index("sasl_mechanism_ix_name") 535 | op.drop_table("sasl_mechanism") 536 | op.drop_table("endpoint_scan_result") 537 | 538 | op.drop_index("scan_task_dependency_ix_parent") 539 | op.drop_index("scan_task_dependency_ix_child") 540 | op.drop_table("scan_task_dependency") 541 | op.drop_table("scan_task") 542 | 543 | op.drop_table("endpoint_http") 544 | op.drop_table("endpoint_tcp") 545 | op.drop_table("endpoint") 546 | 547 | op.drop_index("cipher_offering_order_ix_scan_cipher_id") 548 | op.drop_index("cipher_offering_order_ix_scan_id") 549 | op.drop_table("cipher_offering_order") 550 | op.drop_table("cipher_offering") 551 | op.drop_table("tls_offering") 552 | 553 | op.drop_table("host_meta_link") 554 | op.drop_table("host_meta_object") 555 | op.drop_table("xmppconnect_record") 556 | op.drop_table("srv_record") 557 | op.drop_index("scan_ix_recent") 558 | op.drop_table("scan") 559 | 560 | op.drop_index("san_type_ix_asn1_name") 561 | op.drop_table("san_type") 562 | op.drop_index("sasl_mechanism_ix_name") 563 | op.drop_table("sasl_mechanism") 564 | op.drop_index("cipher_metadata_ix_iana_name") 565 | op.drop_index("cipher_metadata_ix_openssl_name") 566 | op.drop_table("cipher_metadata") 567 | -------------------------------------------------------------------------------- /build-requirements.txt: -------------------------------------------------------------------------------- 1 | pyscss~=1.3 2 | -------------------------------------------------------------------------------- /doc/design.md: -------------------------------------------------------------------------------- 1 | # Design 2 | 3 | ## Overview 4 | 5 | ### Components 6 | 7 | - Web frontend 8 | - testssl.sh backend 9 | - xmpp-blackbox-exporter backend 10 | - database 11 | - coordinator 12 | 13 | ### Communication 14 | 15 | ZeroMQ 16 | 17 | ### Data flow 18 | 19 | #### Web frontend 20 | 21 | - Sends scan requests (synchronously) to coordinator 22 | - Reads from database 23 | 24 | #### Coordinator 25 | 26 | - Receives scan requests from web frontend 27 | - Dispatches steps to backends (asynchronously) 28 | - Receives results from backends (asynchronously) 29 | - Writes results to database 30 | 31 | #### testssl.sh backend 32 | 33 | - Receives scan requests from coordinator 34 | - Emits results step-by-step 35 | 36 | #### xmpp-blackbox-exporter backend 37 | 38 | - Receives scan requests from coordinator (possibly directly via HTTP) 39 | 40 | 41 | ## Coordinator 42 | 43 | - Central broker between all backends and the database 44 | -------------------------------------------------------------------------------- /doc/privileged_scans.md: -------------------------------------------------------------------------------- 1 | # Privileged Scans 2 | 3 | ## Motivation 4 | 5 | - Offer an easy way for server operators to create extended scans 6 | - Vulnerability scanning should not be the default because it might be an attack in some sense of the law 7 | 8 | ## Properties 9 | 10 | - Allow enabling vulnerability scans 11 | - Allow testssl-scanning all endpoints 12 | - Allow scanning more often than every 30 mins (every 5 mins?) 13 | 14 | ## Authentication 15 | 16 | 1. XMPP based 17 | 18 | 1. Ask for subscription to $jid by one of the JIDs published in the contact info 19 | 2. If subscribed, send message with token (valid for 24h or so) 20 | 3. If token valid, allow privileged scan 21 | 22 | 2. DNS based 23 | 24 | 1. Generate token := HMAC(salt || domain name || '\0' || shared secret || salt) 25 | 2. Ask user to put token in TXT record 26 | 3. On scan, ask for input of shared secret 27 | 4. Validate shared secret against TXT record before creating scan 28 | 29 | 3. Admin override 30 | 31 | 1. Secret admin token 32 | -------------------------------------------------------------------------------- /doc/requirements.md: -------------------------------------------------------------------------------- 1 | # Requirements 2 | 3 | ## User Interaction 4 | 5 | - MUST accept domain name as well as s2s vs. c2s input for scan 6 | - MUST NOT block until all scans have finished 7 | - MUST show intermediate results 8 | - MAY support live updates (without reload) of results 9 | - MAY offer statistics based on result values 10 | - MUST provide the following results: 11 | 12 | - SRV record listing (RFC and XEP-0368) 13 | - TLSA record listing 14 | - TLSA validation 15 | - TLS OK/NOT OK result for all endpoints 16 | - TLS versions offered (for one endpoint only) 17 | - Certificate info: chain, SAN, fingerprint (for one endpoint only) 18 | - Cipher list and behaviour (for one endpoint only) 19 | 20 | - SHOULD offer scanning a specific endpoint as a follow-up scan or with initial 21 | parameter (NOTE: requires domain and SRV delegation for security) 22 | - MUST support IPv4 and IPv6 23 | 24 | ## Backend 25 | 26 | - SHOULD NOT lose in-flight and queued scans on restart of any component 27 | - MUST NOT require a replicated or network-reachable database, if all 28 | components live on the same machine 29 | - SHOULD NOT require a replicated or network-reachable database, if frontend 30 | and queue manager run on the same machine 31 | - SHOULD support multiple workers of each type in separate processes 32 | 33 | # Solutions 34 | 35 | ## SHOULD NOT lose in-flight and queued scans on restart 36 | 37 | - State is recorded in database 38 | - On system start, state is synchronised from database to workers 39 | 40 | - Open question: how to prevent multiple workers from taking the same job 41 | on startup? 42 | 43 | - Workers pull jobs and heartbeat that they are working on a job as long as 44 | they are 45 | - Workers report intermediate results when available 46 | - Workers report completion or abortion of tasks 47 | - Coordinator reassigns tasks on heartbeat timeout 48 | -------------------------------------------------------------------------------- /doc/scans.md: -------------------------------------------------------------------------------- 1 | # Scans 2 | 3 | - SRV Endpoint Discovery 4 | 5 | - Check for CNAMEs 6 | - Check if all endpoints have the advertised ports open 7 | 8 | - A/AAAA fallback test (c2s only) 9 | 10 | - Check if :5222 is open and responds to XMPP correctly, to handle weird 11 | DNS problems 12 | 13 | - _xmppconnect TXT check, also the .well-known thing 14 | 15 | - TLSA Record Discovery on all endpoints 16 | - TLS Scan: classic testssl.sh 17 | - Trust Check: Validate certificate chain against trust stores 18 | - DANE Check: Validate TLSA records against Certificates of all endpoints 19 | - Stream Features: 20 | 21 | - Scrape SASL pre- and post-TLS 22 | - With client certificate to allow discovery of EXTERNAL (s2s only) 23 | - Maybe even post-Auth if EXTERNAL is possible (s2s only) 24 | 25 | - Ping Check (s2s only): 26 | 27 | - Use two or three accounts to ping 28 | 29 | - TLS tolerance checks (s2s only): 30 | 31 | - Use badxmpp-style stuff and see if pings pass 32 | 33 | - Disco#info: 34 | 35 | - Obtain and list features 36 | -------------------------------------------------------------------------------- /docker-compose.yml: -------------------------------------------------------------------------------- 1 | version: '3' 2 | 3 | services: 4 | testssl: 5 | image: testxmpp/testssl:latest 6 | build: 7 | context: . 8 | dockerfile: docker/testssl.Dockerfile 9 | network_mode: host 10 | command: 11 | - "-vvv" 12 | environment: 13 | TESTXMPP_COORDINATOR_URI: "tcp://127.0.0.1:5001" 14 | coordinator: 15 | image: testxmpp/coordinator:latest 16 | build: 17 | context: . 18 | dockerfile: docker/coordinator.Dockerfile 19 | user: "1000" 20 | network_mode: host 21 | command: 22 | - "-vvv" 23 | environment: 24 | TESTXMPP_DB_URI: sqlite:////tmp/src/test.sqlite 25 | volumes: 26 | - "./:/tmp/src/" 27 | xmpp: 28 | image: testxmpp/xmpp:latest 29 | build: 30 | context: . 31 | dockerfile: docker/xmpp.Dockerfile 32 | network_mode: host 33 | command: 34 | - "-vvv" 35 | environment: 36 | TESTXMPP_COORDINATOR_URI: "tcp://127.0.0.1:5001" 37 | TESTXMPP_S2S_FROM: tbd.dreckshal.de 38 | web: 39 | image: testxmpp/web:latest 40 | build: 41 | context: . 42 | dockerfile: docker/web.Dockerfile 43 | network_mode: host 44 | environment: 45 | TESTXMPP_COORDINATOR_URI: "tcp://127.0.0.1:5001" 46 | TESTXMPP_DB_URI: sqlite:////tmp/src/test.sqlite 47 | volumes: 48 | - "./:/tmp/src/" 49 | -------------------------------------------------------------------------------- /docker/coordinator.Dockerfile: -------------------------------------------------------------------------------- 1 | FROM debian:bullseye-slim 2 | 3 | ARG uid=36919 4 | 5 | RUN apt-get update && \ 6 | DEBIAN_FRONTEND=noninteractive apt-get dist-upgrade -y && \ 7 | DEBIAN_FRONTEND=noninteractive apt-get install -y --no-install-recommends --no-install-suggests python3-pip git && \ 8 | apt-get clean && \ 9 | rm -rf /var/lib/apt/lists/* 10 | 11 | COPY setup.py MANIFEST.in alembic.ini /src/ 12 | COPY testxmpp /src/testxmpp 13 | COPY alembic /src/alembic 14 | RUN cd /src && pip3 install '.[coordinator]' && pip3 install alembic && rm -rf /root/.cache 15 | COPY docker/coordinator.sh /coordinator.sh 16 | 17 | USER $uid 18 | 19 | ENTRYPOINT ["/coordinator.sh"] 20 | -------------------------------------------------------------------------------- /docker/coordinator.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | set -euo pipefail 3 | pushd /src >/dev/null 4 | python3 -m alembic upgrade head 5 | popd >/dev/null 6 | exec python3 -m testxmpp.coordinator "$@" 7 | -------------------------------------------------------------------------------- /docker/testssl.Dockerfile: -------------------------------------------------------------------------------- 1 | FROM debian:bullseye-slim 2 | 3 | ARG uid=36919 4 | 5 | RUN apt-get update && \ 6 | DEBIAN_FRONTEND=noninteractive apt-get dist-upgrade -y && \ 7 | DEBIAN_FRONTEND=noninteractive apt-get install -y --no-install-recommends --no-install-suggests python3-pip git && \ 8 | apt-get clean && \ 9 | rm -rf /var/lib/apt/lists/* 10 | 11 | RUN apt-get update && \ 12 | DEBIAN_FRONTEND=noninteractive apt-get dist-upgrade -y && \ 13 | DEBIAN_FRONTEND=noninteractive apt-get install -y --no-install-recommends --no-install-suggests bsdmainutils dnsutils procps && \ 14 | apt-get clean && \ 15 | rm -rf /var/lib/apt/lists/* 16 | 17 | ARG testssl_branch=3.1dev 18 | 19 | RUN cd /tmp && git clone --depth 1 --branch $testssl_branch https://github.com/drwetter/testssl.sh && \ 20 | cd testssl.sh/ && git log --oneline | head -n1 && cd .. && \ 21 | mkdir -p /opt/testssl/ && \ 22 | cp -r /tmp/testssl.sh/etc /tmp/testssl.sh/testssl.sh /opt/testssl/ && \ 23 | ln -s /opt/testssl/testssl.sh /usr/local/bin/testssl && \ 24 | cd / && \ 25 | rm -rf /tmp/testssl.sh 26 | 27 | ENV TESTSSL_INSTALL_DIR="/opt/testssl" 28 | ENV TESTXMPP_TESTSSL="/usr/local/bin/testssl" 29 | ENV TESTXMPP_OPENSSL_PATH="/usr/bin/openssl" 30 | 31 | COPY setup.py /src/ 32 | COPY MANIFEST.in /src/ 33 | COPY testxmpp /src/testxmpp 34 | RUN cd /src && pip install '.[testssl]' 35 | 36 | USER $uid 37 | 38 | ENTRYPOINT ["python3", "-m", "testxmpp.testssl"] 39 | -------------------------------------------------------------------------------- /docker/web.Dockerfile: -------------------------------------------------------------------------------- 1 | FROM debian:bullseye-slim 2 | 3 | ARG uid=36919 4 | 5 | RUN apt-get update && \ 6 | DEBIAN_FRONTEND=noninteractive apt-get dist-upgrade -y && \ 7 | DEBIAN_FRONTEND=noninteractive apt-get install -y --no-install-recommends --no-install-suggests python3-pip git && \ 8 | apt-get clean && \ 9 | rm -rf /var/lib/apt/lists/* 10 | 11 | RUN apt-get update && \ 12 | DEBIAN_FRONTEND=noninteractive apt-get dist-upgrade -y && \ 13 | DEBIAN_FRONTEND=noninteractive apt-get install -y --no-install-recommends --no-install-suggests make && \ 14 | apt-get clean && \ 15 | rm -rf /var/lib/apt/lists/* 16 | 17 | ARG uid=36919 18 | 19 | COPY setup.py build-requirements.txt MANIFEST.in Makefile /src/ 20 | COPY testxmpp /src/testxmpp 21 | RUN cd /src && \ 22 | pip install -r build-requirements.txt && \ 23 | make build_css && \ 24 | pip install '.[web]' && \ 25 | pip install hypercorn && \ 26 | cd / && \ 27 | rm -rf /src /root/.cache 28 | 29 | USER $uid 30 | 31 | ENTRYPOINT ["hypercorn", "-b", "::", "testxmpp.web:create_app()"] 32 | -------------------------------------------------------------------------------- /docker/xmpp.Dockerfile: -------------------------------------------------------------------------------- 1 | FROM debian:bullseye-slim 2 | 3 | ARG uid=36919 4 | 5 | RUN apt-get update && \ 6 | DEBIAN_FRONTEND=noninteractive apt-get dist-upgrade -y && \ 7 | DEBIAN_FRONTEND=noninteractive apt-get install -y --no-install-recommends --no-install-suggests python3-pip git && \ 8 | apt-get clean && \ 9 | rm -rf /var/lib/apt/lists/* 10 | 11 | ARG aioxmpp_branch=devel 12 | 13 | RUN cd /tmp && git clone --depth 1 --branch $aioxmpp_branch https://github.com/horazont/aioxmpp && \ 14 | cd aioxmpp/ && \ 15 | pip3 install . && \ 16 | cd / && \ 17 | rm -rf /tmp/aioxmpp 18 | 19 | RUN sed -ri 's/SECLEVEL=\d+/SECLEVEL=0/;s/MinProtocol\s*=/MinProtocol = SSLv2/' /etc/ssl/openssl.cnf 20 | 21 | COPY setup.py /src/ 22 | COPY MANIFEST.in /src/ 23 | COPY testxmpp /src/testxmpp 24 | RUN cd /src && pip3 install . 25 | 26 | USER $uid 27 | 28 | ENTRYPOINT ["python3", "-m", "testxmpp.xmpp"] 29 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | from setuptools import setup, find_packages 2 | 3 | setup( 4 | name="testxmpp", 5 | version="0.0.1", 6 | packages=find_packages(), 7 | install_requires=[ 8 | "PyZMQ~=19.0", 9 | "schema~=0.7.2", 10 | "sqlalchemy~=1.3", 11 | "dnspython~=2.0", 12 | "environ-config~=20.1", 13 | "aiohttp", 14 | "defusedxml", 15 | 'pyasn1', 16 | 'pyasn1_modules', 17 | ], 18 | extras_require={ 19 | 'web': [ 20 | "Quart~=0.13", 21 | "Flask-SQLAlchemy~=2.4", 22 | "Flask-Babel~=2.0", 23 | "Flask-WTF~=0.14", 24 | ], 25 | 'xmpp': [ 26 | "aioxmpp~=0.11", 27 | ], 28 | 'testssl': [ 29 | ], 30 | 'coordinator': [ 31 | "aioxmpp~=0.11", 32 | ], 33 | }, 34 | include_package_data=True, 35 | ) 36 | -------------------------------------------------------------------------------- /testxmpp/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/xmpp-observatory/testxmpp/64dae01de0ec59650694afc90289e2ff59a88383/testxmpp/__init__.py -------------------------------------------------------------------------------- /testxmpp/api/__init__.py: -------------------------------------------------------------------------------- 1 | 2 | -------------------------------------------------------------------------------- /testxmpp/api/common.py: -------------------------------------------------------------------------------- 1 | import enum 2 | 3 | from schema import Schema, Or 4 | 5 | 6 | class ErrorCode(enum.Enum): 7 | BAD_REQUEST = 400 8 | TOO_MANY_REQUESTS = 429 9 | 10 | INTERNAL_ERROR = 500 11 | 12 | 13 | error = Schema({ 14 | "code": Or(*(ec.value for ec in ErrorCode)), 15 | "message": Or(str, None), 16 | }) 17 | 18 | 19 | def mkerror(code, message=None): 20 | return { 21 | "code": code.value, 22 | "message": message or None, 23 | } 24 | -------------------------------------------------------------------------------- /testxmpp/api/coordinator.py: -------------------------------------------------------------------------------- 1 | import enum 2 | 3 | from schema import Schema, Or 4 | 5 | from .common import error 6 | 7 | 8 | def _worker_id(s): 9 | if not isinstance(s, str): 10 | return False 11 | return 32 <= len(s) <= 128 12 | 13 | 14 | class RequestType(enum.Enum): 15 | PING = "ping" 16 | 17 | # From the frontend 18 | SCAN_DOMAIN = "scan_domain" 19 | 20 | # From workers 21 | JOB_HEARTBEAT = "job-heartbeat" 22 | 23 | # From testssl workers 24 | GET_TESTSSL_JOB = "get-testssl-job" 25 | TESTSSL_RESULT_PUSH = "testssl-result-push" 26 | TESTSSL_COMPLETE = "testssl-complete" 27 | 28 | # From xmpp workers 29 | GET_XMPP_JOB = "get-xmpp-job" 30 | XMPP_COMPLETE = "xmpp-complete" 31 | 32 | 33 | class ResponseType(enum.Enum): 34 | ERROR = "error" 35 | PONG = "pong" 36 | OK = "ok" 37 | NO_TASKS = "no-tasks" 38 | 39 | SCAN_QUEUED = "scan-queued" 40 | 41 | GET_TESTSSL_JOB = "get-testssl-job" 42 | GET_XMPP_JOB = "get-xmpp-job" 43 | JOB_CONFIRMATION = "job-confirmation" 44 | 45 | 46 | cipher_info = Schema({ 47 | "id": int, 48 | "openssl_name": str, 49 | "key_exchange": str, 50 | "symmetric_cipher": { 51 | "name": str, 52 | "bits": int, 53 | }, 54 | "iana_name": str, 55 | }) 56 | 57 | cert_name = Schema([ 58 | { 59 | str: str, 60 | } 61 | ]) 62 | 63 | certificate_info = Schema({ 64 | "subject": cert_name, 65 | "issuer": cert_name, 66 | "subject_alt_names": Or({ 67 | str: [str], 68 | }, None), 69 | "not_before": str, 70 | "not_after": str, 71 | "public_key": str, 72 | "public_key_type": str, 73 | "fingerprints": { 74 | str: str, 75 | }, 76 | }) 77 | 78 | gen_echo = Schema({}) 79 | 80 | req_scan_domain = Schema({ 81 | "domain": str, 82 | "protocol": Or("c2s", "s2s"), 83 | }) 84 | 85 | req_get_testssl_job = Schema({ 86 | "worker_id": _worker_id, 87 | }) 88 | 89 | req_job_heartbeat = Schema({ 90 | "worker_id": _worker_id, 91 | "job_id": str, 92 | }) 93 | 94 | _testssl_data = Schema(Or( 95 | { 96 | "type": "tls_versions", 97 | "tls_versions": { 98 | str: bool, 99 | }, 100 | }, 101 | { 102 | "type": "cipherlists", 103 | "cipherlists": { 104 | str: [str], 105 | } 106 | }, 107 | { 108 | "type": "server_cipher_order", 109 | "server_cipher_order": bool, 110 | }, 111 | { 112 | "type": "cipher_info", 113 | "cipher": cipher_info, 114 | }, 115 | { 116 | "type": "certificate", 117 | "certificate": { 118 | "info": certificate_info, 119 | "raw_der": str, 120 | }, 121 | }, 122 | { 123 | "type": "intermediate_certificate", 124 | "certificate": { 125 | "index": int, 126 | "info": certificate_info, 127 | "raw_der": str, 128 | }, 129 | }, 130 | )) 131 | 132 | req_testssl_result_push = Schema({ 133 | "worker_id": _worker_id, 134 | "job_id": str, 135 | "testssl_data": _testssl_data, 136 | }) 137 | 138 | req_testssl_complete = Schema({ 139 | "worker_id": _worker_id, 140 | "job_id": str, 141 | "testssl_result": { 142 | "tls_versions": {str: bool}, 143 | "cipherlists": {str: [str]}, 144 | "certificate": { 145 | "info": certificate_info, 146 | "raw_der": str, 147 | }, 148 | "intermediate_certificates": [{ 149 | "index": int, 150 | "info": certificate_info, 151 | "raw_der": str, 152 | }], 153 | "server_cipher_order": bool, 154 | "ciphers": [cipher_info], 155 | } 156 | }) 157 | 158 | req_get_xmpp_job = Schema({ 159 | "worker_id": _worker_id, 160 | }) 161 | 162 | rep_ok = Schema({}) 163 | 164 | rep_scan_queued = Schema({ 165 | "scan_id": int, 166 | }) 167 | 168 | rep_get_testssl_job = Schema({ 169 | "job_id": str, 170 | "domain": str, 171 | "hostname": str, 172 | "port": int, 173 | "protocol": Or("c2s", "s2s"), 174 | "tls_mode": Or("starttls", "direct"), 175 | }) 176 | 177 | rep_get_xmpp_job = Schema({ 178 | "job_id": str, 179 | "job": Schema(Or( 180 | { 181 | "type": "features", 182 | "domain": str, 183 | "hostname": str, 184 | "port": int, 185 | "protocol": Or("c2s", "s2s"), 186 | "tls_mode": Or("starttls", "direct"), 187 | }, 188 | )), 189 | }) 190 | 191 | req_xmpp_complete = Schema({ 192 | "worker_id": str, 193 | "job_id": str, 194 | "xmpp_result": { 195 | "tls_offered": bool, 196 | "tls_negotiated": bool, 197 | "error": Or(None, str), 198 | "errno": Or(None, int), 199 | "pre_tls_sasl_mechanisms": Or(None, [str]), 200 | "post_tls_sasl_mechanisms": Or(None, [str]), 201 | } 202 | }) 203 | 204 | rep_no_tasks = Schema({ 205 | "ask_again_after": int, 206 | }) 207 | 208 | rep_job_confirmation = Schema({ 209 | "continue": bool, 210 | }) 211 | 212 | _V1_API_VERSION = "coordinator/v1" 213 | 214 | api_request = Schema(Or( 215 | { 216 | "api_version": _V1_API_VERSION, 217 | "type": RequestType.PING.value, 218 | "payload": gen_echo, 219 | }, 220 | { 221 | "api_version": _V1_API_VERSION, 222 | "type": RequestType.SCAN_DOMAIN.value, 223 | "payload": req_scan_domain, 224 | }, 225 | { 226 | "api_version": _V1_API_VERSION, 227 | "type": RequestType.GET_TESTSSL_JOB.value, 228 | "payload": req_get_testssl_job, 229 | }, 230 | { 231 | "api_version": _V1_API_VERSION, 232 | "type": RequestType.JOB_HEARTBEAT.value, 233 | "payload": req_job_heartbeat, 234 | }, 235 | { 236 | "api_version": _V1_API_VERSION, 237 | "type": RequestType.TESTSSL_RESULT_PUSH.value, 238 | "payload": req_testssl_result_push, 239 | }, 240 | { 241 | "api_version": _V1_API_VERSION, 242 | "type": RequestType.TESTSSL_COMPLETE.value, 243 | "payload": req_testssl_complete, 244 | }, 245 | { 246 | "api_version": _V1_API_VERSION, 247 | "type": RequestType.GET_XMPP_JOB.value, 248 | "payload": req_get_xmpp_job, 249 | }, 250 | { 251 | "api_version": _V1_API_VERSION, 252 | "type": RequestType.XMPP_COMPLETE.value, 253 | "payload": req_xmpp_complete, 254 | }, 255 | )) 256 | 257 | api_response = Schema(Or( 258 | { 259 | "api_version": _V1_API_VERSION, 260 | "type": ResponseType.PONG.value, 261 | "payload": gen_echo, 262 | }, 263 | { 264 | "api_version": _V1_API_VERSION, 265 | "type": ResponseType.ERROR.value, 266 | "payload": error, 267 | }, 268 | { 269 | "api_version": _V1_API_VERSION, 270 | "type": ResponseType.SCAN_QUEUED.value, 271 | "payload": rep_scan_queued, 272 | }, 273 | { 274 | "api_version": _V1_API_VERSION, 275 | "type": ResponseType.OK.value, 276 | "payload": rep_ok, 277 | }, 278 | { 279 | "api_version": _V1_API_VERSION, 280 | "type": ResponseType.GET_TESTSSL_JOB.value, 281 | "payload": rep_get_testssl_job, 282 | }, 283 | { 284 | "api_version": _V1_API_VERSION, 285 | "type": ResponseType.NO_TASKS.value, 286 | "payload": rep_no_tasks, 287 | }, 288 | { 289 | "api_version": _V1_API_VERSION, 290 | "type": ResponseType.JOB_CONFIRMATION.value, 291 | "payload": rep_job_confirmation, 292 | }, 293 | { 294 | "api_version": _V1_API_VERSION, 295 | "type": ResponseType.GET_XMPP_JOB.value, 296 | "payload": rep_get_xmpp_job, 297 | }, 298 | )) 299 | 300 | 301 | def mkv1request(type_, payload): 302 | return { 303 | "api_version": _V1_API_VERSION, 304 | "type": type_.value, 305 | "payload": payload, 306 | } 307 | 308 | 309 | def mkv1response(type_, payload): 310 | return { 311 | "api_version": _V1_API_VERSION, 312 | "type": type_.value, 313 | "payload": payload, 314 | } 315 | -------------------------------------------------------------------------------- /testxmpp/blackbox/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/xmpp-observatory/testxmpp/64dae01de0ec59650694afc90289e2ff59a88383/testxmpp/blackbox/__init__.py -------------------------------------------------------------------------------- /testxmpp/certutil.py: -------------------------------------------------------------------------------- 1 | import base64 2 | import dataclasses 3 | import hashlib 4 | import typing 5 | from datetime import datetime 6 | 7 | import pyasn1.codec.der.decoder 8 | import pyasn1.codec.der.encoder 9 | import pyasn1.type.base 10 | import pyasn1_modules.rfc5280 as cert_rfc 11 | 12 | 13 | OID_TO_SHORTNAME = { 14 | str(cert_rfc.id_at_name): "name", 15 | str(cert_rfc.id_at_surname): "surname", 16 | str(cert_rfc.id_at_givenName): "givenName", 17 | str(cert_rfc.id_at_initials): "initials", 18 | str(cert_rfc.id_at_generationQualifier): "generationQualifier", 19 | str(cert_rfc.id_at_commonName): "commonName", 20 | str(cert_rfc.id_at_localityName): "localityName", 21 | str(cert_rfc.id_at_stateOrProvinceName): "stateOrProvinceName", 22 | str(cert_rfc.id_at_organizationName): "organizationName", 23 | str(cert_rfc.id_at_organizationalUnitName): "organizationalUnitName", 24 | str(cert_rfc.id_at_title): "title", 25 | str(cert_rfc.id_at_dnQualifier): "dnQualifier", 26 | str(cert_rfc.id_at_countryName): "countryName", 27 | str(cert_rfc.id_at_serialNumber): "serialNumber", 28 | str(cert_rfc.id_at_pseudonym): "pseudonym", 29 | str(cert_rfc.id_domainComponent): "domainComponent", 30 | str(cert_rfc.id_emailAddress): "emailAddress", 31 | } 32 | 33 | 34 | def get_default_name(nm: typing.Any) -> str: 35 | return str(nm) 36 | 37 | 38 | def get_choice_name(cn: typing.Any) -> str: 39 | for comp in cn.components: 40 | if comp is pyasn1.type.base.noValue: 41 | continue 42 | return str(comp) 43 | return "" 44 | 45 | 46 | OID_TO_LOOKUPFN = { 47 | cert_rfc.id_at_commonName: get_choice_name, 48 | cert_rfc.id_at_organizationName: get_choice_name, 49 | } 50 | 51 | 52 | @dataclasses.dataclass 53 | class CertInfo: 54 | subject: typing.Sequence[typing.Mapping[str, str]] 55 | issuer: typing.Sequence[typing.Mapping[str, str]] 56 | subject_alt_names: typing.Optional[ 57 | typing.Mapping[str, typing.Collection[str]] 58 | ] 59 | not_before: datetime 60 | not_after: datetime 61 | public_key: bytes 62 | public_key_type: str 63 | fingerprints: typing.Mapping[str, bytes] 64 | 65 | def to_json(self) -> typing.Mapping[str, typing.Any]: 66 | return { 67 | "subject": self.subject, 68 | "issuer": self.issuer, 69 | "subject_alt_names": self.subject_alt_names, 70 | "not_before": self.not_before.isoformat(), 71 | "not_after": self.not_after.isoformat(), 72 | "public_key": base64.b64encode(self.public_key).decode("ascii"), 73 | "public_key_type": self.public_key_type, 74 | "fingerprints": { 75 | type_: ":".join("{:02x}".format(octet) for octet in fp) 76 | for type_, fp in self.fingerprints.items() 77 | }, 78 | } 79 | 80 | @classmethod 81 | def from_json(cls, data: typing.Mapping[str, typing.Any]) -> "CertInfo": 82 | return cls( 83 | subject=data["subject"], 84 | issuer=data["issuer"], 85 | subject_alt_names=data["subject_alt_names"], 86 | not_before=datetime.fromisoformat(data["not_before"]), 87 | not_after=datetime.fromisoformat(data["not_after"]), 88 | public_key=base64.b64decode(data["public_key"]), 89 | public_key_type=data["public_key_type"], 90 | fingerprints={ 91 | type_: bytes( 92 | int(hexoctet, 16) 93 | for hexoctet in hexdigest.split(":") 94 | ) 95 | for type_, hexdigest in data["fingerprints"].items() 96 | }, 97 | ) 98 | 99 | 100 | def unwrap_pem(blob: str) -> bytes: 101 | HEAD = "-----BEGIN CERTIFICATE-----" 102 | FOOT = "-----END CERTIFICATE-----" 103 | startidx = blob.find(HEAD) 104 | if startidx is None: 105 | raise ValueError("input does not contain a certificate header") 106 | blob = blob[(startidx+len(HEAD)):] 107 | endidx = blob.find(FOOT) 108 | blob = blob[:endidx] 109 | return base64.b64decode(blob) 110 | 111 | 112 | def decode_cert_der(blob: bytes) -> cert_rfc.Certificate: 113 | return pyasn1.codec.der.decoder.decode( 114 | blob, 115 | cert_rfc.Certificate(), 116 | )[0] 117 | 118 | 119 | def decode_pem(blob: str) -> cert_rfc.Certificate: 120 | der = unwrap_pem(blob) 121 | return decode_cert_der(der) 122 | 123 | 124 | def decode_name( 125 | name: cert_rfc.Name, 126 | ) -> typing.Sequence[typing.Mapping[str, str]]: 127 | result = [] 128 | # A Name is a Choice of just a single option: rdnSequence 129 | rdn_sequence = name.getComponentByName("rdnSequence") 130 | # A RDNSequence is a sequence of RelativeDistinguishedNames 131 | for rdn in rdn_sequence: 132 | # A RelativeDistinguishedName is a set of AttributeTypeAndValue 133 | # objects 134 | kv = {} 135 | for atav in rdn: 136 | # An AttributeTypeAndValue is magic. 137 | type_oidish = atav.getComponentByName("type") 138 | try: 139 | type_ = cert_rfc.certificateAttributesMap[type_oidish] 140 | except KeyError: 141 | raise ValueError( 142 | f"unsupported name attribute type: {type_oidish}", 143 | ) 144 | lookupfn = OID_TO_LOOKUPFN.get(type_oidish, get_default_name) 145 | values = pyasn1.codec.der.decoder.decode( 146 | atav.getComponentByName("value"), 147 | type_, 148 | ) 149 | value, _ = values 150 | kv[str(type_oidish)] = lookupfn(value) 151 | result.append(kv) 152 | return result 153 | 154 | 155 | def get_extension( 156 | exts: cert_rfc.Extensions, 157 | oid: cert_rfc.univ.ObjectIdentifier) -> typing.Any: 158 | for ext in exts: 159 | if ext.getComponentByName("extnID") != oid: 160 | continue 161 | values = pyasn1.codec.der.decoder.decode( 162 | ext.getComponentByName("extnValue"), 163 | cert_rfc.certificateExtensionsMap[oid], 164 | ) 165 | value, _ = values 166 | return value 167 | return None 168 | 169 | 170 | def extract_alt_names( 171 | names: cert_rfc.GeneralNames, 172 | ) -> typing.Mapping[str, typing.Collection[str]]: 173 | result = {} 174 | for name in names: 175 | for k in name.keys(): 176 | v = name.getComponentByName(k) 177 | result.setdefault(k, []).append(str(v)) 178 | return result 179 | 180 | 181 | def get_subject_alt_names( 182 | extensions: cert_rfc.Extensions, 183 | ) -> typing.Optional[typing.Mapping[str, typing.Collection[str]]]: 184 | san_ext: cert_rfc.SubjectAltName = get_extension( 185 | extensions, cert_rfc.id_ce_subjectAltName, 186 | ) 187 | if san_ext is None: 188 | return None 189 | return extract_alt_names(san_ext) 190 | 191 | 192 | def time_to_datetime(t: cert_rfc.Time) -> datetime: 193 | utc_time = t.getComponentByName("utcTime") 194 | if utc_time is pyasn1.type.base.noValue: 195 | return t.getComponentByName("generalTime").asDateTime 196 | return utc_time.asDateTime 197 | 198 | 199 | def extract_cert_info(cert: bytes) -> CertInfo: 200 | cert_bytes = pyasn1.codec.der.encoder.encode(cert) 201 | fingerprints = {} 202 | for algo in ["sha1", "sha256", "sha512"]: 203 | hashfun = hashlib.new(algo) 204 | hashfun.update(cert_bytes) 205 | fingerprints[algo] = hashfun.digest() 206 | tbs = cert.getComponentByName("tbsCertificate") 207 | subject = decode_name(tbs.getComponentByName("subject")) 208 | issuer = decode_name(tbs.getComponentByName("issuer")) 209 | extensions = tbs.getComponentByName("extensions") 210 | validity = tbs.getComponentByName("validity") 211 | not_before = time_to_datetime(validity.getComponentByName("notBefore")) 212 | not_after = time_to_datetime(validity.getComponentByName("notAfter")) 213 | pk_info = tbs.getComponentByName("subjectPublicKeyInfo") 214 | pk_algo = str( 215 | pk_info.getComponentByName("algorithm") 216 | .getComponentByName("algorithm") 217 | ) 218 | pk = pk_info.getComponentByName("subjectPublicKey").asOctets() 219 | return CertInfo( 220 | subject=subject, 221 | issuer=issuer, 222 | subject_alt_names=get_subject_alt_names(extensions), 223 | not_before=not_before, 224 | not_after=not_after, 225 | public_key=pk, 226 | public_key_type=pk_algo, 227 | fingerprints=fingerprints, 228 | ) 229 | 230 | 231 | if __name__ == "__main__": 232 | import argparse 233 | import json 234 | import sys 235 | 236 | parser = argparse.ArgumentParser() 237 | parser.add_argument( 238 | "cert", 239 | type=argparse.FileType("r", encoding="ascii"), 240 | ) 241 | 242 | args = parser.parse_args() 243 | 244 | with args.cert as f: 245 | cert = decode_pem(f.read()) 246 | 247 | json.dump(extract_cert_info(cert).to_json(), sys.stdout, indent=2) 248 | -------------------------------------------------------------------------------- /testxmpp/cli/__init__.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import pprint 3 | 4 | import zmq.asyncio 5 | 6 | import testxmpp.api.coordinator as coordinator_api 7 | 8 | 9 | async def do_ping(args, sock): 10 | await sock.send_json(coordinator_api.mkv1request( 11 | coordinator_api.RequestType.PING, 12 | {}, 13 | )) 14 | resp = await sock.recv_json() 15 | pprint.pprint(resp) 16 | 17 | 18 | async def do_scan(args, sock): 19 | await sock.send_json(coordinator_api.mkv1request( 20 | coordinator_api.RequestType.SCAN_DOMAIN, 21 | { 22 | "domain": args.domain, 23 | "protocol": args.protocol, 24 | } 25 | )) 26 | resp = await sock.recv_json() 27 | pprint.pprint(resp) 28 | 29 | 30 | async def do_get_testssl_job(args, sock): 31 | await sock.send_json(coordinator_api.mkv1request( 32 | coordinator_api.RequestType.GET_TESTSSL_JOB, 33 | { 34 | "worker_id": args.worker_id, 35 | } 36 | )) 37 | resp = await sock.recv_json() 38 | pprint.pprint(resp) 39 | 40 | 41 | async def amain(args): 42 | zctx = zmq.asyncio.Context() 43 | sock = zctx.socket(zmq.REQ) 44 | try: 45 | sock.connect(args.coordinator_url) 46 | await args.func(args, sock) 47 | finally: 48 | sock.close() 49 | 50 | 51 | def main(): 52 | import argparse 53 | 54 | parser = argparse.ArgumentParser() 55 | parser.add_argument( 56 | "-C", "--coordinator-url", 57 | default="tcp://localhost:5001", 58 | ) 59 | 60 | subparsers = parser.add_subparsers() 61 | 62 | subparser = subparsers.add_parser("ping") 63 | subparser.set_defaults(func=do_ping) 64 | 65 | subparser = subparsers.add_parser("scan") 66 | subparser.set_defaults(func=do_scan) 67 | subparser.add_argument( 68 | "domain", 69 | ) 70 | subparser.add_argument( 71 | "protocol", 72 | choices=("c2s", "s2s"), 73 | ) 74 | 75 | subparser = subparsers.add_parser("get-testssl-job") 76 | subparser.set_defaults(func=do_get_testssl_job) 77 | subparser.add_argument( 78 | "worker_id", 79 | nargs="?", 80 | default="cliclientxxxxxxxxxxxxxxxxxxxxxxx", 81 | ) 82 | 83 | args = parser.parse_args() 84 | 85 | asyncio.run(amain(args)) 86 | -------------------------------------------------------------------------------- /testxmpp/cli/__main__.py: -------------------------------------------------------------------------------- 1 | import testxmpp.cli 2 | testxmpp.cli.main() 3 | -------------------------------------------------------------------------------- /testxmpp/common.py: -------------------------------------------------------------------------------- 1 | import abc 2 | import asyncio 3 | import secrets 4 | 5 | import schema 6 | 7 | import zmq 8 | import zmq.asyncio 9 | 10 | import testxmpp.api.coordinator as coordinator_api 11 | 12 | 13 | class RequestProcessor(metaclass=abc.ABCMeta): 14 | def __init__(self, 15 | request_schema, response_schema, 16 | internal_server_error_response, 17 | logger): 18 | super().__init__() 19 | self._request_schema = request_schema 20 | self._response_schema = response_schema 21 | self._ise_response = response_schema.validate( 22 | internal_server_error_response 23 | ) 24 | self.logger = logger 25 | 26 | @abc.abstractmethod 27 | async def _handle_message(self, msg): 28 | pass 29 | 30 | @abc.abstractmethod 31 | async def _handle_schema_error(self, msg, exc): 32 | pass 33 | 34 | async def process_one(self, sock): 35 | msg = await sock.recv_json() 36 | try: 37 | msg = self._request_schema.validate(msg) 38 | except schema.SchemaError as exc: 39 | reply = await self._handle_schema_error( 40 | msg, exc, 41 | ) 42 | else: 43 | try: 44 | reply = await self._handle_message(msg) 45 | except Exception as exc: 46 | self.logger.error( 47 | "handler failed for message %r", 48 | msg, 49 | exc_info=True, 50 | ) 51 | reply = self._ise_response 52 | 53 | try: 54 | reply = self._response_schema.validate(reply) 55 | except schema.SchemaError as exc: 56 | self.logger.error( 57 | "handler generated invalid response (%s): %r", 58 | exc, reply, 59 | ) 60 | reply = self._ise_response 61 | 62 | await sock.send_json(reply) 63 | 64 | async def run(self, sock): 65 | while True: 66 | await self.process_one(sock) 67 | 68 | 69 | class NoJob(Exception): 70 | def __init__(self, wait_time): 71 | super().__init__("no jobs available") 72 | self.wait_time = wait_time 73 | 74 | 75 | class Worker: 76 | def __init__(self, coordinator_uri, logger): 77 | super().__init__() 78 | self.logger = logger 79 | self._coordinator_uri = coordinator_uri 80 | self._worker_id = secrets.token_hex(16) 81 | self._zctx = zmq.asyncio.Context() 82 | self.logger.debug("I am %s", self) 83 | 84 | @property 85 | def worker_id(self): 86 | return self._worker_id 87 | 88 | def __repr__(self): 89 | return "<{}.{} id={!r} coordinator_uri={!r}>".format( 90 | type(self).__module__, 91 | type(self).__qualname__, 92 | self._worker_id, 93 | self._coordinator_uri, 94 | ) 95 | 96 | @abc.abstractmethod 97 | def _mkjobrequest(self, worker_id): 98 | pass 99 | 100 | @abc.abstractmethod 101 | def _decode_job(self, response): 102 | pass 103 | 104 | async def _get_job(self, sock) -> int: 105 | await sock.send_json(self._mkjobrequest(self._worker_id)) 106 | resp = coordinator_api.api_response.validate(await sock.recv_json()) 107 | if resp["type"] == coordinator_api.ResponseType.NO_TASKS.value: 108 | raise NoJob(resp["payload"]["ask_again_after"]) 109 | 110 | result = self._decode_job(resp) 111 | if result is None: 112 | raise RuntimeError("unexpected server reply: {!r}".format(resp)) 113 | 114 | return result 115 | 116 | @abc.abstractmethod 117 | async def _run_job(self, coordinator_sock, job): 118 | pass 119 | 120 | async def _get_and_run_job(self, coordinator_sock): 121 | try: 122 | job = await self._get_job(coordinator_sock) 123 | except NoJob as exc: 124 | self.logger.debug("no job, waiting for %ds", exc.wait_time) 125 | return exc.wait_time 126 | 127 | await self._run_job(coordinator_sock, job) 128 | return 1 129 | 130 | async def run(self): 131 | sleep_interval = 1 132 | coordinator_sock = self._zctx.socket(zmq.REQ) 133 | try: 134 | self.logger.debug("talking to coordinator at %r", 135 | self._coordinator_uri) 136 | coordinator_sock.connect(self._coordinator_uri) 137 | while True: 138 | try: 139 | sleep_interval = await self._get_and_run_job( 140 | coordinator_sock 141 | ) 142 | except Exception: 143 | sleep_interval = min(sleep_interval * 2, 60) 144 | self.logger.error( 145 | "failed to get or run job. trying again in %d " 146 | "seconds", 147 | exc_info=True, 148 | ) 149 | 150 | await asyncio.sleep(sleep_interval) 151 | finally: 152 | coordinator_sock.close() 153 | -------------------------------------------------------------------------------- /testxmpp/coordinator/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/xmpp-observatory/testxmpp/64dae01de0ec59650694afc90289e2ff59a88383/testxmpp/coordinator/__init__.py -------------------------------------------------------------------------------- /testxmpp/coordinator/__main__.py: -------------------------------------------------------------------------------- 1 | import testxmpp.coordinator.cli 2 | testxmpp.coordinator.cli.main() 3 | -------------------------------------------------------------------------------- /testxmpp/coordinator/auth.py: -------------------------------------------------------------------------------- 1 | import base64 2 | import hashlib 3 | import hmac 4 | import secrets 5 | 6 | from datetime import datetime 7 | 8 | 9 | class InvalidSignature(ValueError): 10 | pass 11 | 12 | 13 | class ExpiredSignature(ValueError): 14 | pass 15 | 16 | 17 | TXT_DATE_FORMAT = "%Y%m%dT%H%M%S" 18 | HMAC_DATE_FORMAT = "%Y%m%dT%H%M%S" 19 | 20 | 21 | def sign_user_secret(domain: bytes, 22 | instance_secret: bytes, 23 | user_secret: bytes, 24 | valid_until: datetime) -> bytes: 25 | hash_input = [ 26 | b"\x00", # version identifier 27 | valid_until.strftime(HMAC_DATE_FORMAT).encode("ascii"), 28 | domain, 29 | b"\x00", # separator which cannot occur in the domain name 30 | user_secret, 31 | ] 32 | return hmac.digest( 33 | instance_secret, 34 | b"".join(hash_input), 35 | hashlib.sha3_256, 36 | ) 37 | 38 | 39 | def generate_dnsauth_pair(domain: bytes, 40 | instance_secret: bytes, 41 | valid_until: datetime) -> (str, str): 42 | user_secret = secrets.token_bytes(128//8) 43 | signature = sign_user_secret( 44 | domain, 45 | instance_secret, 46 | user_secret, 47 | valid_until, 48 | ) 49 | 50 | txt_record = "v0 {} {}".format( 51 | valid_until.strftime(TXT_DATE_FORMAT), 52 | base64.b64encode(signature).decode("ascii"), 53 | ) 54 | return txt_record, base64.urlsafe_b64encode(user_secret) 55 | 56 | 57 | def validate_dnsauth_pair(txt_record: str, 58 | domain: bytes, 59 | instance_secret: bytes, 60 | user_secret: bytes, 61 | now: datetime = None): 62 | if now is None: 63 | now = datetime.utcnow() 64 | 65 | version, *data = txt_record.split(" ") 66 | if version != "v0": 67 | raise ValueError("unknown signature version: {!r}".format( 68 | version 69 | )) 70 | 71 | valid_until_s, signature_b64 = data 72 | valid_until = datetime.strptime(valid_until_s, TXT_DATE_FORMAT) 73 | given_signature = base64.b64decode(signature_b64) 74 | expected_signature = sign_user_secret( 75 | domain, 76 | instance_secret, 77 | user_secret, 78 | valid_until, 79 | ) 80 | 81 | if not hmac.compare_digest(given_signature, expected_signature): 82 | raise InvalidSignature( 83 | "signature is not valid for {!r} with the given key".format( 84 | domain.decode("ascii"), 85 | ) 86 | ) 87 | 88 | if valid_until < now: 89 | raise ExpiredSignature( 90 | "signature is valid but expired at {} " 91 | "(now is {})".format(valid_until.isoformat(), now.isoformat()) 92 | ) 93 | -------------------------------------------------------------------------------- /testxmpp/coordinator/cli.py: -------------------------------------------------------------------------------- 1 | import argparse 2 | import asyncio 3 | import logging 4 | import os 5 | import pathlib 6 | import sys 7 | 8 | import environ 9 | 10 | from .daemon import Coordinator 11 | 12 | 13 | @environ.config 14 | class Ratelimit: 15 | burst = environ.var(6, converter=int) 16 | interval = environ.var(3600, converter=int) 17 | 18 | 19 | @environ.config(prefix="TESTXMPP") 20 | class AppConfig: 21 | db_uri = environ.var() 22 | listen_uri = environ.var("tcp://*:5001") 23 | 24 | @environ.config 25 | class DNSAuth: 26 | secret = environ.var("") 27 | 28 | @environ.config 29 | class Unprivileged: 30 | ratelimit = environ.group(Ratelimit) 31 | 32 | @environ.config 33 | class Privileged: 34 | ratelimit = environ.group(Ratelimit) 35 | 36 | unprivileged = environ.group(Unprivileged) 37 | privileged = environ.group(Privileged) 38 | dns_auth = environ.group(DNSAuth) 39 | 40 | 41 | async def amain(config): 42 | coordinator = Coordinator(config) 43 | await coordinator.run() 44 | 45 | 46 | def main(): 47 | import argparse 48 | 49 | parser = argparse.ArgumentParser() 50 | parser.add_argument( 51 | "-v", "--verbose", 52 | dest="verbosity", 53 | action="count", 54 | default=0, 55 | help="Increase verbosity (up to -vvv)", 56 | ) 57 | parser.add_argument( 58 | "--debug-libraries", 59 | action="store_true", 60 | default=False, 61 | help="If enabled, verbosity will also be increased for libraries", 62 | ) 63 | 64 | args = parser.parse_args() 65 | 66 | verbosity_level = { 67 | 0: logging.ERROR, 68 | 1: logging.WARNING, 69 | 2: logging.INFO, 70 | }.get(args.verbosity, logging.DEBUG) 71 | if args.debug_libraries: 72 | global_level = verbosity_level 73 | else: 74 | global_level = logging.WARNING 75 | 76 | logging.basicConfig(level=global_level) 77 | logging.getLogger("testxmpp").setLevel(verbosity_level) 78 | 79 | config = environ.to_config(AppConfig) 80 | asyncio.run(amain(config)) 81 | -------------------------------------------------------------------------------- /testxmpp/coordinator/common.py: -------------------------------------------------------------------------------- 1 | import secrets 2 | 3 | 4 | def generate_task_id() -> bytes: 5 | return secrets.token_bytes(16) 6 | -------------------------------------------------------------------------------- /testxmpp/coordinator/endpoints.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import logging 3 | import json 4 | import typing 5 | 6 | import defusedxml.ElementTree 7 | 8 | import sqlalchemy.orm 9 | 10 | import aiohttp 11 | 12 | import dns 13 | import dns.rdatatype 14 | 15 | import testxmpp.dns 16 | import testxmpp.model as model 17 | 18 | from .common import generate_task_id 19 | 20 | 21 | async def gather_srv_records(domain: bytes, services: typing.List[str]): 22 | domain = testxmpp.dns.encode_domain(domain) 23 | for service in services: 24 | try: 25 | records = await testxmpp.dns.lookup_srv(domain, "tcp", service, 26 | raise_on_no_answer=False) 27 | except dns.resolver.NXDOMAIN: 28 | continue 29 | 30 | for record in records: 31 | yield (service, record) 32 | 33 | 34 | async def discover_srv_records(domain: bytes, 35 | protocol: model.ScanType): 36 | srv_services = { 37 | model.ScanType.C2S: ["xmpp-client", "xmpps-client"], 38 | model.ScanType.S2S: ["xmpp-server", "xmpps-server"], 39 | }[protocol] 40 | 41 | async for service, record in gather_srv_records(domain, srv_services): 42 | db_record = model.SRVRecord() 43 | db_record.service = service 44 | db_record.protocol = "tcp" 45 | db_record.weight = record.weight 46 | db_record.port = record.port 47 | db_record.priority = record.priority 48 | db_record.host = record.target.to_text().encode("ascii") 49 | yield db_record 50 | 51 | 52 | async def discover_xmppconnect_records(domain: bytes): 53 | domain = testxmpp.dns.encode_domain(domain) 54 | qname = b"_xmppconnect." + domain 55 | for record in (await testxmpp.dns.resolve(qname, dns.rdatatype.TXT, 56 | suppress_nxdomain=True)): 57 | for blob in record.strings: 58 | name, _, value = blob.partition(b"=") 59 | db_record = model.XMPPConnectRecord() 60 | db_record.attribute_name = name 61 | db_record.attribute_value = value 62 | yield db_record 63 | 64 | 65 | def interpret_xmppconnect_record(name: bytes, value: bytes, prefix: bytes): 66 | if not name.startswith(prefix): 67 | return None 68 | 69 | key = name[len(prefix):] 70 | try: 71 | http_mode = { 72 | b"xbosh": model.HTTPMode.XEP0206_BOSH, 73 | b"websocket": model.HTTPMode.RFC7395_WEBSOCKETS, 74 | b"httppoll": model.HTTPMode.XEP0025_POLLING, 75 | }[key] 76 | except KeyError: 77 | return None 78 | 79 | endpoint = model.EndpointHTTP() 80 | endpoint.http_mode = http_mode 81 | endpoint.transport = model.TransportLayer.HTTP 82 | 83 | try: 84 | endpoint.url = value.decode("utf-8") 85 | except UnicodeDecodeError: 86 | return None 87 | 88 | return endpoint 89 | 90 | 91 | def parse_xml_host_meta(data: bytes): 92 | el = defusedxml.ElementTree.fromstring(data) 93 | if el.tag != "{http://docs.oasis-open.org/ns/xri/xrd-1.0}XRD": 94 | raise ValueError("not a valid host-meta file") 95 | 96 | for link in el.iter( 97 | "{http://docs.oasis-open.org/ns/xri/xrd-1.0}Link"): 98 | attr = link.attrib 99 | try: 100 | rel = attr["rel"] 101 | href = attr["href"] 102 | except KeyError as exc: 103 | continue 104 | yield rel, href 105 | 106 | 107 | def parse_json_host_meta(data: bytes, charset: typing.Optional[str]): 108 | if charset is None: 109 | charset = "utf-8" 110 | 111 | data_s = data.decode(charset) 112 | data_j = json.loads(data_s) 113 | for link in data_j.get("links", []): 114 | try: 115 | rel = link["rel"] 116 | href = link["href"] 117 | except KeyError: 118 | continue 119 | yield rel, href 120 | 121 | 122 | async def download(session, url: str, max_size: int): 123 | BLOCK_SIZE = 1024**2 124 | total_size = 0 125 | parts = [] 126 | async with session.get(url, raise_for_status=True) as response: 127 | charset = response.charset 128 | while total_size < max_size: 129 | chunk = await response.content.read(BLOCK_SIZE) 130 | if not chunk: 131 | break 132 | parts.append(chunk) 133 | total_size += len(chunk) 134 | else: 135 | raise ValueError("response too large") 136 | 137 | return charset, b"".join(parts) 138 | 139 | 140 | async def discover_host_meta_links(domain: bytes): 141 | logger = logging.getLogger(__name__).getChild( 142 | "discover_host_meta_links:{}".format(domain.decode("ascii")) 143 | ) 144 | 145 | xml_url = "https://{}/.well-known/host-meta".format( 146 | domain.decode("ascii") 147 | ) 148 | json_url = "{}.json".format(xml_url) 149 | db_objects = [] 150 | async with aiohttp.ClientSession() as session: 151 | xml_data, json_data = await asyncio.gather( 152 | download(session, xml_url, 1024**2), 153 | download(session, json_url, 1024**2), 154 | return_exceptions=True, 155 | ) 156 | 157 | if isinstance(xml_data, tuple): 158 | _, xml_data = xml_data 159 | db_object = model.HostMetaObject() 160 | db_object.url = xml_url 161 | db_object.format_ = model.HostMetaFormat.XML 162 | db_objects.append(db_object) 163 | 164 | try: 165 | for rel, href in parse_xml_host_meta(xml_data): 166 | try: 167 | db_link = model.HostMetaLink() 168 | db_link.object_ = db_object 169 | db_link.rel = rel 170 | db_link.href = href 171 | db_objects.append(db_link) 172 | except ValueError as exc: 173 | logger.debug("discarding link: %s", exc) 174 | db_objects.append(db_object) 175 | except ValueError as exc: 176 | logger.debug("discarding object: %s", exc) 177 | else: 178 | logger.debug("failed to download object from %r: %s", 179 | xml_url, xml_data) 180 | 181 | if isinstance(json_data, tuple): 182 | charset, json_data = json_data 183 | 184 | db_object = model.HostMetaObject() 185 | db_object.url = json_url 186 | db_object.format_ = model.HostMetaFormat.JSON 187 | try: 188 | for rel, href in parse_json_host_meta(json_data, charset): 189 | try: 190 | db_link = model.HostMetaLink() 191 | db_link.object_ = db_object 192 | db_link.rel = rel 193 | db_link.href = href 194 | db_objects.append(db_link) 195 | except ValueError as exc: 196 | logger.debug("discarding link: %s", exc) 197 | db_objects.append(db_object) 198 | except ValueError as exc: 199 | logger.debug("discarding object: %s", exc) 200 | else: 201 | logger.debug("failed to download object from %r: %s", 202 | json_url, json_data) 203 | 204 | return db_objects 205 | 206 | 207 | def interpret_host_meta_link(rel: str, href: str): 208 | try: 209 | http_mode = { 210 | "urn:xmpp:alt-connections:xbosh": 211 | model.HTTPMode.XEP0206_BOSH, 212 | "urn:xmpp:alt-connections:websocket": 213 | model.HTTPMode.RFC7395_WEBSOCKETS, 214 | "urn:xmpp:alt-connections:httppoll": 215 | model.HTTPMode.XEP0025_POLLING, 216 | }[rel] 217 | except KeyError: 218 | return None 219 | 220 | endpoint = model.EndpointHTTP() 221 | endpoint.http_mode = http_mode 222 | endpoint.transport = model.TransportLayer.HTTP 223 | endpoint.url = href 224 | 225 | return endpoint 226 | 227 | 228 | async def discover_endpoints(scan_id: int, 229 | domain: bytes, 230 | protocol: model.ScanType): 231 | altconnect_endpoints = {} 232 | 233 | def get_or_add_altconnect_endpoint(endpoint): 234 | key = endpoint.http_mode, endpoint.url 235 | try: 236 | return altconnect_endpoints[key] 237 | except KeyError: 238 | altconnect_endpoints[key] = endpoint 239 | endpoint.scan_id = scan_id 240 | db_objects.append(endpoint) 241 | return endpoint 242 | 243 | db_objects = [] 244 | async for db_record in discover_srv_records(domain, protocol): 245 | db_record.scan_id = scan_id 246 | db_objects.append(db_record) 247 | db_endpoint = model.EndpointTCP() 248 | db_endpoint.scan_id = scan_id 249 | db_endpoint.transport = model.TransportLayer.TCP 250 | db_endpoint.srv_record = db_record 251 | db_endpoint.tls_mode = { 252 | "xmpp-client": model.TLSMode.STARTTLS, 253 | "xmpp-server": model.TLSMode.STARTTLS, 254 | "xmpps-client": model.TLSMode.DIRECT, 255 | "xmpps-server": model.TLSMode.DIRECT, 256 | }[db_record.service] 257 | db_endpoint.hostname = db_record.host 258 | db_endpoint.port = db_record.port 259 | db_objects.append(db_endpoint) 260 | 261 | if protocol == model.ScanType.C2S: 262 | async for db_record in discover_xmppconnect_records(domain): 263 | db_record.scan_id = scan_id 264 | db_objects.append(db_record) 265 | 266 | endpoint = interpret_xmppconnect_record( 267 | db_record.attribute_name, 268 | db_record.attribute_value, 269 | b"_xmpp-client-", 270 | ) 271 | if endpoint is not None: 272 | endpoint = get_or_add_altconnect_endpoint(endpoint) 273 | endpoint.xmppconnect_record = db_record 274 | 275 | for db_object in (await discover_host_meta_links(domain)): 276 | db_objects.append(db_object) 277 | if isinstance(db_object, model.HostMetaLink): 278 | endpoint = interpret_host_meta_link( 279 | db_object.rel, 280 | db_object.href, 281 | ) 282 | if endpoint is not None: 283 | endpoint = get_or_add_altconnect_endpoint(endpoint) 284 | endpoint.host_meta_link = db_object 285 | if isinstance(db_object, model.HostMetaObject): 286 | db_object.scan_id = scan_id 287 | 288 | fallback_port = { 289 | model.ScanType.C2S: 5222, 290 | model.ScanType.S2S: 5269, 291 | }[protocol] 292 | 293 | endpoint = model.EndpointTCP() 294 | endpoint.scan_id = scan_id 295 | endpoint.transport = model.TransportLayer.TCP 296 | endpoint.tls_mode = model.TLSMode.STARTTLS 297 | endpoint.hostname = domain 298 | endpoint.port = fallback_port 299 | db_objects.append(endpoint) 300 | 301 | return db_objects 302 | 303 | 304 | def select_endpoints(session, task_id): 305 | try: 306 | task = session.query(model.ScanTask).filter( 307 | model.ScanTask.id_ == task_id, 308 | ).one() 309 | except sqlalchemy.orm.exc.NoResultError: 310 | return 311 | 312 | endpoint_q = session.query( 313 | model.Endpoint, 314 | model.EndpointScanResult, 315 | model.SRVRecord, 316 | ).select_from(model.Endpoint).outerjoin( 317 | model.EndpointScanResult, 318 | ).outerjoin(model.SRVRecord).filter( 319 | model.Endpoint.scan_id == task.scan_id 320 | ) 321 | 322 | best_endpoint_score = None 323 | best_endpoint = None 324 | for ep, scan_result, srv_record in endpoint_q: 325 | if isinstance(ep, model.EndpointHTTP): 326 | # not supported 327 | continue 328 | 329 | if scan_result is None: 330 | # no successful scan -> cannot use 331 | continue 332 | 333 | if scan_result.error or scan_result.errno: 334 | # also not successful -> cannot use either 335 | continue 336 | 337 | if srv_record is None: 338 | score = (0, 0, 0) 339 | else: 340 | score = (1, -srv_record.priority, srv_record.weight) 341 | 342 | if best_endpoint_score is None or best_endpoint_score < score: 343 | best_endpoint = ep 344 | best_endpoint_score = score 345 | 346 | # TODO: handle case where no best endpoint arrives by marking the scan as 347 | # bad in some way 348 | if best_endpoint is None: 349 | return 350 | 351 | testssl_task = model.ScanTask() 352 | testssl_task.id_ = generate_task_id() 353 | testssl_task.type_ = model.TaskType.TLS_SCAN 354 | testssl_task.endpoint = best_endpoint 355 | testssl_task.scan_id = task.scan_id 356 | testssl_task.state = model.TaskState.WAITING 357 | session.add(testssl_task) 358 | 359 | task.mark_completed(session) 360 | -------------------------------------------------------------------------------- /testxmpp/coordinator/tasks.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import logging 3 | import time 4 | 5 | 6 | class TaskItem: 7 | __slots__ = ("func", "data", "run_at", "backoff") 8 | 9 | def __init__(self, func, data, run_at, backoff): 10 | self.func = func 11 | self.data = data 12 | self.run_at = run_at 13 | self.backoff = backoff 14 | 15 | def __eq__(self, other): 16 | return self.func == other.func and self.data == other.data 17 | 18 | def __ne__(self, other): 19 | return not (self == other) 20 | 21 | def __hash__(self): 22 | return hash(self.func) ^ hash(self.data) 23 | 24 | def __str__(self): 25 | return "{}({})".format(self.func, self.data) 26 | 27 | 28 | class TaskQueue: 29 | def __init__(self, *, 30 | backoff_base=2, 31 | backoff_start=0.5, 32 | backoff_max=120, 33 | max_size=None, 34 | logger=None): 35 | super().__init__() 36 | self._queue = [] 37 | self._max_size = max_size 38 | self._backoff_base = backoff_base 39 | self._backoff_start = backoff_start 40 | self._backoff_max = backoff_max 41 | self._queue_changed = asyncio.Event() 42 | self.logger = logger or logging.getLogger(__name__) 43 | 44 | def _add(self, task_item): 45 | self._queue.append(task_item) 46 | self._queue.sort(key=lambda x: x.run_at) 47 | 48 | def push(self, func, data): 49 | task_item = TaskItem(func, data, 0, self._backoff_start) 50 | if task_item in self._queue: 51 | return 52 | task_item.run_at = time.monotonic() 53 | self._add(task_item) 54 | self._queue_changed.set() 55 | 56 | async def run_next_task(self): 57 | while True: 58 | self._queue_changed.clear() 59 | now = time.monotonic() 60 | if not self._queue: 61 | await self._queue_changed.wait() 62 | continue 63 | 64 | if self._queue[0].run_at > now: 65 | try: 66 | await asyncio.wait_for(self._queue_changed.wait(), 67 | self._queue[0].run_at - now) 68 | except asyncio.TimeoutError: 69 | pass 70 | continue 71 | 72 | task_item = self._queue.pop(0) 73 | func = task_item.func 74 | data = task_item.data 75 | try: 76 | await func(data) 77 | except Exception as exc: 78 | task_item.backoff = min(task_item.backoff * self._backoff_base, 79 | self._backoff_max) 80 | task_item.run_at = time.monotonic() + task_item.backoff 81 | self.logger.error("task %s failed. retrying in %r", 82 | task_item, task_item.backoff, 83 | exc_info=True) 84 | self._add(task_item) 85 | 86 | return 87 | 88 | async def run(self): 89 | while True: 90 | await self.run_next_task() 91 | -------------------------------------------------------------------------------- /testxmpp/coordinator/test_auth.py: -------------------------------------------------------------------------------- 1 | import base64 2 | import secrets 3 | import unittest 4 | 5 | from datetime import datetime, timedelta 6 | 7 | from . import auth 8 | 9 | 10 | class TestSignature(unittest.TestCase): 11 | @classmethod 12 | def setUpClass(cls): 13 | cls.instance_secret = secrets.token_bytes(8) 14 | 15 | def setUp(self): 16 | self.NOW = datetime(2020, 10, 2, 14, 0, 0) 17 | self.domain = b"chat.example." 18 | 19 | def test_sign_and_validate(self): 20 | txt, user = auth.generate_dnsauth_pair( 21 | self.domain, 22 | self.instance_secret, 23 | self.NOW + timedelta(seconds=1) 24 | ) 25 | auth.validate_dnsauth_pair( 26 | txt, 27 | self.domain, 28 | self.instance_secret, 29 | base64.urlsafe_b64decode(user), 30 | now=self.NOW, 31 | ) 32 | 33 | def test_reject_domain_mismatch(self): 34 | txt, user = auth.generate_dnsauth_pair( 35 | self.domain, 36 | self.instance_secret, 37 | self.NOW + timedelta(seconds=1) 38 | ) 39 | with self.assertRaises(auth.InvalidSignature): 40 | auth.validate_dnsauth_pair( 41 | txt, 42 | b"fnord." + self.domain, 43 | self.instance_secret, 44 | base64.urlsafe_b64decode(user), 45 | now=self.NOW, 46 | ) 47 | 48 | def test_reject_spoofed_valid_until(self): 49 | txt, user = auth.generate_dnsauth_pair( 50 | self.domain, 51 | self.instance_secret, 52 | self.NOW + timedelta(seconds=1) 53 | ) 54 | 55 | version, valid_until, signature = txt.split(" ") 56 | valid_until = ( 57 | self.NOW + timedelta(seconds=60) 58 | ).strftime(auth.TXT_DATE_FORMAT) 59 | txt = " ".join([version, valid_until, signature]) 60 | 61 | with self.assertRaises(auth.InvalidSignature): 62 | auth.validate_dnsauth_pair( 63 | txt, 64 | self.domain, 65 | self.instance_secret, 66 | base64.urlsafe_b64decode(user), 67 | now=self.NOW, 68 | ) 69 | 70 | def test_reject_instance_secret_mismatch(self): 71 | txt, user = auth.generate_dnsauth_pair( 72 | self.domain, 73 | self.instance_secret, 74 | self.NOW + timedelta(seconds=1) 75 | ) 76 | with self.assertRaises(auth.InvalidSignature): 77 | auth.validate_dnsauth_pair( 78 | txt, 79 | self.domain, 80 | self.instance_secret + b"foo", 81 | base64.urlsafe_b64decode(user), 82 | now=self.NOW, 83 | ) 84 | 85 | def test_reject_user_secret_mismatch(self): 86 | txt, user = auth.generate_dnsauth_pair( 87 | self.domain, 88 | self.instance_secret, 89 | self.NOW + timedelta(seconds=1) 90 | ) 91 | with self.assertRaises(auth.InvalidSignature): 92 | auth.validate_dnsauth_pair( 93 | txt, 94 | self.domain, 95 | self.instance_secret, 96 | base64.urlsafe_b64decode(user) + b"foo", 97 | now=self.NOW, 98 | ) 99 | 100 | def test_reject_expired(self): 101 | txt, user = auth.generate_dnsauth_pair( 102 | self.domain, 103 | self.instance_secret, 104 | self.NOW + timedelta(seconds=1) 105 | ) 106 | with self.assertRaises(auth.ExpiredSignature): 107 | auth.validate_dnsauth_pair( 108 | txt, 109 | self.domain, 110 | self.instance_secret, 111 | base64.urlsafe_b64decode(user), 112 | now=self.NOW + timedelta(seconds=2), 113 | ) 114 | -------------------------------------------------------------------------------- /testxmpp/coordinator/testssl.py: -------------------------------------------------------------------------------- 1 | import base64 2 | import json 3 | 4 | import sqlalchemy.orm 5 | 6 | import testxmpp.certutil 7 | import testxmpp.model as model 8 | 9 | 10 | def get_or_create_tls_offering(session, endpoint_id): 11 | try: 12 | return session.query(model.TLSOffering).filter( 13 | model.TLSOffering.endpoint_id == endpoint_id, 14 | ).one() 15 | except sqlalchemy.orm.exc.NoResultFound: 16 | tls_offering = model.TLSOffering() 17 | tls_offering.endpoint_id = endpoint_id 18 | session.add(tls_offering) 19 | return tls_offering 20 | 21 | 22 | def get_or_create_san_type( 23 | session, 24 | asn1_name: str, 25 | ) -> model.SubjectAltNameType: 26 | try: 27 | return session.query(model.SubjectAltNameType).filter( 28 | model.SubjectAltNameType.asn1_name == asn1_name, 29 | ).one() 30 | except sqlalchemy.orm.exc.NoResultFound: 31 | san_type = model.SubjectAltNameType() 32 | san_type.asn1_name = asn1_name 33 | session.add(san_type) 34 | return san_type 35 | 36 | 37 | def get_or_create_certificate(session, certinfo, raw_der): 38 | fingerprint_sha1 = certinfo.fingerprints["sha1"] 39 | fingerprint_sha256 = certinfo.fingerprints["sha256"] 40 | fingerprint_sha512 = certinfo.fingerprints["sha512"] 41 | certs = session.query(model.Certificate).filter( 42 | model.Certificate.fingerprint_sha1 == fingerprint_sha1, 43 | model.Certificate.fingerprint_sha256 == fingerprint_sha256, 44 | model.Certificate.fingerprint_sha512 == fingerprint_sha512, 45 | ).all() 46 | for cert in certs: 47 | if cert.raw_der == raw_der: 48 | return cert 49 | # IS THIS A COLLISION?! OF SHA1 + SHA256 + SHA512? :-O 50 | 51 | san_type_map = { 52 | asn1_name: get_or_create_san_type(session, asn1_name) 53 | for asn1_name in (certinfo.subject_alt_names or {}).keys() 54 | } 55 | 56 | cert = model.Certificate() 57 | cert.fingerprint_sha1 = fingerprint_sha1 58 | cert.fingerprint_sha256 = fingerprint_sha256 59 | cert.fingerprint_sha512 = fingerprint_sha512 60 | cert.not_before = certinfo.not_before 61 | cert.not_after = certinfo.not_after 62 | cert.public_key = certinfo.public_key 63 | cert.public_key_type = certinfo.public_key_type 64 | cert.raw_der = raw_der 65 | cert.subject = json.dumps(certinfo.subject, sort_keys=True) 66 | cert.issuer = json.dumps(certinfo.issuer, sort_keys=True) 67 | session.add(cert) 68 | 69 | for asn1_name, values in (certinfo.subject_alt_names or {}).items(): 70 | san_type = san_type_map[asn1_name] 71 | for value in values: 72 | san = model.SubjectAltName() 73 | san.certificate = cert 74 | san.type_ = san_type 75 | san.value = value 76 | session.add(san) 77 | 78 | return cert 79 | 80 | 81 | def upsert_certificate_offering( 82 | session, 83 | endpoint_id, 84 | certificate, 85 | chain_index): 86 | try: 87 | offering = session.query(model.CertificateOffering).filter( 88 | model.CertificateOffering.endpoint_id == endpoint_id, 89 | model.CertificateOffering.chain_index == chain_index, 90 | ).one() 91 | except sqlalchemy.orm.exc.NoResultFound: 92 | offering = model.CertificateOffering() 93 | offering.endpoint_id = endpoint_id 94 | offering.chain_index = chain_index 95 | offering.certificate = certificate 96 | session.add(offering) 97 | else: 98 | offering.certificate = certificate 99 | session.add(offering) 100 | 101 | 102 | def lookup_cipher_id_by_name(session, openssl_name): 103 | result = session.query(model.CipherMetadata.id_).filter( 104 | model.CipherMetadata.openssl_name == openssl_name 105 | ).one_or_none() 106 | if result is None: 107 | return None 108 | return result[0] 109 | 110 | 111 | def upsert_cipher_metadata(session, cipher_id, openssl_name, iana_name): 112 | try: 113 | metadata = session.query(model.CipherMetadata).filter( 114 | model.CipherMetadata.id_ == cipher_id, 115 | ).one() 116 | except sqlalchemy.orm.exc.NoResultFound: 117 | metadata = model.CipherMetadata() 118 | metadata.id_ = cipher_id 119 | session.add(metadata) 120 | 121 | if openssl_name and metadata.openssl_name != openssl_name: 122 | metadata.openssl_name = openssl_name 123 | if iana_name and metadata.iana_name != iana_name: 124 | metadata.iana_name = iana_name 125 | return metadata 126 | 127 | 128 | def upsert_cipher_offering_order(session, 129 | endpoint_id, cipher_id, tls_version, 130 | order): 131 | try: 132 | offering_order = session.query(model.CipherOfferingOrder).filter( 133 | model.CipherOfferingOrder.endpoint_id == endpoint_id, 134 | model.CipherOfferingOrder.cipher_id == cipher_id, 135 | model.CipherOfferingOrder.tls_version == tls_version, 136 | ).one() 137 | except sqlalchemy.orm.exc.NoResultFound: 138 | offering_order = model.CipherOfferingOrder() 139 | offering_order.endpoint_id = endpoint_id 140 | offering_order.cipher_id = cipher_id 141 | offering_order.tls_version = tls_version 142 | session.add(offering_order) 143 | 144 | if offering_order.order != order: 145 | offering_order.order = order 146 | return offering_order 147 | 148 | 149 | def get_or_create_cipher_offering(session, endpoint_id, cipher_id): 150 | try: 151 | return session.query(model.CipherOffering).filter( 152 | model.CipherOffering.endpoint_id == endpoint_id, 153 | model.CipherOffering.cipher_id == cipher_id, 154 | ).one() 155 | except sqlalchemy.orm.exc.NoResultFound: 156 | cipher_offering = model.CipherOffering() 157 | cipher_offering.endpoint_id = endpoint_id 158 | cipher_offering.cipher_id = cipher_id 159 | session.add(cipher_offering) 160 | return cipher_offering 161 | 162 | 163 | def handle_tls_versions_push(session, endpoint_id, data) -> bool: 164 | tls_offering = get_or_create_tls_offering(session, endpoint_id) 165 | 166 | # no generic/procedural mapping here to avoid the worker being able to 167 | # manipulate arbitrary attributes of the object. 168 | keymap = { 169 | "SSLv2": "sslv2", 170 | "SSLv3": "sslv3", 171 | "TLSv1": "tlsv1", 172 | "TLSv1.1": "tlsv1_1", 173 | "TLSv1.2": "tlsv1_2", 174 | "TLSv1.3": "tlsv1_3", 175 | } 176 | 177 | for k, v in data["tls_versions"].items(): 178 | setattr(tls_offering, keymap[k], bool(v)) 179 | 180 | return True 181 | 182 | 183 | def handle_cipherlists_push(session, endpoint_id, data) -> bool: 184 | # we ignore this push, because we need to access the cipher lists 185 | # based on the cipher ID, but we only get the OpenSSL name here. 186 | return True 187 | 188 | 189 | def handle_cipherlists_complete(session, endpoint_id, data) -> bool: 190 | for tls_version, ciphers in data.items(): 191 | for order, openssl_name in enumerate(ciphers): 192 | cipher_id = lookup_cipher_id_by_name( 193 | session, openssl_name, 194 | ) 195 | if cipher_id is None: 196 | # ??? 197 | continue 198 | cipher_offering = get_or_create_cipher_offering( 199 | session, endpoint_id, cipher_id 200 | ) 201 | upsert_cipher_offering_order( 202 | session, endpoint_id, cipher_id, tls_version, order, 203 | ) 204 | return True 205 | 206 | 207 | def handle_server_cipher_order_push(session, endpoint_id, data) -> bool: 208 | tls_offering = get_or_create_tls_offering(session, endpoint_id) 209 | tls_offering.server_cipher_order = data["server_cipher_order"] 210 | return True 211 | 212 | 213 | def handle_cipher_info_push(session, endpoint_id, data) -> bool: 214 | data = data["cipher"] 215 | cipher_metadata = upsert_cipher_metadata( 216 | session, 217 | data["id"], 218 | data["openssl_name"], 219 | data["iana_name"], 220 | ) 221 | cipher_offering = get_or_create_cipher_offering( 222 | session, 223 | endpoint_id, 224 | data["id"], 225 | ) 226 | cipher_offering.key_exchange_info = data["key_exchange"] or None 227 | return True 228 | 229 | 230 | def handle_certificate_push( 231 | session, 232 | endpoint_id, 233 | data) -> bool: 234 | data = data["certificate"] 235 | certinfo_json = data["info"] 236 | raw_der = base64.b64decode(data["raw_der"]) 237 | index = data.get("index", 0) 238 | certinfo = testxmpp.certutil.CertInfo.from_json(certinfo_json) 239 | certificate = get_or_create_certificate(session, certinfo, raw_der) 240 | upsert_certificate_offering( 241 | session, 242 | endpoint_id=endpoint_id, 243 | certificate=certificate, 244 | chain_index=index, 245 | ) 246 | return True 247 | -------------------------------------------------------------------------------- /testxmpp/coordinator/xmpp.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/xmpp-observatory/testxmpp/64dae01de0ec59650694afc90289e2ff59a88383/testxmpp/coordinator/xmpp.py -------------------------------------------------------------------------------- /testxmpp/dns.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import itertools 3 | import functools 4 | import threading 5 | import typing 6 | 7 | import dns.rdataclass 8 | import dns.rdatatype 9 | # NOTE: Not using asyncresolver here because it crashes with InvalidState 10 | # exceptions... may be due to specific python/asyncio versions. 11 | # Using a thread pool seems to be the safer choice. 12 | import dns.resolver 13 | 14 | _state = threading.local() 15 | 16 | 17 | def get_resolver(): 18 | global _state 19 | if not hasattr(_state, "resolver"): 20 | reconfigure_resolver() 21 | return _state.resolver 22 | 23 | 24 | def reconfigure_resolver(): 25 | global _state 26 | _state.resolver = dns.resolver.Resolver() 27 | _state.overridden_resolver = False 28 | 29 | 30 | def encode_domain(domain: typing.Union[str, bytes]) -> bytes: 31 | if isinstance(domain, str): 32 | domain = domain.encode("idna") 33 | return domain 34 | 35 | 36 | async def achain(*aws): 37 | pending = list(map(asyncio.ensure_future, aws)) 38 | try: 39 | while pending: 40 | done, pending = await asyncio.wait( 41 | pending, 42 | return_when=asyncio.FIRST_COMPLETED, 43 | ) 44 | for fut in done: 45 | result = await fut 46 | for value in result: 47 | yield value 48 | 49 | finally: 50 | for fut in pending: 51 | if not fut.done(): 52 | fut.cancel() 53 | 54 | 55 | async def resolve(qname: bytes, rdtype, 56 | rdclass=dns.rdataclass.IN, 57 | suppress_nxdomain=False, 58 | search=False, 59 | **kwargs): 60 | loop = asyncio.get_event_loop() 61 | resolver = get_resolver() 62 | try: 63 | return await loop.run_in_executor( 64 | None, 65 | functools.partial( 66 | resolver.resolve, 67 | qname.decode("ascii"), 68 | rdtype=rdtype, 69 | rdclass=rdclass, 70 | search=search, 71 | **kwargs, 72 | ) 73 | ) 74 | except dns.resolver.NoAnswer: 75 | # treat as NXDOMAIN 76 | if suppress_nxdomain: 77 | return [] 78 | except dns.resolver.NXDOMAIN: 79 | if suppress_nxdomain: 80 | return [] 81 | raise 82 | 83 | 84 | async def lookup_srv(domain: typing.Union[str, bytes], 85 | protocol: str, 86 | service: str, 87 | **kwargs): 88 | domain = encode_domain(domain) 89 | qname = b".".join([ 90 | "_{}".format(service).encode("ascii"), 91 | "_{}".format(protocol).encode("ascii"), 92 | domain, 93 | ]) 94 | return await resolve(qname, dns.rdatatype.SRV, **kwargs) 95 | 96 | 97 | async def lookup_addresses(domain: typing.Union[str, bytes]): 98 | domain = encode_domain(domain) 99 | async for record in achain(resolve(domain, dns.rdatatype.A, 100 | raise_on_no_answer=False, 101 | suppress_nxdomain=True), 102 | resolve(domain, dns.rdatatype.AAAA, 103 | raise_on_no_answer=False, 104 | suppress_nxdomain=True)): 105 | yield record 106 | -------------------------------------------------------------------------------- /testxmpp/model.py: -------------------------------------------------------------------------------- 1 | import contextlib 2 | import enum 3 | import json 4 | import typing 5 | 6 | from datetime import datetime 7 | 8 | import sqlalchemy 9 | from sqlalchemy import ( 10 | Column, 11 | Integer, 12 | Unicode, 13 | DateTime, 14 | ForeignKey, 15 | Boolean, 16 | ForeignKeyConstraint, 17 | ) 18 | from sqlalchemy.orm import ( 19 | relationship, 20 | ) 21 | from sqlalchemy.ext.declarative import declarative_base 22 | 23 | 24 | @contextlib.contextmanager 25 | def session_scope(sessionmaker, allow_autoflush=False): 26 | """Provide a transactional scope around a series of operations.""" 27 | session = sessionmaker() 28 | try: 29 | if allow_autoflush: 30 | yield session 31 | else: 32 | with session.no_autoflush: 33 | yield session 34 | except: # NOQA 35 | session.rollback() 36 | raise 37 | finally: 38 | session.close() 39 | 40 | 41 | def mkdir_exist_ok(path): 42 | try: 43 | path.mkdir(parents=True) 44 | except FileExistsError: 45 | if not path.is_dir(): 46 | raise 47 | 48 | 49 | def get_generic_engine(uri: str) -> sqlalchemy.engine.Engine: 50 | engine = sqlalchemy.create_engine(uri) 51 | 52 | if uri.startswith("sqlite://"): 53 | # https://stackoverflow.com/questions/1654857/ 54 | @sqlalchemy.event.listens_for(engine, "connect") 55 | def do_connect(dbapi_connection, connection_record): 56 | # disable pysqlite's emitting of the BEGIN statement entirely. 57 | # also stops it from emitting COMMIT before any DDL. 58 | dbapi_connection.isolation_level = None 59 | # holy smokes, enforce foreign keys!!k 60 | dbapi_connection.execute('pragma foreign_keys=ON') 61 | 62 | @sqlalchemy.event.listens_for(engine, "begin") 63 | def do_begin(conn): 64 | # emit our own BEGIN 65 | conn.execute("BEGIN") 66 | 67 | return engine 68 | 69 | 70 | class SimpleEnum(sqlalchemy.types.TypeDecorator): 71 | cache_ok = True 72 | impl = sqlalchemy.types.Unicode 73 | 74 | def __init__(self, enum_type): 75 | super().__init__() 76 | self.__enum_type = enum_type 77 | 78 | def load_dialect_impl(self, dialect): 79 | return sqlalchemy.types.Unicode(32) 80 | 81 | def process_bind_param(self, value, dialect): 82 | if value is None: 83 | return value 84 | return value.value 85 | 86 | def process_result_value(self, value, dialect): 87 | if value is None: 88 | return value 89 | return self.__enum_type(value) 90 | 91 | 92 | class ScanType(enum.Enum): 93 | C2S = "c2s" 94 | S2S = "s2s" 95 | 96 | 97 | class SASLOfferingPhase(enum.Enum): 98 | PRE_TLS = "pre-tls" 99 | POST_TLS = "post-tls" 100 | 101 | 102 | class ScanState(enum.Enum): 103 | IN_PROGRESS = "in-progress" 104 | CANCELLED = "cancelled" 105 | ERROR = "error" 106 | COMPLETED = "completed" 107 | 108 | 109 | class TLSMode(enum.Enum): 110 | STARTTLS = "starttls" 111 | DIRECT = "direct" 112 | 113 | 114 | class HostMetaFormat(enum.Enum): 115 | XML = "xml" 116 | JSON = "json" 117 | 118 | 119 | class EndpointSource(enum.Enum): 120 | FALLBACK = "fallback" 121 | SRV_RECORD = "srv" 122 | ALTERNATIVE_METHOD = "altconnect" 123 | 124 | 125 | class TransportLayer(enum.Enum): 126 | TCP = "tcp" 127 | HTTP = "http" 128 | 129 | 130 | class HTTPMode(enum.Enum): 131 | XEP0025_POLLING = "polling" 132 | XEP0206_BOSH = "bosh" 133 | RFC7395_WEBSOCKETS = "ws" 134 | 135 | 136 | class TaskType(enum.Enum): 137 | DISCOVER_ENDPOINTS = "srv-resolve" 138 | RESOLVE_TLSA = "tlsa-resolve" 139 | SASL_SCAN = "sasl-scan" 140 | TLS_SCAN = "tls-scan" 141 | XMPP_PROBE = "xmpp-probe" 142 | SELECT_ENDPOINTS = "select-endpoints" 143 | 144 | 145 | class TaskState(enum.Enum): 146 | WAITING = "waiting" 147 | IN_PROGRESS = "in-progress" 148 | FAILED = "failed" 149 | DONE = "done" 150 | 151 | 152 | class FailReason(enum.Enum): 153 | TIMEOUT = "timeout" 154 | INTERNAL_ERROR = "internal-error" 155 | UNSUPPORTED = "unsupported" 156 | 157 | 158 | class ConnectionPhase(enum.Enum): 159 | PRE_TLS = "pre-tls" 160 | POST_TLS = "post-tls" 161 | 162 | 163 | class Base(declarative_base()): 164 | __abstract__ = True 165 | __table_args__ = {} 166 | 167 | 168 | # GLOBAL DATA 169 | 170 | 171 | class CipherMetadata(Base): 172 | __tablename__ = "cipher_metadata" 173 | 174 | id_ = Column( 175 | "id", 176 | Integer(), 177 | primary_key=True, 178 | nullable=False, 179 | ) 180 | 181 | openssl_name = Column( 182 | "openssl_name", 183 | Unicode(255), 184 | nullable=True, 185 | ) 186 | 187 | iana_name = Column( 188 | "iana_name", 189 | Unicode(255), 190 | nullable=True, 191 | ) 192 | 193 | 194 | class SASLMechanism(Base): 195 | __tablename__ = "sasl_mechanism" 196 | 197 | id_ = Column( 198 | "id", 199 | Integer(), 200 | nullable=False, 201 | primary_key=True, 202 | autoincrement=True, 203 | ) 204 | 205 | name = Column( 206 | "name", 207 | Unicode(20), 208 | nullable=False, 209 | ) 210 | 211 | 212 | class SubjectAltNameType(Base): 213 | __tablename__ = "san_type" 214 | 215 | id_ = Column( 216 | "id", 217 | Integer(), 218 | nullable=False, 219 | primary_key=True, 220 | autoincrement=True, 221 | ) 222 | 223 | asn1_name = Column( 224 | "asn1_name", 225 | Unicode(128), 226 | nullable=False, 227 | ) 228 | 229 | 230 | # PER SCAN DATA 231 | 232 | 233 | class Scan(Base): 234 | __tablename__ = "scan" 235 | 236 | id_ = Column( 237 | "id", 238 | Integer(), 239 | primary_key=True, 240 | nullable=False, 241 | # TODO: something non-auto-increment maybe? 242 | autoincrement=True, 243 | ) 244 | 245 | domain = Column( 246 | "domain", 247 | sqlalchemy.types.VARCHAR(1023), 248 | nullable=False, 249 | ) 250 | 251 | protocol = Column( 252 | "protocol", 253 | SimpleEnum(ScanType), 254 | nullable=False, 255 | ) 256 | 257 | created_at = Column( 258 | "created_at", 259 | DateTime(), 260 | nullable=False, 261 | ) 262 | 263 | state = Column( 264 | "state", 265 | SimpleEnum(ScanState), 266 | nullable=False, 267 | ) 268 | 269 | certificate_score = Column( 270 | "certificate_score", 271 | Integer(), 272 | nullable=True, 273 | ) 274 | 275 | kex_score = Column( 276 | "kex_score", 277 | Integer(), 278 | nullable=True, 279 | ) 280 | 281 | protocol_score = Column( 282 | "protocol_score", 283 | Integer(), 284 | nullable=True, 285 | ) 286 | 287 | cipher_score = Column( 288 | "cipher_score", 289 | Integer(), 290 | nullable=True, 291 | ) 292 | 293 | privileged = Column( 294 | "privileged", 295 | Boolean(), 296 | nullable=False, 297 | ) 298 | 299 | 300 | class SRVRecord(Base): 301 | __tablename__ = "srv_record" 302 | 303 | id_ = Column( 304 | "id", 305 | Integer(), 306 | primary_key=True, 307 | nullable=False, 308 | autoincrement=True, 309 | ) 310 | 311 | scan_id = Column( 312 | "scan_id", 313 | Integer(), 314 | ForeignKey(Scan.id_, ondelete="CASCADE", onupdate="CASCADE"), 315 | nullable=False, 316 | ) 317 | 318 | service = Column( 319 | "service", 320 | Unicode(63), 321 | nullable=False, 322 | ) 323 | 324 | protocol = Column( 325 | "protocol", 326 | Unicode(63), 327 | nullable=False, 328 | ) 329 | 330 | port = Column( 331 | "port", 332 | Integer(), 333 | nullable=False, 334 | ) 335 | 336 | host = Column( 337 | "host", 338 | sqlalchemy.types.VARCHAR(255), 339 | nullable=False, 340 | ) 341 | 342 | priority = Column( 343 | "priority", 344 | Integer(), 345 | nullable=False, 346 | ) 347 | 348 | weight = Column( 349 | "weight", 350 | Integer(), 351 | nullable=False, 352 | ) 353 | 354 | 355 | class XMPPConnectRecord(Base): 356 | __tablename__ = "xmppconnect_record" 357 | 358 | id_ = Column( 359 | "id", 360 | Integer(), 361 | primary_key=True, 362 | nullable=False, 363 | autoincrement=True, 364 | ) 365 | 366 | scan_id = Column( 367 | "scan_id", 368 | Integer(), 369 | ForeignKey(Scan.id_, ondelete="CASCADE", onupdate="CASCADE"), 370 | nullable=False, 371 | ) 372 | 373 | attribute_name = Column( 374 | "attribute_name", 375 | sqlalchemy.types.VARBINARY(1023), 376 | nullable=False, 377 | ) 378 | 379 | attribute_value = Column( 380 | "attribute_value", 381 | sqlalchemy.types.VARBINARY(1023), 382 | nullable=True, 383 | ) 384 | 385 | 386 | class HostMetaObject(Base): 387 | __tablename__ = "host_meta_object" 388 | 389 | id_ = Column( 390 | "id", 391 | Integer(), 392 | primary_key=True, 393 | nullable=False, 394 | autoincrement=True, 395 | ) 396 | 397 | scan_id = Column( 398 | "scan_id", 399 | Integer(), 400 | ForeignKey(Scan.id_, 401 | ondelete="CASCADE", onupdate="CASCADE"), 402 | nullable=False, 403 | ) 404 | 405 | url = Column( 406 | "url", 407 | Unicode(1023), 408 | nullable=False, 409 | ) 410 | 411 | format_ = Column( 412 | "format", 413 | SimpleEnum(HostMetaFormat), 414 | nullable=False, 415 | ) 416 | 417 | 418 | class HostMetaLink(Base): 419 | __tablename__ = "host_meta_link" 420 | 421 | id_ = Column( 422 | "id", 423 | Integer(), 424 | primary_key=True, 425 | nullable=False, 426 | autoincrement=True, 427 | ) 428 | 429 | object_id = Column( 430 | "object_id", 431 | Integer(), 432 | ForeignKey(HostMetaObject.id_, 433 | ondelete="CASCADE", onupdate="CASCADE"), 434 | nullable=False, 435 | ) 436 | 437 | object_ = relationship(HostMetaObject) 438 | 439 | rel = Column( 440 | "rel", 441 | Unicode(1023), 442 | nullable=False, 443 | ) 444 | 445 | href = Column( 446 | "href", 447 | Unicode(1023), 448 | nullable=False, 449 | ) 450 | 451 | 452 | class Endpoint(Base): 453 | __tablename__ = "endpoint" 454 | 455 | id_ = Column( 456 | "id", 457 | Integer(), 458 | autoincrement=True, 459 | primary_key=True, 460 | nullable=False, 461 | ) 462 | 463 | scan_id = Column( 464 | "scan_id", 465 | Integer(), 466 | ForeignKey(Scan.id_, ondelete="CASCADE", onupdate="CASCADE"), 467 | nullable=False, 468 | ) 469 | 470 | transport = Column( 471 | "transport", 472 | SimpleEnum(TransportLayer), 473 | nullable=False, 474 | ) 475 | 476 | # For SRV-sourced endpoints 477 | srv_record_id = Column( 478 | "srv_record_id", 479 | Integer(), 480 | ForeignKey(SRVRecord.id_, ondelete="SET NULL", onupdate="CASCADE"), 481 | nullable=True, 482 | ) 483 | 484 | srv_record = relationship(SRVRecord) 485 | 486 | # For xmppconnect endpoints 487 | xmppconnect_record_id = Column( 488 | "xmppconnect_record_id", 489 | Integer(), 490 | ForeignKey(XMPPConnectRecord.id_, 491 | ondelete="SET NULL", onupdate="CASCADE"), 492 | nullable=True, 493 | ) 494 | 495 | xmppconnect_record = relationship(XMPPConnectRecord) 496 | 497 | # For host-meta-sourced endpoints 498 | host_meta_link_id = Column( 499 | "host_meta_link_id", 500 | Integer(), 501 | ForeignKey(HostMetaLink.id_, ondelete="SET NULL", onupdate="CASCADE"), 502 | nullable=True, 503 | ) 504 | 505 | host_meta_link = relationship(HostMetaLink) 506 | 507 | __mapper_args__ = { 508 | "polymorphic_on": transport 509 | } 510 | 511 | 512 | class EndpointTCP(Endpoint): 513 | __tablename__ = "endpoint_tcp" 514 | 515 | endpoint_id = Column( 516 | "endpoint_id", 517 | Integer(), 518 | ForeignKey(Endpoint.id_, ondelete="CASCADE", onupdate="CASCADE"), 519 | primary_key=True, 520 | nullable=False, 521 | ) 522 | 523 | tls_mode = Column( 524 | "tls_mode", 525 | SimpleEnum(TLSMode), 526 | nullable=False, 527 | ) 528 | 529 | hostname = Column( 530 | "hostname", 531 | sqlalchemy.types.VARBINARY(255), 532 | nullable=False, 533 | ) 534 | 535 | port = Column( 536 | "port", 537 | Integer(), 538 | nullable=False, 539 | ) 540 | 541 | __mapper_args__ = { 542 | "polymorphic_identity": TransportLayer.TCP, 543 | } 544 | 545 | @property 546 | def uri(self): 547 | return "{}:{}".format(self.hostname.decode("idna").rstrip("."), 548 | self.port) 549 | 550 | 551 | class EndpointHTTP(Endpoint): 552 | __tablename__ = "endpoint_http" 553 | 554 | endpoint_id = Column( 555 | "endpoint_id", 556 | Integer(), 557 | ForeignKey(Endpoint.id_, ondelete="CASCADE", onupdate="CASCADE"), 558 | primary_key=True, 559 | nullable=False, 560 | ) 561 | 562 | url = Column( 563 | "url", 564 | Unicode(1023), 565 | nullable=False, 566 | ) 567 | 568 | http_mode = Column( 569 | "http_mode", 570 | SimpleEnum(HTTPMode), 571 | nullable=False, 572 | ) 573 | 574 | @property 575 | def uri(self): 576 | return self.url 577 | 578 | __mapper_args__ = { 579 | "polymorphic_identity": TransportLayer.HTTP, 580 | } 581 | 582 | 583 | class TLSOffering(Base): 584 | __tablename__ = "tls_offering" 585 | 586 | endpoint_id = Column( 587 | "endpoint_id", 588 | Integer(), 589 | ForeignKey(Endpoint.id_, ondelete="CASCADE", onupdate="CASCADE"), 590 | primary_key=True, 591 | nullable=False, 592 | ) 593 | 594 | sslv2 = Column("sslv2", Boolean(), 595 | nullable=True) 596 | sslv3 = Column("sslv3", Boolean(), 597 | nullable=True) 598 | tlsv1 = Column("tlsv1", Boolean(), 599 | nullable=True) 600 | tlsv1_1 = Column("tlsv1_1", Boolean(), 601 | nullable=True) 602 | tlsv1_2 = Column("tlsv1_2", Boolean(), 603 | nullable=True) 604 | tlsv1_3 = Column("tlsv1_3", Boolean(), 605 | nullable=True) 606 | 607 | server_cipher_order = Column("server_cipher_order", Boolean(), 608 | nullable=True) 609 | 610 | 611 | class Certificate(Base): 612 | __tablename__ = "certificate" 613 | 614 | id_ = Column( 615 | "id", 616 | Integer(), 617 | primary_key=True, 618 | nullable=False, 619 | # TODO: something non-auto-increment maybe? 620 | autoincrement=True, 621 | ) 622 | 623 | fingerprint_sha1 = Column( 624 | "fp_sha1", 625 | sqlalchemy.types.VARBINARY(20), 626 | nullable=False, 627 | ) 628 | 629 | fingerprint_sha256 = Column( 630 | "fp_sha256", 631 | sqlalchemy.types.VARBINARY(32), 632 | nullable=False, 633 | ) 634 | 635 | fingerprint_sha512 = Column( 636 | "fp_sha512", 637 | sqlalchemy.types.VARBINARY(64), 638 | nullable=False, 639 | ) 640 | 641 | raw_der = Column( 642 | "raw_der", 643 | sqlalchemy.types.VARBINARY(8192), 644 | nullable=False, 645 | ) 646 | 647 | not_before = Column( 648 | "not_before", 649 | DateTime(), 650 | nullable=False, 651 | ) 652 | 653 | not_after = Column( 654 | "not_after", 655 | DateTime(), 656 | nullable=False, 657 | ) 658 | 659 | public_key = Column( 660 | "public_key", 661 | sqlalchemy.types.VARBINARY(2048), 662 | nullable=False, 663 | ) 664 | 665 | public_key_type = Column( 666 | "public_key_type", 667 | Unicode(128), 668 | nullable=False, 669 | ) 670 | 671 | subject = Column( 672 | "subject", 673 | Unicode(1024), 674 | nullable=False, 675 | ) 676 | 677 | issuer = Column( 678 | "issuer", 679 | Unicode(1024), 680 | nullable=False, 681 | ) 682 | 683 | 684 | class SubjectAltName(Base): 685 | __tablename__ = "san" 686 | 687 | # cannot use cert_id + asn1_name as PK because a cert may have multiple 688 | # SANs of the same type 689 | id_ = Column( 690 | "id", 691 | Integer(), 692 | autoincrement=True, 693 | primary_key=True, 694 | nullable=False, 695 | ) 696 | 697 | certificate_id = Column( 698 | "certificate_id", 699 | Integer(), 700 | ForeignKey(Certificate.id_, ondelete="CASCADE", onupdate="CASCADE"), 701 | nullable=False, 702 | ) 703 | 704 | type_id = Column( 705 | "type_id", 706 | Integer, 707 | ForeignKey(SubjectAltNameType.id_, 708 | ondelete="RESTRICT", onupdate="CASCADE"), 709 | nullable=False, 710 | ) 711 | 712 | value = Column( 713 | "value", 714 | Unicode(256), 715 | nullable=False, 716 | ) 717 | 718 | certificate = relationship(Certificate) 719 | type_ = relationship(SubjectAltNameType) 720 | 721 | 722 | class CertificateOffering(Base): 723 | __tablename__ = "certificate_offering" 724 | 725 | endpoint_id = Column( 726 | "endpoint_id", 727 | Integer(), 728 | ForeignKey(Endpoint.id_, ondelete="CASCADE", onupdate="CASCADE"), 729 | primary_key=True, 730 | nullable=False, 731 | ) 732 | 733 | chain_index = Column( 734 | "chain_index", 735 | Integer(), 736 | primary_key=True, 737 | nullable=False, 738 | ) 739 | 740 | certificate_id = Column( 741 | "certificate_id", 742 | Integer(), 743 | ForeignKey(Certificate.id_, ondelete="CASCADE", onupdate="CASCADE"), 744 | nullable=False, 745 | ) 746 | 747 | certificate = relationship(Certificate) 748 | 749 | 750 | class CipherOffering(Base): 751 | __tablename__ = "cipher_offering" 752 | 753 | endpoint_id = Column( 754 | "endpoint_id", 755 | Integer(), 756 | ForeignKey(Endpoint.id_, ondelete="CASCADE", onupdate="CASCADE"), 757 | primary_key=True, 758 | nullable=False, 759 | ) 760 | 761 | cipher_id = Column( 762 | "cipher_id", 763 | Integer(), 764 | ForeignKey(CipherMetadata.id_, 765 | onupdate="CASCADE"), 766 | primary_key=True, 767 | nullable=False, 768 | ) 769 | 770 | key_exchange_info = Column( 771 | "key_exchange_info", 772 | Unicode(127), 773 | nullable=True, 774 | ) 775 | 776 | 777 | class CipherOfferingOrder(Base): 778 | __tablename__ = "cipher_offering_order" 779 | 780 | __table_args__ = ( 781 | ForeignKeyConstraint( 782 | ["endpoint_id", "cipher_id"], 783 | ["cipher_offering.endpoint_id", "cipher_offering.cipher_id"] 784 | ), 785 | ) 786 | 787 | endpoint_id = Column( 788 | "endpoint_id", 789 | Integer(), 790 | primary_key=True, 791 | nullable=False, 792 | ) 793 | 794 | cipher_id = Column( 795 | "cipher_id", 796 | Integer(), 797 | primary_key=True, 798 | nullable=False, 799 | ) 800 | 801 | tls_version = Column( 802 | "tls_version", 803 | Unicode(32), 804 | primary_key=True, 805 | nullable=False, 806 | ) 807 | 808 | order = Column( 809 | "order", 810 | Integer(), 811 | nullable=False, 812 | ) 813 | 814 | 815 | class ScanTask(Base): 816 | __tablename__ = "scan_task" 817 | 818 | id_ = Column( 819 | "id", 820 | sqlalchemy.types.BINARY(16), 821 | primary_key=True, 822 | nullable=False, 823 | ) 824 | 825 | scan_id = Column( 826 | "scan_id", 827 | Integer(), 828 | ForeignKey(Scan.id_, ondelete="CASCADE", onupdate="CASCADE"), 829 | nullable=False, 830 | ) 831 | 832 | scan = relationship(Scan) 833 | 834 | type_ = Column( 835 | "type", 836 | SimpleEnum(TaskType), 837 | nullable=False, 838 | ) 839 | 840 | state = Column( 841 | "state", 842 | SimpleEnum(TaskState), 843 | nullable=False, 844 | ) 845 | 846 | fail_reason = Column( 847 | "fail_reason", 848 | SimpleEnum(FailReason), 849 | nullable=True, 850 | ) 851 | 852 | endpoint_id = Column( 853 | "endpoint_id", 854 | Integer(), 855 | ForeignKey(Endpoint.id_, ondelete="CASCADE", onupdate="CASCADE"), 856 | nullable=True, 857 | ) 858 | 859 | endpoint = relationship(Endpoint) 860 | 861 | heartbeat = Column( 862 | "heartbeat", 863 | DateTime(), 864 | nullable=True, 865 | ) 866 | 867 | assigned_worker = Column( 868 | "assigned_worker", 869 | sqlalchemy.types.BINARY(16), 870 | nullable=True, 871 | ) 872 | 873 | @classmethod 874 | def available_tasks(cls, session, cutoff: typing.Optional[datetime]): 875 | q = session.query( 876 | cls 877 | ).outerjoin( 878 | ScanTaskDependency, 879 | ScanTaskDependency.child_task_id == cls.id_, 880 | ).filter( 881 | ScanTaskDependency.parent_task_id == None, # NOQA 882 | cls.state != TaskState.FAILED, 883 | cls.state != TaskState.DONE, 884 | ) 885 | if cutoff is not None: 886 | q = q.filter(sqlalchemy.or_( 887 | cls.heartbeat == None, # NOQA 888 | cls.heartbeat < cutoff, 889 | )) 890 | return q 891 | 892 | def mark_completed(self, session, state=TaskState.DONE): 893 | self.state = TaskState.DONE 894 | session.query(ScanTaskDependency).filter( 895 | ScanTaskDependency.parent_task_id == self.id_ 896 | ).delete() 897 | 898 | 899 | class ScanTaskDependency(Base): 900 | __tablename__ = "scan_task_dependency" 901 | 902 | parent_task_id = Column( 903 | "parent_task_id", 904 | Integer(), 905 | ForeignKey("scan_task.id", ondelete="CASCADE", onupdate="CASCADE"), 906 | primary_key=True, 907 | ) 908 | 909 | child_task_id = Column( 910 | "child_task_id", 911 | Integer(), 912 | ForeignKey("scan_task.id", ondelete="CASCADE", onupdate="CASCADE"), 913 | primary_key=True, 914 | ) 915 | 916 | parent_task = relationship(ScanTask, foreign_keys=parent_task_id) 917 | 918 | child_task = relationship(ScanTask, foreign_keys=child_task_id) 919 | 920 | 921 | class EndpointScanResult(Base): 922 | __tablename__ = "endpoint_scan_result" 923 | 924 | endpoint_id = Column( 925 | "endpoint_id", 926 | Integer(), 927 | ForeignKey(Endpoint.id_, ondelete="CASCADE", onupdate="CASCADE"), 928 | nullable=False, 929 | primary_key=True, 930 | ) 931 | 932 | endpoint = relationship(Endpoint) 933 | 934 | tls_offered = Column( 935 | "tls_offered", 936 | Boolean(), 937 | nullable=False, 938 | ) 939 | 940 | tls_negotiated = Column( 941 | "tls_negotiated", 942 | Boolean(), 943 | nullable=False, 944 | ) 945 | 946 | sasl_pre_tls = Column( 947 | "sasl_pre_tls", 948 | Boolean(), 949 | nullable=False, 950 | ) 951 | 952 | sasl_post_tls = Column( 953 | "sasl_post_tls", 954 | Boolean(), 955 | nullable=False, 956 | ) 957 | 958 | errno = Column( 959 | "errno", 960 | Integer(), 961 | nullable=True, 962 | ) 963 | 964 | error = Column( 965 | "error", 966 | Unicode(1023), 967 | nullable=True, 968 | ) 969 | 970 | 971 | class EndpointScanSASLOffering(Base): 972 | __tablename__ = "endpoint_scan_sasl_offering" 973 | 974 | endpoint_id = Column( 975 | "endpoint_scan_result_id", 976 | Integer(), 977 | ForeignKey(EndpointScanResult.endpoint_id, 978 | ondelete="CASCADE", 979 | onupdate="CASCADE"), 980 | nullable=False, 981 | primary_key=True, 982 | ) 983 | 984 | endpoint_scan_result = relationship(EndpointScanResult) 985 | 986 | sasl_mechanism_id = Column( 987 | "sasl_mechanism_id", 988 | Integer(), 989 | ForeignKey(SASLMechanism.id_, 990 | ondelete="CASCADE", 991 | onupdate="CASCADE"), 992 | nullable=False, 993 | primary_key=True, 994 | ) 995 | 996 | sasl_mechanism = relationship(SASLMechanism) 997 | 998 | phase = Column( 999 | "phase", 1000 | SimpleEnum(ConnectionPhase), 1001 | nullable=False, 1002 | primary_key=True, 1003 | ) 1004 | -------------------------------------------------------------------------------- /testxmpp/testssl/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/xmpp-observatory/testxmpp/64dae01de0ec59650694afc90289e2ff59a88383/testxmpp/testssl/__init__.py -------------------------------------------------------------------------------- /testxmpp/testssl/__main__.py: -------------------------------------------------------------------------------- 1 | import testxmpp.testssl.cli 2 | testxmpp.testssl.cli.main() 3 | -------------------------------------------------------------------------------- /testxmpp/testssl/cli.py: -------------------------------------------------------------------------------- 1 | import argparse 2 | import asyncio 3 | import logging 4 | import os 5 | import pathlib 6 | import shlex 7 | 8 | import environ 9 | 10 | from .daemon import TestSSLWorker 11 | 12 | 13 | @environ.config(prefix="TESTXMPP") 14 | class AppConfig: 15 | coordinator_uri = environ.var("tcp://localhost:5001") 16 | testssl = environ.var("testssl", converter=shlex.split) 17 | openssl_path = environ.var("/usr/bin/openssl") 18 | 19 | 20 | async def amain(coordinator_uri, testssl_argv_base, openssl_path): 21 | coordinator = TestSSLWorker(coordinator_uri, testssl_argv_base, 22 | openssl_path) 23 | await coordinator.run() 24 | 25 | 26 | def main(): 27 | import argparse 28 | 29 | parser = argparse.ArgumentParser() 30 | parser.add_argument( 31 | "-v", "--verbose", 32 | dest="verbosity", 33 | action="count", 34 | default=0, 35 | help="Increase verbosity (up to -vvv)", 36 | ) 37 | parser.add_argument( 38 | "--debug-libraries", 39 | action="store_true", 40 | default=False, 41 | help="If enabled, verbosity will also be increased for libraries", 42 | ) 43 | 44 | args = parser.parse_args() 45 | 46 | verbosity_level = { 47 | 0: logging.ERROR, 48 | 1: logging.WARNING, 49 | 2: logging.INFO, 50 | }.get(args.verbosity, logging.DEBUG) 51 | if args.debug_libraries: 52 | global_level = verbosity_level 53 | else: 54 | global_level = logging.WARNING 55 | 56 | logging.basicConfig(level=global_level) 57 | logging.getLogger("testxmpp").setLevel(verbosity_level) 58 | 59 | config = environ.to_config(AppConfig) 60 | asyncio.run(amain(config.coordinator_uri, 61 | config.testssl, 62 | config.openssl_path)) 63 | -------------------------------------------------------------------------------- /testxmpp/testssl/daemon.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import base64 3 | import csv 4 | import io 5 | import logging 6 | import secrets 7 | import os 8 | import re 9 | 10 | import zmq 11 | import zmq.asyncio 12 | 13 | import pyasn1 14 | import pyasn1_modules 15 | 16 | import testxmpp.certutil 17 | import testxmpp.common 18 | import testxmpp.api.coordinator as coordinator_api 19 | 20 | 21 | logger = logging.getLogger(__name__) 22 | 23 | 24 | def decode_line(s): 25 | f = io.StringIO(s) 26 | data = list(csv.reader(f))[0] 27 | return data 28 | 29 | 30 | ID_TLS_VERSION_RE = re.compile( 31 | r"^(SSLv2|SSLv3|TLS1(_[0-9]+)?)$" 32 | ) 33 | ID_CIPHERLIST_RE = re.compile( 34 | r"^((cipherorder|supportedciphers)_(?P(SSLv2|SSLv3|TLSv1(_[123])?)))$" 35 | ) 36 | ID_CERT_RE = re.compile( 37 | r"^cert$" 38 | ) 39 | ID_INTERMEDIATE_RE = re.compile( 40 | r"^intermediate_cert <#(?P\d+)>$" 41 | ) 42 | ID_CIPHER_RE = re.compile( 43 | r"^cipher_x(?P[0-9a-f]+)$" 44 | ) 45 | ID_CIPHER_OVERRIDE_RE = re.compile( 46 | r"^cipher_order$" 47 | ) 48 | ID_CLIENTSIMULATION_RE = re.compile( 49 | r"^clientsimulation-(?P.+)$" 50 | ) 51 | ID_PFS_CURVES_RE = re.compile( 52 | r"^PFS_ECDHE_curves$", 53 | ) 54 | ID_IGNORE_RE = re.compile( 55 | r"^(cipher-.*|TLS_.*|sessionresumption_.*|protocol_negotiated|" 56 | r"cipher_negotiated)$" 57 | ) 58 | 59 | CIPHER_COLUMN_SEP_RE = re.compile(r"\s\s+") 60 | 61 | 62 | def unwrap_cert(cert: str) -> str: 63 | return cert.replace( 64 | " ", "\n", 65 | ).replace( 66 | "\nCERTIFICATE-", " CERTIFICATE-", 67 | ) 68 | 69 | 70 | def interpret_tls_version(id_, severity, finding, id_match): 71 | supported = "not" not in finding 72 | if id_.startswith("TLS") and not id_.startswith("TLSv"): 73 | id_ = "TLSv" + id_[3:] 74 | return ("tls-version-support", id_.replace("_", "."), supported) 75 | 76 | 77 | def interpret_cipherlist(id_, severity, finding, id_match): 78 | data = id_match.groupdict() 79 | return ("cipherlist", 80 | data["tls_version"].replace("_", "."), 81 | finding.split()) 82 | 83 | 84 | def interpret_cert(id_, severity, finding, id_match): 85 | return ("certificate", unwrap_cert(finding)) 86 | 87 | 88 | def interpret_intermediate_cert(id_, severity, finding, id_match): 89 | data = id_match.groupdict() 90 | return ("intermediate-certificate", 91 | unwrap_cert(finding), 92 | int(data["chain_index"])) 93 | 94 | 95 | def interpret_cipher(id_, severity, finding, id_match): 96 | data = id_match.groupdict() 97 | columns = CIPHER_COLUMN_SEP_RE.split(finding) 98 | _, openssl_name, kex, symm, symm_bits, iana_name = columns 99 | return ("cipher-offered", { 100 | "id": int(data["cipher_id"], 16), 101 | "openssl_name": openssl_name, 102 | "key_exchange": kex, 103 | "symmetric_cipher": { 104 | "name": symm, 105 | "bits": int(symm_bits), 106 | }, 107 | "iana_name": iana_name, 108 | }) 109 | 110 | 111 | def interpret_cipher_override(id_, severity, finding, id_match): 112 | return ("server-cipher-order", finding.lower() == "server") 113 | 114 | 115 | def interpret_clientsimulation(id_, severity, finding, id_match): 116 | data = id_match.groupdict() 117 | tls_version, cipher, *_ = finding.split() 118 | return ("client-simulation", data["client_name"], tls_version, cipher) 119 | 120 | 121 | def interpret_curves(id_, severity, finding, id_match): 122 | return ("ecdh-curves", finding.split()) 123 | 124 | 125 | def ignore(id_, severity, finding, id_match): 126 | pass 127 | 128 | 129 | INTERPRETERS = [ 130 | (ID_TLS_VERSION_RE, interpret_tls_version), 131 | (ID_CIPHERLIST_RE, interpret_cipherlist), 132 | (ID_CERT_RE, interpret_cert), 133 | (ID_INTERMEDIATE_RE, interpret_intermediate_cert), 134 | (ID_CIPHER_RE, interpret_cipher), 135 | (ID_CIPHER_OVERRIDE_RE, interpret_cipher_override), 136 | (ID_CLIENTSIMULATION_RE, interpret_clientsimulation), 137 | (ID_PFS_CURVES_RE, interpret_curves), 138 | (ID_IGNORE_RE, ignore), 139 | ] 140 | 141 | 142 | def interpret_line(id_, severity, finding): 143 | for rx, interpreter in INTERPRETERS: 144 | m = rx.match(id_) 145 | if m is None: 146 | continue 147 | return interpreter(id_, severity, finding, m) 148 | 149 | logger.debug("no interpreter matched ID %r: %r %r", id_, 150 | severity, finding) 151 | 152 | 153 | async def line_communicate(proc, reader, writer_fd): 154 | proc_done = asyncio.ensure_future(proc.wait()) 155 | next_line = asyncio.ensure_future(reader.readline()) 156 | 157 | pending = [proc_done, next_line] 158 | while pending: 159 | done, pending = await asyncio.wait( 160 | pending, 161 | return_when=asyncio.FIRST_COMPLETED, 162 | ) 163 | if next_line in done: 164 | yield (await next_line) 165 | if proc_done in done: 166 | os.close(writer_fd) 167 | if not reader.at_eof() and next_line not in pending: 168 | next_line = asyncio.ensure_future(reader.readline()) 169 | pending = list(pending) + [next_line] 170 | 171 | 172 | async def run_testssl(testssl, domain, hostname, port, starttls): 173 | loop = asyncio.get_event_loop() 174 | 175 | piper, pipew = os.pipe() 176 | 177 | pipe_reader = asyncio.StreamReader() 178 | await loop.connect_read_pipe( 179 | lambda: asyncio.StreamReaderProtocol(pipe_reader), 180 | os.fdopen(piper, mode="rb"), 181 | ) 182 | 183 | argv = testssl + [ 184 | "--csvfile", "/proc/self/fd/{}".format(pipew), 185 | "-p", 186 | "-e", 187 | "-S", 188 | "-P", 189 | "-c", 190 | "--xmpphost", domain, 191 | ] 192 | if starttls is not None: 193 | argv.append("--starttls") 194 | argv.append(starttls) 195 | argv.append("{}:{}".format(hostname, port)) 196 | 197 | logger.debug("spawning testssl with %r", argv) 198 | proc = await asyncio.create_subprocess_exec( 199 | *argv, 200 | stdin=asyncio.subprocess.DEVNULL, 201 | # stdout=asyncio.subprocess.DEVNULL, 202 | # stderr=asyncio.subprocess.DEVNULL, 203 | pass_fds=[pipew], 204 | ) 205 | 206 | try: 207 | async for line in line_communicate(proc, pipe_reader, pipew): 208 | if not line: 209 | continue 210 | try: 211 | id_, _, _, severity, finding, _, _ = decode_line( 212 | line.decode("utf-8") 213 | ) 214 | except (ValueError, IndexError) as exc: 215 | logger.warning("failed to decode output line %r (%s)", 216 | line, exc) 217 | continue 218 | 219 | msg = interpret_line(id_, severity, finding) 220 | if msg is not None: 221 | yield msg 222 | 223 | if proc.returncode != 0: 224 | logger.info("testssl failed!") 225 | finally: 226 | if proc.returncode is None: 227 | proc.kill() 228 | await proc.wait() 229 | 230 | 231 | class TestSSLWorker(testxmpp.common.Worker): 232 | def __init__(self, coordinator_uri, testssl_argv_base, 233 | openssl_path): 234 | super().__init__(coordinator_uri, logger) 235 | self._testssl_argv_base = testssl_argv_base + [ 236 | "--openssl", openssl_path, 237 | ] 238 | logger.debug("I will use %r", self._testssl_argv_base) 239 | 240 | def _mkjobrequest(self, worker_id): 241 | return coordinator_api.mkv1request( 242 | coordinator_api.RequestType.GET_TESTSSL_JOB, 243 | { 244 | "worker_id": worker_id, 245 | } 246 | ) 247 | 248 | def _decode_job(self, resp): 249 | if resp["type"] == coordinator_api.ResponseType.GET_TESTSSL_JOB.value: 250 | return resp["payload"] 251 | 252 | async def _send_push_update(self, sock, job_id, data): 253 | msg = coordinator_api.mkv1request( 254 | coordinator_api.RequestType.TESTSSL_RESULT_PUSH, 255 | { 256 | "worker_id": self.worker_id, 257 | "job_id": job_id, 258 | "testssl_data": data, 259 | } 260 | ) 261 | 262 | await sock.send_json(msg) 263 | response = await sock.recv_json() 264 | if (response["type"] != 265 | coordinator_api.ResponseType.JOB_CONFIRMATION.value): 266 | raise RuntimeError( 267 | "unexpected push reply: {!r}".format(response) 268 | ) 269 | 270 | if not response["payload"]["continue"]: 271 | raise RuntimeError("cancelled job at server request") 272 | 273 | async def _run_job(self, sock, job): 274 | logger.info("got job: %r", job) 275 | if job["tls_mode"] == "starttls": 276 | if job["protocol"] == "c2s": 277 | starttls = "xmpp" 278 | else: 279 | starttls = "xmpp-server" 280 | else: 281 | starttls = None 282 | 283 | result = { 284 | "tls_versions": {}, 285 | "cipherlists": {}, 286 | "certificate": None, 287 | "intermediate_certificates": [], 288 | "server_cipher_order": False, 289 | "ciphers": [] 290 | } 291 | 292 | async for info_blob in run_testssl(self._testssl_argv_base, 293 | job["domain"], 294 | job["hostname"], 295 | job["port"], 296 | starttls): 297 | type_, *info = info_blob 298 | if type_ == "tls-version-support": 299 | tls_version, supported = info 300 | result["tls_versions"][tls_version] = supported 301 | await self._send_push_update(sock, job["job_id"], { 302 | "type": "tls_versions", 303 | "tls_versions": result["tls_versions"], 304 | }) 305 | 306 | elif type_ == "server-cipher-order": 307 | result["server_cipher_order"] = info[0] 308 | await self._send_push_update(sock, job["job_id"], { 309 | "type": "server_cipher_order", 310 | "server_cipher_order": result["server_cipher_order"], 311 | }) 312 | 313 | elif type_ == "cipherlist": 314 | tls_version, ciphers = info 315 | result["cipherlists"][tls_version] = ciphers 316 | await self._send_push_update(sock, job["job_id"], { 317 | "type": "cipherlists", 318 | "cipherlists": result["cipherlists"], 319 | }) 320 | 321 | elif type_ == "certificate": 322 | raw_der = testxmpp.certutil.unwrap_pem(info[0]) 323 | result["certificate"] = { 324 | "info": testxmpp.certutil.extract_cert_info( 325 | testxmpp.certutil.decode_cert_der(raw_der) 326 | ).to_json(), 327 | "raw_der": base64.b64encode(raw_der).decode("ascii"), 328 | } 329 | await self._send_push_update(sock, job["job_id"], { 330 | "type": "certificate", 331 | "certificate": result["certificate"], 332 | }) 333 | 334 | elif type_ == "intermediate-certificate": 335 | raw_der = testxmpp.certutil.unwrap_pem(info[0]) 336 | cert_block = { 337 | "index": info[1], 338 | "info": testxmpp.certutil.extract_cert_info( 339 | testxmpp.certutil.decode_cert_der(raw_der) 340 | ).to_json(), 341 | "raw_der": base64.b64encode(raw_der).decode("ascii"), 342 | } 343 | result["intermediate_certificates"].append(cert_block) 344 | await self._send_push_update(sock, job["job_id"], { 345 | "type": "intermediate_certificate", 346 | "certificate": cert_block, 347 | }) 348 | 349 | elif type_ == "cipher-offered": 350 | result["ciphers"].append(info[0]) 351 | await self._send_push_update(sock, job["job_id"], { 352 | "type": "cipher_info", 353 | "cipher": info[0], 354 | }) 355 | 356 | msg = coordinator_api.mkv1request( 357 | coordinator_api.RequestType.TESTSSL_COMPLETE, 358 | { 359 | "worker_id": self.worker_id, 360 | "job_id": job["job_id"], 361 | "testssl_result": result, 362 | } 363 | ) 364 | await sock.send_json(msg) 365 | resp = await sock.recv_json() 366 | if resp["type"] != coordinator_api.ResponseType.OK.value: 367 | self.logger.warning( 368 | "coordinator rejected our result: %r", 369 | resp 370 | ) 371 | -------------------------------------------------------------------------------- /testxmpp/web/__init__.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import os 3 | 4 | import environ 5 | 6 | import quart.flask_patch 7 | import quart.logging 8 | 9 | from quart import Quart 10 | 11 | import flask_babel 12 | 13 | from .infra import db, babel, setup_template_filters 14 | from .main import bp as bp_main 15 | 16 | 17 | @environ.config(prefix="TESTXMPP") 18 | class AppConfig: 19 | db_uri = environ.var() 20 | coordinator_uri = environ.var("tcp://localhost:5001") 21 | 22 | 23 | def create_app(): 24 | config = environ.to_config(AppConfig) 25 | 26 | app = Quart(__name__) 27 | app.config["SQLALCHEMY_TRACK_MODIFICATIONS"] = False 28 | app.config["SQLALCHEMY_DATABASE_URI"] = config.db_uri 29 | app.config["COORDINATOR_URI"] = config.coordinator_uri 30 | app.config["LANGUAGES"] = ["en"] 31 | 32 | db.init_app(app) 33 | babel.init_app(app) 34 | 35 | app.register_blueprint(bp_main) 36 | setup_template_filters(app) 37 | 38 | return app 39 | -------------------------------------------------------------------------------- /testxmpp/web/infra.py: -------------------------------------------------------------------------------- 1 | import contextlib 2 | 3 | import quart.flask_patch 4 | 5 | from quart import current_app, request 6 | 7 | from flask_sqlalchemy import SQLAlchemy 8 | import flask_babel 9 | from flask_babel import Babel 10 | 11 | import zmq 12 | import zmq.asyncio 13 | 14 | import testxmpp.certutil 15 | import testxmpp.model 16 | 17 | 18 | db = SQLAlchemy(metadata=testxmpp.model.Base.metadata) 19 | babel = Babel() 20 | 21 | 22 | @babel.localeselector 23 | def selected_locale(): 24 | return request.accept_languages.best_match( 25 | current_app.config['LANGUAGES'] 26 | ) 27 | 28 | 29 | @contextlib.contextmanager 30 | def zmq_socket(type_): 31 | zctx = zmq.asyncio.Context() 32 | try: 33 | sock = zctx.socket(zmq.REQ) 34 | try: 35 | yield sock 36 | finally: 37 | sock.close() 38 | finally: 39 | zctx.term() 40 | 41 | 42 | def setup_template_filters(app): 43 | @app.template_filter(name="format_timedelta") 44 | def format_timedelta(dt, **kwargs): 45 | return flask_babel.format_timedelta(dt, **kwargs) 46 | 47 | @app.template_filter(name="decode_domain") 48 | def decode_domain(d, **kwargs): 49 | if isinstance(d, str): 50 | return d 51 | return d.decode("idna") 52 | 53 | @app.template_filter(name="printable_bytes") 54 | def printable_bytes(b, **kwargs): 55 | if isinstance(b, str): 56 | return b 57 | return b.decode("utf-8", errors="replace") 58 | 59 | @app.template_filter(name="hexdigest") 60 | def hexdigest(bs, **kwargs): 61 | return ":".join("{:02x}".format(b) for b in bs) 62 | 63 | @app.template_filter(name="oidname") 64 | def oidname(oid, **kwargs): 65 | return testxmpp.certutil.OID_TO_SHORTNAME.get(oid, oid) 66 | -------------------------------------------------------------------------------- /testxmpp/web/main.py: -------------------------------------------------------------------------------- 1 | import json 2 | import os 3 | import socket 4 | 5 | from datetime import datetime 6 | 7 | from quart import ( 8 | Blueprint, 9 | render_template, 10 | request, 11 | current_app, 12 | redirect, 13 | url_for, 14 | abort, 15 | ) 16 | 17 | import sqlalchemy 18 | import sqlalchemy.orm 19 | 20 | import zmq 21 | 22 | import testxmpp.certutil 23 | import testxmpp.api.coordinator as coordinator_api 24 | from testxmpp import model 25 | from .infra import db, zmq_socket 26 | 27 | bp = Blueprint('main', __name__) 28 | 29 | 30 | def _get_recent_scans(protocol): 31 | subquery = db.session.query( 32 | model.Scan.domain, 33 | sqlalchemy.func.max_(model.Scan.created_at).label("newest"), 34 | ).group_by( 35 | model.Scan.domain, 36 | ).filter( 37 | model.Scan.protocol == protocol, 38 | ).limit(10).subquery() 39 | 40 | return [ 41 | (id_, domain.decode("idna"), created_at) 42 | for id_, domain, created_at in db.session.query( 43 | model.Scan.id_, 44 | model.Scan.domain, 45 | model.Scan.created_at, 46 | ).select_from( 47 | model.Scan, 48 | ).join( 49 | subquery, 50 | sqlalchemy.and_( 51 | subquery.c.domain == model.Scan.domain, 52 | subquery.c.newest == model.Scan.created_at, 53 | ), 54 | ).filter( 55 | model.Scan.protocol == protocol, 56 | ).order_by( 57 | model.Scan.created_at.desc(), 58 | ).limit(10) 59 | ] 60 | 61 | 62 | @bp.route("/", methods=["GET"]) 63 | async def index(): 64 | recent_scans_c2s = _get_recent_scans(model.ScanType.C2S) 65 | recent_scans_s2s = _get_recent_scans(model.ScanType.S2S) 66 | 67 | return await render_template( 68 | "index.html", 69 | recent_scans=[ 70 | (model.ScanType.C2S, recent_scans_c2s), 71 | (model.ScanType.S2S, recent_scans_s2s), 72 | ], 73 | now=datetime.utcnow(), 74 | ) 75 | 76 | 77 | @bp.route("/scan/queue", methods=["GET", "POST"]) 78 | async def queue_scan(): 79 | form_data = await request.form 80 | scan_request = coordinator_api.mkv1request( 81 | coordinator_api.RequestType.SCAN_DOMAIN, 82 | { 83 | "domain": form_data["domain"], 84 | "protocol": form_data["protocol"], 85 | }, 86 | ) 87 | 88 | with zmq_socket(zmq.REQ) as sock: 89 | sock.connect(current_app.config["COORDINATOR_URI"]) 90 | await sock.send_json(scan_request) 91 | reply = await sock.recv_json() 92 | 93 | if reply["type"] == coordinator_api.ResponseType.SCAN_QUEUED.value: 94 | return redirect(url_for( 95 | "main.scan_result", 96 | scan_id=reply["payload"]["scan_id"] 97 | )) 98 | elif reply["type"] == coordinator_api.ResponseType.ERROR.value: 99 | return abort(reply["payload"]["code"], reply["payload"]["message"]) 100 | 101 | raise RuntimeError("unexpected reply: {!r}".format(reply)) 102 | 103 | 104 | def evaluate_task_and_result(task, result): 105 | in_progress = task is not None and task.state in [ 106 | model.TaskState.WAITING, 107 | model.TaskState.IN_PROGRESS 108 | ] 109 | 110 | passed = ( 111 | result is not None and result.error is None and 112 | result.errno is None 113 | ) 114 | 115 | if task is not None and task.state == model.TaskState.FAILED: 116 | error = task.fail_reason.value 117 | elif result is not None: 118 | if result.errno is not None and result.errno > 0: 119 | error = os.strerror(result.errno) 120 | else: 121 | error = result.error 122 | else: 123 | error = None 124 | 125 | return in_progress, passed, error 126 | 127 | 128 | def fetch_host_meta_info(session, 129 | scan_id: int): 130 | objects = { 131 | id_: (format_, url) 132 | for id_, format_, url in session.query( 133 | model.HostMetaObject.id_, 134 | model.HostMetaObject.format_, 135 | model.HostMetaObject.url, 136 | ).filter( 137 | model.HostMetaObject.scan_id == scan_id, 138 | ) 139 | } 140 | object_info = sorted(objects.values(), key=lambda x: x[0].value) 141 | 142 | links_collection = {} 143 | for object_id, rel, href in session.query( 144 | model.HostMetaLink.object_id, 145 | model.HostMetaLink.rel, 146 | model.HostMetaLink.href, 147 | ).select_from(model.HostMetaLink).join( 148 | model.HostMetaObject, 149 | ).filter( 150 | model.HostMetaObject.scan_id == scan_id, 151 | ): 152 | links_collection.setdefault((rel, href), []).append( 153 | objects[object_id][0] 154 | ) 155 | 156 | links = sorted( 157 | ((rel, href, formats) 158 | for (rel, href), formats in links_collection.items()), 159 | key=lambda x: (x[0], x[1]) 160 | ) 161 | 162 | return object_info, links 163 | 164 | 165 | @bp.route("/scan/result/", methods=["GET"]) 166 | async def scan_result(scan_id): 167 | try: 168 | domain, protocol, created_at = \ 169 | db.session.query( 170 | model.Scan.domain, 171 | model.Scan.protocol, 172 | model.Scan.created_at, 173 | ).filter( 174 | model.Scan.id_ == scan_id 175 | ).one() 176 | except sqlalchemy.orm.exc.NoResultFound: 177 | return abort(404) 178 | 179 | srv_records = list(db.session.query( 180 | model.SRVRecord.priority, 181 | model.SRVRecord.weight, 182 | model.SRVRecord.service, 183 | model.SRVRecord.host, 184 | model.SRVRecord.port, 185 | ).filter( 186 | model.SRVRecord.scan_id == scan_id, 187 | ).order_by( 188 | model.SRVRecord.priority.asc(), 189 | model.SRVRecord.weight.desc(), 190 | model.SRVRecord.service.asc(), 191 | )) 192 | 193 | xmppconnect_records = list(db.session.query( 194 | model.XMPPConnectRecord.attribute_name, 195 | model.XMPPConnectRecord.attribute_value, 196 | ).filter( 197 | model.XMPPConnectRecord.scan_id == scan_id, 198 | ).order_by( 199 | model.XMPPConnectRecord.attribute_name.asc(), 200 | )) 201 | 202 | host_meta_object_info, host_meta_links = fetch_host_meta_info( 203 | db.session, 204 | scan_id, 205 | ) 206 | 207 | endpoints = [] 208 | for ep, task, result in db.session.query( 209 | model.EndpointTCP, 210 | model.ScanTask, 211 | model.EndpointScanResult, 212 | ).select_from( 213 | model.EndpointTCP, 214 | ).outerjoin( 215 | model.ScanTask, 216 | ).outerjoin( 217 | model.EndpointScanResult 218 | ).filter( 219 | model.EndpointTCP.scan_id == scan_id, 220 | model.ScanTask.type_ == model.TaskType.XMPP_PROBE, 221 | ).order_by( 222 | model.EndpointTCP.endpoint_id.asc() 223 | ): 224 | 225 | if ep.srv_record_id is not None: 226 | source = model.EndpointSource.SRV_RECORD.value 227 | else: 228 | source = model.EndpointSource.FALLBACK.value 229 | 230 | endpoints.append(( 231 | source, ep.transport.value, ep.uri, ep.tls_mode.value, 232 | evaluate_task_and_result(task, result), 233 | )) 234 | 235 | for ep, task, result in db.session.query( 236 | model.EndpointHTTP, 237 | model.ScanTask, 238 | model.EndpointScanResult, 239 | ).select_from( 240 | model.EndpointHTTP, 241 | ).outerjoin( 242 | model.ScanTask, 243 | ).outerjoin( 244 | model.EndpointScanResult 245 | ).filter( 246 | model.EndpointHTTP.scan_id == scan_id, 247 | model.ScanTask.type_ == model.TaskType.XMPP_PROBE, 248 | ).order_by( 249 | model.EndpointHTTP.endpoint_id.asc() 250 | ): 251 | 252 | endpoints.append( 253 | (model.EndpointSource.ALTERNATIVE_METHOD.value, 254 | ep.transport.value, ep.uri, ep.http_mode.value, 255 | evaluate_task_and_result(task, result)) 256 | ) 257 | 258 | sasl_offerings = { 259 | v.value: [] 260 | for v in model.ConnectionPhase 261 | } 262 | for phase, name in db.session.query( 263 | model.EndpointScanSASLOffering.phase, 264 | model.SASLMechanism.name 265 | ).select_from(model.Endpoint).join( 266 | model.EndpointScanResult 267 | ).join( 268 | model.EndpointScanSASLOffering 269 | ).join( 270 | model.SASLMechanism 271 | ).filter( 272 | model.Endpoint.scan_id == scan_id, 273 | ).distinct().order_by( 274 | model.SASLMechanism.name.asc(), 275 | ): 276 | sasl_offerings[phase.value].append(name) 277 | 278 | tls_offering_schema = [ 279 | ("SSL 2", [1, -1]), 280 | ("SSL 3", [1, -1]), 281 | ("TLS 1", [1, -1]), 282 | ("TLS 1.1", [1, 0]), 283 | ("TLS 1.2", [-1, 1]), 284 | ("TLS 1.3", [-1, 1]), 285 | ] 286 | 287 | try: 288 | *tls_versions, server_cipher_order, endpoint_id = db.session.query( 289 | model.TLSOffering.sslv2, 290 | model.TLSOffering.sslv3, 291 | model.TLSOffering.tlsv1, 292 | model.TLSOffering.tlsv1_1, 293 | model.TLSOffering.tlsv1_2, 294 | model.TLSOffering.tlsv1_3, 295 | model.TLSOffering.server_cipher_order, 296 | model.Endpoint.id_, 297 | ).select_from(model.TLSOffering).join( 298 | model.Endpoint, 299 | ).filter( 300 | model.Endpoint.scan_id == scan_id, 301 | ).one() 302 | except sqlalchemy.orm.exc.NoResultFound: 303 | tls_versions = [None] * len(tls_offering_schema) 304 | server_cipher_order = None 305 | tls_scan_uri = None 306 | else: 307 | tls_scan_endpoint = db.session.query( 308 | model.Endpoint, 309 | ).filter( 310 | model.Endpoint.id_ == endpoint_id, 311 | ).one() 312 | tls_scan_uri = tls_scan_endpoint.uri 313 | 314 | tls_offering_info = [ 315 | (label, scores[offered] if offered is not None else 0, offered) 316 | for (label, scores), offered in zip(tls_offering_schema, tls_versions) 317 | ] 318 | 319 | ciphers = list(db.session.query( 320 | model.CipherOffering.cipher_id, 321 | model.CipherMetadata.openssl_name, 322 | model.CipherOffering.key_exchange_info, 323 | ).select_from( 324 | model.CipherOffering 325 | ).join( 326 | model.CipherMetadata 327 | ).join( 328 | model.CipherOfferingOrder 329 | ).join( 330 | model.Endpoint 331 | ).filter( 332 | model.Endpoint.scan_id == scan_id, 333 | ).order_by( 334 | model.CipherOfferingOrder.order.asc(), 335 | )) 336 | 337 | # TODO: this is not safe with multiple endpoints being testssl'd' 338 | certs = list(db.session.query( 339 | model.Certificate.id_, 340 | model.Certificate.subject, 341 | model.Certificate.issuer, 342 | model.Certificate.not_before, 343 | model.Certificate.not_after, 344 | model.Certificate.public_key, 345 | model.Certificate.public_key_type, 346 | model.Certificate.fingerprint_sha1, 347 | model.Certificate.fingerprint_sha256, 348 | model.Certificate.fingerprint_sha512, 349 | ).select_from( 350 | model.Endpoint, 351 | ).join( 352 | model.CertificateOffering, 353 | ).join( 354 | model.Certificate, 355 | ).filter( 356 | model.Endpoint.scan_id == scan_id, 357 | ).order_by( 358 | model.CertificateOffering.chain_index.asc(), 359 | )) 360 | 361 | cert_chain = [] 362 | for (cert_id, 363 | subject, issuer, 364 | not_before, not_after, 365 | public_key, public_key_type, 366 | fp_sha1, fp_sha256, fp_sha512) in certs: 367 | sans = {} 368 | 369 | for asn1_name, value in db.session.query( 370 | model.SubjectAltNameType.asn1_name, 371 | model.SubjectAltName.value, 372 | ).select_from( 373 | model.SubjectAltName, 374 | ).join( 375 | model.SubjectAltNameType, 376 | ).filter( 377 | model.SubjectAltName.certificate_id == cert_id, 378 | ).order_by( 379 | model.SubjectAltNameType.asn1_name.asc(), 380 | ): 381 | sans.setdefault(asn1_name, []).append(value) 382 | 383 | cert_chain.append( 384 | testxmpp.certutil.CertInfo( 385 | subject=json.loads(subject), 386 | issuer=json.loads(issuer), 387 | subject_alt_names=sans, 388 | public_key=public_key, 389 | public_key_type=public_key_type, 390 | not_before=not_before, 391 | not_after=not_after, 392 | fingerprints={ 393 | "sha1": fp_sha1, 394 | "sha256": fp_sha256, 395 | "sha512": fp_sha512, 396 | }, 397 | ) 398 | ) 399 | 400 | return await render_template( 401 | "scan_result.html", 402 | scan_id=scan_id, 403 | scan_info={ 404 | "domain": domain.decode("idna"), 405 | "protocol": protocol, 406 | "created_at": created_at, 407 | }, 408 | srv_records=srv_records, 409 | xmppconnect_records=xmppconnect_records, 410 | endpoints=endpoints, 411 | host_meta_object_info=host_meta_object_info, 412 | host_meta_links=host_meta_links, 413 | sasl_offerings=sasl_offerings, 414 | tls_offering_info=tls_offering_info, 415 | tls_scan_uri=tls_scan_uri, 416 | server_cipher_order=server_cipher_order, 417 | ciphers=ciphers, 418 | cert_chain=cert_chain, 419 | ) 420 | -------------------------------------------------------------------------------- /testxmpp/web/scss/_baseline.scss: -------------------------------------------------------------------------------- 1 | html { 2 | font-size: 100%; 3 | } 4 | 5 | body { 6 | font-family: $font-sans; 7 | color: $gray-100; 8 | } 9 | 10 | p { 11 | line-height: 1.5; 12 | margin: 1.5em 0; 13 | font-family: $font-bulk; 14 | color: inherit; 15 | } 16 | 17 | h1, h2, h3, h4, h5, h6 { 18 | /* normalise */ 19 | font-weight: 400; 20 | text-decoration: none; 21 | font-style: normal; 22 | font-family: $font-heading; 23 | color: black; 24 | } 25 | 26 | input, button, label, select, textarea, pre, code { 27 | font-size: 100%; 28 | color: inherit; 29 | line-height: 1.5; 30 | } 31 | 32 | textarea { 33 | font-family: $font-bulk; 34 | } 35 | 36 | option { 37 | padding: 0; 38 | margin: 0; 39 | } 40 | 41 | @for $n from 1 through 6 { 42 | h#{$n} { 43 | font-size: nth($h-sizes, $n); 44 | line-height: 1.5 / (nth($h-sizes, $n) / 100%); 45 | margin: 1.5em / (nth($h-sizes, $n) / 100%) 0; 46 | } 47 | } 48 | 49 | h6 { 50 | font-weight: bold; 51 | } 52 | 53 | @media screen and (max-width: $small-screen-threshold) { 54 | @for $n from 1 through 6 { 55 | h#{$n} { 56 | font-size: nth($h-small-sizes, $n); 57 | line-height: 1.5 / (nth($h-small-sizes, $n) / 100%); 58 | margin: 1.5em / (nth($h-small-sizes, $n) / 100%) 0; 59 | } 60 | } 61 | } 62 | -------------------------------------------------------------------------------- /testxmpp/web/scss/_theme.scss: -------------------------------------------------------------------------------- 1 | $colours: ( 2 | "gray": [ 3 | hsl(0, 0%, 10%), 4 | hsl(0, 0%, 20%), 5 | hsl(0, 0%, 30%), 6 | hsl(0, 0%, 40%), 7 | hsl(0, 0%, 50%), 8 | hsl(0, 0%, 60%), 9 | hsl(0, 0%, 70%), 10 | hsl(0, 0%, 80%), 11 | hsl(0, 0%, 90%) 12 | ], 13 | "primary": [ 14 | hsl(220, 80%, 10%), 15 | hsl(220, 80%, 20%), 16 | hsl(220, 80%, 30%), 17 | hsl(220, 80%, 40%), 18 | hsl(220, 80%, 50%), 19 | hsl(220, 80%, 60%), 20 | hsl(220, 80%, 70%), 21 | hsl(220, 80%, 80%), 22 | hsl(220, 80%, 90%) 23 | ], 24 | "alert": [ 25 | hsl(0, 80%, 10%), 26 | hsl(0, 80%, 20%), 27 | hsl(0, 80%, 30%), 28 | hsl(0, 80%, 40%), 29 | hsl(0, 80%, 50%), 30 | hsl(0, 80%, 60%), 31 | hsl(0, 80%, 70%), 32 | hsl(0, 80%, 80%), 33 | hsl(0, 80%, 90%) 34 | ], 35 | "accent": [ 36 | hsl(150, 80%, 10%), 37 | hsl(150, 80%, 20%), 38 | hsl(150, 80%, 30%), 39 | hsl(150, 80%, 40%), 40 | hsl(150, 80%, 50%), 41 | hsl(150, 80%, 60%), 42 | hsl(150, 80%, 70%), 43 | hsl(150, 80%, 80%), 44 | hsl(150, 80%, 90%) 45 | ], 46 | "success": [ 47 | hsl(100, 80%, 10%), 48 | hsl(100, 80%, 20%), 49 | hsl(100, 80%, 30%), 50 | hsl(100, 80%, 40%), 51 | hsl(100, 80%, 50%), 52 | hsl(100, 80%, 60%), 53 | hsl(100, 80%, 70%), 54 | hsl(100, 80%, 80%), 55 | hsl(100, 80%, 90%) 56 | ] 57 | ); 58 | 59 | $gray-100: nth(map-get($colours, "gray"), 1); 60 | $gray-200: nth(map-get($colours, "gray"), 2); 61 | $gray-300: nth(map-get($colours, "gray"), 3); 62 | $gray-400: nth(map-get($colours, "gray"), 4); 63 | $gray-500: nth(map-get($colours, "gray"), 5); 64 | $gray-600: nth(map-get($colours, "gray"), 6); 65 | $gray-700: nth(map-get($colours, "gray"), 7); 66 | $gray-800: nth(map-get($colours, "gray"), 8); 67 | $gray-900: nth(map-get($colours, "gray"), 9); 68 | 69 | $primary-100: nth(map-get($colours, "primary"), 1); 70 | $primary-200: nth(map-get($colours, "primary"), 2); 71 | $primary-300: nth(map-get($colours, "primary"), 3); 72 | $primary-400: nth(map-get($colours, "primary"), 4); 73 | $primary-500: nth(map-get($colours, "primary"), 5); 74 | $primary-600: nth(map-get($colours, "primary"), 6); 75 | $primary-700: nth(map-get($colours, "primary"), 7); 76 | $primary-800: nth(map-get($colours, "primary"), 8); 77 | $primary-900: nth(map-get($colours, "primary"), 9); 78 | 79 | $alert-100: nth(map-get($colours, "alert"), 1); 80 | $alert-200: nth(map-get($colours, "alert"), 2); 81 | $alert-300: nth(map-get($colours, "alert"), 3); 82 | $alert-400: nth(map-get($colours, "alert"), 4); 83 | $alert-500: nth(map-get($colours, "alert"), 5); 84 | $alert-600: nth(map-get($colours, "alert"), 6); 85 | $alert-700: nth(map-get($colours, "alert"), 7); 86 | $alert-800: nth(map-get($colours, "alert"), 8); 87 | $alert-900: nth(map-get($colours, "alert"), 9); 88 | 89 | $accent-100: nth(map-get($colours, "accent"), 1); 90 | $accent-200: nth(map-get($colours, "accent"), 2); 91 | $accent-300: nth(map-get($colours, "accent"), 3); 92 | $accent-400: nth(map-get($colours, "accent"), 4); 93 | $accent-500: nth(map-get($colours, "accent"), 5); 94 | $accent-600: nth(map-get($colours, "accent"), 6); 95 | $accent-700: nth(map-get($colours, "accent"), 7); 96 | $accent-800: nth(map-get($colours, "accent"), 8); 97 | $accent-900: nth(map-get($colours, "accent"), 9); 98 | 99 | $success-100: nth(map-get($colours, "success"), 1); 100 | $success-200: nth(map-get($colours, "success"), 2); 101 | $success-300: nth(map-get($colours, "success"), 3); 102 | $success-400: nth(map-get($colours, "success"), 4); 103 | $success-500: nth(map-get($colours, "success"), 5); 104 | $success-600: nth(map-get($colours, "success"), 6); 105 | $success-700: nth(map-get($colours, "success"), 7); 106 | $success-800: nth(map-get($colours, "success"), 8); 107 | $success-900: nth(map-get($colours, "success"), 9); 108 | 109 | $w-s5: 0.0625rem; 110 | $w-s4: 0.125rem; 111 | $w-s3: 0.25rem; 112 | $w-s2: 0.5rem; 113 | $w-s1: 0.75rem; 114 | $w-0: 1rem; 115 | $w-l1: 1.5rem; 116 | $w-l2: 2rem; 117 | $w-l3: 3rem; 118 | $w-l4: 4rem; 119 | $w-l5: 6rem; 120 | $w-l6: 8rem; 121 | $w-l7: 12rem; 122 | 123 | $font-sans: "Noto Sans", sans-serif; 124 | $font-serif: serif; 125 | $font-monospace: monospace; 126 | 127 | $font-heading: $font-sans; 128 | $font-bulk: $font-sans; 129 | $font-code: $font-monospace; 130 | 131 | /** 132 | * On the scaling of the headers. I’m a nerd, so here we go. 133 | * 134 | * I tried to determine a good scale a priori. It was clear to me that the 135 | * observed difference between a 48px and 64px font is much smaller than the 136 | * perceived difference between a 8px and 16px font size. 137 | * 138 | * Thus, the perception is *not* linear in the font size. 139 | * 140 | * I set the edge points to 200% and 100% (the h6 would get a bold font face) 141 | * to compensate. 142 | * 143 | * The first attempt to get a visually appealing header size scale was thus to 144 | * generate a logarithmic scale: 145 | * 146 | * numpy.logspace(np.log10(200), 2, 6, base=10) 147 | * 148 | * This leads to the following sizes: 149 | * 150 | * $_h-sizes: [200%, 174.11011266%, 151.57165665%, 131.95079108%, 114.8698355%, 100%]; 151 | * 152 | * This scale has too large differences between the larger font sizes, and too 153 | * small differences between the smaller font sizes. Thus, I tried to invert 154 | * this: 155 | * 156 | * 200 - numpy.logspace(2, np.log10(200), 6, base=10) + 100 157 | * 158 | * This leads to the following sizes: 159 | * 160 | * $_h-sizes: [200.0%, 185.13016450029647%, 168.0492089227105%, 148.42834334896025%, 125.88988734077518%, 100%]; 161 | * 162 | * While this was better, it still didn’t look quite right yet. The next 163 | * attempt was to go about a square function instead of log. The idea behind 164 | * this is that the font size is essentially one edge of a rectangle, where the 165 | * second edge depends on the first. A square function should thus generate a 166 | * nicely appealing sequence: 167 | * 168 | * Again, we want the large differences to be on the large scales, too: 169 | * 170 | * xs = numpy.linspace(5, 0, 6); 4*xs*xs + 100 171 | * 172 | * This leads to the following sizes: 173 | * 174 | * $_h-sizes: [200.0%, 164.0%, 136.0%, 116.0%, 104.0%, 100.0%]; 175 | * 176 | * While the first three headings looked nice with that, the others did not. 177 | * Further research has shown me that others use an exponential scale (instead 178 | * of a log scale), but with a rather small base (<1.6). 179 | * 180 | * Instead of taking one of the well-known factors (like golden ratio or major 181 | * second), I opted for choosing a factor which gives me a clean 200%-100% 182 | * range: 183 | * 184 | * numpy.power(math.pow(2, 1/5), numpy.linspace(5, 0, 6)) * 100 185 | * 186 | * The result (rounded to 8 digits) is: 187 | * 188 | * $_h-sizes: [200.0%, 174.11011266%, 151.57165665%, 131.95079108%, 114.8698355%, 100.0%]; 189 | * 190 | * And... This is the first logspace range. Derp. So why did I discard it in 191 | * the first place? Now that I look at it, it looks amazing. Brains are weird. 192 | */ 193 | $h-sizes: [200.0%, 174.11011266%, 151.57165665%, 131.95079108%, 114.8698355%, 100.0%]; 194 | 195 | /** 196 | * And for mobile devices, we want an even less aggressive scale. Let’s try 197 | * 150%-100%. 198 | */ 199 | $h-small-sizes: [150.0%, 138.31618672%, 127.54245006%, 117.60790225%, 108.44717712%, 100.0%]; 200 | $small-screen-threshold: 40rem; 201 | -------------------------------------------------------------------------------- /testxmpp/web/scss/app.scss: -------------------------------------------------------------------------------- 1 | @import "_theme.scss"; 2 | @import "_baseline.scss"; 3 | 4 | 5 | /* standard elevations */ 6 | 7 | .el-1, .box.el-1, div.form.el-1 { 8 | box-shadow: 9 | 0 1px 3px rgba(0, 0, 0, 0.12), 10 | 0 1px 2px rgba(0, 0, 0, 0.24); 11 | } 12 | 13 | .el-2, .box.el-2, div.form.el-2 { 14 | box-shadow: 15 | 0 3px 6px rgba(0, 0, 0, 0.15), 16 | 0 2px 4px rgba(0, 0, 0, 0.12); 17 | } 18 | 19 | .el-3, .box.el-3, div.form.el-3 { 20 | box-shadow: 21 | 0 10px 20px rgba(0, 0, 0, 0.15), 22 | 0 3px 6px rgba(0, 0, 0, 0.10); 23 | } 24 | 25 | .el-4, .box.el-4, div.form.el-4 { 26 | box-shadow: 27 | 0 15px 25px rgba(0, 0, 0, 0.15), 28 | 0 5px 10px rgba(0, 0, 0, 0.05); 29 | } 30 | 31 | .el-5, .box.el-5, div.form.el-5 { 32 | box-shadow: 33 | 0 20px 40px rgba(0, 0, 0, 0.2); 34 | } 35 | 36 | 37 | body { 38 | margin: 0; 39 | margin-left: auto; 40 | margin-right: auto; 41 | max-width: 60rem; 42 | padding: 0; 43 | background-color: $gray-100; 44 | } 45 | 46 | main { 47 | @extends .el-4; 48 | background-color: $gray-900; 49 | padding: $w-l1; 50 | margin: $w-l1; 51 | } 52 | 53 | .card { 54 | background-color: white; 55 | padding: $w-l1; 56 | margin: $w-l1 0; 57 | } 58 | 59 | .card > :first-child { 60 | margin-top: 0; 61 | } 62 | 63 | .card > :last-child { 64 | margin-bottom: 0; 65 | } 66 | 67 | 68 | /* table foo */ 69 | 70 | /* table { 71 | border-collapse: collapse; 72 | } 73 | 74 | table td, table th { 75 | padding: $w-s1; 76 | border: $w-s5 solid $primary-300; 77 | } 78 | 79 | table.wide { 80 | width: 100%; 81 | } 82 | 83 | table.long tr { 84 | background-color: $gray-900; 85 | } 86 | 87 | table.long tr:nth-child(2n) { 88 | background-color: $primary-900; 89 | color: $primary-100; 90 | } 91 | 92 | table th { 93 | background-color: $primary-800; 94 | color: black; 95 | } 96 | 97 | col.numeric { 98 | text-align: right; 99 | } */ 100 | 101 | table { 102 | border-collapse: collapse; 103 | } 104 | 105 | table td, table th { 106 | padding: $w-s1; 107 | } 108 | 109 | table.wide { 110 | width: 100%; 111 | } 112 | 113 | table th { 114 | border-bottom: $w-s5 solid $primary-100; 115 | } 116 | 117 | table.transposed th { 118 | border-bottom: none; 119 | border-right: $w-s5 solid $primary-100; 120 | } 121 | 122 | table.long { 123 | background: inherit; 124 | thead { 125 | position: sticky; 126 | top: 0; 127 | background: inherit; 128 | 129 | th { 130 | background-color: rgba(0, 0, 0, 0.05); 131 | } 132 | } 133 | } 134 | 135 | td.numeric { 136 | text-align: right; 137 | } 138 | 139 | /* grid dl */ 140 | 141 | dl { 142 | display: table; 143 | } 144 | 145 | dl div.di { 146 | display: table-row; 147 | } 148 | 149 | dt, dd { 150 | display: table-cell; 151 | } 152 | 153 | dt { 154 | padding: $w-s1; 155 | padding-left: 0; 156 | font-weight: bold; 157 | } 158 | 159 | dd { 160 | padding: $w-s1; 161 | padding-right: 0; 162 | } 163 | 164 | /* certchain magic */ 165 | 166 | ol.certchain, ol.xname { 167 | list-style-type: none; 168 | margin: 0; 169 | padding: 0; 170 | 171 | > { 172 | margin: 0; 173 | padding: 0; 174 | } 175 | } 176 | 177 | dl.xname { 178 | margin: 0; 179 | padding: 0; 180 | } 181 | 182 | dl.slim dt, dl.slim dd { 183 | padding-top: 0; 184 | padding-bottom: 0; 185 | } 186 | 187 | /* main page cards and quick scan form */ 188 | 189 | form.quickscan { 190 | display: table; 191 | } 192 | 193 | form.quickscan > div { 194 | display: table-row; 195 | } 196 | 197 | form.quickscan > div > * { 198 | display: table-cell; 199 | } 200 | 201 | form.quickscan > div > * { 202 | margin: 0.75em; 203 | } 204 | 205 | /* recent results */ 206 | 207 | div.recent-scans-wrap { 208 | display: flex; 209 | flex-wrap: wrap; 210 | } 211 | 212 | div.recent-scans-col { 213 | flex: 1 0 auto; 214 | margin: $w-s1; 215 | padding: $w-s1; 216 | } 217 | 218 | /* misc */ 219 | 220 | span.cipher-id { 221 | font-size: $w-s1; 222 | font-weight: light; 223 | color: $gray-300; 224 | } 225 | 226 | ul.inline { 227 | display: inline; 228 | margin: 0; 229 | padding: 0; 230 | list-style-type: none; 231 | 232 | > li { 233 | display: inline; 234 | margin: 0; 235 | padding: 0; 236 | } 237 | 238 | > li:before { 239 | content: ', '; 240 | } 241 | 242 | > li:first-child:before { 243 | content: none; 244 | } 245 | } 246 | 247 | .hardwrap { 248 | line-break: anywhere; 249 | } 250 | 251 | /* a11y things */ 252 | 253 | .a11y-text { 254 | position: absolute; 255 | width: 1px; 256 | height: 1px; 257 | overflow: hidden; 258 | top: -100px; 259 | } 260 | 261 | .pres-text:before { 262 | content: attr(data-text); 263 | } 264 | -------------------------------------------------------------------------------- /testxmpp/web/templates/_base.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | {% block title %}{% endblock %} - TestXMPP 5 | 6 | 7 | {% block body %}{% endblock %} 8 | 9 | 10 | -------------------------------------------------------------------------------- /testxmpp/web/templates/_library.html: -------------------------------------------------------------------------------- 1 | {% macro protocol_name_short(protocol) -%} 2 | {%- if protocol.value == "c2s" -%} 3 | {%- trans %}client{% endtrans -%} 4 | {%- else -%} 5 | {%- trans %}server{% endtrans -%} 6 | {%- endif -%} 7 | {%- endmacro %} 8 | 9 | {% macro protocol_name(protocol) -%} 10 | {%- if protocol.value == "c2s" -%} 11 | {% trans %}client-to-server{% endtrans %} 12 | {%- else -%} 13 | {% trans %}server-to-server{% endtrans %} 14 | {%- endif -%} 15 | {%- endmacro %} 16 | 17 | {% macro connection_phase(phase) -%} 18 | {%- if phase == "pre-tls" -%} 19 | {% trans %}pre-TLS{% endtrans %} 20 | {%- else -%} 21 | {% trans %}post-TLS{% endtrans %} 22 | {%- endif -%} 23 | {%- endmacro %} 24 | 25 | {% macro badge(class_, title=None, caller=None) %} 26 | {{ caller() }} 27 | {% endmacro %} 28 | 29 | {% macro tls_mode_name(tls_mode) -%} 30 | {%- if tls_mode.value == "starttls" -%} 31 | StartTLS 32 | {%- else -%} 33 | Direct 34 | {%- endif -%} 35 | {%- endmacro %} 36 | -------------------------------------------------------------------------------- /testxmpp/web/templates/_page.html: -------------------------------------------------------------------------------- 1 | {% extends "_base.html" %} 2 | {% block body %}
{% block content %}{% endblock %}
{% endblock %} 3 | -------------------------------------------------------------------------------- /testxmpp/web/templates/index.html: -------------------------------------------------------------------------------- 1 | {% extends "_base.html" %} 2 | {% block title %}Hello{% endblock %} 3 | {% from "_library.html" import protocol_name_short, protocol_name %} 4 | {% block body %} 5 | 9 |
10 |

{% trans %}IM Observatory{% endtrans %}

11 |

{% trans %}Testing the security of the Jabber/XMPP network since '13.{% endtrans %}

12 |
13 |
14 |
15 |

{% trans %}Test a server{% endtrans %}

16 |
17 |
18 |
22 |
23 |
24 |
25 |
26 |

{% trans %}Recent scans{% endtrans %}

27 |
28 | {% for protocol, scans in recent_scans %} 29 |
30 |

{{ protocol_name(protocol).title() }}

31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | {% for id_, domain, created_at in scans %} 41 | 42 | 43 | 44 | 45 | 46 | {% endfor %} 47 | 48 |
{% trans %}Domain{% endtrans %}{% trans %}When{% endtrans %}{% trans %}Score{% endtrans %}
{{ domain }}{{ (created_at - now) | format_timedelta(add_direction=True) }}TBD
49 |
50 | {% endfor %} 51 |
52 |
53 |
54 | {% endblock %} 55 | -------------------------------------------------------------------------------- /testxmpp/web/templates/scan_result.html: -------------------------------------------------------------------------------- 1 | {% extends "_page.html" %} 2 | {% block title %}{% trans %}Scan result #{{ scan_id }}{% endtrans %}{% endblock %} 3 | {% from "_library.html" import connection_phase, badge, tls_mode_name %} 4 | {% block content %} 5 |

{% trans %}Scan result #{{ scan_id }}{% endtrans %}

6 |
7 |
8 |
9 |
Domain
10 |
{{ scan_info.domain }}
11 |
12 |
13 |
Start time
14 |
{{ scan_info.created_at }}
15 |
16 |
17 |
Protocol
18 |
{{ scan_info.protocol.value }}
19 |
20 |
21 |
22 |

Endpoint discovery

23 |
24 |

SRV records

25 | {% if srv_records %} 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | {% for prio, weight, service, host, port in srv_records %} 38 | 39 | 40 | 41 | 42 | 43 | 44 | 45 | {% endfor %} 46 | 47 |
PWServiceHostPort
{{ prio }}{{ weight }}{{ service }}{{ host | decode_domain }}{{ port }}
48 | {% else %} 49 |

{% trans %}No valid records found.{% endtrans %}

50 | {% endif %} 51 |
52 |
53 |

_xmppconnect TXT records

54 | {% if xmppconnect_records %} 55 | 56 | 57 | 58 | 59 | 60 | 61 | 62 | 63 | {% for name, value in xmppconnect_records %} 64 | 65 | 66 | 67 | 68 | {% endfor %} 69 | 70 |
NameValue
{{ name | printable_bytes }}{{ value | printable_bytes }}
71 | {% else %} 72 |

{% trans %}No valid records found.{% endtrans %}

73 | {% endif %} 74 |
75 |
76 |

host-meta

77 | {% if host_meta_object_info %} 78 |
79 | {% for format_, url in host_meta_object_info %} 80 |
81 |
{{ format_.value }}
82 |
{{ url }}
83 |
84 | {% endfor %} 85 |
86 | {% if host_meta_links %} 87 | 88 | 89 | 90 | 91 | 92 | 93 | 94 | 95 | 96 | {% for rel, href, formats in host_meta_links %} 97 | 98 | 99 | 100 | 105 | 106 | {% endfor %} 107 | 108 |
TypeURLFormats
{{ rel }}{{ href }}
    101 | {%- for format_ in formats -%} 102 |
  • {{ format_.value }}
  • 103 | {%- endfor -%} 104 |
109 | {% else %} 110 |

{% trans %}No object contained any valid links.{% endtrans %}

111 | {% endif %} 112 | {% else %} 113 |

{% trans %}No valid host-meta objects found.{% endtrans %}

114 | {% endif %} 115 |
116 |

Endpoints

117 |
118 | 119 | 120 | 121 | 122 | 123 | 124 | 125 | 126 | 127 | 128 | 129 | {% for source, transport, uri, mode, (in_progress, passed, error) in endpoints %} 130 | 131 | 132 | 133 | 134 | 135 | 146 | 147 | {% endfor %} 148 | 149 |
OriginTransportURIModeStatus
{{ source }}{{ transport }}{{ uri }}{{ mode }} 136 | {%- if in_progress -%} 137 | {% call badge("gray") %}{% trans %}In progress{% endtrans %}{% endcall %} 138 | {%- else -%} 139 | {%- if passed -%} 140 | {% call badge("success") %}{% trans %}Passed{% endtrans %}{% endcall %} 141 | {%- else -%} 142 | {% call badge("error", title=error) %}{% trans %}Error{% endtrans %}{% endcall %} 143 | {%- endif -%} 144 | {%- endif -%} 145 |
150 |
151 | 152 |

Authentication

153 |
154 | {% for phase, mechanisms in sasl_offerings.items() %} 155 |

{{ phase }}

156 | {% if mechanisms %} 157 |
    158 | {% for mechanism in mechanisms %} 159 |
  • {{ mechanism }}
  • 160 | {% endfor %} 161 |
162 | {% else %} 163 |

{% trans %}No SASL mechanisms offered.{% endtrans %}

164 | {% endif %} 165 | {% endfor %} 166 |
167 | 168 |

TLS

169 | {% if not tls_scan_uri %} 170 |
171 |

The TLS scan has not been scheduled.

172 |
173 | {% else %} 174 |

The TLS scan was executed against {{ tls_scan_uri }}.

175 |

Versions

176 |
177 | 178 | 179 | 180 | {% for label, score, offered in tls_offering_info %} 181 | 182 | {% endfor %} 183 | 184 | 185 | 186 | {% for label, score, offered in tls_offering_info %} 187 | 188 | {% endfor %} 189 | 190 |
Version{{ label }}
Offered{% if offered is none %}?{% else %}{{ 'yes' if offered else 'no'}}{% endif %}
191 |
192 | 193 | {%- macro xname(nm, caller=None) %} 194 |
    195 | {% for rdn in nm %} 196 |
  1. 197 |
    198 | {% for type_, value in rdn.items() %} 199 |
    200 |
    {{ type_ | oidname }}
    201 |
    {{ value }}
    202 |
    203 | {% endfor %} 204 |
    205 |
  2. 206 | {% endfor %} 207 |
208 | {% endmacro -%} 209 | 210 |

Certificate Chain

211 | {% if cert_chain %} 212 |
    213 | {% for cert in cert_chain %} 214 |
  1. 215 |
    216 |
    217 |
    Subject
    218 |
    {% call xname(cert.subject) %}{% endcall %}
    219 |
    220 | {#
    221 |
    Issuer
    222 |
    {% call xname(cert.issuer) %}{% endcall %}
    223 |
    #} 224 |
    225 |
    Valid from
    226 |
    {{ cert.not_before }}
    227 |
    228 |
    229 |
    Valid until
    230 |
    {{ cert.not_after }}
    231 |
    232 |
    233 |
    Fingerprints
    234 |
    235 |
    236 | {% for algo in cert.fingerprints %} 237 |
    238 |
    {{ algo }} 239 |
    {{ cert.fingerprints[algo] | hexdigest }}
    240 |
    241 | {% endfor %} 242 |
    243 |
    244 |
    245 |
    246 |
  2. 247 | {% endfor %} 248 |
249 | {% else %} 250 |
251 |

No certificate chain discovered.

252 |
253 | {% endif %} 254 |

Ciphers

255 |
256 |
Cipher order
257 |
{% if server_cipher_order is none %}?{% elif server_cipher_order %}Server side{% else %}Client side{% endif %}
258 |
259 |
260 | {% if ciphers %} 261 | 262 | 263 | 264 | 265 | 266 | 267 | 268 | 269 | {% for id_, name, info in ciphers %} 270 | 271 | 272 | 273 | 274 | {% endfor %} 275 | 276 |
NameInfo
{{ name }} ({{ "0x%02x" | format(id_) }}){{ info }}
277 | {% else %} 278 |

No cipher information available.

279 | {% endif %} 280 |
281 | {% endif %} 282 | {% endblock %} 283 | -------------------------------------------------------------------------------- /testxmpp/xmpp/__init__.py: -------------------------------------------------------------------------------- 1 | 2 | -------------------------------------------------------------------------------- /testxmpp/xmpp/__main__.py: -------------------------------------------------------------------------------- 1 | import testxmpp.xmpp.cli 2 | testxmpp.xmpp.cli.main() 3 | -------------------------------------------------------------------------------- /testxmpp/xmpp/cli.py: -------------------------------------------------------------------------------- 1 | import argparse 2 | import asyncio 3 | import logging 4 | import os 5 | import pathlib 6 | import shlex 7 | 8 | import environ 9 | 10 | from .daemon import XMPPWorker 11 | 12 | 13 | @environ.config(prefix="TESTXMPP") 14 | class AppConfig: 15 | coordinator_uri = environ.var("tcp://localhost:5001") 16 | s2s_from = environ.var() 17 | s2s_client_cert = environ.var(default=None) 18 | s2s_client_key = environ.var(default=None) 19 | 20 | 21 | async def amain(coordinator_uri, s2s_from, s2s_client_cert, s2s_client_key): 22 | coordinator = XMPPWorker(coordinator_uri, 23 | s2s_from, s2s_client_cert, s2s_client_key) 24 | await coordinator.run() 25 | 26 | 27 | def main(): 28 | import argparse 29 | 30 | parser = argparse.ArgumentParser() 31 | parser.add_argument( 32 | "-v", "--verbose", 33 | dest="verbosity", 34 | action="count", 35 | default=0, 36 | help="Increase verbosity (up to -vvv)", 37 | ) 38 | parser.add_argument( 39 | "--debug-libraries", 40 | action="store_true", 41 | default=False, 42 | help="If enabled, verbosity will also be increased for libraries", 43 | ) 44 | 45 | args = parser.parse_args() 46 | 47 | verbosity_level = { 48 | 0: logging.ERROR, 49 | 1: logging.WARNING, 50 | 2: logging.INFO, 51 | }.get(args.verbosity, logging.DEBUG) 52 | if args.debug_libraries: 53 | global_level = verbosity_level 54 | else: 55 | global_level = logging.WARNING 56 | 57 | logging.basicConfig(level=global_level) 58 | logging.getLogger("testxmpp").setLevel(verbosity_level) 59 | 60 | config = environ.to_config(AppConfig) 61 | asyncio.run(amain(config.coordinator_uri, 62 | config.s2s_from, 63 | config.s2s_client_cert, 64 | config.s2s_client_key)) 65 | -------------------------------------------------------------------------------- /testxmpp/xmpp/daemon.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import dataclasses 3 | import logging 4 | import typing 5 | 6 | from datetime import timedelta 7 | 8 | import OpenSSL.SSL 9 | 10 | import aioopenssl 11 | import aioxmpp 12 | 13 | import testxmpp.common 14 | import testxmpp.api.coordinator as coordinator_api 15 | 16 | 17 | @dataclasses.dataclass 18 | class ScanResult: 19 | pre_tls_features: aioxmpp.nonza.StreamFeatures = None 20 | post_tls_features: aioxmpp.nonza.StreamFeatures = None 21 | tls_offered: bool = False 22 | tls_negotiated: bool = False 23 | errno: int = None 24 | error: str = None 25 | 26 | 27 | class CustomNonVerifier(aioxmpp.security_layer.PKIXCertificateVerifier): 28 | def verify_callback(self, ctx, x509, errno, errdepth, returncode): 29 | upstream_returncode = super().verify_callback 30 | return True 31 | 32 | 33 | def try_abort(protocol): 34 | try: 35 | protocol.abort() 36 | except: # NOQA 37 | pass 38 | 39 | 40 | async def scan_xmpp(domain: aioxmpp.JID, 41 | hostname: str, 42 | port: int, 43 | protocol: str, 44 | tls_mode: str, 45 | client_cert_path: typing.Optional[str], 46 | client_key_path: typing.Optional[str], 47 | from_: typing.Optional[str], 48 | negotiation_timeout: float): 49 | loop = asyncio.get_event_loop() 50 | 51 | result = ScanResult() 52 | 53 | namespace = { 54 | "c2s": "jabber:client", 55 | "s2s": "jabber:server", 56 | }[protocol] 57 | use_starttls = { 58 | "starttls": True, 59 | "direct": False, 60 | }[tls_mode] 61 | 62 | first_features_future = asyncio.Future() 63 | second_features = None 64 | stream = aioxmpp.protocol.XMLStream( 65 | default_namespace=namespace, 66 | from_=from_, 67 | to=domain, 68 | features_future=first_features_future, 69 | ) 70 | verifier = CustomNonVerifier() 71 | 72 | def context_factory(transport): 73 | ssl_context = OpenSSL.SSL.Context(OpenSSL.SSL.SSLv23_METHOD) 74 | ssl_context.set_verify( 75 | OpenSSL.SSL.VERIFY_PEER, 76 | aioxmpp.security_layer.default_verify_callback, 77 | ) 78 | if client_cert_path is not None: 79 | ssl_context.use_certificate_chain_file(client_cert_path) 80 | if client_key_path is not None: 81 | ssl_context.use_privatekey_file(client_key_path) 82 | if protocol == "s2s" and client_key_path is None: 83 | logging.warn( 84 | "running s2s tests without client certificate may" 85 | " be inaccurate or cause false negatives" 86 | ) 87 | verifier.setup_context(ssl_context, transport) 88 | return ssl_context 89 | 90 | if not use_starttls: 91 | result.pre_tls_features = None 92 | result.tls_offered = True 93 | await verifier.pre_handshake(None, domain, hostname, port) 94 | 95 | stream.deadtime_hard_limit = timedelta(seconds=negotiation_timeout) 96 | 97 | try: 98 | conn_future = asyncio.ensure_future( 99 | aioopenssl.create_starttls_connection( 100 | loop, 101 | lambda: stream, 102 | host=hostname, 103 | port=port, 104 | peer_hostname=hostname, 105 | server_hostname=str(domain).encode("idna").decode("ascii"), 106 | use_starttls=use_starttls, 107 | ssl_context_factory=context_factory, 108 | post_handshake_callback=verifier.post_handshake, 109 | ) 110 | ) 111 | done, pending = await asyncio.wait( 112 | [conn_future, first_features_future], 113 | return_when=asyncio.FIRST_EXCEPTION 114 | ) 115 | 116 | for fut in pending: 117 | fut.cancel() 118 | 119 | # this is the only special case we need to check: if the conn_future 120 | # raises, we’ll catch below immediately; if both succeed, everything 121 | # is fine. 122 | # only if the first_features_future raises before the conn_future 123 | # completes (timeout!), we’re in trouble. 124 | if pending and first_features_future in done: 125 | # something went terribly wrong with the stream 126 | await first_features_future 127 | assert False 128 | 129 | transport, _ = await conn_future 130 | first_features = await first_features_future 131 | print(transport, first_features) 132 | 133 | if not use_starttls: 134 | result.post_tls_features = first_features 135 | result.tls_negotiated = True 136 | else: 137 | result.pre_tls_features = first_features 138 | 139 | try: 140 | first_features[aioxmpp.nonza.StartTLSFeature] 141 | except KeyError: 142 | result.tls_offered = False 143 | else: 144 | result.tls_offered = True 145 | 146 | # We always try STARTTLS, even if not offered! 147 | response = await aioxmpp.protocol.send_and_wait_for( 148 | stream, 149 | [ 150 | aioxmpp.nonza.StartTLS(), 151 | ], 152 | [ 153 | aioxmpp.nonza.StartTLSFailure, 154 | aioxmpp.nonza.StartTLSProceed, 155 | ] 156 | ) 157 | 158 | if isinstance(response, aioxmpp.nonza.StartTLSFailure): 159 | result.error = "received " 160 | else: 161 | await verifier.pre_handshake(None, domain, hostname, port) 162 | await stream.starttls( 163 | ssl_context=context_factory(transport), 164 | post_handshake_callback=verifier.post_handshake, 165 | ) 166 | 167 | result.tls_negotiated = True 168 | 169 | result.post_tls_features = \ 170 | await aioxmpp.protocol.reset_stream_and_get_features( 171 | stream, 172 | timeout=negotiation_timeout, 173 | ) 174 | except aioxmpp.errors.StreamError as exc: 175 | result.error = str(exc) 176 | try_abort(stream) 177 | return result 178 | except aioxmpp.errors.MultiOSError as exc: 179 | result.error = str(exc) 180 | try_abort(stream) 181 | return result 182 | except OpenSSL.SSL.Error as exc: 183 | result.error = str(exc) 184 | try_abort(stream) 185 | return result 186 | except OSError as exc: 187 | result.errno = exc.errno 188 | result.error = exc.strerror or str(exc) 189 | try_abort(stream) 190 | return result 191 | except Exception as exc: # NOQA 192 | print("other exception", exc) 193 | try_abort(stream) 194 | raise 195 | 196 | try: 197 | await asyncio.wait_for(stream.close_and_wait(), 198 | timeout=negotiation_timeout) 199 | except asyncio.TimeoutError: 200 | pass 201 | 202 | return result 203 | 204 | 205 | def extract_sasl_mechanisms(features: aioxmpp.nonza.StreamFeatures): 206 | try: 207 | sasl = features[aioxmpp.security_layer.SASLMechanisms] 208 | except KeyError: 209 | return None 210 | 211 | return sasl.get_mechanism_list() 212 | 213 | 214 | async def scan_features(domain: aioxmpp.JID, 215 | hostname: str, 216 | port: int, 217 | protocol: str, 218 | tls_mode: str, 219 | client_cert_path: typing.Optional[str], 220 | client_key_path: typing.Optional[str], 221 | from_: typing.Optional[str], 222 | timeout: float): 223 | scan_result = await scan_xmpp( 224 | domain, hostname, port, protocol, tls_mode, 225 | client_cert_path, client_key_path, from_, 226 | timeout, 227 | ) 228 | 229 | result = { 230 | "tls_offered": scan_result.tls_offered or False, 231 | "tls_negotiated": scan_result.tls_negotiated or False, 232 | "error": scan_result.error, 233 | "errno": scan_result.errno, 234 | "pre_tls_sasl_mechanisms": None, 235 | "post_tls_sasl_mechanisms": None, 236 | } 237 | 238 | if scan_result.pre_tls_features is not None: 239 | result["pre_tls_sasl_mechanisms"] = extract_sasl_mechanisms( 240 | scan_result.pre_tls_features, 241 | ) 242 | 243 | if scan_result.post_tls_features is not None: 244 | result["post_tls_sasl_mechanisms"] = extract_sasl_mechanisms( 245 | scan_result.post_tls_features, 246 | ) 247 | 248 | return result 249 | 250 | 251 | class XMPPWorker(testxmpp.common.Worker): 252 | def __init__(self, coordinator_uri: str, 253 | s2s_from: str, 254 | s2s_client_cert: typing.Optional[str], 255 | s2s_client_key: typing.Optional[str]): 256 | super().__init__(coordinator_uri, logging.getLogger(__name__)) 257 | self._s2s_from = s2s_from 258 | self._s2s_client_cert = s2s_client_cert 259 | self._s2s_client_key = s2s_client_key 260 | 261 | def _mkjobrequest(self, worker_id): 262 | return coordinator_api.mkv1response( 263 | coordinator_api.RequestType.GET_XMPP_JOB, 264 | { 265 | "worker_id": worker_id, 266 | } 267 | ) 268 | 269 | def _decode_job(self, resp): 270 | if resp["type"] == coordinator_api.ResponseType.GET_XMPP_JOB.value: 271 | return resp["payload"] 272 | 273 | async def _run_job(self, sock, job): 274 | job_id = job["job_id"] 275 | job = job["job"] 276 | if job["type"] == "features": 277 | result = await scan_features( 278 | aioxmpp.JID.fromstr(job["domain"]), 279 | job["hostname"], 280 | job["port"], 281 | job["protocol"], 282 | job["tls_mode"], 283 | None if job["protocol"] == "c2s" else self._s2s_client_cert, 284 | None if job["protocol"] == "c2s" else self._s2s_client_key, 285 | None if job["protocol"] == "c2s" else self._s2s_from, 286 | 10.0, 287 | ) 288 | msg = coordinator_api.mkv1request( 289 | coordinator_api.RequestType.XMPP_COMPLETE, 290 | { 291 | "job_id": job_id, 292 | "worker_id": self.worker_id, 293 | "xmpp_result": result, 294 | } 295 | ) 296 | self.logger.debug("completed job %r: %r", job_id, msg) 297 | await sock.send_json(msg) 298 | resp = await sock.recv_json() 299 | if resp["type"] != coordinator_api.ResponseType.OK.value: 300 | self.logger.warning( 301 | "coordinator rejected our result: %r", 302 | resp 303 | ) 304 | --------------------------------------------------------------------------------