├── .dockerignore ├── tests ├── __init__.py ├── e2e │ ├── homepage.spec.ts │ ├── README.md │ ├── fixtures │ │ ├── temp-files.fixture.ts │ │ └── create-test-assets.fixture.ts │ ├── create_assets.spec.ts │ └── search_assets.spec.ts └── test_routes.py ├── webapp ├── __init__.py ├── lib │ ├── __init__.py │ ├── http_helpers.py │ ├── python_helpers.py │ ├── url_helpers.py │ ├── file_helpers.py │ └── processors.py ├── dummy-data │ ├── dummy.pdf │ ├── ubuntu.png │ └── ubuntu.svg ├── auth.py ├── database.py ├── utils.py ├── alembic │ ├── script.py.mako │ ├── versions │ │ ├── eb9f8639d610_add_tokens.py │ │ ├── a8efd843e4ad_add_deperecated_option_to_assets.py │ │ ├── 2ab5564cfe99_add_assets.py │ │ ├── a2f0126f69b8_add_redirects.py │ │ ├── 14e1e1dfca79_squash_assets_improvements.py │ │ ├── 6652ef3aa77f_add_tags.py │ │ ├── def1b50e89fa_add_categories_to_assets.py │ │ ├── 7059dcc76605_added_salesforce_campaign_and_asset_.py │ │ └── 62f9c9a26cce_.py │ └── env.py ├── decorators.py ├── param_parser.py ├── dataclass.py ├── integrations │ └── trino_service.py ├── sso.py ├── art │ ├── 404.ascii │ └── chbs.ascii ├── config.py ├── swift.py ├── app.py ├── commands.py └── models.py ├── app.py ├── alembic.ini ├── charm ├── requirements.txt ├── .gitignore ├── src │ └── charm.py ├── pyproject.toml ├── charmcraft.yaml ├── tox.ini └── lib │ └── charms │ └── redis_k8s │ └── v0 │ └── redis.py ├── migrate.sh ├── renovate.json ├── templates ├── _asset-list.html ├── shared │ ├── _asset-author.html │ └── _asset-card-actions.html ├── details.html ├── create-readonly.html ├── index.html ├── error.html ├── created.html ├── _search-form.html ├── _pagination.html ├── _asset-card-image.html └── _layout.html ├── CONTRIBUTING.md ├── .github ├── pull_request_template.md └── workflows │ ├── pr.yaml │ └── deploy.yaml ├── docker-compose.yaml ├── requirements.txt ├── static ├── sass │ ├── icons.scss │ └── main.scss └── js │ └── src │ ├── date-picker.js │ ├── navigation.js │ ├── generic-fields.js │ ├── search-and-filter-overflow.js │ ├── sf_campaign-search.js │ ├── authors-search.js │ └── main.js ├── entrypoint ├── playwright.config.ts ├── .env ├── .gitignore ├── HACKING.md ├── rockcraft.yaml ├── Dockerfile ├── package.json ├── config.yaml └── README.md /.dockerignore: -------------------------------------------------------------------------------- 1 | .venv -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /webapp/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /webapp/lib/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /app.py: -------------------------------------------------------------------------------- 1 | from webapp.app import app # noqa: F401 2 | -------------------------------------------------------------------------------- /alembic.ini: -------------------------------------------------------------------------------- 1 | [alembic] 2 | script_location = webapp/alembic 3 | -------------------------------------------------------------------------------- /charm/requirements.txt: -------------------------------------------------------------------------------- 1 | ops ~= 2.17 2 | paas-charm>=1.0,<2 3 | -------------------------------------------------------------------------------- /migrate.sh: -------------------------------------------------------------------------------- 1 | #! /usr/bin/env bash 2 | 3 | alembic upgrade head 4 | -------------------------------------------------------------------------------- /renovate.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends": [ 3 | "github>canonical-web-and-design/renovate-websites" 4 | ] 5 | } 6 | -------------------------------------------------------------------------------- /templates/_asset-list.html: -------------------------------------------------------------------------------- 1 | {% for asset in assets %} 2 | {% include "_asset-card-image.html" %} 3 | {% endfor %} 4 | -------------------------------------------------------------------------------- /webapp/dummy-data/dummy.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/canonical/assets.ubuntu.com/main/webapp/dummy-data/dummy.pdf -------------------------------------------------------------------------------- /webapp/dummy-data/ubuntu.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/canonical/assets.ubuntu.com/main/webapp/dummy-data/ubuntu.png -------------------------------------------------------------------------------- /charm/.gitignore: -------------------------------------------------------------------------------- 1 | venv/ 2 | build/ 3 | *.charm 4 | .tox/ 5 | .coverage 6 | __pycache__/ 7 | *.py[cod] 8 | .idea 9 | .vscode/ 10 | -------------------------------------------------------------------------------- /webapp/auth.py: -------------------------------------------------------------------------------- 1 | from webapp.database import db_session 2 | from webapp.models import Token 3 | 4 | 5 | def authenticate(token): 6 | """Check if this authentication token is valid (i.e. exists)""" 7 | 8 | return bool( 9 | db_session.query(Token).filter(Token.token == token).one_or_none() 10 | ) 11 | -------------------------------------------------------------------------------- /templates/shared/_asset-author.html: -------------------------------------------------------------------------------- 1 |

2 | Author:  3 | {% if asset.author | trim %} 4 | 7 | {% endif %} 8 |

9 | -------------------------------------------------------------------------------- /webapp/database.py: -------------------------------------------------------------------------------- 1 | from sqlalchemy import create_engine 2 | from sqlalchemy.orm import scoped_session, sessionmaker 3 | 4 | from webapp.config import config 5 | 6 | 7 | db_engine = create_engine(config.database_url.get_secret_value()) 8 | db_session = scoped_session( 9 | sessionmaker(autocommit=False, autoflush=False, bind=db_engine) 10 | ) 11 | -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # Contributing to assets-server 2 | 3 | Awesome you want to contribute to our server! 4 | 5 | ## Bugs and Issues 6 | 7 | *Add something here about bugs and issues* 8 | 9 | ## Pull requests 10 | 11 | Template for PRs is: 12 | 13 | # Details 14 | ## Done 15 | ## QA 16 | 17 | ## Feature requests 18 | 19 | *How do we want the community to ask for new features* 20 | -------------------------------------------------------------------------------- /webapp/lib/http_helpers.py: -------------------------------------------------------------------------------- 1 | def set_headers_for_type(response, content_type=None): 2 | """ 3 | Setup all requires response headers appropriate for this file 4 | """ 5 | 6 | if not content_type: 7 | content_type = response.headers["Content-Type"] 8 | 9 | if "font" in content_type: 10 | response.headers["Access-Control-Allow-Origin"] = "*" 11 | 12 | return response 13 | -------------------------------------------------------------------------------- /.github/pull_request_template.md: -------------------------------------------------------------------------------- 1 | ## Done 2 | 3 | [List of work items including drive-bys] 4 | 5 | ## QA 6 | 7 | - Check out this feature branch 8 | - Run the site using the command `dotrun` 9 | - View the site locally in your web browser at: http://0.0.0.0:8017 10 | - [List additional steps to QA the new features or prove the bug has been resolved] 11 | 12 | ## Issue / Card 13 | 14 | Fixes # 15 | 16 | ## Screenshots 17 | 18 | [if relevant, include a screenshot] 19 | -------------------------------------------------------------------------------- /docker-compose.yaml: -------------------------------------------------------------------------------- 1 | services: 2 | postgres: 3 | image: postgres:17 4 | environment: 5 | POSTGRES_DB: assets 6 | POSTGRES_USER: assets 7 | POSTGRES_PASSWORD: password 8 | volumes: 9 | - postgres-data:/var/lib/postgresql/data 10 | ports: 11 | - "5432:5432" 12 | swift: 13 | image: openstackswift/saio:latest 14 | ports: 15 | - "8080:8080" 16 | volumes: 17 | - swift-data:/srv 18 | volumes: 19 | postgres-data: 20 | swift-data: 21 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | alembic==1.13.2 2 | canonicalwebteam.flask-base==2.6.0 3 | django-openid-auth==0.17 4 | filetype==1.2.0 5 | Flask-OpenID==1.3.1 6 | Flask-WTF==1.2.1 7 | more-itertools==10.3.0 8 | Pillow==10.4.0 9 | psycopg2-binary==2.9.9 10 | python-keystoneclient==5.4.0 11 | python-swiftclient==4.6.0 12 | requests==2.32.3 13 | scour==0.38.2 14 | sh==2.0.7 15 | sqlalchemy==2.0.31 16 | Wand==0.6.13 17 | pydantic==2.11.7 18 | pydantic-settings==2.10.1 19 | python-slugify==8.0.4 20 | trino==0.335.0 21 | google-auth==2.40.3 22 | cryptography==46.0.1 -------------------------------------------------------------------------------- /webapp/utils.py: -------------------------------------------------------------------------------- 1 | import time 2 | from functools import lru_cache as _lru_cache 3 | 4 | 5 | def lru_cache(*, ttl_seconds, maxsize=128): 6 | def deco(foo): 7 | @_lru_cache(maxsize=maxsize) 8 | def cached_with_ttl(*args, ttl_hash, **kwargs): 9 | return foo(*args, **kwargs) 10 | 11 | def inner(*args, **kwargs): 12 | return cached_with_ttl( 13 | *args, ttl_hash=round(time.time() / ttl_seconds), **kwargs 14 | ) 15 | 16 | return inner 17 | 18 | return deco 19 | -------------------------------------------------------------------------------- /templates/details.html: -------------------------------------------------------------------------------- 1 | {% extends "_layout.html" %} 2 | 3 | {% block title %}Asset details{% endblock %} 4 | 5 | {% block content %} 6 |
7 |
8 |
9 |

Asset details

10 |
11 |
12 |
13 |
14 | {% with details=True %} 15 | {% include "_asset-card-image.html" %} 16 | {% endwith %} 17 |
18 |
19 | {% endblock %} 20 | -------------------------------------------------------------------------------- /webapp/alembic/script.py.mako: -------------------------------------------------------------------------------- 1 | """${message} 2 | 3 | Revision ID: ${up_revision} 4 | Revises: ${down_revision | comma,n} 5 | Create Date: ${create_date} 6 | 7 | """ 8 | from alembic import op 9 | import sqlalchemy as sa 10 | ${imports if imports else ""} 11 | 12 | # revision identifiers, used by Alembic. 13 | revision = ${repr(up_revision)} 14 | down_revision = ${repr(down_revision)} 15 | branch_labels = ${repr(branch_labels)} 16 | depends_on = ${repr(depends_on)} 17 | 18 | 19 | def upgrade(): 20 | ${upgrades if upgrades else "pass"} 21 | 22 | 23 | def downgrade(): 24 | ${downgrades if downgrades else "pass"} 25 | -------------------------------------------------------------------------------- /static/sass/icons.scss: -------------------------------------------------------------------------------- 1 | .p-icon--edit { 2 | @extend %icon; 3 | background-image: url("data:image/svg+xml,%3Csvg width='16' height='16' xmlns='http://www.w3.org/2000/svg'%3E%3Cpath d='M14 13.25v1.5H2v-1.5h12zM11.907 1.676l.58.58a2 2 0 010 2.829l-5.133 5.133c-.038.038-.079.073-.122.104l.01.008a3.77 3.77 0 01-1.718.983l-3.52.914.863-3.583c.138-.57.41-1.099.795-1.54l.17-.183.01.01c.03-.043.065-.084.103-.122l5.133-5.133a2 2 0 012.829 0zm-3.309 2.6L5.036 7.84l-.593.82 1.062 1.063.814-.591 3.567-3.568-1.288-1.288zm1.61-1.597l-.07.057-.479.48 1.288 1.288.48-.48a.5.5 0 00.057-.638l-.057-.069-.581-.58a.5.5 0 00-.638-.058z' fill='%23666' fill-rule='nonzero'/%3E%3C/svg%3E"); 4 | } 5 | -------------------------------------------------------------------------------- /charm/src/charm.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # Copyright 2025 goulin 3 | # See LICENSE file for licensing details. 4 | 5 | """Flask Charm entrypoint.""" 6 | 7 | import logging 8 | import typing 9 | 10 | import ops 11 | 12 | import paas_charm.flask 13 | 14 | logger = logging.getLogger(__name__) 15 | 16 | 17 | class AssetsManagerCharm(paas_charm.flask.Charm): 18 | """Flask Charm service.""" 19 | 20 | def __init__(self, *args: typing.Any) -> None: 21 | """Initialize the instance. 22 | 23 | Args: 24 | args: passthrough to CharmBase. 25 | """ 26 | super().__init__(*args) 27 | 28 | 29 | if __name__ == "__main__": 30 | ops.main(AssetsManagerCharm) 31 | -------------------------------------------------------------------------------- /entrypoint: -------------------------------------------------------------------------------- 1 | #! /usr/bin/env bash 2 | 3 | set -e 4 | export FLASK_APP=webapp.app 5 | 6 | activate() { 7 | # Create or activate virtualenv 8 | if [ ! -d ./.venv ]; then 9 | python3 -m venv .venv 10 | fi 11 | . ./.venv/bin/activate 12 | } 13 | 14 | { 15 | activate 16 | 17 | RUN_COMMAND="talisker.gunicorn webapp.app:app --bind $1 --worker-class sync --workers 3 --name talisker-$(hostname)" 18 | 19 | if [ "${FLASK_DEBUG}" = true ] || [ "${FLASK_DEBUG}" = 1 ]; then 20 | RUN_COMMAND="${RUN_COMMAND} --reload --log-level debug --timeout 9999" 21 | fi 22 | 23 | # Run migrations 24 | alembic upgrade head 25 | 26 | # Start the website 27 | ${RUN_COMMAND} 28 | 29 | } 30 | -------------------------------------------------------------------------------- /playwright.config.ts: -------------------------------------------------------------------------------- 1 | import { defineConfig, devices } from '@playwright/test'; 2 | 3 | /** 4 | * @see https://playwright.dev/docs/test-configuration 5 | */ 6 | export default defineConfig({ 7 | testDir: './tests/e2e', 8 | timeout: 40_000, 9 | fullyParallel: false, 10 | forbidOnly: !!process.env.CI, 11 | retries: process.env.CI ? 4 : 2, 12 | workers: process.env.CI ? 1 : 4, 13 | reporter: 'html', 14 | 15 | use: { 16 | baseURL: process.env.BASE_URL || 'http://0.0.0.0:8017', 17 | trace: 'on-first-retry', 18 | screenshot: 'only-on-failure', 19 | }, 20 | 21 | projects: [ 22 | { 23 | name: 'chromium', 24 | use: { ...devices['Desktop Chrome'] }, 25 | }, 26 | ], 27 | }); 28 | -------------------------------------------------------------------------------- /templates/shared/_asset-card-actions.html: -------------------------------------------------------------------------------- 1 |
2 | 5 | {% if asset.data.image %} 6 | 9 | {% endif %} 10 | Edit 12 |
13 | -------------------------------------------------------------------------------- /templates/create-readonly.html: -------------------------------------------------------------------------------- 1 | {% extends "_layout.html" %} 2 | {% block title %}Create assets disabled{% endblock %} 3 | {% block content %} 4 |
5 |
6 |

Creating assets is disabled

7 |

8 | We are in the process of migrating the asset manager to a new platform. Creating new assets is temporarily disabled. 9 |

10 |

11 | For more information, please reach out to the web team on Mattermost: ~web-design. 12 |

13 | Go to home 14 |
15 |
16 | {% endblock %} 17 | -------------------------------------------------------------------------------- /webapp/alembic/versions/eb9f8639d610_add_tokens.py: -------------------------------------------------------------------------------- 1 | """Add tokens 2 | 3 | Revision ID: eb9f8639d610 4 | Revises: 5 | Create Date: 2019-12-09 14:38:26.434966 6 | 7 | """ 8 | 9 | from alembic import op 10 | import sqlalchemy as sa 11 | 12 | 13 | # revision identifiers, used by Alembic. 14 | revision = "eb9f8639d610" 15 | down_revision = None 16 | branch_labels = None 17 | depends_on = None 18 | 19 | 20 | def upgrade(): 21 | op.create_table( 22 | "token", 23 | sa.Column("id", sa.Integer(), nullable=False), 24 | sa.Column("name", sa.String(), nullable=False), 25 | sa.Column("token", sa.String(), nullable=False), 26 | sa.PrimaryKeyConstraint("id"), 27 | ) 28 | 29 | 30 | def downgrade(): 31 | op.drop_table("token") 32 | -------------------------------------------------------------------------------- /webapp/alembic/versions/a8efd843e4ad_add_deperecated_option_to_assets.py: -------------------------------------------------------------------------------- 1 | """add deperecated option to assets 2 | 3 | Revision ID: a8efd843e4ad 4 | Revises: 6652ef3aa77f 5 | Create Date: 2023-04-12 18:08:21.932199 6 | 7 | """ 8 | 9 | from alembic import op 10 | import sqlalchemy as sa 11 | 12 | 13 | # revision identifiers, used by Alembic. 14 | revision = "a8efd843e4ad" 15 | down_revision = "6652ef3aa77f" 16 | branch_labels = None 17 | depends_on = None 18 | 19 | 20 | def upgrade(): 21 | op.add_column( 22 | "asset", sa.Column("deprecated", sa.Boolean(), nullable=True) 23 | ) 24 | op.execute("UPDATE asset SET deprecated = FALSE") 25 | op.alter_column("asset", "deprecated", nullable=False) 26 | 27 | 28 | def downgrade(): 29 | op.drop_column("asset", "deprecated") 30 | -------------------------------------------------------------------------------- /webapp/alembic/versions/2ab5564cfe99_add_assets.py: -------------------------------------------------------------------------------- 1 | """add_assets 2 | 3 | Revision ID: 2ab5564cfe99 4 | Revises: eb9f8639d610 5 | Create Date: 2020-01-09 14:37:08.316607 6 | 7 | """ 8 | 9 | from alembic import op 10 | import sqlalchemy as sa 11 | 12 | 13 | # revision identifiers, used by Alembic. 14 | revision = "2ab5564cfe99" 15 | down_revision = "eb9f8639d610" 16 | branch_labels = None 17 | depends_on = None 18 | 19 | 20 | def upgrade(): 21 | op.create_table( 22 | "asset", 23 | sa.Column("id", sa.Integer(), nullable=False), 24 | sa.Column("created", sa.DateTime(), nullable=False), 25 | sa.Column("data", sa.JSON(), nullable=False), 26 | sa.Column("file_path", sa.String(), nullable=False), 27 | sa.PrimaryKeyConstraint("id"), 28 | ) 29 | 30 | 31 | def downgrade(): 32 | op.drop_table("asset") 33 | -------------------------------------------------------------------------------- /webapp/alembic/versions/a2f0126f69b8_add_redirects.py: -------------------------------------------------------------------------------- 1 | """add redirects 2 | 3 | Revision ID: a2f0126f69b8 4 | Revises: 2ab5564cfe99 5 | Create Date: 2020-01-14 14:46:21.862129 6 | 7 | """ 8 | 9 | from alembic import op 10 | import sqlalchemy as sa 11 | 12 | 13 | # revision identifiers, used by Alembic. 14 | revision = "a2f0126f69b8" 15 | down_revision = "2ab5564cfe99" 16 | branch_labels = None 17 | depends_on = None 18 | 19 | 20 | def upgrade(): 21 | op.create_table( 22 | "redirect", 23 | sa.Column("id", sa.Integer(), nullable=False), 24 | sa.Column("redirect_path", sa.String(), nullable=False), 25 | sa.Column("target_url", sa.String(), nullable=False), 26 | sa.Column("permanent", sa.Boolean(), nullable=False), 27 | sa.PrimaryKeyConstraint("id"), 28 | ) 29 | 30 | 31 | def downgrade(): 32 | op.drop_table("redirect") 33 | -------------------------------------------------------------------------------- /webapp/decorators.py: -------------------------------------------------------------------------------- 1 | # Standard library 2 | import functools 3 | 4 | # Packages 5 | import flask 6 | 7 | # Local 8 | from webapp.auth import authenticate 9 | 10 | 11 | def get_token_from_request(request): 12 | auth_header = request.headers.get("Authorization", "") 13 | 14 | if auth_header[:6].lower() == "token ": 15 | return auth_header[6:] 16 | 17 | return request.values.get("token", None) 18 | 19 | 20 | def token_required(f): 21 | @functools.wraps(f) 22 | def wrapped(*args, **kwargs): 23 | token = get_token_from_request(flask.request) 24 | if not token or not authenticate(token): 25 | message = "Invalid or missing token." 26 | flask.abort(401, message) 27 | 28 | response = flask.make_response(f(*args, **kwargs)) 29 | response.cache_control.private = True 30 | 31 | return response 32 | 33 | return wrapped 34 | -------------------------------------------------------------------------------- /static/js/src/date-picker.js: -------------------------------------------------------------------------------- 1 | import flatpickr from "flatpickr"; 2 | 3 | import { addValueToHiddenInput, addValueToQueryParams, } from "./main"; 4 | 5 | document.addEventListener("DOMContentLoaded", function () { 6 | flatpickr(".js-date-picker", { 7 | mode: "range", 8 | dateFormat: "Y-m-d", 9 | onChange: function(selectedDates) { 10 | const [startDate, endDate] = selectedDates; 11 | if (startDate) { 12 | addValueToHiddenInput(startDate.toISOString(), document.querySelector(".js-hidden-field-startdate"), replace = true); 13 | addValueToQueryParams("start_date", startDate.toISOString(), replace = true); 14 | } 15 | if (endDate) { 16 | addValueToHiddenInput(endDate.toISOString(), document.querySelector(".js-hidden-field-enddate"), replace = true); 17 | addValueToQueryParams("end_date", endDate.toISOString(), replace = true); 18 | } 19 | } 20 | }); 21 | }); -------------------------------------------------------------------------------- /webapp/param_parser.py: -------------------------------------------------------------------------------- 1 | from flask import request 2 | from webapp.dataclass import AssetSearchParams 3 | 4 | 5 | def parse_asset_search_params() -> AssetSearchParams: 6 | """ 7 | Parse request arguments and return AssetSearchParams object with defaults. 8 | """ 9 | return AssetSearchParams( 10 | tag=request.args.get("tag", "").strip(), 11 | asset_type=request.args.get("asset_type", "").strip(), 12 | product_types=request.args.getlist("product_types") or [], 13 | categories=request.args.getlist("categories") or [], 14 | author_email=request.args.get("author_email", "").strip(), 15 | name=request.args.get("name", "").strip(), 16 | start_date=request.args.get("start_date", None), 17 | end_date=request.args.get("end_date", None), 18 | language=request.args.get("language", "").strip(), 19 | file_types=request.args.getlist("file_types") or [], 20 | ) 21 | -------------------------------------------------------------------------------- /webapp/lib/python_helpers.py: -------------------------------------------------------------------------------- 1 | from cryptography.hazmat.primitives import serialization 2 | from cryptography.hazmat.backends import default_backend 3 | 4 | 5 | def shared_items(list_one, list_two): 6 | """ 7 | Return the list of items that are shared 8 | between two lists 9 | """ 10 | 11 | return [x for x in list_one.keys() if x in list_two] 12 | 13 | 14 | def sanitize_like_input(raw: str) -> str: 15 | """Escape special characters used in SQL LIKE""" 16 | escaped = raw.replace("\\", "\\\\").replace("%", "\\%").replace("_", "\\_") 17 | return escaped 18 | 19 | 20 | def is_pem_private_key(data: bytes) -> bool: 21 | """ 22 | Return True if the given data is a valid PEM private key. 23 | """ 24 | try: 25 | serialization.load_pem_private_key( 26 | data, password=None, backend=default_backend() 27 | ) 28 | return True 29 | except Exception: 30 | return False 31 | -------------------------------------------------------------------------------- /charm/pyproject.toml: -------------------------------------------------------------------------------- 1 | # Testing tools configuration 2 | [tool.coverage.run] 3 | branch = true 4 | 5 | [tool.coverage.report] 6 | show_missing = true 7 | 8 | [tool.pytest.ini_options] 9 | minversion = "6.0" 10 | log_cli_level = "INFO" 11 | 12 | # Linting tools configuration 13 | [tool.ruff] 14 | line-length = 99 15 | lint.select = ["E", "W", "F", "C", "N", "D", "I001"] 16 | lint.extend-ignore = [ 17 | "D105", 18 | "D107", 19 | "D203", 20 | "D204", 21 | "D213", 22 | "D215", 23 | "D400", 24 | "D404", 25 | "D406", 26 | "D407", 27 | "D408", 28 | "D409", 29 | "D413", 30 | ] 31 | extend-exclude = ["__pycache__", "*.egg_info"] 32 | lint.per-file-ignores = {"tests/*" = ["D100","D101","D102","D103","D104"]} 33 | 34 | [tool.ruff.lint.mccabe] 35 | max-complexity = 10 36 | 37 | [tool.codespell] 38 | skip = "build,lib,venv,icon.svg,.tox,.git,.mypy_cache,.ruff_cache,.coverage" 39 | 40 | [tool.pyright] 41 | include = ["src/**.py"] 42 | -------------------------------------------------------------------------------- /tests/e2e/homepage.spec.ts: -------------------------------------------------------------------------------- 1 | import { test, expect } from '@playwright/test'; 2 | 3 | test.describe('Index page loading test', () => { 4 | test('should load the manager homepage', async ({ page }) => { 5 | await page.goto('/manager'); 6 | 7 | // Should stay on manager page (not redirect to login) 8 | await expect(page).toHaveURL(/.*\/manager/); 9 | 10 | // Wait for page to load 11 | await page.waitForLoadState('networkidle'); 12 | }); 13 | 14 | test('should have visible content', async ({ page }) => { 15 | await page.goto('/manager'); 16 | const body = page.locator('body'); 17 | await expect(body).toBeVisible(); 18 | }); 19 | 20 | test('should display main navigation header', async ({ page }) => { 21 | await page.goto('/manager'); 22 | 23 | const banner = page.locator('.p-navigation__banner'); 24 | await expect(banner).toBeVisible(); 25 | await expect(banner).toContainText('Canonical asset manager'); 26 | }); 27 | }); 28 | -------------------------------------------------------------------------------- /.env: -------------------------------------------------------------------------------- 1 | SECRET_KEY=development 2 | FLASK_SECRET_KEY=development 3 | PORT=8017 4 | FLASK_DEBUG=1 5 | FLASK_APP=webapp.app 6 | FLASK_READ_ONLY_MODE=false 7 | 8 | # Database 9 | FLASK_DATABASE_URL=postgresql://assets:password@localhost:5432/assets 10 | 11 | # Swift server 12 | FLASK_OS_AUTH_URL=http://localhost:8080/auth/v1.0 13 | FLASK_OS_USERNAME=test:tester 14 | FLASK_OS_PASSWORD=testing 15 | FLASK_OS_AUTH_VERSION=1.0 16 | FLASK_OS_TENANT_NAME="" 17 | 18 | # Canonical Directory API 19 | FLASK_DIRECTORY_API_URL=https://api.directory.canonical.com/graphql/ 20 | FLASK_DIRECTORY_API_TOKEN=ADD_DIRECTORY_API_TOKEN_HERE 21 | 22 | # Trino Creds (need to be replaced with actual values) 23 | FLASK_TRINO_SF_PROJECT_ID=trino_project-id 24 | FLASK_TRINO_SF_PRIVATE_KEY_ID=trino_private_key_id 25 | FLASK_TRINO_SF_CLIENT_EMAIL=trino_client_email 26 | FLASK_TRINO_SF_CLIENT_ID=trino_client_id 27 | FLASK_TRINO_SF_PRIVATE_KEY=trino_private_key 28 | 29 | # This needs to be set when doing e2e tests 30 | # FLASK_DISABLE_AUTH_FOR_TESTS=1 31 | 32 | 33 | -------------------------------------------------------------------------------- /webapp/alembic/env.py: -------------------------------------------------------------------------------- 1 | # Standard library 2 | import os 3 | import sys 4 | 5 | # Packages 6 | from alembic import context 7 | 8 | # Local 9 | sys.path.append(os.getcwd()) 10 | from webapp.database import db_engine # noqa: E402 11 | from webapp.models import Base # noqa: E402 12 | 13 | 14 | # this is the Alembic Config object, which provides 15 | # access to the values within the .ini file in use. 16 | config = context.config 17 | 18 | # add your model's MetaData object here 19 | target_metadata = Base.metadata 20 | 21 | 22 | def run_migrations(): 23 | """Run migrations in 'online' mode. 24 | 25 | In this scenario we need to create an Engine 26 | and associate a connection with the context. 27 | 28 | """ 29 | 30 | with db_engine.connect() as connection: 31 | context.configure( 32 | connection=connection, target_metadata=target_metadata 33 | ) 34 | 35 | with context.begin_transaction(): 36 | context.run_migrations() 37 | 38 | 39 | run_migrations() 40 | -------------------------------------------------------------------------------- /webapp/dataclass.py: -------------------------------------------------------------------------------- 1 | from dataclasses import dataclass, field 2 | 3 | 4 | @dataclass 5 | class AssetSearchParams: 6 | tag: str = "" 7 | asset_type: str = "" 8 | product_types: list = field(default_factory=list) 9 | author_email: str = "" 10 | name: str = "" 11 | start_date: str = None 12 | end_date: str = None 13 | language: str = "" 14 | file_types: list = field(default_factory=list) 15 | categories: list = field(default_factory=list) 16 | 17 | def __post_init__(self): 18 | for product in self.product_types[:]: 19 | product = product.strip() 20 | if not product: 21 | self.product_types.remove(product) 22 | for category in self.categories[:]: 23 | category = category.strip() 24 | if not category: 25 | self.categories.remove(category) 26 | for file_type in self.file_types[:]: 27 | file_type = file_type.strip() 28 | if not file_type: 29 | self.file_types.remove(file_type) 30 | -------------------------------------------------------------------------------- /webapp/lib/url_helpers.py: -------------------------------------------------------------------------------- 1 | from slugify import slugify 2 | from pathlib import Path 3 | 4 | 5 | try: 6 | from urllib.parse import quote_plus, unquote_plus 7 | except ImportError: 8 | from urllib import quote_plus, unquote_plus 9 | 10 | 11 | def normalize(url_to_normalize): 12 | """ 13 | Given a URL, it will unquote it and requote it 14 | with "quote_plus" so that spaces become "+" 15 | """ 16 | 17 | unquoted_url = unquote_plus(url_to_normalize) 18 | requoted_url = quote_plus(unquoted_url) 19 | return requoted_url 20 | 21 | 22 | def sanitize_filename(file_name: str) -> str: 23 | """ 24 | Sanitize file names using slugify: 25 | - Keeps A-Z, a-z, 0-9 26 | - Removes spaces 27 | - Replaces any other character with '-' 28 | Preserves `.` characters in the file extension. 29 | """ 30 | if not file_name: 31 | return "" 32 | 33 | p = Path(file_name) 34 | ext = "".join(p.suffixes) 35 | base = file_name.removesuffix(ext) 36 | 37 | sanitized_base = slugify(base, separator="_") 38 | 39 | if not sanitized_base: 40 | sanitized_base = "file" 41 | 42 | return f"{sanitized_base}{ext}" 43 | -------------------------------------------------------------------------------- /charm/charmcraft.yaml: -------------------------------------------------------------------------------- 1 | name: assets-manager 2 | 3 | type: charm 4 | 5 | bases: 6 | - build-on: 7 | - name: ubuntu 8 | channel: "22.04" 9 | run-on: 10 | - name: ubuntu 11 | channel: "22.04" 12 | 13 | summary: A very short one-line summary of the Flask application. 14 | 15 | description: | 16 | A comprehensive overview of your Flask application. 17 | 18 | extensions: 19 | - flask-framework 20 | 21 | requires: 22 | postgresql: 23 | interface: postgresql_client 24 | optional: false 25 | limit: 1 26 | 27 | config: 28 | options: 29 | os: 30 | type: secret 31 | description: Swift bucket credentials, must contain (auth-url, username, password, auth-version, tenant-name?) 32 | directory-api: 33 | type: secret 34 | description: Canonical Directory API credentials, must contain (url, token) 35 | read-only-mode: 36 | type: boolean 37 | description: Enabling this will prevent users from creating new assets (useful during migration to a new server) 38 | default: false 39 | trino-sf: 40 | type: secret 41 | description: Trino credentials, must contain (project-id, private-key-id, client-email, client-id, private-key) -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | *.rock 2 | 3 | # [generated] Bracketed sections updated by Yeoman generator 4 | # generator-canonical-webteam@3.4.1 5 | 6 | # [os] OS & editor files 7 | Desktop.ini 8 | Thumbs.db 9 | ._* 10 | *.DS_Store 11 | *~ 12 | \#*\# 13 | .AppleDouble 14 | .LSOverride 15 | .spelling 16 | .vscode 17 | 18 | # [cache] Cache and backup 19 | *.bak 20 | *.pyc 21 | *-cache/ 22 | 23 | # [data] Local data 24 | *.sqlite* 25 | *.log 26 | logs/ 27 | pids 28 | *.pid 29 | *.seed 30 | .*-metadata 31 | 32 | # [deps] Local dependencies 33 | .bundle/ 34 | node_modules/ 35 | vendor/ 36 | bower_components/ 37 | vendor/ 38 | .venv* 39 | .dotrun.json 40 | # Normally lockfiles would be committed, but we use yarn instead of NPM for locking dependencies 41 | package-lock.json 42 | 43 | # [build] Built files 44 | /build/ 45 | /parts/ 46 | /prime/ 47 | /stage/ 48 | *.egg-info 49 | .snapcraft/ 50 | *.snap 51 | _site/ 52 | *.*.map 53 | static/js/dist 54 | 55 | # [env] Local environment settings 56 | .docker-project 57 | .*.hash 58 | .envrc 59 | .env.local 60 | env/ 61 | env[23]/ 62 | 63 | # [sass] Files generated by Sass 64 | *.css 65 | 66 | # Project-specific ignores 67 | django-error.log 68 | 69 | # [playwright] Playwright test artifacts 70 | /playwright-report/ 71 | /test-results/ 72 | -------------------------------------------------------------------------------- /webapp/alembic/versions/14e1e1dfca79_squash_assets_improvements.py: -------------------------------------------------------------------------------- 1 | """squash_assets_improvements 2 | 3 | Revision ID: 14e1e1dfca79 4 | Revises: 62f9c9a26cce 5 | Create Date: 2025-04-03 13:42:24.559843 6 | 7 | """ 8 | 9 | import sqlalchemy as sa 10 | from alembic import op 11 | 12 | # revision identifiers, used by Alembic. 13 | revision = "14e1e1dfca79" 14 | down_revision = "62f9c9a26cce" 15 | branch_labels = None 16 | depends_on = None 17 | 18 | 19 | def upgrade(): 20 | op.create_table( 21 | "author", 22 | sa.Column("first_name", sa.String(), nullable=False), 23 | sa.Column("last_name", sa.String(), nullable=False), 24 | sa.Column("email", sa.String(), nullable=False), 25 | sa.PrimaryKeyConstraint("first_name", "last_name", "email"), 26 | sa.UniqueConstraint("email"), 27 | ) 28 | op.add_column( 29 | "asset", sa.Column("author_email", sa.String(), nullable=True) 30 | ) 31 | op.add_column("asset", sa.Column("file_type", sa.String(), nullable=True)) 32 | op.create_foreign_key( 33 | "fk_asset_author", "asset", "author", ["author_email"], ["email"] 34 | ) 35 | 36 | 37 | def downgrade(): 38 | op.drop_constraint("fk_asset_author", "asset", type_="foreignkey") 39 | op.drop_column("asset", "file_type") 40 | op.drop_column("asset", "author_email") 41 | op.drop_table("author") 42 | -------------------------------------------------------------------------------- /templates/index.html: -------------------------------------------------------------------------------- 1 | {% extends "_layout.html" %} 2 | 3 | {% block title %} 4 | Search assets 5 | {% endblock title %} 6 | 7 | {% block content %} 8 | 9 |
10 |
{% include "_search-form.html" %}
11 |
12 | {% if assets %} 13 |

Search results

14 | {% elif is_search %} 15 |

No results. Please try another search.

16 | {% else %} 17 |

Start a search to show assets.

18 | {% endif %} 19 |
20 |
21 |
22 | 23 | {% if total_assets %} 24 |
25 |

26 | {{ total_assets }} asset{{ "s" if total_assets > 1 }} match{{ "es" if total_assets < 2 }} your search 27 |

28 |
29 |
30 | {% endif %} 31 | 32 |
33 |
34 | {% if assets %} 35 | {% include "_asset-list.html" %} 36 | {% endif %} 37 |
38 |
39 | 40 | {% include "_pagination.html" %} 41 | 42 | {% endblock content %} 43 | -------------------------------------------------------------------------------- /webapp/alembic/versions/6652ef3aa77f_add_tags.py: -------------------------------------------------------------------------------- 1 | """add tags 2 | 3 | Revision ID: 6652ef3aa77f 4 | Revises: a2f0126f69b8 5 | Create Date: 2021-11-04 18:42:16.308477 6 | 7 | """ 8 | 9 | from alembic import op 10 | import sqlalchemy as sa 11 | 12 | 13 | # revision identifiers, used by Alembic. 14 | revision = "6652ef3aa77f" 15 | down_revision = "a2f0126f69b8" 16 | branch_labels = None 17 | depends_on = None 18 | 19 | 20 | def upgrade(): 21 | # ### commands auto generated by Alembic - please adjust! ### 22 | op.create_table( 23 | "tag", 24 | sa.Column("name", sa.String(), nullable=False), 25 | sa.PrimaryKeyConstraint("name"), 26 | ) 27 | op.create_table( 28 | "asset_tag_association", 29 | sa.Column("asset_id", sa.Integer(), nullable=False), 30 | sa.Column("tag_name", sa.String(), nullable=False), 31 | sa.ForeignKeyConstraint( 32 | ["asset_id"], 33 | ["asset.id"], 34 | ), 35 | sa.ForeignKeyConstraint( 36 | ["tag_name"], 37 | ["tag.name"], 38 | ), 39 | sa.PrimaryKeyConstraint("asset_id", "tag_name"), 40 | ) 41 | # ### end Alembic commands ### 42 | 43 | 44 | def downgrade(): 45 | # ### commands auto generated by Alembic - please adjust! ### 46 | op.drop_table("asset_tag_association") 47 | op.drop_table("tag") 48 | # ### end Alembic commands ### 49 | -------------------------------------------------------------------------------- /templates/error.html: -------------------------------------------------------------------------------- 1 | {% extends "_layout.html" %} 2 | 3 | {% block title %}{{ code }}: {{ reason }}{% endblock %} 4 | 5 | {% block content %} 6 |
7 |
8 |
9 | 13 |
14 |
15 |
16 |

{{ code }}: {{ reason }}

17 |

Something's gone wrong.

18 | {% if message %} 19 |
20 | {{ message }} 21 |
22 | {% endif %} 23 | {% if code != 404 %} 24 |

25 | Try reloading the page. 26 | If the error persists, please note that it may be a known issue. 28 | If not, please file a new issue. 30 |

31 | {% endif %} 32 |
33 |
34 |
35 |
36 | {% endblock content %} 37 | -------------------------------------------------------------------------------- /static/js/src/navigation.js: -------------------------------------------------------------------------------- 1 | function toggleDropdown(toggle, open) { 2 | var parentElement = toggle.parentNode; 3 | var dropdown = document.getElementById(toggle.getAttribute("aria-controls")); 4 | dropdown.setAttribute("aria-hidden", !open); 5 | 6 | if (open) { 7 | parentElement.classList.add("is-active"); 8 | } else { 9 | parentElement.classList.remove("is-active"); 10 | } 11 | } 12 | 13 | function closeAllDropdowns(toggles) { 14 | toggles.forEach(function (toggle) { 15 | toggleDropdown(toggle, false); 16 | }); 17 | } 18 | 19 | function handleClickOutside(toggles, containerClass) { 20 | document.addEventListener("click", function (event) { 21 | var target = event.target; 22 | 23 | if (target.closest) { 24 | if (!target.closest(containerClass)) { 25 | closeAllDropdowns(toggles); 26 | } 27 | } 28 | }); 29 | } 30 | 31 | function initNavDropdowns(containerClass) { 32 | var toggles = [].slice.call( 33 | document.querySelectorAll(containerClass + " [aria-controls]") 34 | ); 35 | 36 | handleClickOutside(toggles, containerClass); 37 | 38 | toggles.forEach(function (toggle) { 39 | toggle.addEventListener("click", function (e) { 40 | e.preventDefault(); 41 | 42 | const shouldOpen = !toggle.parentNode.classList.contains("is-active"); 43 | closeAllDropdowns(toggles); 44 | toggleDropdown(toggle, shouldOpen); 45 | }); 46 | }); 47 | } 48 | 49 | initNavDropdowns(".p-navigation__item--dropdown-toggle"); 50 | -------------------------------------------------------------------------------- /webapp/alembic/versions/def1b50e89fa_add_categories_to_assets.py: -------------------------------------------------------------------------------- 1 | """add categories to assets 2 | 3 | Revision ID: def1b50e89fa 4 | Revises: 7059dcc76605 5 | Create Date: 2025-09-23 13:54:08.000678 6 | 7 | """ 8 | 9 | from alembic import op 10 | import sqlalchemy as sa 11 | 12 | 13 | # revision identifiers, used by Alembic. 14 | revision = "def1b50e89fa" 15 | down_revision = "7059dcc76605" 16 | branch_labels = None 17 | depends_on = None 18 | 19 | 20 | def upgrade(): 21 | # ### commands auto generated by Alembic - please adjust! ### 22 | op.create_table( 23 | "category", 24 | sa.Column("name", sa.String(), nullable=False), 25 | sa.Column("created", sa.DateTime(), nullable=False), 26 | sa.Column("updated", sa.DateTime(), nullable=False), 27 | sa.PrimaryKeyConstraint("name"), 28 | ) 29 | op.create_table( 30 | "asset_category_association", 31 | sa.Column("asset_id", sa.Integer(), nullable=False), 32 | sa.Column("category_name", sa.String(), nullable=False), 33 | sa.ForeignKeyConstraint( 34 | ["asset_id"], 35 | ["asset.id"], 36 | ), 37 | sa.ForeignKeyConstraint( 38 | ["category_name"], 39 | ["category.name"], 40 | ), 41 | sa.PrimaryKeyConstraint("asset_id", "category_name"), 42 | ) 43 | # ### end Alembic commands ### 44 | 45 | 46 | def downgrade(): 47 | # ### commands auto generated by Alembic - please adjust! ### 48 | op.drop_table("asset_category_association") 49 | op.drop_table("category") 50 | # ### end Alembic commands ### 51 | -------------------------------------------------------------------------------- /webapp/lib/file_helpers.py: -------------------------------------------------------------------------------- 1 | import mimetypes 2 | import os 3 | import re 4 | 5 | import filetype 6 | 7 | 8 | def is_hex(hex_string): 9 | """ 10 | Check if a string is hexadecimal 11 | """ 12 | 13 | try: 14 | int(hex_string, 16) 15 | return True 16 | except ValueError: 17 | return False 18 | 19 | 20 | def remove_filename_hash(filename): 21 | """ 22 | Remove the 8-digit unique hexadecimal hash 23 | from a filename 24 | """ 25 | 26 | if is_hex(filename[:8]) and filename[8] == "-": 27 | filename = filename[9:] 28 | 29 | return filename 30 | 31 | 32 | def get_mimetype(filepath): 33 | """ 34 | Get mimetype by file extension. 35 | If we have set an explicit mimetype, use that, 36 | otherwise ask Python to guess. 37 | """ 38 | 39 | mappings = {".woff2": "font/woff2"} 40 | 41 | extension = os.path.splitext(filepath)[1] 42 | 43 | return mappings.get(extension) or mimetypes.guess_type(filepath)[0] 44 | 45 | 46 | SVG_R = r"(?:<\?xml\b[^>]*>[^<]*)?(?:[^<]*)*(?: Optional[str]: 26 | try: 27 | service_account_dict = config.trino_sf.model_dump() 28 | creds = service_account.Credentials.from_service_account_info( 29 | service_account_dict, 30 | scopes=self.scopes, 31 | ) 32 | creds.refresh(self._request) 33 | return creds.token 34 | except Exception as e: 35 | logger.exception("Unable to refresh Trino account token: %s", e) 36 | return None 37 | 38 | def get_cursor(self): 39 | token = self._get_token() 40 | if not token: 41 | return None 42 | try: 43 | conn = connect( 44 | host=config.trino_sf.host, 45 | port=config.trino_sf.connection_port, 46 | http_scheme=config.trino_sf.http_scheme, 47 | auth=trino.auth.JWTAuthentication(token), 48 | verify=True, 49 | catalog=config.trino_sf.catalog, 50 | schema=config.trino_sf.schema, 51 | ) 52 | return conn.cursor() 53 | except Exception as e: 54 | logger.exception("Unable to connect to Trino: %s", e) 55 | return None 56 | 57 | 58 | trino_client = TrinoClient() 59 | trino_cur = trino_client.get_cursor() 60 | -------------------------------------------------------------------------------- /webapp/sso.py: -------------------------------------------------------------------------------- 1 | import functools 2 | import os 3 | 4 | import flask 5 | from django_openid_auth.teams import TeamsRequest, TeamsResponse 6 | from flask_openid import OpenID 7 | 8 | 9 | SSO_LOGIN_URL = "https://login.ubuntu.com" 10 | SSO_TEAM = "canonical-content-people" 11 | 12 | 13 | def init_sso(app): 14 | open_id = OpenID( 15 | store_factory=lambda: None, 16 | safe_roots=[], 17 | extension_responses=[TeamsResponse], 18 | ) 19 | 20 | @app.route("/login", methods=["GET", "POST"]) 21 | @open_id.loginhandler 22 | def login(): 23 | if "openid" in flask.session: 24 | return flask.redirect(open_id.get_next_url()) 25 | 26 | teams_request = TeamsRequest(query_membership=[SSO_TEAM]) 27 | return open_id.try_login( 28 | SSO_LOGIN_URL, ask_for=["email"], extensions=[teams_request] 29 | ) 30 | 31 | @open_id.after_login 32 | def after_login(resp): 33 | if SSO_TEAM not in resp.extensions["lp"].is_member: 34 | flask.abort(403) 35 | 36 | flask.session["openid"] = { 37 | "identity_url": resp.identity_url, 38 | "email": resp.email, 39 | } 40 | 41 | return flask.redirect(open_id.get_next_url()) 42 | 43 | 44 | def login_required(func): 45 | """ 46 | Decorator that checks if a user is logged in, and redirects 47 | to login page if not. 48 | """ 49 | 50 | @functools.wraps(func) 51 | def is_user_logged_in(*args, **kwargs): 52 | disable_auth = str( 53 | os.getenv("FLASK_DISABLE_AUTH_FOR_TESTS", "") 54 | ).lower() in ( 55 | "1", 56 | "true", 57 | ) 58 | if disable_auth: 59 | return func(*args, **kwargs) 60 | 61 | if "openid" not in flask.session: 62 | return flask.redirect("/login?next=" + flask.request.path) 63 | response = flask.make_response(func(*args, **kwargs)) 64 | response.cache_control.private = True 65 | return response 66 | 67 | return is_user_logged_in 68 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "assets-manager", 3 | "version": "0.0.1", 4 | "description": "Assets manager for the Canonical web team", 5 | "private": true, 6 | "scripts": { 7 | "lint-scss": "sass-lint static/**/*.scss --exclude node_modules --verbose --no-exit", 8 | "lint-python": "flake8 --extend-ignore=E203 webapp tests && black --check --line-length 79 webapp tests", 9 | "lint": "yarn run lint-scss && yarn run lint-python", 10 | "format-python": "black --line-length 79 webapp tests", 11 | "test-python": "python3 -m unittest discover tests", 12 | "test-e2e": "playwright test", 13 | "test-e2e-ui": "playwright test --ui", 14 | "test-e2e-headed": "playwright test --headed", 15 | "build": "yarn run build-css && yarn run build-js", 16 | "build-css": "sass static/sass/main.scss static/css/main.css --load-path=node_modules --style=compressed && postcss --use autoprefixer --replace 'static/css/**/*.css' --no-map", 17 | "build-js": "node build.js", 18 | "watch": "concurrently --kill-others --raw 'yarn run watch-css' 'yarn run watch-js'", 19 | "watch-css": "watch -p 'static/sass/**/*.scss' -c 'yarn run build-css'", 20 | "watch-js": "watch -p 'static/js/**/*.js' -c 'yarn run build-js'", 21 | "serve": "./entrypoint 0.0.0.0:${PORT}", 22 | "serve:test": "./entrypoint 0.0.0.0:${PORT}", 23 | "test": "yarn run lint-scss && yarn run lint-python && yarn run test-python", 24 | "start": "yarn run build && concurrently --raw 'yarn run watch' 'yarn run serve'", 25 | "clean": "rm -rf node_modules yarn-error.log css static/css *.log *.sqlite _site/ build/ .jekyll-metadata .bundle" 26 | }, 27 | "dependencies": { 28 | "autoprefixer": "10.4.14", 29 | "flatpickr": "4.6.13", 30 | "fuse.js": "7.0.0", 31 | "postcss": "8.4.21", 32 | "postcss-cli": "10.1.0", 33 | "sass": "1.79.0", 34 | "sass-lint": "1.13.1", 35 | "vanilla-framework": "4.34.1", 36 | "watch-cli": "0.2.3" 37 | }, 38 | "devDependencies": { 39 | "@playwright/test": "^1.56.1", 40 | "@types/node": "^24.9.1", 41 | "concurrently": "8.0.1", 42 | "esbuild": "0.24.0", 43 | "typescript": "^5.9.3" 44 | } 45 | } 46 | -------------------------------------------------------------------------------- /tests/e2e/README.md: -------------------------------------------------------------------------------- 1 | # E2E Tests with Playwright 2 | 3 | End-to-end tests for the Assets Manager application. 4 | 5 | ## Setup 6 | 7 | 1. Install Playwright browsers: 8 | ```bash 9 | npx playwright install chromium 10 | ``` 11 | 12 | 2. Make sure your application is running with all required Docker containers: 13 | Note: It is recommended to start fr 14 | ```bash 15 | # Start your Docker containers 16 | docker compose down -v 17 | docker compose up -d --build 18 | 19 | 20 | # Start the app with auth disabled for testing 21 | FLASK_DISABLE_AUTH_FOR_TESTS=true 22 | dotrun 23 | ``` 24 | 25 | ## Running Tests 26 | 27 | ```bash 28 | # Run all e2e tests (headless) 29 | yarn run test-e2e 30 | 31 | # Run tests with UI mode (interactive) 32 | yarn run test-e2e-ui 33 | 34 | # Run tests in headed mode (see browser) 35 | yarn run test-e2e-headed 36 | 37 | # Run specific test file 38 | npx playwright test homepage.spec.ts 39 | 40 | # Run only search tests (fixtures create data automatically) 41 | npx playwright test search_assets.spec.ts 42 | ``` 43 | 44 | ## Configuration 45 | 46 | - Test directory: `tests/e2e/` 47 | - Configuration file: `playwright.config.ts` 48 | 49 | ## Test Structure 50 | 51 | - `homepage.spec.ts` - Tests homepage and navigation 52 | - `create_assets.spec.ts` - Tests asset creation (single and multiple) 53 | - `search_assets.spec.ts` - Tests search and filter functionality (uses fixtures for test data) 54 | - `fixtures/test-assets.fixture.ts` - Fixture that creates test assets for search tests 55 | - `fixtures/temp-files.fixture.ts` - Fixture for creating temporary test files dynamically 56 | 57 | ## Test Files 58 | 59 | All test files are generated **dynamically** at runtime. There are no static test files checked into the repository. This is achieved via `temp-files.fixture.ts`. 60 | 61 | ## Writing Tests 62 | 63 | Test files should follow the pattern `*.spec.ts` and be placed in `tests/e2e/`. 64 | 65 | ### Basic Example: 66 | ```typescript 67 | import { test, expect } from '@playwright/test'; 68 | 69 | test('my test', async ({ page }) => { 70 | await page.goto('/manager'); 71 | await expect(page).toHaveTitle(/Manager/); 72 | }); 73 | ``` 74 | -------------------------------------------------------------------------------- /charm/tox.ini: -------------------------------------------------------------------------------- 1 | # Copyright 2025 goulin 2 | # See LICENSE file for licensing details. 3 | 4 | [tox] 5 | no_package = True 6 | skip_missing_interpreters = True 7 | env_list = format, lint, static 8 | min_version = 4.0.0 9 | 10 | [vars] 11 | src_path = {tox_root}/src 12 | ;tests_path = {tox_root}/tests 13 | ;lib_path = {tox_root}/lib/charms/operator_name_with_underscores 14 | all_path = {[vars]src_path} 15 | 16 | [testenv] 17 | set_env = 18 | PYTHONPATH = {tox_root}/lib:{[vars]src_path} 19 | PYTHONBREAKPOINT=pdb.set_trace 20 | PY_COLORS=1 21 | pass_env = 22 | PYTHONPATH 23 | CHARM_BUILD_DIR 24 | MODEL_SETTINGS 25 | 26 | [testenv:format] 27 | description = Apply coding style standards to code 28 | deps = 29 | ruff 30 | commands = 31 | ruff format {[vars]all_path} 32 | ruff check --fix {[vars]all_path} 33 | 34 | [testenv:lint] 35 | description = Check code against coding style standards 36 | deps = 37 | ruff 38 | codespell 39 | commands = 40 | # if this charm owns a lib, uncomment "lib_path" variable 41 | # and uncomment the following line 42 | # codespell {[vars]lib_path} 43 | codespell {tox_root} 44 | ruff check {[vars]all_path} 45 | ruff format --check --diff {[vars]all_path} 46 | 47 | [testenv:unit] 48 | description = Run unit tests 49 | deps = 50 | pytest 51 | coverage[toml] 52 | -r {tox_root}/requirements.txt 53 | commands = 54 | coverage run --source={[vars]src_path} \ 55 | -m pytest \ 56 | --tb native \ 57 | -v \ 58 | -s \ 59 | {posargs} \ 60 | {[vars]tests_path}/unit 61 | coverage report 62 | 63 | [testenv:static] 64 | description = Run static type checks 65 | deps = 66 | pyright 67 | -r {tox_root}/requirements.txt 68 | commands = 69 | pyright {posargs} 70 | 71 | [testenv:integration] 72 | description = Run integration tests 73 | deps = 74 | pytest 75 | juju 76 | pytest-operator 77 | -r {tox_root}/requirements.txt 78 | commands = 79 | pytest -v \ 80 | -s \ 81 | --tb native \ 82 | --log-cli-level=INFO \ 83 | {posargs} \ 84 | {[vars]tests_path}/integration 85 | -------------------------------------------------------------------------------- /static/js/src/generic-fields.js: -------------------------------------------------------------------------------- 1 | // Here we define the handlers for generic fields 2 | 3 | import { addValueToQueryParams } from "./main"; 4 | 5 | // Define whether we are in search and thus need to update query params 6 | const updateQueryParams = document.querySelector('.js-asset-search'); 7 | 8 | /* 9 | * Function to handle multiselects as they were not submitting all the selected values. 10 | **/ 11 | const multiSelects = document.querySelectorAll('.js-multiselect'); 12 | multiSelects?.forEach(multiSelect => { 13 | const hiddenField = multiSelect.querySelector('.js-hidden-field'); 14 | multiSelect = multiSelect.tagName.toLowerCase() === 'select' ? multiSelect : multiSelect.querySelector('select'); 15 | multiSelect.addEventListener('change', function() { 16 | const values = Array.from(multiSelect.selectedOptions).map(option => option.value).join(','); 17 | hiddenField.value = values; 18 | if (updateQueryParams) { 19 | addValueToQueryParams(hiddenField.name, values, replace = true); 20 | } 21 | }); 22 | }); 23 | 24 | /* 25 | * Function to handle select as they were not submitting all the selected values. 26 | **/ 27 | function handleSelectInputs() { 28 | const selects = document.querySelectorAll('.js-select'); 29 | selects?.forEach(select => { 30 | select = select.querySelector('select'); 31 | select.addEventListener('change', function() { 32 | addValueToQueryParams(select.name, select.value, replace = true); 33 | }); 34 | }); 35 | } 36 | 37 | /* 38 | * Function to handle tags input 39 | */ 40 | function handleTagInput() { 41 | const tagsInput = document.querySelector('.js-tags'); 42 | tagsInput?.addEventListener('input', function() { 43 | addValueToQueryParams(tagsInput.name, tagsInput.value, replace = true); 44 | }); 45 | } 46 | 47 | /* 48 | * Function to handle salesforce id input 49 | */ 50 | function handleSalesforceIdInput() { 51 | const salesforceIdInput = document.querySelector('.js-salesforce-campaign-id'); 52 | salesforceIdInput?.addEventListener('input', function() { 53 | addValueToQueryParams(salesforceIdInput.name, salesforceIdInput.value, replace = true); 54 | }); 55 | } 56 | 57 | // These field will auto submit and only need to be handled if we are on search and want to add them to the query params 58 | if (updateQueryParams) { 59 | handleSelectInputs(); 60 | handleTagInput(); 61 | handleSalesforceIdInput(); 62 | } -------------------------------------------------------------------------------- /templates/created.html: -------------------------------------------------------------------------------- 1 | {% extends "_layout.html" %} 2 | 3 | {% block title %}Created assets{% endblock %} 4 | 5 | {% block content %} 6 | {% set created = True %} 7 |
8 |
9 |
10 |

Upload complete

11 |
12 |
13 |
14 | {% if assets %} 15 |
16 |
17 |
18 |
19 |

{{ assets|length }} asset(s) added to asset manager

20 | Add another asset 21 |
22 |
23 |
24 |
{% include "_asset-list.html" %}
25 |
26 | {% endif %} 27 | {% if existing %} 28 | {% set assets = existing %} 29 |
30 |
31 |
32 |
33 |

{{ assets|length }} existing asset(s)

34 | Add another asset 35 |
36 |
37 |
38 |
{% include "_asset-list.html" %}
39 |
40 | {% endif %} 41 | {% if failed %} 42 | {% set assets = failed %} 43 |
44 |
45 |
46 |
47 |

{{ assets|length }} asset(s) failed to upload

48 | Add another asset 49 |
50 |
51 |
52 |
53 |
    54 | {% for asset in assets %} 55 |
  • 56 | {{ asset.file_path }} 57 | {% if asset.error %}:{{ asset.error }}{% endif %} 58 |
  • 59 | {% endfor %} 60 |
61 |
62 |
63 | {% endif %} 64 |
65 | {% endblock %} 66 | -------------------------------------------------------------------------------- /webapp/art/404.ascii: -------------------------------------------------------------------------------- 1 | `-/+osssoo+:-` 2 | -ossssssssssssss/. 3 | .ossssssssssssssssss/` 4 | -ssssssso:-.-:osssssss+` 5 | `ossssssss/++- `ssssssss/ 6 | -sssssssssss:``/sssssssss 7 | -ssssssssss- -sssssssssss 8 | `osssssssss/:+ssssssssss/ 9 | -sssssssss-`:ssssssssso` 10 | `ssssssssssssssssssss/` 11 | `` /ssssssssssssssssss/. 12 | .-:/++oooooo++/:-. `o+/:-.-/+osssso+/-` 13 | `:/+/:---:/+oo+/:---:/+/-` 14 | -+o:. .. ./o/- 15 | -+o+. -+o+- 16 | .+ooo- `.` `.` :ooo/` 17 | -+oooo. +o: +o: -oooo+. 18 | -+ooooo: /ooooo+. 19 | `+ooooooo: `/ooooooo+ 20 | `...../ooooooooo+:. `.::. .:+ooooooooo:.....` 21 | /ooooooooooooooooooo++///+o+-:+o+///+ooooooooooooooooooo+- 22 | ooooooooooooooooooooooooooo: /oooooooooooooooooooooooooo+ 23 | ooooo+/oooooooooooooooooooo- /ooooooooooooooooooo+/ooooo+ 24 | +oooo: +ooooooooooooooooooo/..+ooooooooooooooooooo/ /oooo/ 25 | ./+/: -oooooooooooooooooooooooooooooooooooooooooo. `://:` 26 | /oooooooooooooooooooooooooooooooooooooooo/ 27 | `+oooooooooooooooooooooooooooooooooooooo/ 28 | `/oooooooooooooooooooooooooooooooooooo/ 29 | :+oooooooooooooooooooooooooooooooo+- 30 | `:+oooooooooooooooooooooooooooo+:` 31 | `-+oooooooooooooooooooooooo/-` 32 | `:+oooooooooooooooooooooo+- 33 | /oooooooooooooooooooooooooo: 34 | ./+oooooooooooooooooooooo+/. 35 | 36 | ... WHUT?! (404) 37 | -------------------------------------------------------------------------------- /tests/e2e/fixtures/temp-files.fixture.ts: -------------------------------------------------------------------------------- 1 | import { test as base } from '@playwright/test'; 2 | import * as fs from 'fs'; 3 | import * as path from 'path'; 4 | import * as os from 'os'; 5 | 6 | type TempFilesFixture = { 7 | tempFiles: { 8 | createFile: (fileName: string, content: string) => string; 9 | getFilePath: (fileName: string) => string; 10 | cleanup: () => void; 11 | }; 12 | }; 13 | 14 | // Worker-scoped fixture for creating temporary test files 15 | export const test = base.extend({ 16 | tempFiles: async ({}, use) => { 17 | const tempDir = fs.mkdtempSync(path.join(os.tmpdir(), 'playwright-test-')); 18 | const createdFiles: string[] = []; 19 | 20 | const fixture = { 21 | /** 22 | * @param fileName - Name of the file to create 23 | * @param content - Content to write to the file 24 | * @returns Full path to the created file 25 | */ 26 | createFile: (fileName: string, content: string): string => { 27 | const filePath = path.join(tempDir, fileName); 28 | fs.writeFileSync(filePath, content, 'utf-8'); 29 | createdFiles.push(filePath); 30 | return filePath; 31 | }, 32 | 33 | /** 34 | * @param fileName - Name of the file 35 | * @returns Full path to the file 36 | */ 37 | getFilePath: (fileName: string): string => { 38 | return path.join(tempDir, fileName); 39 | }, 40 | 41 | /** 42 | * Cleanup all created files and the temp directory 43 | */ 44 | cleanup: (): void => { 45 | createdFiles.forEach((filePath) => { 46 | if (fs.existsSync(filePath)) { 47 | fs.unlinkSync(filePath); 48 | } 49 | }); 50 | if (fs.existsSync(tempDir)) { 51 | fs.rmdirSync(tempDir); 52 | } 53 | }, 54 | }; 55 | 56 | await use(fixture); 57 | 58 | fixture.cleanup(); 59 | }, 60 | }); 61 | 62 | export { expect } from '@playwright/test'; 63 | 64 | /** 65 | * Helper function to create multiple test files at once 66 | * @param tempFiles - The tempFiles fixture 67 | * @param files - Object mapping file names to their content 68 | * @returns Object mapping file names to their full paths 69 | */ 70 | export function createTestFiles( 71 | tempFiles: TempFilesFixture['tempFiles'], 72 | files: Record 73 | ): Record { 74 | const filePaths: Record = {}; 75 | 76 | for (const [fileName, content] of Object.entries(files)) { 77 | filePaths[fileName] = tempFiles.createFile(fileName, content); 78 | } 79 | 80 | return filePaths; 81 | } 82 | -------------------------------------------------------------------------------- /webapp/config.py: -------------------------------------------------------------------------------- 1 | from pydantic import AliasChoices, SecretStr, Field, field_validator 2 | from pydantic_settings import BaseSettings, SettingsConfigDict 3 | from webapp.lib.python_helpers import is_pem_private_key 4 | import base64 5 | 6 | ENV_FILES = (".env", ".env.local") 7 | 8 | 9 | class SwiftConfig(BaseSettings): 10 | model_config = SettingsConfigDict( 11 | env_file=ENV_FILES, extra="ignore", env_prefix="flask_os_" 12 | ) 13 | auth_url: str 14 | username: str 15 | password: SecretStr 16 | auth_version: str 17 | tenant_name: str = "" 18 | 19 | 20 | class DirectoryApiConfig(BaseSettings): 21 | model_config = SettingsConfigDict( 22 | env_file=ENV_FILES, extra="ignore", env_prefix="flask_directory_api_" 23 | ) 24 | url: str 25 | token: SecretStr 26 | 27 | 28 | # Salesforce Trino Config 29 | 30 | 31 | class TrinoSFConfig(BaseSettings): 32 | model_config = SettingsConfigDict( 33 | env_file=ENV_FILES, extra="ignore", env_prefix="flask_trino_sf_" 34 | ) 35 | type: str = "service_account" 36 | universe_domain: str = "googleapis.com" 37 | project_id: str 38 | private_key_id: str 39 | private_key: str 40 | client_email: str 41 | client_id: str 42 | auth_uri: str = "https://accounts.google.com/o/oauth2/auth" 43 | token_uri: str = "https://oauth2.googleapis.com/token" 44 | auth_provider_x509_cert_url: str = ( 45 | "https://www.googleapis.com/oauth2/v1/certs" 46 | ) 47 | host: str = "trino.canonical.com" 48 | connection_port: int = 443 49 | http_scheme: str = "https" 50 | catalog: str = "salesforce" 51 | schema: str = "canonical" 52 | 53 | @field_validator("private_key", mode="before") 54 | @classmethod 55 | def decode_private_key(cls, v: str) -> str: 56 | """ 57 | Automatically base64-decode the private_key if it looks base64-encoded. 58 | Otherwise, leave as is. 59 | """ 60 | if not v: 61 | return v 62 | try: 63 | decoded_bytes = base64.b64decode(v, validate=True).replace( 64 | b"\\n", b"\n" 65 | ) 66 | if is_pem_private_key(decoded_bytes): 67 | return decoded_bytes 68 | except Exception: 69 | pass 70 | return v 71 | 72 | 73 | class Config(BaseSettings): 74 | model_config = SettingsConfigDict( 75 | env_file=ENV_FILES, extra="ignore", env_prefix="flask_" 76 | ) 77 | 78 | secret_key: SecretStr 79 | read_only_mode: bool = False 80 | database_url: SecretStr = Field( 81 | validation_alias=AliasChoices( 82 | "database_url", 83 | "postgresql_db_connect_string", 84 | ) 85 | ) 86 | swift: SwiftConfig = SwiftConfig() # type: ignore 87 | directory_api: DirectoryApiConfig = DirectoryApiConfig() # type: ignore 88 | trino_sf: TrinoSFConfig = TrinoSFConfig() # type: ignore 89 | 90 | 91 | config = Config() # type: ignore 92 | -------------------------------------------------------------------------------- /static/sass/main.scss: -------------------------------------------------------------------------------- 1 | @import "vanilla-framework"; 2 | @include vanilla; 3 | 4 | @include vf-p-chip; 5 | @include vf-p-search-box; 6 | @include vf-p-search-and-filter; 7 | @include vf-u-off-screen; 8 | 9 | @import "icons"; 10 | 11 | .p-label a { 12 | color: $color-light; 13 | &:hover { 14 | text-decoration: none; 15 | } 16 | } 17 | 18 | .p-asset-card--thumbnail { 19 | max-width: 100%; 20 | max-height: 100%; 21 | object-fit: scale-down; 22 | object-position: top; 23 | 24 | &--small { 25 | max-width: 30%; 26 | max-height: 30%; 27 | object-fit: scale-down; 28 | object-position: center; 29 | } 30 | } 31 | 32 | .p-asset-card-image__container { 33 | position: relative; 34 | min-height: 200px; 35 | max-height: 200px; 36 | overflow: hidden; 37 | display: grid; 38 | place-items: center; 39 | 40 | &.is-deprecated img { 41 | opacity: 0.5; 42 | } 43 | 44 | &.is-deprecated::after { 45 | position: absolute; 46 | top: 50%; 47 | left: 50%; 48 | transform: translate(-50%, -50%); 49 | content: "Deprecated asset"; 50 | width: max-content; 51 | padding: 0.8rem; 52 | background: red; 53 | color: white; 54 | font-weight: bold; 55 | } 56 | } 57 | 58 | .asset-additional-edit-row { 59 | display: flex; 60 | flex-direction: row; 61 | justify-content: space-between; 62 | align-items: center; 63 | flex-wrap: wrap; 64 | & > * { 65 | margin: 0 !important; 66 | } 67 | } 68 | 69 | .p-filter-panel-section__header { 70 | display: flex; 71 | justify-content: space-between; 72 | align-items: center; 73 | } 74 | 75 | .p-filter-panel-section__chips-toggle { 76 | padding-top: 6px; 77 | margin-bottom: 18px; 78 | } 79 | 80 | .p-asset-created-details__header { 81 | display: flex; 82 | align-items: flex-end; 83 | justify-content: space-between; 84 | flex-wrap: wrap; 85 | } 86 | 87 | .p-link--disabled { 88 | pointer-events: none; 89 | } 90 | 91 | .added-chips { 92 | background-color: #f7f7f7; 93 | } 94 | 95 | a.p-chip { 96 | background-color: var(--vf-color-background-neutral-default); 97 | } 98 | 99 | .p-form--per-page-select { 100 | .p-form__label { 101 | margin-right: 0; 102 | } 103 | .p-form__control { 104 | max-width: fit-content; 105 | min-width: 0; 106 | padding-right: 2rem; 107 | } 108 | .p-button { 109 | margin-left: 0.5rem; 110 | } 111 | } 112 | 113 | .p-search-and-filter__panel.js-chips-panel { 114 | max-height: 30rem; 115 | overflow-y: scroll; 116 | } 117 | 118 | .p-filter-panel-section__chips { 119 | padding-right: 2rem !important; 120 | } 121 | 122 | .p-chip--selected { 123 | background-color: rgba(0, 0, 0, 0.1); 124 | border: 1.5px solid var(--vf-color-border-neutral) !important; 125 | } 126 | 127 | .p-filter-panel-section__selected-count { 128 | color: var(--vf-color-link-default); 129 | cursor: pointer; 130 | position: absolute; 131 | right: 0.5rem; 132 | top: 0.3rem; 133 | } 134 | -------------------------------------------------------------------------------- /templates/_search-form.html: -------------------------------------------------------------------------------- 1 | 80 | -------------------------------------------------------------------------------- /webapp/swift.py: -------------------------------------------------------------------------------- 1 | # Standard library 2 | from hashlib import sha1 3 | from typing import Optional 4 | 5 | # Packages 6 | import swiftclient 7 | import swiftclient.exceptions 8 | from swiftclient.exceptions import ClientException as SwiftException 9 | 10 | # Local 11 | from webapp.config import config 12 | from webapp.lib.url_helpers import normalize 13 | 14 | 15 | class FileManager: 16 | """ 17 | Manage asset files: 18 | - creation 19 | - retrieval 20 | - searching 21 | - deletion 22 | """ 23 | 24 | container_name = "assets" 25 | swift_connection: swiftclient.client.Connection 26 | 27 | def __init__(self, swift_connection): 28 | self.swift_connection = swift_connection 29 | 30 | def create(self, file_data, file_path): 31 | """ 32 | Create a new asset and return its file_path 33 | If it already exists, 34 | return the file_path for the existing asset 35 | (don't create it again) 36 | """ 37 | 38 | try: 39 | # Create object 40 | self.swift_connection.put_object( 41 | self.container_name, normalize(file_path), file_data 42 | ) 43 | except SwiftException as swift_error: 44 | if swift_error.http_status != 404: 45 | raise swift_error 46 | 47 | # Not found, assuming container doesn't exist 48 | self.swift_connection.put_container(self.container_name) 49 | 50 | # And try to create again 51 | self.swift_connection.put_object( 52 | self.container_name, normalize(file_path), file_data 53 | ) 54 | 55 | def exists(self, file_path: str) -> bool: 56 | file_exists = True 57 | 58 | try: 59 | self.swift_connection.head_object( 60 | self.container_name, normalize(file_path) 61 | ) 62 | except SwiftException as error: 63 | if error.http_status == 404: 64 | file_exists = False 65 | 66 | return file_exists 67 | 68 | def fetch(self, file_path: str) -> Optional[bytes]: 69 | try: 70 | asset = self.swift_connection.get_object( 71 | self.container_name, normalize(file_path) 72 | ) 73 | return asset[1] 74 | except swiftclient.exceptions.ClientException as error: 75 | if error.http_status == 404: 76 | return None 77 | raise error 78 | 79 | def headers(self, file_path: str) -> dict: 80 | return self.swift_connection.head_object( 81 | self.container_name, normalize(file_path) 82 | ) 83 | 84 | def delete(self, file_path): 85 | if self.exists(file_path): 86 | self.swift_connection.delete_object( 87 | self.container_name, normalize(file_path) 88 | ) 89 | return True 90 | 91 | def generate_asset_path(self, file_data, friendly_name): 92 | """ 93 | Generate a unique asset file_path 94 | based on a friendly name 95 | """ 96 | 97 | path = sha1(file_data).hexdigest()[:8] 98 | if friendly_name: 99 | path += "-" + friendly_name 100 | 101 | return path 102 | 103 | 104 | swift_connection = swiftclient.client.Connection( 105 | config.swift.auth_url, 106 | config.swift.username, 107 | config.swift.password.get_secret_value(), 108 | auth_version=config.swift.auth_version, 109 | os_options={"tenant_name": config.swift.tenant_name}, 110 | ) 111 | 112 | file_manager = FileManager(swift_connection) 113 | -------------------------------------------------------------------------------- /templates/_pagination.html: -------------------------------------------------------------------------------- 1 | {% set max_pagination_items = 3 %} 2 |
3 |
4 |
5 | {% if total_pages > 1 %} 6 | 44 | {% endif %} 45 |
46 |
47 |
50 |
51 | 52 | 65 | 66 | {% for param_name, param_value in request.args.items() %} 67 | {% if param_name != 'per_page' and param_name != 'page' %} 68 | 69 | {% endif %} 70 | {% endfor %} 71 | 74 |
75 |
76 |
77 |
78 |
79 | -------------------------------------------------------------------------------- /webapp/app.py: -------------------------------------------------------------------------------- 1 | import errno 2 | import http.client 3 | from typing import Optional 4 | 5 | import flask 6 | from canonicalwebteam.flask_base.app import FlaskBase 7 | from flask import redirect 8 | from flask.globals import request 9 | from flask_wtf.csrf import CSRFProtect 10 | from swiftclient.exceptions import ClientException as SwiftException 11 | from werkzeug.exceptions import NotFound 12 | 13 | from webapp.commands import db_group, token_group 14 | from webapp.database import db_session 15 | from webapp.lib.processors import ImageProcessingError 16 | from webapp.routes import api_blueprint, ui_blueprint 17 | from webapp.sso import init_sso 18 | 19 | app = FlaskBase( 20 | __name__, 21 | "assets.ubuntu.com", 22 | static_folder="../static", 23 | template_folder="../templates", 24 | ) 25 | 26 | 27 | csrf = CSRFProtect() 28 | csrf.init_app(app) 29 | csrf.exempt(api_blueprint) 30 | init_sso(app) 31 | 32 | 33 | # Error pages 34 | # === 35 | def render_error(code, message): 36 | # return JSON format in case of api route (with prefix /v1) 37 | if request.blueprint == api_blueprint.name: 38 | return {"code": code, "message": message}, code 39 | else: 40 | return ( 41 | flask.render_template( 42 | "error.html", 43 | code=code, 44 | reason=http.client.responses.get(code), 45 | message=message, 46 | ), 47 | code, 48 | ) 49 | 50 | 51 | @app.errorhandler(400) 52 | @app.errorhandler(401) 53 | @app.errorhandler(403) 54 | def error_handler(error=None): 55 | code = getattr(error, "code") 56 | return render_error(code, str(error)) 57 | 58 | 59 | @app.errorhandler(500) 60 | def error_500(error=None): 61 | app.extensions["sentry"].captureException() 62 | return render_error(500, str(error)) 63 | 64 | 65 | @app.errorhandler(OSError) 66 | def error_os(error=None): 67 | app.extensions["sentry"].captureException() 68 | 69 | status = 500 70 | 71 | if error.errno in [errno.EPERM, errno.EACCES]: 72 | status = 403 # Forbidden 73 | if error.errno in [errno.ENOENT, errno.ENXIO]: 74 | status = 404 # Not found 75 | if error.errno in [errno.EEXIST]: 76 | status = 409 # Conflict 77 | if error.errno in [errno.E2BIG]: 78 | status = 413 # Request Entity Too Large 79 | 80 | return render_error(status, str(error.strerror)) 81 | 82 | 83 | @app.errorhandler(ImageProcessingError) 84 | def error_pillbox(error=None): 85 | app.extensions["sentry"].captureException() 86 | 87 | status = error.status_code 88 | return render_error(status, f"Pilbox Error: {error.log_message}") 89 | 90 | 91 | @app.errorhandler(SwiftException) 92 | def error_swift(error=None): 93 | app.extensions["sentry"].captureException() 94 | 95 | status = 500 96 | if error.http_status > 99: 97 | status = error.http_status 98 | elif error.msg[:12] == "Unauthorised": 99 | # Special case for swiftclient.exceptions.ClientException 100 | status = 511 101 | return render_error(status, f"Swift Error: {error.msg}") 102 | 103 | 104 | # Apply blueprints 105 | # === 106 | @app.route("/") 107 | def index(): 108 | return redirect(api_blueprint.url_prefix, code=302) 109 | 110 | 111 | @app.errorhandler(404) 112 | def redirect_v1(error: Optional[NotFound] = None): 113 | # Redirect to /v1/ if the route is not found 114 | if request.path.startswith(api_blueprint.url_prefix): 115 | return render_error(404, error.description if error else "Not found") 116 | else: 117 | return redirect( 118 | api_blueprint.url_prefix + "/" + request.path, code=302 119 | ) 120 | 121 | 122 | app.register_blueprint(ui_blueprint) 123 | app.register_blueprint(api_blueprint) 124 | 125 | # Teardown 126 | # === 127 | 128 | 129 | @app.teardown_appcontext 130 | def remove_db_session(response): 131 | db_session.remove() 132 | return response 133 | 134 | 135 | # CLI commands 136 | # === 137 | app.cli.add_command(token_group) 138 | app.cli.add_command(db_group) 139 | -------------------------------------------------------------------------------- /.github/workflows/pr.yaml: -------------------------------------------------------------------------------- 1 | name: PR checks 2 | on: pull_request 3 | 4 | jobs: 5 | run-image: 6 | runs-on: ubuntu-latest 7 | 8 | services: 9 | postgres: 10 | image: postgres 11 | env: 12 | POSTGRES_DB: assets 13 | POSTGRES_USER: assets 14 | POSTGRES_PASSWORD: password 15 | ports: 16 | - 5432:5432 17 | swift: 18 | image: bouncestorage/swift-aio 19 | ports: 20 | - 8080:8080 21 | steps: 22 | - uses: actions/checkout@v3 23 | 24 | - name: Build image 25 | run: DOCKER_BUILDKIT=1 docker build --tag assets-ubuntu-com . 26 | 27 | - name: Run image 28 | run: | 29 | docker run --detach --env-file .env --network host assets-ubuntu-com 30 | sleep 1 31 | curl --head --fail --retry-delay 1 --retry 30 --retry-connrefused http://localhost 32 | 33 | run-tests: 34 | runs-on: ubuntu-latest 35 | services: 36 | postgres: 37 | image: postgres 38 | env: 39 | POSTGRES_DB: assets 40 | POSTGRES_USER: assets 41 | POSTGRES_PASSWORD: password 42 | ports: 43 | - 5432:5432 44 | swift: 45 | image: bouncestorage/swift-aio 46 | ports: 47 | - 8080:8080 48 | steps: 49 | - uses: actions/checkout@v3 50 | 51 | - name: Install Dotrun 52 | run: | 53 | sudo pip3 install dotrun requests==2.31.0 # requests version is pinned to avoid breaking changes, can be removed once issue is resolved: https://github.com/docker/docker-py/issues/3256 54 | chmod -R 777 . 55 | echo "FLASK_DISABLE_AUTH_FOR_TESTS=true" >> .env 56 | 57 | - name: Install dependencies 58 | run: dotrun install 59 | 60 | - name: Build project 61 | run: dotrun build 62 | 63 | - name: Test site 64 | run: | 65 | dotrun & sleep 25 66 | echo "Waiting for server to start..." 67 | curl --head --fail --retry-delay 1 --retry 30 --retry-connrefused http://localhost:8017 68 | 69 | - name: Install additional dependencies 70 | run: dotrun exec pip3 install coverage 71 | 72 | - name: Run unit tests with coverage 73 | run: dotrun exec coverage run --source=. --module unittest discover tests && bash <(curl -s https://codecov.io/bash) -cF python 74 | 75 | - name: Install Playwright 76 | run: yarn playwright install --with-deps 77 | 78 | - name: Run E2E tests 79 | run: yarn playwright test --reporter=line,html 80 | 81 | - name: Upload test results 82 | if: always() 83 | uses: actions/upload-artifact@v4 84 | with: 85 | name: e2e-test-results 86 | path: | 87 | test-results/ 88 | screenshots/ 89 | playwright-report/ 90 | retention-days: 30 91 | 92 | - name: Upload Playwright trace 93 | if: failure() 94 | uses: actions/upload-artifact@v4 95 | with: 96 | name: playwright-traces 97 | path: test-results/ 98 | retention-days: 30 99 | 100 | lint-scss: 101 | runs-on: ubuntu-latest 102 | 103 | steps: 104 | - uses: actions/checkout@v3 105 | 106 | - name: Install dependencies 107 | run: yarn install --immutable 108 | 109 | - name: Lint scss 110 | run: yarn lint-scss 111 | 112 | lint-python: 113 | runs-on: ubuntu-latest 114 | 115 | steps: 116 | - uses: actions/checkout@v3 117 | 118 | - name: Install node dependencies 119 | run: yarn install --immutable 120 | 121 | - name: Install python dependencies 122 | run: | 123 | python3 -m pip install --upgrade pip 124 | sudo pip3 install flake8 black 125 | 126 | - name: Lint python 127 | run: yarn lint-python 128 | 129 | check-inclusive-naming: 130 | runs-on: ubuntu-latest 131 | 132 | steps: 133 | - name: Checkout 134 | uses: actions/checkout@v3 135 | 136 | - name: Check inclusive naming 137 | uses: canonical-web-and-design/inclusive-naming@main 138 | with: 139 | github-token: ${{ secrets.GITHUB_TOKEN }} 140 | reporter: github-pr-review 141 | fail-on-error: true 142 | -------------------------------------------------------------------------------- /static/js/src/search-and-filter-overflow.js: -------------------------------------------------------------------------------- 1 | /** 2 | * Return number of overflowing chips given a row threshold 3 | * @param {array} chips - An array of chips 4 | * @param {Integer} overflowRowLimit - Number of rows to show before counting 5 | * overflow 6 | */ 7 | export const overflowingChipsCount = (chips, overflowRowLimit) => { 8 | let overflowChips = 0; 9 | if (chips) { 10 | chips.forEach((chip) => { 11 | // If the offsetTop is more than double height of a single chip, consider it 12 | // overflowing 13 | if (chip.offsetTop > chip.offsetHeight * overflowRowLimit) 14 | overflowChips++; 15 | }); 16 | } 17 | return overflowChips; 18 | }; 19 | 20 | export const setOverflowCounter = (overflowCount, overflowCountEl) => { 21 | if (!overflowCountEl) return; 22 | if (overflowCount && overflowCount > 0) { 23 | overflowCountEl.textContent = `+${overflowCount}`; 24 | return; 25 | } 26 | overflowCountEl.textContent = ""; 27 | }; 28 | 29 | export const updateFlowCount = function (chipsContainerRef) { 30 | const chips = chipsContainerRef.querySelectorAll( 31 | ".p-chip.js-selected:not(.u-hide)" 32 | ); 33 | const overflowCount = overflowingChipsCount(chips, 1); 34 | setOverflowCounter( 35 | overflowCount, 36 | chipsContainerRef.querySelector(".p-search-and-filter__selected-count") 37 | ); 38 | }; 39 | 40 | // Add click handler for clicks on elements with aria-controls 41 | [].slice 42 | .call(document.querySelectorAll(".p-search-and-filter")) 43 | .forEach(function (pattern) { 44 | updateFlowCount(pattern); 45 | var overflowCount = pattern.querySelector( 46 | ".p-search-and-filter__selected-count" 47 | ); 48 | var container = pattern.querySelector( 49 | ".p-search-and-filter__search-container" 50 | ); 51 | 52 | if (container) { 53 | // Create a MutationObserver instance 54 | const observer = new MutationObserver((mutationsList, observer) => { 55 | for (const mutation of mutationsList) { 56 | if ( 57 | mutation.type === "attributes" && 58 | mutation.attributeName === "class" 59 | ) { 60 | updateFlowCount(pattern); 61 | } 62 | } 63 | }); 64 | 65 | // Configure the observer 66 | const config = { 67 | subtree: true, 68 | attributeFilter: ["class"], 69 | }; 70 | 71 | // Start observing 72 | observer.observe(container, config); 73 | } 74 | 75 | var searchBox = pattern.querySelector(".p-search-and-filter__box"); 76 | 77 | var panel = pattern.querySelector(".p-search-and-filter__panel"); 78 | 79 | if (overflowCount) { 80 | overflowCount.addEventListener("click", function (event) { 81 | searchBox.dataset.overflowing = "true"; 82 | panel.setAttribute("aria-hidden", "false"); 83 | container.setAttribute("aria-expanded", "true"); 84 | }); 85 | } 86 | }); 87 | 88 | export const setupOverflowingProductPanels = () => { 89 | [].slice 90 | .call(document.querySelectorAll(".p-filter-panel-section")) 91 | .forEach(function (section) { 92 | var overflowCountEl = section.querySelector( 93 | ".p-filter-panel-section__selected-count" 94 | ); 95 | 96 | var container = section.querySelector(".p-filter-panel-section__chips"); 97 | 98 | const chips = container.querySelectorAll( 99 | ".p-chip.js-unselected:not(.u-hide)" 100 | ); 101 | 102 | const overflowCount = overflowingChipsCount(chips, 1); 103 | 104 | if (overflowCountEl) { 105 | setOverflowCounter(overflowCount, overflowCountEl); 106 | 107 | container.style.overflow = "hidden"; 108 | container.style.height = "2.5rem"; 109 | 110 | overflowCountEl.addEventListener("click", function (event) { 111 | container.style.height = "auto"; 112 | overflowCountEl.classList.add("u-hide"); 113 | }); 114 | } 115 | }); 116 | }; 117 | 118 | document 119 | .querySelectorAll(".p-search-and-filter__selected-count") 120 | .forEach((el) => { 121 | el.addEventListener("click", function (e) { 122 | if ( 123 | (searchEl = el 124 | .closest(".p-search-and-filter__search-container") 125 | ?.querySelector(".p-search-and-filter__input")) 126 | ) { 127 | searchEl.focus(); 128 | } 129 | }); 130 | }); 131 | 132 | setupOverflowingProductPanels(); 133 | -------------------------------------------------------------------------------- /webapp/alembic/versions/62f9c9a26cce_.py: -------------------------------------------------------------------------------- 1 | """empty message 2 | 3 | Revision ID: 62f9c9a26cce 4 | Revises: a8efd843e4ad 5 | Create Date: 2024-09-24 12:56:52.278399 6 | 7 | """ 8 | 9 | from datetime import datetime 10 | 11 | import sqlalchemy as sa 12 | from alembic import op 13 | 14 | # revision identifiers, used by Alembic. 15 | revision = "62f9c9a26cce" 16 | down_revision = "a8efd843e4ad" 17 | branch_labels = None 18 | depends_on = None 19 | 20 | 21 | def upgrade(): 22 | op.create_table( 23 | "product", 24 | sa.Column("name", sa.String(), nullable=False), 25 | sa.Column( 26 | "created", 27 | sa.DateTime(), 28 | nullable=False, 29 | server_default=str(datetime.now()), 30 | ), 31 | sa.Column( 32 | "updated", 33 | sa.DateTime(), 34 | nullable=False, 35 | server_default=str(datetime.now()), 36 | ), 37 | sa.PrimaryKeyConstraint("name"), 38 | ) 39 | op.create_table( 40 | "asset_product_association", 41 | sa.Column("asset_id", sa.Integer(), nullable=False), 42 | sa.Column("product_name", sa.String(), nullable=False), 43 | sa.ForeignKeyConstraint( 44 | ["asset_id"], 45 | ["asset.id"], 46 | ), 47 | sa.ForeignKeyConstraint( 48 | ["product_name"], 49 | ["product.name"], 50 | ), 51 | sa.PrimaryKeyConstraint("asset_id", "product_name"), 52 | ) 53 | op.add_column("asset", sa.Column("asset_type", sa.String(), nullable=True)) 54 | op.add_column("asset", sa.Column("name", sa.String(), nullable=True)) 55 | op.add_column("asset", sa.Column("author", sa.String(), nullable=True)) 56 | op.add_column( 57 | "asset", sa.Column("google_drive_link", sa.String(), nullable=True) 58 | ) 59 | op.add_column( 60 | "asset", 61 | sa.Column("salesforce_campaign_id", sa.String(), nullable=True), 62 | ) 63 | op.add_column("asset", sa.Column("language", sa.String(), nullable=True)) 64 | op.add_column( 65 | "asset", 66 | sa.Column( 67 | "updated", 68 | sa.DateTime(), 69 | nullable=False, 70 | server_default=str(datetime.now()), 71 | ), 72 | ) 73 | op.add_column( 74 | "redirect", 75 | sa.Column( 76 | "created", 77 | sa.DateTime(), 78 | nullable=False, 79 | server_default=str(datetime.now()), 80 | ), 81 | ) 82 | op.add_column( 83 | "redirect", 84 | sa.Column( 85 | "updated", 86 | sa.DateTime(), 87 | nullable=False, 88 | server_default=str(datetime.now()), 89 | ), 90 | ) 91 | op.add_column( 92 | "tag", 93 | sa.Column( 94 | "created", 95 | sa.DateTime(), 96 | nullable=False, 97 | server_default=str(datetime.now()), 98 | ), 99 | ) 100 | op.add_column( 101 | "tag", 102 | sa.Column( 103 | "updated", 104 | sa.DateTime(), 105 | nullable=False, 106 | server_default=str(datetime.now()), 107 | ), 108 | ) 109 | op.add_column( 110 | "token", 111 | sa.Column( 112 | "created", 113 | sa.DateTime(), 114 | nullable=False, 115 | server_default=str(datetime.now()), 116 | ), 117 | ) 118 | op.add_column( 119 | "token", 120 | sa.Column( 121 | "updated", 122 | sa.DateTime(), 123 | nullable=False, 124 | server_default=str(datetime.now()), 125 | ), 126 | ) 127 | 128 | 129 | def downgrade(): 130 | op.drop_column("token", "updated") 131 | op.drop_column("token", "created") 132 | op.drop_column("tag", "updated") 133 | op.drop_column("tag", "created") 134 | op.drop_column("redirect", "updated") 135 | op.drop_column("redirect", "created") 136 | op.drop_column("asset", "updated") 137 | op.drop_column("asset", "language") 138 | op.drop_column("asset", "salesforce_campaign_id") 139 | op.drop_column("asset", "google_drive_link") 140 | op.drop_column("asset", "author") 141 | op.drop_column("asset", "name") 142 | op.drop_column("asset", "asset_type") 143 | op.drop_table("asset_product_association") 144 | op.drop_table("product") 145 | -------------------------------------------------------------------------------- /config.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | kind: Service 3 | apiVersion: v1 4 | metadata: 5 | name: assets-ubuntu-com 6 | namespace: production 7 | labels: 8 | app: "assets.ubuntu.com" 9 | spec: 10 | selector: 11 | app: assets.ubuntu.com 12 | ports: 13 | - name: http 14 | protocol: TCP 15 | port: 80 16 | targetPort: http 17 | 18 | --- 19 | kind: Deployment 20 | apiVersion: apps/v1 21 | metadata: 22 | name: assets-ubuntu-com 23 | namespace: production 24 | labels: 25 | app: "assets.ubuntu.com" 26 | spec: 27 | replicas: 2 28 | selector: 29 | matchLabels: 30 | app: assets.ubuntu.com 31 | template: 32 | metadata: 33 | labels: 34 | app: assets.ubuntu.com 35 | spec: 36 | containers: 37 | - name: assets-ubuntu-com 38 | image: prod-comms.ps5.docker-registry.canonical.com/assets.ubuntu.com:1670233982-1d8bb2f 39 | 40 | ports: 41 | - name: http 42 | containerPort: 80 43 | 44 | env: 45 | - name: TALISKER_NETWORKS 46 | value: 10.0.0.0/8 47 | 48 | - name: SECRET_KEY 49 | valueFrom: 50 | secretKeyRef: 51 | key: assets-ubuntu-com 52 | name: secret-keys 53 | 54 | - name: HTTP_PROXY 55 | value: "http://squid.internal:3128/" 56 | 57 | - name: HTTPS_PROXY 58 | value: "http://squid.internal:3128/" 59 | 60 | - name: NO_PROXY 61 | value: "10.24.0.132,10.24.0.23,.internal,ubuntu.com,.ubuntu.com,snapcraft.io,.snapcraft.io,jujucharms.com,.jujucharms.com,maas.io,.maas.io,conjure-up.io,.conjure-up.io,netplan.io,.netplan.io,canonical.com,.canonical.com,launchpad.net,.launchpad.net,linuxcontainers.org,.linuxcontainers.org,cloud-init.io,.cloud-init.io,vanillaframework.io,.vanillaframework.io,anbox-cloud.io,.anbox-cloud.io,juju.is,.juju.is,dqlite.io,.dqlite.io,charmhub.io,.charmhub.io" 62 | - name: FLASK_APP 63 | value: "webapp.app" 64 | - name: DATABASE_URL 65 | valueFrom: 66 | secretKeyRef: 67 | key: database-url 68 | name: assets-ubuntu-postgres 69 | - name: OS_AUTH_URL 70 | valueFrom: 71 | secretKeyRef: 72 | key: os-auth-url 73 | name: assets-ubuntu-swift 74 | - name: OS_USERNAME 75 | valueFrom: 76 | secretKeyRef: 77 | key: os-username 78 | name: assets-ubuntu-swift 79 | - name: OS_PASSWORD 80 | valueFrom: 81 | secretKeyRef: 82 | key: os-password 83 | name: assets-ubuntu-swift 84 | - name: OS_AUTH_VERSION 85 | valueFrom: 86 | secretKeyRef: 87 | key: os-auth-version 88 | name: assets-ubuntu-swift 89 | - name: OS_TENANT_NAME 90 | valueFrom: 91 | secretKeyRef: 92 | key: os-tenant-name 93 | name: assets-ubuntu-swift 94 | - name: OS_REGION_NAME 95 | valueFrom: 96 | secretKeyRef: 97 | key: os-region-name 98 | name: assets-ubuntu-swift 99 | readinessProbe: 100 | httpGet: 101 | path: /_status/check 102 | port: 80 103 | periodSeconds: 5 104 | timeoutSeconds: 3 105 | 106 | resources: 107 | limits: 108 | memory: 1024Mi 109 | requests: 110 | memory: 128Mi 111 | 112 | --- 113 | kind: Ingress 114 | apiVersion: networking.k8s.io/v1 115 | metadata: 116 | name: assets-ubuntu-com 117 | namespace: production 118 | annotations: 119 | kubernetes.io/ingress.class: "nginx" 120 | nginx.ingress.kubernetes.io/proxy-body-size: 8m 121 | nginx.ingress.kubernetes.io/use-regex: "true" 122 | nginx.ingress.kubernetes.io/configuration-snippet: | 123 | if ($host = 'manager.assets.ubuntu.com' ) { 124 | rewrite ^ https://assets.ubuntu.com/manager$request_uri? permanent; 125 | } 126 | more_set_headers "X-Robots-Tag: noindex"; 127 | 128 | labels: 129 | app: "assets.ubuntu.com" 130 | 131 | spec: 132 | tls: 133 | - secretName: assets-ubuntu-com-tls 134 | 135 | hosts: 136 | - assets.ubuntu.com 137 | 138 | - secretName: manager-assets-ubuntu-com-tls 139 | hosts: 140 | - manager.assets.ubuntu.com 141 | 142 | rules: 143 | - host: assets.ubuntu.com 144 | http: &http_service 145 | paths: 146 | - path: / 147 | pathType: ImplementationSpecific 148 | backend: 149 | service: 150 | name: assets-ubuntu-com 151 | port: 152 | number: 80 153 | 154 | - host: manager.assets.ubuntu.com 155 | http: *http_service 156 | -------------------------------------------------------------------------------- /webapp/commands.py: -------------------------------------------------------------------------------- 1 | # Standard library 2 | from datetime import datetime 3 | import re 4 | import uuid 5 | 6 | # Packages 7 | import click 8 | import flask 9 | import requests 10 | 11 | # Local 12 | from webapp.database import db_session 13 | from webapp.models import Asset, Redirect, Token 14 | from webapp.services import asset_service 15 | 16 | token_group = flask.cli.AppGroup("token") 17 | db_group = flask.cli.AppGroup("database") 18 | 19 | 20 | @token_group.command("create") 21 | @click.argument("name") 22 | def create_token(name): 23 | if db_session.query(Token).filter(Token.name == name).one_or_none(): 24 | print(f"Token exists: {name}") 25 | else: 26 | token = Token(name=name, token=uuid.uuid4().hex) 27 | db_session.add(token) 28 | db_session.commit() 29 | print(f"Token created: {token.name} - {token.token}") 30 | 31 | 32 | @token_group.command("delete") 33 | @click.argument("name") 34 | def delete_token(name): 35 | token = db_session.query(Token).filter(Token.name == name).one_or_none() 36 | 37 | if not token: 38 | print(f"Token not found: '{name}'") 39 | else: 40 | db_session.delete(token) 41 | db_session.commit() 42 | print(f"Token deleted: '{name}'") 43 | 44 | 45 | @token_group.command("list") 46 | def list_token(): 47 | tokens = db_session.query(Token).all() 48 | 49 | if tokens: 50 | for token in tokens: 51 | print(f"{token.name} - {token.token}") 52 | else: 53 | print("No tokens found") 54 | 55 | 56 | @db_group.command("import-assets-from-prod") 57 | @click.argument("token") 58 | def import_assets_from_prod(token): 59 | print("Assets in DB count (before):", db_session.query(Asset).count()) 60 | data = requests.get(f"https://assets.ubuntu.com/v1?token={token}").json() 61 | print("Data to insert:", len(data)) 62 | for index in range(len(data)): 63 | if index % 1000 == 0: 64 | print(f"{index}/{len(data)}") 65 | entry = data[index] 66 | file_path = entry.get("file_path") 67 | created = datetime.strptime( 68 | entry.get("created"), "%a %b %d %H:%M:%S %Y" 69 | ) 70 | tags = entry.get("tags", "") 71 | tags = re.split(",|\\s", tags) 72 | entry.pop("file_path", None) 73 | entry.pop("created", None) 74 | entry.pop("tags", None) 75 | 76 | # rename optimized 77 | if entry.get("optimized", None): 78 | entry["optimize"] = entry.get("optimized", None) 79 | 80 | image_extensions = [ 81 | "jpeg", 82 | "jpg", 83 | "gif", 84 | "png", 85 | "apng", 86 | "svg", 87 | "bmp", 88 | "webp", 89 | ] 90 | entry["image"] = bool( 91 | re.match( 92 | f".+\\.({'|'.join(image_extensions)})$", 93 | file_path, 94 | flags=re.IGNORECASE, 95 | ) 96 | ) 97 | 98 | asset = asset_service.find_asset(file_path) 99 | # update all the fields if already exists 100 | if asset: 101 | asset_service.update_asset(file_path, tags) 102 | else: 103 | asset = Asset(file_path=file_path, data=entry, created=created) 104 | db_session.add(asset) 105 | asset.tags = asset_service.create_tags_if_not_exist(tags) 106 | db_session.commit() 107 | print("Assets in DB count (after):", db_session.query(Asset).count()) 108 | 109 | 110 | @db_group.command("import-redirects-from-prod") 111 | @click.argument("token") 112 | def import_redirects_from_prod(token): 113 | data = requests.get( 114 | f"https://assets.ubuntu.com/v1/redirects?token={token}" 115 | ).json() 116 | 117 | print("Importing redirects...") 118 | for entry in data: 119 | db_session.add( 120 | Redirect( 121 | permanent=entry.get("permanent"), 122 | redirect_path=entry.get("redirect_path"), 123 | target_url=entry.get("target_url"), 124 | ) 125 | ) 126 | 127 | db_session.commit() 128 | print("Done!") 129 | 130 | 131 | @db_group.command("insert-dummy-data") 132 | def insert_dummy_data(): 133 | dummy_pdf = { 134 | "name": "dummy.pdf", 135 | "file": open("./webapp/dummy-data/dummy.pdf", "rb").read(), 136 | } 137 | ubuntu_png = { 138 | "name": "ubuntu.png", 139 | "file": open("./webapp/dummy-data/ubuntu.png", "rb").read(), 140 | "optimize": True, 141 | } 142 | ubuntu_svg = { 143 | "name": "ubuntu.svg", 144 | "file": open("./webapp/dummy-data/ubuntu.svg", "rb").read(), 145 | } 146 | 147 | assets_to_create = [dummy_pdf, ubuntu_png, ubuntu_svg] 148 | for asset in assets_to_create: 149 | asset_service.create_asset( 150 | file_content=asset["file"], 151 | friendly_name=asset["name"], 152 | optimize=asset.get("optimize", False), 153 | tags=["dummy_asset"], 154 | ) 155 | -------------------------------------------------------------------------------- /webapp/dummy-data/ubuntu.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | -------------------------------------------------------------------------------- /templates/_asset-card-image.html: -------------------------------------------------------------------------------- 1 |
2 |
3 |
4 | {% if details %} 5 | {% include "shared/_asset-card-actions.html" %} 6 | {% endif %} 7 |
8 | 29 | {% if asset.name %} 30 |

31 | {{ asset.name }} 32 |

33 | {% else %} 34 |

35 | {{ asset.file_path.split(".")[0] }} 36 |

37 | {% endif %} 38 |

39 | File type: .{{ asset.file_type or asset.file_path.split(".")[-1] .lower() }} 40 |

41 |

42 | Resolution: 43 | 44 | {% if asset.data.width and asset.data.height %}{{ asset.data.width }} x {{ asset.data.height }}px{% endif %} 45 | 46 |

47 |

48 | Date added: {{ asset.created.strftime("%d %B %Y") }} 49 |

50 |
51 |
52 |
53 | Tags:  54 | {% for product in asset.products %} 55 | 59 | {{ product.name }} 60 | 61 | {% endfor %} 62 | {% for tag in asset.tags %} 63 | 65 | {{ tag.name }} 66 | 67 | {% endfor %} 68 |
69 | {% if details %} 70 |
71 |

72 | Salesforce Campaigns: 73 |

    74 | {% for campaign in asset.salesforce_campaigns %}
  • {{ campaign.name }}
  • {% endfor %} 75 |
76 |

77 |
78 | {% include "shared/_asset-author.html" %} 79 |

80 | Language: {{ asset.language }} 81 |

82 |

83 | Salesforce campaign ID: {{ asset.salesforce_campaign_id }} 84 |

85 |

86 | Google Drive link: {{ asset.google_drive_link }} 87 |

88 | {% elif created %} 89 | {% include "shared/_asset-card-actions.html" %} 90 |

91 | Asset details 92 |

93 |
94 | Salesforce Campaigns: 95 |
    96 | {% for campaign in asset.salesforce_campaigns %}
  • {{ campaign.name }}
  • {% endfor %} 97 |
98 |
99 | {% include "shared/_asset-author.html" %} 100 |

101 | Created: {{ asset.created.strftime("%d %B %Y") }} 102 |

103 |

104 | Language: {{ asset.language }} 105 |

106 |

107 | Salesforce campaign ID: {{ asset.salesforce_campaign_id }} 108 |

109 |

110 | Google Drive link: {{ asset.google_drive_link }} 111 |

112 | {% else %} 113 | {% include "shared/_asset-card-actions.html" %} 114 | {% endif %} 115 |
116 |
117 |
118 | -------------------------------------------------------------------------------- /webapp/art/chbs.ascii: -------------------------------------------------------------------------------- 1 | i81 .f@@L fC . 2 | ;1fft1. .8; ;C LL 3 | L, .L01@. 0i :;i: 4 | Ct 08Li fL. 5 | tC 6 | ;0 7 | Lt 8 | @;;;;. 9 | 1f 18L 10 | ;@. 11 | i1 i: ,1t; ,1t; ttti ttt; tttti it1.1ttttt, t0 12 | CL@@880Lf8fC .8. .@. 8, @ .@ @ ;0 @ 0; @ @, Lf1C; 13 | i0@@@8L@ft; ,CG. ,0 .8 ft @tf8. @tf8 @ 8. @ CC C, L: 14 | 180C@@0 :8. i01;tL i8iiCL @ 8, @ 8. @i;;; ,8t;1G @ C8, ,Gi,Lf .C t1 15 | ;:t@8@@@i, :8 ,1C@f .Gi;G, 16 | : ,0@@@t;1 : LC ,0L 17 | t08@@8CL 08.@; f t0 1ftt 18 | ,G@@88i; 1, 81 .L 8: .LL: 19 | L0GGtG@t ,8 LG .C ,@. .. 20 | .i10@@@0; i0:@; ,@f .8 .8i .G01. :t8G: 21 | 0G0@@8L. iC i@t 1f C t8@f. L8, i8i 22 | G1@@@L. ;G ,8L GC. .8 f, 01 1@. LG 23 | @@G01, ,0 1@0GCLi .8 , 8: Ct .@, 24 | tt, :@. . LCti101 8. LL Ct iG 25 | G; CL ;@, ,@, .8. 26 | ;G .@@8 0f 0C :@; :8, 27 | 0: ,@, tG@@@1 CG .L@C. t@C;itG@0; 28 | tCG@1 1G G@@8 :@i i.G0. :8L 29 | G1 f0; CL :C@Gt, .8i G@8C 30 | :8 :0 ;;iiiii. ;: .@, i@@,Ct 31 | G08;;1 1 . L 0; 088 8; 32 | ,,:Gt .C;,::0@@@@@@@@@@@. . 8: tCtC i8. 33 | C1 1G 0@@@@@@@@@@@ f0 8.Cf CL 34 | 1C i:CCi 8@@@@@@@@@@@ .t; ;8f Ci.8i ,@1 35 | CG. .L; tL @@@@@@@@@@@@. t8 t88000@L, 10 L@8. 36 | .G@8G0@LGL .0;;;:@@@@@@@@@@@@, :@ .@iLf 37 | 10 fG L0 .@, 38 | 10 ,@G 18 .@; fC 39 | ,88t;,:iG@i,@, @@f :8f f0 0i 40 | ::. .00, f8 t@8Cf1fC8@Gi @i :8 41 | i0@CttfG@L LC t0 42 | @i Ct 43 | t@ 8; 44 | 81 :@ 45 | t8 fL 46 | .@; 01 47 | CG ,@. 48 | ,0, ;8 49 | 50 | All hail the great [Randall Munroe][1] and his [beautiful comics][2] 51 | 52 | 1: http://en.wikipedia.org/wiki/Randall_Munroe 53 | 2: http://xkcd.com/936/ -------------------------------------------------------------------------------- /tests/e2e/fixtures/create-test-assets.fixture.ts: -------------------------------------------------------------------------------- 1 | import { test as base, Page } from '@playwright/test'; 2 | import * as fs from 'fs'; 3 | import * as path from 'path'; 4 | import * as os from 'os'; 5 | 6 | type TestAssets = { 7 | asset1: { name: string; type: string; tags: string[] }; 8 | asset2: { name: string; type: string; tags: string[] }; 9 | }; 10 | 11 | type WorkerFixtures = { 12 | testAssets: TestAssets; 13 | }; 14 | 15 | // Temp directory for worker-scoped file generation 16 | let workerTempDir: string | null = null; 17 | 18 | function ensureTempDir(): string { 19 | if (!workerTempDir) { 20 | workerTempDir = fs.mkdtempSync(path.join(os.tmpdir(), 'playwright-worker-')); 21 | } 22 | return workerTempDir; 23 | } 24 | 25 | function createTempFile(fileName: string, content: string): string { 26 | const tempDir = ensureTempDir(); 27 | const filePath = path.join(tempDir, fileName); 28 | fs.writeFileSync(filePath, content, 'utf-8'); 29 | return filePath; 30 | } 31 | 32 | function cleanupTempDir(): void { 33 | if (workerTempDir && fs.existsSync(workerTempDir)) { 34 | const files = fs.readdirSync(workerTempDir); 35 | files.forEach((file) => { 36 | fs.unlinkSync(path.join(workerTempDir!, file)); 37 | }); 38 | fs.rmdirSync(workerTempDir); 39 | workerTempDir = null; 40 | } 41 | } 42 | 43 | // Fixture to create assets before search 44 | export const test = base.extend<{}, WorkerFixtures>({ 45 | testAssets: [async ({ browser }, use) => { 46 | // Create a new page for setup 47 | const context = await browser.newContext(); 48 | const page = await context.newPage(); 49 | console.log('Creating test assets (runs once for all tests)...'); 50 | 51 | const assets: TestAssets = { 52 | asset1: { 53 | name: 'pre_test_asset_1', 54 | type: 'guide', 55 | tags: ['e2e-search-test', 'test'], 56 | }, 57 | asset2: { 58 | name: 'pre_test_asset_2', 59 | type: 'image', 60 | tags: ['e2e-search-test', 'image-test'], 61 | }, 62 | }; 63 | 64 | // Create temporary test files 65 | const file1Path = createTempFile('pre_test_asset_1.txt', 'pre test asset 1 content'); 66 | const file2Path = createTempFile('pre_test_asset_2.txt', 'pre test asset 2 content'); 67 | 68 | // Create first test asset 69 | await createAsset(page, { 70 | filePath: file1Path, 71 | assetType: 'guide', 72 | assetName: assets.asset1.name, 73 | productTagSearch: 'digital', 74 | productTagLabel: 'Digital Signage', 75 | customTags: assets.asset1.tags, 76 | language: 'English', 77 | }); 78 | 79 | console.log(`Created ${assets.asset1.name}`); 80 | 81 | // Create second test asset 82 | await createAsset(page, { 83 | filePath: file2Path, 84 | assetType: 'image', 85 | assetName: assets.asset2.name, 86 | productTagSearch: 'amd', 87 | productTagLabel: 'amd', 88 | customTags: assets.asset2.tags, 89 | language: 'Chinese', 90 | }); 91 | use(assets) 92 | console.log(`Created ${assets.asset2.name}`); 93 | 94 | await context.close(); 95 | 96 | // Cleanup temporary files 97 | cleanupTempDir(); 98 | }, { scope: 'worker' }], 99 | }); 100 | 101 | export { expect } from '@playwright/test'; 102 | 103 | // Helper function to create an asset 104 | async function createAsset( 105 | page: Page, 106 | options: { 107 | filePath: string; 108 | assetType: string; 109 | assetName: string; 110 | productTagSearch: string; 111 | productTagLabel: string; 112 | customTags: string[]; 113 | language: string; 114 | } 115 | ) { 116 | await page.goto('/manager/create'); 117 | 118 | // Upload file 119 | await page 120 | .getByRole('button', { name: 'Choose files to upload' }) 121 | .setInputFiles(options.filePath); 122 | 123 | // Set asset type 124 | await page.locator('#asset-type-select').selectOption(options.assetType); 125 | 126 | // Set asset name 127 | await page.getByRole('textbox', { name: 'Asset name' }).fill(options.assetName); 128 | 129 | // Add product tag 130 | const productTagInput = page.getByRole('textbox', { 131 | name: 'Product or category tags', 132 | }); 133 | await productTagInput.click(); 134 | await productTagInput.fill(options.productTagSearch); 135 | await page.getByRole('button', { name: options.productTagLabel }).click(); 136 | await page.locator('html').click(); 137 | 138 | // Add custom tags 139 | const customTagInput = page.getByRole('textbox', { 140 | name: 'Enter tag and press enter', 141 | }); 142 | for (const tag of options.customTags) { 143 | await customTagInput.click(); 144 | await customTagInput.fill(tag); 145 | await customTagInput.press('Enter'); 146 | } 147 | 148 | // Set language 149 | await page.getByLabel('language').selectOption(options.language); 150 | 151 | // Submit 152 | await page.getByRole('button', { name: 'Upload asset' }).click(); 153 | 154 | // Wait for completion 155 | await page.waitForSelector('h1:has-text("Upload complete")', { 156 | timeout: 1000000, 157 | }); 158 | } 159 | -------------------------------------------------------------------------------- /tests/e2e/create_assets.spec.ts: -------------------------------------------------------------------------------- 1 | import { test, expect } from './fixtures/temp-files.fixture'; 2 | import { Page, Locator } from '@playwright/test'; 3 | 4 | async function navigateToCreatePage(page: Page) { 5 | await page.goto('/manager/create'); 6 | } 7 | 8 | async function uploadFiles(page: Page, filePaths: string[]) { 9 | const uploadButton = page.getByRole('button', { 10 | name: 'Choose files to upload', 11 | }); 12 | await uploadButton.setInputFiles(filePaths); 13 | } 14 | 15 | async function fillAssetMetadata( 16 | page: Page, 17 | options: { 18 | assetType: string; 19 | assetName: string; 20 | productSearch: string; 21 | productTagLabel: string; 22 | customTags: string[]; 23 | language: string; 24 | } 25 | ) { 26 | // Select asset type 27 | await page.locator('#asset-type-select').selectOption(options.assetType); 28 | 29 | // Fill asset name 30 | const assetNameInput = page.getByRole('textbox', { name: 'Asset name' }); 31 | await assetNameInput.fill(options.assetName); 32 | 33 | // Add and verify product tag 34 | const productTagInput = page.getByRole('textbox', { 35 | name: 'Product or category tags', 36 | }); 37 | await productTagInput.click(); 38 | await productTagInput.fill(options.productSearch); 39 | await page.getByRole('button', { name: options.productTagLabel }).click(); 40 | await expect(page.locator('#create-update-asset')).toContainText( 41 | options.productTagLabel 42 | ); 43 | await page.locator('html').click(); 44 | 45 | // Add custom tags 46 | const customTagInput = page.getByRole('textbox', { 47 | name: 'Enter tag and press enter', 48 | }); 49 | for (const tag of options.customTags) { 50 | await customTagInput.click(); 51 | await customTagInput.fill(tag); 52 | await customTagInput.press('Enter'); 53 | await expect(page.locator('#create-update-asset')).toContainText(tag); 54 | } 55 | 56 | // Select language 57 | await page.getByLabel('language').selectOption(options.language); 58 | } 59 | 60 | async function submitUpload(page: Page) { 61 | await page.getByRole('button', { name: 'Upload asset' }).click(); 62 | await expect( 63 | page.getByRole('heading', { name: 'Upload complete' }) 64 | ).toBeVisible(); 65 | } 66 | 67 | async function verifyAssetContent(page: Page, pageElement: Locator, expectedText: string) { 68 | const popupPromise = page.waitForEvent('popup'); 69 | await pageElement.click(); 70 | const popup = await popupPromise; 71 | await expect(popup.locator('pre')).toContainText(expectedText); 72 | await popup.close(); 73 | } 74 | 75 | test.describe('Asset creation flow', () => { 76 | test('single asset creation', async ({ page, tempFiles }) => { 77 | // Create temporary test file 78 | const file1Path = tempFiles.createFile('test_upload_1.txt', 'test upload 1'); 79 | 80 | await navigateToCreatePage(page); 81 | 82 | // Upload single file 83 | await uploadFiles(page, [file1Path]); 84 | 85 | // Fill metadata 86 | await fillAssetMetadata(page, { 87 | assetType: 'guide', 88 | assetName: 'test_asset', 89 | productSearch: 'digital', 90 | productTagLabel: 'Digital Signage', 91 | customTags: ['single-upload', 'e2e'], 92 | language: 'Chinese', 93 | }); 94 | 95 | // Submit and verify 96 | await submitUpload(page); 97 | const assetCard = page.locator('.p-card__content') 98 | await expect(assetCard).toContainText('test_asset'); 99 | await expect(assetCard).toContainText( 100 | 'digital-signage' 101 | ); 102 | await expect(assetCard).toContainText( 103 | 'e2e' 104 | ); 105 | await expect(assetCard).toContainText( 106 | 'single-upload' 107 | ); 108 | await expect(assetCard).toContainText( 109 | 'Language: Chinese' 110 | ); 111 | const assetCardThumbnail = page.locator('.p-asset-card--thumbnail') 112 | await verifyAssetContent(page, assetCardThumbnail, 'test upload 1'); 113 | }); 114 | 115 | test('multiple assets creation', async ({ page, tempFiles }) => { 116 | // Create temporary test files 117 | const file2Path = tempFiles.createFile('test_upload_2.txt', 'test upload 2'); 118 | const file3Path = tempFiles.createFile('test_upload_3.txt', 'test upload 3'); 119 | 120 | await navigateToCreatePage(page); 121 | 122 | // Upload multiple files 123 | await uploadFiles(page, [file2Path, file3Path]); 124 | 125 | // Fill metadata 126 | await fillAssetMetadata(page, { 127 | assetType: 'guide', 128 | assetName: 'multi_test_asset', 129 | productSearch: 'amd', 130 | productTagLabel: 'AMD', 131 | customTags: ['multi-upload', 'e2e'], 132 | language: 'English', 133 | }); 134 | 135 | // Submit and verify 136 | await submitUpload(page); 137 | 138 | const firstCard = page.locator('.p-card__content').first(); 139 | await expect(firstCard).toContainText('multi_test_asset'); 140 | await expect(firstCard).toContainText('e2e'); 141 | await expect(firstCard).toContainText('amd'); 142 | await expect(firstCard).toContainText('multi-upload'); 143 | 144 | // Verify correct number of asset cards 145 | const assetCards = page.locator('.p-asset-card--thumbnail'); 146 | await expect(assetCards).toHaveCount(2); 147 | 148 | // Verify first asset content 149 | await verifyAssetContent(page, assetCards.first(), 'test upload 2'); 150 | 151 | 152 | // Verify second asset content 153 | await verifyAssetContent(page, assetCards.nth(1), 'test upload 3'); 154 | }); 155 | }); 156 | -------------------------------------------------------------------------------- /charm/lib/charms/redis_k8s/v0/redis.py: -------------------------------------------------------------------------------- 1 | """Library for the redis relation. 2 | 3 | This library contains the Requires and Provides classes for handling the 4 | redis interface. 5 | 6 | Import `RedisRequires` in your charm by adding the following to `src/charm.py`: 7 | ``` 8 | from charms.redis_k8s.v0.redis import RedisRequires 9 | ``` 10 | Define the following attributes in charm charm class for the library to be able to work with it 11 | ``` 12 | on = RedisRelationCharmEvents() 13 | ``` 14 | And then wherever you need to reference the relation data it will be available 15 | in the property `relation_data`: 16 | ``` 17 | redis_host = self.redis.relation_data.get("hostname") 18 | redis_port = self.redis.relation_data.get("port") 19 | ``` 20 | You will also need to add the following to `metadata.yaml`: 21 | ``` 22 | requires: 23 | redis: 24 | interface: redis 25 | ``` 26 | """ 27 | import logging 28 | import socket 29 | from typing import Dict, Optional 30 | 31 | from ops.charm import CharmEvents 32 | from ops.framework import EventBase, EventSource, Object 33 | 34 | # The unique Charmhub library identifier, never change it. 35 | LIBID = "fe18a608cec5465fa5153e419abcad7b" 36 | 37 | # Increment this major API version when introducing breaking changes. 38 | LIBAPI = 0 39 | 40 | # Increment this PATCH version before using `charmcraft publish-lib` or reset 41 | # to 0 if you are raising the major API version. 42 | LIBPATCH = 7 43 | 44 | logger = logging.getLogger(__name__) 45 | 46 | DEFAULT_REALTION_NAME = "redis" 47 | 48 | class RedisRelationUpdatedEvent(EventBase): 49 | """An event for the redis relation having been updated.""" 50 | 51 | 52 | class RedisRelationCharmEvents(CharmEvents): 53 | """A class to carry custom charm events so requires can react to relation changes.""" 54 | redis_relation_updated = EventSource(RedisRelationUpdatedEvent) 55 | 56 | 57 | class RedisRequires(Object): 58 | 59 | def __init__(self, charm, relation_name: str = DEFAULT_REALTION_NAME): 60 | """A class implementing the redis requires relation.""" 61 | super().__init__(charm, relation_name) 62 | self.framework.observe(charm.on[relation_name].relation_joined, self._on_relation_changed) 63 | self.framework.observe(charm.on[relation_name].relation_changed, self._on_relation_changed) 64 | self.framework.observe(charm.on[relation_name].relation_broken, self._on_relation_broken) 65 | self.charm = charm 66 | self.relation_name = relation_name 67 | 68 | def _on_relation_changed(self, event): 69 | """Handle the relation changed event.""" 70 | if not event.unit: 71 | return 72 | 73 | # Trigger an event that our charm can react to. 74 | self.charm.on.redis_relation_updated.emit() 75 | 76 | def _on_relation_broken(self, event): 77 | """Handle the relation broken event.""" 78 | # Trigger an event that our charm can react to. 79 | self.charm.on.redis_relation_updated.emit() 80 | 81 | @property 82 | def app_data(self) -> Optional[Dict[str, str]]: 83 | """Retrieve the app data. 84 | 85 | Returns: 86 | Dict: dict containing the app data. 87 | """ 88 | relation = self.model.get_relation(self.relation_name) 89 | if not relation: 90 | return None 91 | return relation.data[relation.app] 92 | 93 | @property 94 | def relation_data(self) -> Optional[Dict[str, str]]: 95 | """Retrieve the relation data. 96 | 97 | Returns: 98 | Dict: dict containing the relation data. 99 | """ 100 | relation = self.model.get_relation(self.relation_name) 101 | if not relation or not relation.units: 102 | return None 103 | unit = next(iter(relation.units)) 104 | return relation.data[unit] 105 | 106 | @property 107 | def url(self) -> Optional[str]: 108 | """Retrieve the Redis URL. 109 | 110 | Returns: 111 | str: the Redis URL. 112 | """ 113 | if not (relation_data := self.relation_data): 114 | return None 115 | 116 | redis_host = relation_data.get("hostname") 117 | 118 | if app_data := self.app_data: 119 | try: 120 | redis_host = self.app_data.get("leader-host", redis_host) 121 | except KeyError: 122 | pass 123 | redis_port = relation_data.get("port") 124 | return f"redis://{redis_host}:{redis_port}" 125 | 126 | 127 | class RedisProvides(Object): 128 | def __init__(self, charm, port): 129 | """A class implementing the redis provides relation.""" 130 | super().__init__(charm, DEFAULT_REALTION_NAME) 131 | self.framework.observe(charm.on.redis_relation_changed, self._on_relation_changed) 132 | self._port = port 133 | self._charm = charm 134 | 135 | def _on_relation_changed(self, event): 136 | """Handle the relation changed event.""" 137 | event.relation.data[self.model.unit]["hostname"] = self._get_master_ip() 138 | event.relation.data[self.model.unit]["port"] = str(self._port) 139 | # The reactive Redis charm also exposes 'password'. When tackling 140 | # https://github.com/canonical/redis-k8s/issues/7 add 'password' 141 | # field so that it matches the exposed interface information from it. 142 | # event.relation.data[self.unit]['password'] = '' 143 | 144 | def _bind_address(self, event): 145 | """Convenience function for getting the unit address.""" 146 | relation = self.model.get_relation(event.relation.name, event.relation.id) 147 | if address := self.model.get_binding(relation).network.bind_address: 148 | return address 149 | return self.app.name 150 | 151 | def _get_master_ip(self) -> str: 152 | """Gets the ip of the current redis master.""" 153 | return socket.gethostbyname(self._charm.current_master) 154 | -------------------------------------------------------------------------------- /static/js/src/sf_campaign-search.js: -------------------------------------------------------------------------------- 1 | import { openPanel, debounce, closePanels } from "./main.js"; 2 | 3 | const template = document.querySelector("#campaign-unselected-chip-template"); 4 | const selectedChipContainer = document.querySelector(".js-selected-campaigns"); 5 | const campaignsSearchInput = document.querySelector(".js-campaign-input"); 6 | const hidden_input_store = document.querySelector( 7 | ".js-campaign-search .js-hidden-field" 8 | ); 9 | const chipContainer = document.querySelector(".js-campaign-chip-container"); 10 | 11 | // Sets up the event listeners for opening the panel. 12 | // Also calls the specific setup function. 13 | (function () { 14 | const campaignsSearchComponent = document.querySelector( 15 | ".js-campaign-search" 16 | ); 17 | if (campaignsSearchComponent) { 18 | if (campaignsSearchInput) { 19 | campaignsSearchInput.addEventListener("input", function (e) { 20 | const shouldOpen = e.target.value.trim().length > 0; 21 | openPanel(campaignsSearchComponent, shouldOpen); 22 | }); 23 | campaignsSearchInput.addEventListener("focus", function () { 24 | closePanels([".js-products-search", ".js-authors-search"]); 25 | }); 26 | setUpCampaignSearchField(); 27 | } 28 | } 29 | })(); 30 | 31 | // Sets up a query to the Salesforce Campaign DB via backend to search for campaigns. 32 | // Calls the function that shows the search results 33 | function setUpCampaignSearchField() { 34 | campaignsSearchInput.addEventListener( 35 | "input", 36 | debounce(async function () { 37 | const query = this.value; 38 | chipContainer.innerHTML = "Loading..."; 39 | if (query.trim() !== "") { 40 | try { 41 | const response = await fetch( 42 | `/manager/salesforce_campaigns/${query}`, 43 | { 44 | method: "GET", 45 | } 46 | ); 47 | if (response.ok) { 48 | const data = await response.json(); 49 | updateSearchResults(data.campaigns); 50 | } 51 | } catch (error) { 52 | console.error("Error fetching campaign data:", error); 53 | } 54 | } else { 55 | updateSearchResults([]); 56 | } 57 | }, 700) 58 | ); 59 | } 60 | 61 | function updateSearchResults(data) { 62 | let selectedChips = []; 63 | 64 | try { 65 | selectedChips = JSON.parse(hidden_input_store.value || "[]"); 66 | } catch (e) { 67 | console.error("Failed to parse selected chips JSON:", e); 68 | } 69 | 70 | const selectedIds = new Set(selectedChips.map((chip) => chip.id)); 71 | 72 | // Remove already selected campaigns from the results 73 | const filteredData = data.filter((campaign) => !selectedIds.has(campaign.id)); 74 | 75 | chipContainer.innerHTML = ""; 76 | if (filteredData.length === 0) { 77 | chipContainer.innerHTML = "

No results found...

"; 78 | } 79 | filteredData.forEach((campaign) => { 80 | const chipClone = template.content.cloneNode(true); 81 | const chip = chipClone.querySelector(".p-chip.js-unselected"); 82 | chip.querySelector(".js-content").textContent = campaign.name; 83 | chip.setAttribute("data-id", campaign.id); 84 | chip.setAttribute("data-name", campaign.name); 85 | chipContainer.appendChild(chip); 86 | }); 87 | } 88 | 89 | function removeValueFromHiddenInput(chip, hiddenField) { 90 | let currentValues = hiddenField.value ? JSON.parse(hiddenField.value) : []; 91 | currentValues = currentValues.filter( 92 | (item) => !(item.id === chip.dataset.id) 93 | ); 94 | hiddenField.value = JSON.stringify(currentValues); 95 | } 96 | 97 | function deselectCampaignChip(chip) { 98 | removeValueFromHiddenInput(chip, hidden_input_store); 99 | chip.remove(); 100 | } 101 | 102 | // This function adds a value to a hidden input field. 103 | // This is a bit different since it is made to add json string 104 | function addValueToHiddenInput(chip, hiddenField) { 105 | const chipDetails = { 106 | id: chip.dataset.id, 107 | name: chip.dataset.name, 108 | }; 109 | 110 | const currentValues = hiddenField.value ? JSON.parse(hiddenField.value) : []; 111 | 112 | const alreadyExists = currentValues.some( 113 | (item) => item.id === chipDetails.id 114 | ); 115 | 116 | if (!alreadyExists) { 117 | currentValues.push(chipDetails); 118 | hiddenField.value = JSON.stringify(currentValues); 119 | return true; // Indicates that the value was added 120 | } 121 | return false; // Indicates that the value already exists 122 | } 123 | 124 | function selectCampaignChip(chip) { 125 | const unique = addValueToHiddenInput(chip, hidden_input_store); 126 | if (!unique) { 127 | return; // If the value already exists, do not proceed 128 | } 129 | 130 | const chipClone = template.content.cloneNode(true); 131 | 132 | const dismissButton = document.createElement("button"); 133 | dismissButton.classList.add("p-chip__dismiss"); 134 | dismissButton.textContent = "Dismiss"; 135 | 136 | const newchip = chipClone.querySelector(".p-chip.js-unselected"); 137 | newchip.appendChild(dismissButton); 138 | newchip.classList.remove("js-unselected"); 139 | newchip.classList.add("js-selected"); 140 | newchip.setAttribute("data-id", chip.dataset.id); 141 | newchip.setAttribute("data-name", chip.dataset.name); 142 | newchip.querySelector(".js-content").textContent = chip.dataset.name; 143 | selectedChipContainer.appendChild(newchip); 144 | 145 | // clear the entered value after selecting a chip 146 | 147 | campaignsSearchInput.value = ""; 148 | campaignsSearchInput.focus(); 149 | 150 | // Hide panel and reset the chips panel 151 | const chipsPanel = document.querySelector( 152 | ".js-campaign-search .js-chips-panel" 153 | ); 154 | chipsPanel.setAttribute("aria-hidden", "true"); 155 | chipContainer.innerHTML = "Loading..."; 156 | } 157 | 158 | export default function handleCampaignChip(targetChip) { 159 | if (targetChip.classList.contains("js-unselected")) { 160 | selectCampaignChip(targetChip); 161 | } else if (targetChip.classList.contains("js-selected")) { 162 | deselectCampaignChip(targetChip); 163 | } 164 | } 165 | -------------------------------------------------------------------------------- /static/js/src/authors-search.js: -------------------------------------------------------------------------------- 1 | import { 2 | openPanel, 3 | addValueToHiddenInput, 4 | removeValueFromHiddenInput, 5 | addValueToQueryParams, 6 | removeValueFromQueryParams, 7 | debounce, 8 | closePanels, 9 | } from "./main.js"; 10 | 11 | // Define whether we are in search and thus need to update query params 12 | const updateQueryParams = document.querySelector(".js-asset-search"); 13 | 14 | /* 15 | * Sets up the event listeners for opening the panel. 16 | * Also calls the specific setup function. 17 | **/ 18 | (function () { 19 | const authorsSearchComponent = document.querySelector(".js-authors-search"); 20 | if (authorsSearchComponent) { 21 | const authorsSearchInput = 22 | authorsSearchComponent.querySelector(".js-authors-input"); 23 | if (authorsSearchInput) { 24 | authorsSearchInput.addEventListener("input", function (e) { 25 | const shouldOpen = e.target.value.trim().length > 0; 26 | openPanel(authorsSearchComponent, shouldOpen); 27 | }); 28 | authorsSearchInput.addEventListener("focus", function () { 29 | closePanels([".js-products-search", ".js-campaign-search"]); 30 | }); 31 | setUpAuthorSearchField(); 32 | } 33 | } 34 | })(); 35 | 36 | /* 37 | * Sets up a query to the directory API to search for authors. 38 | * Calls the function that shows the search results 39 | **/ 40 | function setUpAuthorSearchField() { 41 | const authorsInput = document.querySelector(".js-authors-input"); 42 | authorsInput.addEventListener( 43 | "input", 44 | debounce(async function () { 45 | const username = this.value; 46 | if (username.trim() !== "") { 47 | try { 48 | const response = await fetch(`/v1/get-users/${username}`, { 49 | method: "GET", 50 | }); 51 | if (response.ok) { 52 | const data = await response.json(); 53 | updateSearchResults(data); 54 | } 55 | } catch (error) { 56 | console.error("Error fetching user data:", error); 57 | } 58 | } else { 59 | updateSearchResults([]); 60 | } 61 | }, 300) 62 | ); 63 | } 64 | 65 | /* 66 | * Function that handles the click event on a product chip. 67 | * It checks if the chip is selected or unselected and calls the specific function. 68 | * @param {HTMLElement} targetChip - The chip that was clicked. 69 | **/ 70 | export default function handleAuthorsChip(targetChip) { 71 | const selectedAuthorChip = document.querySelector(".js-author-chip"); 72 | if (targetChip.classList.contains("js-unselected")) { 73 | selectAuthorChip(targetChip, selectedAuthorChip); 74 | } else { 75 | deselectAuthorChip(targetChip); 76 | } 77 | } 78 | 79 | /* 80 | * Adds and removes the author chips to the search panel. 81 | * As this comes from an API call, we can not setup the chips on load (like with products). 82 | * We have to create the chips on the fly. Limited to 10 results. 83 | * @param {Array} data - The data from the API call. 84 | **/ 85 | function updateSearchResults(data) { 86 | const chipContainer = document.querySelector(".js-authors-chip-container"); 87 | chipContainer.innerHTML = ""; 88 | if (data.length === 0) { 89 | chipContainer.innerHTML = "

No results found...

"; 90 | } 91 | const template = document.querySelector("#author-unselected-chip-template"); 92 | const limitedData = data.slice(0, 10); 93 | limitedData.forEach((author) => { 94 | const chipClone = template.content.cloneNode(true); 95 | const chip = chipClone.querySelector(".p-chip.js-unselected"); 96 | chip.querySelector(".js-content").textContent = 97 | author.firstName + " " + author.surname; 98 | chip.setAttribute("data-email", author.email); 99 | chip.setAttribute("data-firstName", author.firstName); 100 | chip.setAttribute("data-lastName", author.surname); 101 | chipContainer.appendChild(chip); 102 | }); 103 | } 104 | 105 | /* 106 | * When a chip is selected, it adds the chip to the search panel. 107 | * It also adds the chip value to three hidden inputs, as we have to pass the email, firstname and lastname. 108 | * Attaches a dismiss handler to the active chip. Limited to 1 selected chip. 109 | * @param {HTMLElement} chip - The chip to select. 110 | * @param {HTMLElement} activeChipContainer - The container to add the active chip. 111 | **/ 112 | function selectAuthorChip(chip, selectedAuthorChip) { 113 | selectedAuthorChip.classList.remove("u-hide"); 114 | selectedAuthorChip.querySelector(".js-content").textContent = 115 | chip.dataset.firstname + " " + chip.dataset.lastname; 116 | selectedAuthorChip.setAttribute("data-email", chip.dataset.email); 117 | selectedAuthorChip.setAttribute("data-firstname", chip.dataset.firstname); 118 | selectedAuthorChip.setAttribute("data-lastname", chip.dataset.lastname); 119 | addValueToHiddenInput( 120 | chip.dataset.email, 121 | document.querySelector(".js-hidden-field-email"), 122 | (replace = true) 123 | ); 124 | addValueToHiddenInput( 125 | chip.dataset.firstname, 126 | document.querySelector(".js-hidden-field-firstname"), 127 | (replace = true) 128 | ); 129 | addValueToHiddenInput( 130 | chip.dataset.lastname, 131 | document.querySelector(".js-hidden-field-lastname"), 132 | (replace = true) 133 | ); 134 | // clear the entered value after selecting a chip 135 | const inputField = document.querySelector( 136 | ".js-authors-search .js-authors-input" 137 | ); 138 | inputField.value = ""; 139 | // close the chips panel 140 | const chipsPanel = document.querySelector( 141 | ".js-authors-search .js-chips-panel" 142 | ); 143 | chipsPanel.setAttribute("aria-hidden", "true"); 144 | if (updateQueryParams) { 145 | addValueToQueryParams("author_email", chip.dataset.email, (replace = true)); 146 | addValueToQueryParams( 147 | "author_firstname", 148 | chip.dataset.firstname, 149 | (replace = true) 150 | ); 151 | addValueToQueryParams( 152 | "author_lastname", 153 | chip.dataset.lastname, 154 | (replace = true) 155 | ); 156 | } 157 | } 158 | 159 | function deselectAuthorChip(chip) { 160 | chip.classList.add("u-hide"); 161 | removeValueFromHiddenInput( 162 | chip.dataset.email, 163 | document.querySelector(".js-hidden-field-email") 164 | ); 165 | removeValueFromHiddenInput( 166 | chip.dataset.firstname, 167 | document.querySelector(".js-hidden-field-firstname") 168 | ); 169 | removeValueFromHiddenInput( 170 | chip.dataset.lastname, 171 | document.querySelector(".js-hidden-field-lastname") 172 | ); 173 | if (updateQueryParams) { 174 | removeValueFromQueryParams("author_email", chip.dataset.email); 175 | removeValueFromQueryParams("author_firstname", chip.dataset.firstname); 176 | removeValueFromQueryParams("author_lastname", chip.dataset.lastname); 177 | } 178 | chip.removeAttribute("data-email", chip.dataset.email); 179 | chip.removeAttribute("data-firstname", chip.dataset.firstname); 180 | chip.removeAttribute("data-lastname", chip.dataset.lastname); 181 | } 182 | -------------------------------------------------------------------------------- /tests/e2e/search_assets.spec.ts: -------------------------------------------------------------------------------- 1 | import { test, expect } from './fixtures/create-test-assets.fixture'; 2 | import { Page } from '@playwright/test'; 3 | 4 | // So that all the tests run in a single worker and fixtures are only run once 5 | test.describe.configure({ mode: "serial" }); 6 | 7 | 8 | // Helper functions 9 | async function navigateToSearchPage(page: Page) { 10 | await page.goto('/manager'); 11 | await expect(page.locator('h1.p-muted-heading')).toContainText( 12 | 'Search and filter' 13 | ); 14 | } 15 | 16 | async function searchByText(page: Page, searchText: string) { 17 | const searchInput = page.locator('input#tag'); 18 | await searchInput.fill(searchText); 19 | } 20 | 21 | async function selectAssetType(page: Page, assetType: string) { 22 | await page.locator('select[name="asset_type"]').selectOption(assetType); 23 | } 24 | 25 | async function selectCategory(page: Page, category: string) { 26 | await page.locator('select#category-select').selectOption(category); 27 | } 28 | 29 | async function submitSearch(page: Page) { 30 | await page.getByRole('button', { name: 'Search' }).click(); 31 | await page.waitForLoadState('networkidle'); 32 | } 33 | 34 | async function getSearchResultsCount(page: Page): Promise { 35 | const countText = await page.locator('#assets_count').textContent(); 36 | if (!countText) return 0; 37 | const match = countText.match(/(\d+)\s+asset/); 38 | return match ? parseInt(match[1], 10) : 0; 39 | } 40 | 41 | async function verifySearchResults(page: Page, shouldHaveResults: boolean) { 42 | if (shouldHaveResults) { 43 | await expect( 44 | page.locator('h3.p-heading--5').filter({ hasText: 'Search results' }) 45 | ).toBeVisible(); 46 | } else { 47 | await expect( 48 | page 49 | .locator('h3.p-heading--5') 50 | .filter({ hasText: 'No results. Please try another search.' }) 51 | ).toBeVisible(); 52 | } 53 | } 54 | 55 | test.describe('Asset search and filter', () => { 56 | // Use the testAssets fixture to ensure test data exists 57 | test('all search fields are visible', async ({ page, testAssets }) => { 58 | await navigateToSearchPage(page); 59 | 60 | // Verify all search elements are present 61 | await expect(page.locator('input#tag')).toBeVisible(); 62 | await expect(page.locator('select[name="asset_type"]')).toBeVisible(); 63 | await expect(page.locator('select#category-select')).toBeVisible(); 64 | await expect(page.locator('select[name="file_types"]')).toBeVisible(); 65 | await expect( 66 | page.getByRole('button', { name: 'Search' }) 67 | ).toBeVisible(); 68 | }); 69 | 70 | test('search by asset name', async ({ page, testAssets }) => { 71 | await navigateToSearchPage(page); 72 | 73 | // Search for a specific tag 74 | await searchByText(page, 'pre_test_asset'); 75 | await submitSearch(page); 76 | 77 | // Verify URL contains search parameter 78 | await expect(page).toHaveURL(/tag=pre_test_asset/); 79 | 80 | // Verify search results 81 | await verifySearchResults(page, true); 82 | 83 | // Verify we found the expected number of assets 84 | const count = await getSearchResultsCount(page); 85 | expect(count).toEqual(2); 86 | }); 87 | 88 | test('should filter by asset type', async ({ page, testAssets }) => { 89 | await navigateToSearchPage(page); 90 | // To limit the results to our testcase 91 | await searchByText(page, 'pre_test_asset'); 92 | 93 | // Select a specific asset type 94 | await selectAssetType(page, 'guide'); 95 | await submitSearch(page); 96 | 97 | // Verify URL contains asset_type parameter 98 | await expect(page).toHaveURL(/asset_type=guide/); 99 | 100 | // Verify the filter is applied 101 | const assetTypeSelect = page.locator('select[name="asset_type"]'); 102 | await expect(assetTypeSelect).toHaveValue('guide'); 103 | 104 | // Verify we found the expected number of assets 105 | const count = await getSearchResultsCount(page); 106 | expect(count).toEqual(1); 107 | 108 | }); 109 | 110 | test('should filter by category', async ({ page, testAssets }) => { 111 | await navigateToSearchPage(page); 112 | // To limit the results to our testcase 113 | await searchByText(page, 'pre_test_asset'); 114 | 115 | 116 | // select category 117 | const categorySelect = page.locator('select#category-select'); 118 | await selectCategory(page, 'partners') 119 | await submitSearch(page); 120 | 121 | // Verify the filter is applied 122 | await expect(categorySelect).toHaveValue('partners'); 123 | 124 | // Verify the count 125 | const count = await getSearchResultsCount(page); 126 | expect(count).toEqual(1); 127 | }) 128 | 129 | 130 | test('should clear filters and show all assets', async ({ page, testAssets }) => { 131 | await navigateToSearchPage(page); 132 | 133 | // First apply some filters 134 | await searchByText(page, 'test'); 135 | await selectAssetType(page, 'guide'); 136 | await submitSearch(page); 137 | 138 | // Clear filters by selecting "All" options 139 | await searchByText(page, ''); 140 | await selectAssetType(page, ''); 141 | await selectCategory(page, ''); 142 | await submitSearch(page); 143 | 144 | // Verify URL has no filter parameters 145 | await expect( 146 | page.locator('#main-content').filter({ hasText: 'Start a search' }) 147 | ).toBeVisible(); 148 | }); 149 | 150 | test('should show no results message for non-existent search', async ({ 151 | page, 152 | testAssets, 153 | }) => { 154 | await navigateToSearchPage(page); 155 | 156 | // Search for something that do not exist 157 | await searchByText(page, 'veryveryveryvery_random_random_string_string_1234'); 158 | await submitSearch(page); 159 | 160 | // Verify no results message 161 | await verifySearchResults(page, false); 162 | }); 163 | 164 | test('should persist search filters after page reload', async ({ page, testAssets }) => { 165 | await navigateToSearchPage(page); 166 | 167 | // Apply filters 168 | await searchByText(page, 'test'); 169 | await selectAssetType(page, 'guide'); 170 | await submitSearch(page); 171 | 172 | // Get current URL 173 | const currentUrl = page.url(); 174 | 175 | // Reload page 176 | await page.reload(); 177 | 178 | // Verify filters are still applied 179 | await expect(page.locator('input#tag')).toHaveValue('test'); 180 | await expect(page.locator('select[name="asset_type"]')).toHaveValue( 181 | 'guide' 182 | ); 183 | await expect(page).toHaveURL(currentUrl); 184 | }); 185 | 186 | test('should navigate to asset details from search results', async ({ 187 | page, testAssets 188 | }) => { 189 | await navigateToSearchPage(page); 190 | 191 | // Search for assets 192 | await searchByText(page, 'test'); 193 | await submitSearch(page); 194 | 195 | // Check if there are any results 196 | const assetCards = page.locator('.p-asset-card--thumbnail'); 197 | 198 | await assetCards.first().click(); 199 | 200 | // Verify popup opened with asset content 201 | await expect(page.locator('#main-content h1')).toHaveText('Asset details'); 202 | }); 203 | 204 | }); -------------------------------------------------------------------------------- /templates/_layout.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | {% block title %}{% endblock %} 6 | | Assets manager 7 | 8 | 9 | 10 | 12 | 13 | 14 | 18 | 22 | 23 | 24 | 56 |
57 | {% block content %}{% endblock %} 58 |
59 | 125 | 126 |
127 |
128 |

129 | 130 |
131 |
132 | 144 | 175 | 176 | 177 | -------------------------------------------------------------------------------- /webapp/models.py: -------------------------------------------------------------------------------- 1 | from datetime import datetime 2 | 3 | from sqlalchemy import JSON, Boolean, Column, DateTime, Integer, String 4 | from sqlalchemy.ext.declarative import declarative_base 5 | from sqlalchemy.orm import relationship 6 | from sqlalchemy.sql.schema import ForeignKey, Table 7 | 8 | Base = declarative_base() 9 | 10 | 11 | class DateTimeMixin(Base): 12 | __abstract__ = True 13 | created = Column( 14 | DateTime, 15 | nullable=False, 16 | default=datetime.now, 17 | ) 18 | updated = Column( 19 | DateTime, 20 | nullable=False, 21 | default=datetime.now, 22 | onupdate=datetime.now, 23 | ) 24 | 25 | 26 | class Token(DateTimeMixin): 27 | __tablename__ = "token" 28 | 29 | id = Column(Integer, primary_key=True) 30 | name = Column(String, nullable=False) 31 | token = Column(String, nullable=False) 32 | 33 | 34 | asset_tag_association_table = Table( 35 | "asset_tag_association", 36 | Base.metadata, 37 | Column("asset_id", ForeignKey("asset.id"), primary_key=True), 38 | Column("tag_name", ForeignKey("tag.name"), primary_key=True), 39 | ) 40 | 41 | asset_campaign_association_table = Table( 42 | "asset_campaign_association", 43 | Base.metadata, 44 | Column("asset_id", ForeignKey("asset.id"), primary_key=True), 45 | Column( 46 | "campaign_id", ForeignKey("salesforce_campaign.id"), primary_key=True 47 | ), 48 | ) 49 | 50 | asset_product_association_table = Table( 51 | "asset_product_association", 52 | Base.metadata, 53 | Column("asset_id", ForeignKey("asset.id"), primary_key=True), 54 | Column("product_name", ForeignKey("product.name"), primary_key=True), 55 | ) 56 | 57 | asset_category_association_table = Table( 58 | "asset_category_association", 59 | Base.metadata, 60 | Column("asset_id", ForeignKey("asset.id"), primary_key=True), 61 | Column("category_name", ForeignKey("category.name"), primary_key=True), 62 | ) 63 | 64 | 65 | class Author(Base): 66 | __tablename__ = "author" 67 | 68 | first_name = Column(String, nullable=False, primary_key=True) 69 | last_name = Column(String, nullable=False, primary_key=True) 70 | email = Column(String, nullable=False, unique=True, primary_key=True) 71 | 72 | def __str__(self): 73 | return f"{self.first_name} {self.last_name}" 74 | 75 | 76 | class Asset(DateTimeMixin): 77 | __tablename__ = "asset" 78 | 79 | id = Column(Integer, primary_key=True) 80 | asset_type = Column(String, nullable=True) 81 | name = Column(String, nullable=True) 82 | google_drive_link = Column(String, nullable=True) 83 | language = Column(String, nullable=True) 84 | data = Column(JSON, nullable=False) 85 | file_path = Column(String, nullable=False) 86 | author_email = Column(String, ForeignKey("author.email"), nullable=True) 87 | author = relationship("Author") 88 | tags = relationship( 89 | "Tag", secondary=asset_tag_association_table, back_populates="assets" 90 | ) 91 | products = relationship( 92 | "Product", 93 | secondary=asset_product_association_table, 94 | back_populates="assets", 95 | ) 96 | salesforce_campaigns = relationship( 97 | "Salesforce_Campaign", 98 | secondary=asset_campaign_association_table, 99 | back_populates="assets", 100 | ) 101 | categories = relationship( 102 | "Category", 103 | secondary=asset_category_association_table, 104 | back_populates="assets", 105 | ) 106 | file_type = Column(String, nullable=True) 107 | deprecated = Column(Boolean, nullable=False, default=False) 108 | 109 | def as_json(self): 110 | return { 111 | **self.data, 112 | "created": self.created.strftime("%a, %d %b %Y %H:%M:%S"), 113 | "file_path": self.file_path, 114 | "tags": ", ".join([tag.name for tag in self.tags]), 115 | "products": ", ".join([product.name for product in self.products]), 116 | "categories": ", ".join( 117 | [category.name for category in self.categories] 118 | ), 119 | "deprecated": self.deprecated, 120 | "asset_type": self.asset_type, 121 | "name": self.name, 122 | "author": ( 123 | { 124 | "first_name": ( 125 | self.author.first_name if self.author else None 126 | ), 127 | "last_name": ( 128 | self.author.last_name if self.author else None 129 | ), 130 | "email": self.author.email if self.author else None, 131 | } 132 | if self.author 133 | else None 134 | ), 135 | "google_drive_link": self.google_drive_link, 136 | "salesforce_campaigns": [ 137 | campaign.as_json() for campaign in self.salesforce_campaigns 138 | ], 139 | "language": self.language, 140 | "file_type": self.file_type, 141 | } 142 | 143 | 144 | class Tag(DateTimeMixin): 145 | __tablename__ = "tag" 146 | name = Column(String, primary_key=True) 147 | assets = relationship( 148 | "Asset", secondary=asset_tag_association_table, back_populates="tags" 149 | ) 150 | 151 | def as_json(self): 152 | return { 153 | "name": self.name, 154 | "assets": ", ".join(self.assets), 155 | } 156 | 157 | 158 | class Salesforce_Campaign(DateTimeMixin): 159 | __tablename__ = "salesforce_campaign" 160 | name = Column(String, nullable=True) 161 | id = Column(String, primary_key=True) 162 | assets = relationship( 163 | "Asset", 164 | secondary=asset_campaign_association_table, 165 | back_populates="salesforce_campaigns", 166 | ) 167 | 168 | def as_json(self): 169 | return { 170 | "name": self.name, 171 | "id": self.id, 172 | } 173 | 174 | 175 | class Product(DateTimeMixin): 176 | __tablename__ = "product" 177 | name = Column(String, primary_key=True) 178 | assets = relationship( 179 | "Asset", 180 | secondary=asset_product_association_table, 181 | back_populates="products", 182 | ) 183 | 184 | def as_json(self): 185 | return { 186 | "name": self.name, 187 | "assets": ", ".join(self.assets), 188 | } 189 | 190 | 191 | class Category(DateTimeMixin): 192 | __tablename__ = "category" 193 | name = Column(String, primary_key=True) 194 | assets = relationship( 195 | "Asset", 196 | secondary=asset_category_association_table, 197 | back_populates="categories", 198 | ) 199 | 200 | def as_json(self): 201 | return { 202 | "name": self.name, 203 | "assets": ", ".join(self.assets), 204 | } 205 | 206 | 207 | class Redirect(DateTimeMixin): 208 | __tablename__ = "redirect" 209 | 210 | id = Column(Integer, primary_key=True) 211 | redirect_path = Column(String, nullable=False) 212 | target_url = Column(String, nullable=False) 213 | permanent = Column(Boolean, nullable=False) 214 | 215 | def as_json(self): 216 | return { 217 | "redirect_path": self.redirect_path, 218 | "target_url": self.target_url, 219 | "permanent": self.permanent, 220 | } 221 | -------------------------------------------------------------------------------- /static/js/src/main.js: -------------------------------------------------------------------------------- 1 | import "./products-search"; 2 | import handleProductsChip from "./products-search"; 3 | 4 | import "./authors-search"; 5 | import handleAuthorsChip from "./authors-search"; 6 | import "./sf_campaign-search"; 7 | import handleCampaignChip from "./sf_campaign-search"; 8 | 9 | import "./date-picker"; 10 | import "./generic-fields"; 11 | import "./search-and-filter-overflow"; 12 | 13 | /* 14 | * Event delgation to handle the click event for search and filter. 15 | **/ 16 | document.addEventListener("click", function (e) { 17 | const targetChip = e.target.closest(".p-chip"); 18 | // Handle chip clicks 19 | if (targetChip) { 20 | e.preventDefault(); 21 | if (targetChip.closest(".js-products-search")) { 22 | handleProductsChip(targetChip); 23 | return; 24 | } 25 | if (targetChip.closest(".js-authors-search")) { 26 | handleAuthorsChip(targetChip); 27 | return; 28 | } 29 | if (targetChip.closest(".js-campaign-search")) { 30 | handleCampaignChip(targetChip); 31 | return; 32 | } 33 | // Handle clicks outside the search and filter 34 | } else if (!e.target.closest(".js-active-search")) { 35 | openPanel(document.querySelector(".js-active-search"), false, "click"); 36 | } 37 | }); 38 | 39 | /* 40 | * Generic function to show and hide the chips selection panel. 41 | * @param {HTMLElement} searchContainer - The whole pannel container. 42 | * @param {Boolean} opening - Whether the panel should being opening or not. Default is false. 43 | **/ 44 | export function openPanel(searchComponent, opening = "false") { 45 | if (searchComponent) { 46 | const searchContainer = searchComponent.querySelector( 47 | ".p-search-and-filter__search-container" 48 | ); 49 | const panel = searchComponent.querySelector(".p-search-and-filter__panel"); 50 | if (panel && searchContainer) { 51 | if (opening) { 52 | panel.setAttribute("aria-hidden", "false"); 53 | searchContainer.setAttribute("aria-expanded", "true"); 54 | searchComponent.classList.add("js-active-search"); 55 | } else { 56 | panel.setAttribute("aria-hidden", "true"); 57 | searchContainer.setAttribute("aria-expanded", "false"); 58 | searchComponent.classList.remove("js-active-search"); 59 | } 60 | } 61 | } 62 | } 63 | 64 | /* 65 | * Generic function to close all open panels. 66 | * @param {Array} searchInputs - An array of search input selectors to close. 67 | **/ 68 | export function closePanels(searchInputs = []) { 69 | searchInputs.forEach((searchInput) => { 70 | const searchComponent = document.querySelector(searchInput); 71 | if (searchComponent) openPanel(searchComponent, false); 72 | }); 73 | } 74 | 75 | /* 76 | * Generic function to add the value of a selected chip, to the value of a hidden input. 77 | * We have to do this as the chips will not be submitted with the form. 78 | * @param {String} value - The value of the chip. 79 | * @param {HTMLElement} input - The hidden input to store the values. 80 | * @param {Boolean} replace - Whether to replace the current value or not. Default is false. 81 | **/ 82 | export function addValueToHiddenInput(value, input, replace = false) { 83 | let selectedChips = replace ? [] : input.value.split(",").filter(Boolean); 84 | if (!selectedChips.includes(value)) { 85 | selectedChips.push(value); 86 | } 87 | input.setAttribute("value", selectedChips.join(",")); 88 | } 89 | 90 | /* 91 | * Generic function to remove the value of a selected chip, from the value of a hidden input. 92 | * @param {String} value - The value of the chip. 93 | * @param {HTMLElement} input - The hidden input to store the values. 94 | **/ 95 | export function removeValueFromHiddenInput(value, input) { 96 | let selectedChips = input.value.split(",").filter(Boolean); 97 | selectedChips = selectedChips.filter((id) => id !== value); 98 | input.setAttribute("value", selectedChips.join(",")); 99 | } 100 | 101 | /* 102 | * Function to add a value to a query parameter. 103 | * @param {String} key - The query parameter key. 104 | * @param {String} value - The value to add. 105 | * @param {Boolean} replace - Whether to replace the current value or not. Default is false. 106 | **/ 107 | export function addValueToQueryParams(key, value, replace = false) { 108 | const url = new URL(window.location); 109 | if (replace) { 110 | url.searchParams.set(key, value); 111 | } else { 112 | const currentValues = url.searchParams.get(key)?.split(",") || []; 113 | if (!currentValues.includes(value)) { 114 | currentValues.push(value); 115 | } 116 | url.searchParams.set(key, currentValues.join(",")); 117 | } 118 | window.history.replaceState({}, "", url); 119 | } 120 | 121 | /* 122 | * Function to add a value to a query parameter. 123 | * @param {String} key - The query parameter key. 124 | * @param {String} value - The value to add. 125 | **/ 126 | export function removeValueFromQueryParams(key, value) { 127 | const url = new URL(window.location); 128 | let currentValues = url.searchParams.get(key)?.split(",") || []; 129 | currentValues = currentValues.filter((v) => v !== value); 130 | if (currentValues.length) { 131 | url.searchParams.set(key, currentValues.join(",")); 132 | } else { 133 | url.searchParams.delete(key); 134 | } 135 | window.history.replaceState({}, "", url); 136 | } 137 | 138 | /** 139 | * Sanitizes a given input string by performing the following transformations: 140 | * 1. Trims leading and trailing whitespace. 141 | * 2. Converts all characters to lowercase. 142 | * 3. Replaces spaces with hyphens. 143 | * 4. Removes all characters that are not lowercase letters, digits, or hyphens. 144 | * 5. Removes leading and trailing hyphens. 145 | * 146 | * @param {string} input - The input string to sanitize. 147 | * @returns {string} - The sanitized string. 148 | */ 149 | 150 | export function sanitizeInput(input) { 151 | return input 152 | .trim() 153 | .toLowerCase() 154 | .replace(/\s+/g, "-") 155 | .replace(/[^a-z0-9-]/g, "") 156 | .replace(/^-+|-+$/g, ""); 157 | } 158 | 159 | function attachLoadingSpinner(submitButton) { 160 | let spinnerClassName = "p-icon--spinner u-animation--spin"; 161 | if (submitButton.classList.contains("p-button--positive")) { 162 | spinnerClassName += " is-light"; 163 | } 164 | 165 | const spinnerIcon = document.createElement("i"); 166 | spinnerIcon.className = spinnerClassName; 167 | const buttonRect = submitButton.getBoundingClientRect(); 168 | submitButton.style.width = buttonRect.width + "px"; 169 | submitButton.style.height = buttonRect.height + "px"; 170 | submitButton.classList.add("is-processing"); 171 | submitButton.disabled = true; 172 | submitButton.innerText = ""; 173 | submitButton.appendChild(spinnerIcon); 174 | } 175 | 176 | document.querySelectorAll("form").forEach((form) => { 177 | form.addEventListener("submit", (event) => { 178 | const submitButtons = form.querySelectorAll("button[type='submit']"); 179 | submitButtons?.forEach((submitButton) => { 180 | attachLoadingSpinner(submitButton); 181 | }); 182 | }); 183 | }); 184 | 185 | /** 186 | * Function to debounce a function call. 187 | * @param {Function} func - The function to debounce. 188 | * @param {Number} delay - The delay in ms. 189 | **/ 190 | export function debounce(func, delay) { 191 | let timer; 192 | return function (...args) { 193 | const context = this; 194 | clearTimeout(timer); 195 | timer = setTimeout(() => func.apply(context, args), delay); 196 | }; 197 | } 198 | -------------------------------------------------------------------------------- /webapp/lib/processors.py: -------------------------------------------------------------------------------- 1 | import os 2 | from io import BytesIO 3 | from uuid import uuid4 4 | 5 | from more_itertools import unique_everseen 6 | from PIL import Image as PILImage 7 | from scour.scour import scourString 8 | from sh import jpegtran, optipng 9 | from wand.image import Image as WandImage 10 | 11 | from webapp.lib.file_helpers import guess_mime 12 | from webapp.lib.python_helpers import shared_items 13 | 14 | 15 | class ImageProcessingError(Exception): 16 | def __init__(self, status_code, log_message): 17 | self.status_code = status_code 18 | self.log_message = log_message 19 | super().__init__(log_message) 20 | 21 | 22 | class ImageProcessor: 23 | operation_parameters = { 24 | "region": ["rect"], 25 | "rotate": ["deg"], 26 | "resize": ["w", "h", "max-width", "max-height"], 27 | } 28 | 29 | def __init__(self, image_contents, options={}): 30 | self.data = image_contents 31 | self.options = options 32 | 33 | def process(self): 34 | """ 35 | Reformat, optimize or transform an image 36 | """ 37 | 38 | target_format = self.options.get("fmt") 39 | optimize = self.options.get("opt") is not None 40 | 41 | # Reformat images 42 | converted = self.convert(target_format) 43 | 44 | # Do transformations 45 | transformed = self.transform() 46 | 47 | # Optimize images 48 | if converted or transformed or optimize: 49 | self.optimize(allow_svg_errors=converted or transformed) 50 | 51 | return target_format 52 | 53 | def optimize(self, allow_svg_errors=False): 54 | """ 55 | Optimize SVGs, PNGs or Jpegs 56 | Unfortunately, this needs to write temporary files 57 | by making use of the /tmp directory 58 | """ 59 | 60 | mimetype = guess_mime(self.data) 61 | tmp_filename = "/tmp/" + uuid4().hex 62 | 63 | if mimetype == "image/svg+xml": 64 | try: 65 | self.data = str(scourString(self.data)) 66 | except Exception: 67 | # SVG contains bad data, we can't optimise it 68 | pass 69 | 70 | elif mimetype == "image/jpeg": 71 | self.data = jpegtran("-optimize", _in=self.data).stdout 72 | 73 | elif mimetype == "image/png": 74 | with open(tmp_filename, "wb") as tmp: 75 | tmp.write(self.data) 76 | optipng(tmp_filename) 77 | with open(tmp_filename, "rb") as tmp: 78 | self.data = tmp.read() 79 | os.remove(tmp_filename) 80 | 81 | def convert(self, target_format): 82 | if not target_format: 83 | return False 84 | if target_format in ["png", "jpg", "gif"]: 85 | # Do conversion with wand 86 | with WandImage(blob=self.data) as image: 87 | self.data = image.make_blob(target_format) 88 | return True 89 | else: 90 | raise ImageProcessingError( 91 | 400, log_message="Cannot convert to '{}'".format(target_format) 92 | ) 93 | 94 | def transform(self): 95 | """ 96 | Perform transformations on an image 97 | Using Pillow 98 | The self.options follow the provided API 99 | 100 | Return True if transformation happened 101 | """ 102 | 103 | # Operations (region, rotate, resize...) 104 | # --- 105 | mimetype = guess_mime(self.data) 106 | 107 | if mimetype in ["image/png", "image/jpeg", "image/gif"]: 108 | operation = self.options.get("op") 109 | 110 | if not operation and shared_items( 111 | self.options, self.operation_parameters["resize"] 112 | ): 113 | operation = "resize" 114 | 115 | operations = operation.split(",") if operation else [] 116 | # Remove duplicate operations from list 117 | operations = unique_everseen(operations) 118 | 119 | for operation in operations: 120 | if operation or "q" in self.options: 121 | try: 122 | self._pillow_operation(operation) 123 | except ( 124 | IndexError, 125 | ValueError, 126 | TypeError, 127 | AttributeError, 128 | ): 129 | self._missing_param_error(operation) 130 | 131 | if operation: 132 | return True 133 | 134 | # Private helper methods 135 | # === 136 | 137 | def _pillow_operation(self, operation): 138 | """ 139 | Use Pillow to transform an image 140 | """ 141 | 142 | image = PILImage.open(BytesIO(self.data)) 143 | 144 | if operation == "region": 145 | rect = tuple(map(int, self.options.get("rect").split(","))) 146 | image = image.crop(rect) 147 | 148 | elif operation == "rotate": 149 | deg = -1 * int(self.options.get("deg")) 150 | expand = self.options.get("expand") 151 | image = image.rotate(deg, expand=expand) 152 | elif operation == "resize": 153 | max_width = self.options.get("max-width") 154 | max_height = self.options.get("max-height") 155 | 156 | resize_width = self.options.get("w") 157 | resize_height = self.options.get("h") 158 | 159 | # Make sure widths and heights are integers 160 | if resize_width: 161 | resize_width = int(resize_width) 162 | if resize_height: 163 | resize_height = int(resize_height) 164 | if max_width: 165 | max_width = int(max_width) 166 | if max_height: 167 | max_height = int(max_height) 168 | 169 | # Image size management 170 | image_width, image_height = image.size 171 | 172 | # Don't allow expanding of images 173 | if (resize_width and resize_width > image_width) or ( 174 | resize_height and resize_height > image_height 175 | ): 176 | expand_message = ( 177 | "Resize error: Maximum dimensions for this image " 178 | "are {0}px wide by {1}px high." 179 | ).format(image_width, image_height) 180 | 181 | raise ImageProcessingError(400, log_message=expand_message) 182 | 183 | # Process max_width and max_height 184 | if not resize_width and max_width: 185 | if max_width < image_width: 186 | resize_width = max_width 187 | 188 | if not resize_height and max_height: 189 | if max_height < image_height: 190 | resize_height = max_height 191 | 192 | # Conserve the image ratio 193 | if resize_height and not resize_width: 194 | image_ratio = image_height / resize_height 195 | resize_width = int(image_width / image_ratio) 196 | elif not resize_height and resize_width: 197 | image_ratio = image_width / resize_width 198 | resize_height = int(image_height / image_ratio) 199 | 200 | if resize_height or resize_width: 201 | image = image.resize((resize_width, resize_height)) 202 | 203 | image_format = image.format or "PNG" 204 | with BytesIO() as output: 205 | image.save( 206 | output, format=image_format, quality=self.options.get("q") 207 | ) 208 | self.data = output.getvalue() 209 | 210 | def _missing_param_error(self, operation): 211 | message = ( 212 | "Invalid image operation. '{0}' accepts: {1}. " 213 | "See https://github.com/agschwender/pilbox for more detail." 214 | ).format(operation, ", ".join(self.operation_parameters[operation])) 215 | 216 | raise ImageProcessingError(400, log_message=message) 217 | -------------------------------------------------------------------------------- /.github/workflows/deploy.yaml: -------------------------------------------------------------------------------- 1 | name: Juju deploy 2 | 3 | on: 4 | workflow_dispatch: 5 | inputs: 6 | environment: 7 | description: "Target environment (Staging or Production)" 8 | required: false 9 | type: environment 10 | default: "Production" 11 | commit: 12 | description: "Commit SHA" 13 | required: false 14 | type: string 15 | default: HEAD 16 | push: 17 | branches: 18 | - main 19 | 20 | ## See ~/.vaultrc in your Juju model 21 | ## Environment variables: 22 | # WEBSITE_URL (url) 23 | # JUJU_MODEL (string) 24 | # JUJU_CONTROLLER (string) 25 | # JUJU_VERSION (string) 26 | # VAULT_ADDR (url) 27 | # VAULT_SECRET_PATH_ROLE (relative path) 28 | # VAULT_SECRET_PATH_COMMON (relative path) 29 | # 30 | ## Secrets: 31 | # VAULT_APPROLE_ROLE_ID (uuid) 32 | # VAULT_APPROLE_SECRET_ID (uuid) 33 | 34 | env: 35 | DEPLOYMENT_ENV: ${{ github.event.inputs.environment || 'Staging' }} 36 | CHARM_BUILD_NAME: ${{ github.event.repository.name }}-${{ github.sha }}.charm 37 | ROCK_BUILD_NAME: ${{ github.event.repository.name }}-${{ github.sha }}.rock 38 | 39 | jobs: 40 | commit-check: 41 | runs-on: ubuntu-latest 42 | outputs: 43 | REF: ${{ steps.check-branch.outputs.ref }} 44 | steps: 45 | - name: Checkout code 46 | uses: actions/checkout@v4 47 | 48 | - name: Check branch 49 | id: check-branch 50 | run: | 51 | # make sure that the commit sha is from the main branch 52 | # otherwise, fail the workflow 53 | if [ "${{ github.event_name }}" == "workflow_dispatch" ]; then 54 | if [ "${{ github.event.inputs.commit }}" == "HEAD" ]; then 55 | echo "ref=main" >> $GITHUB_OUTPUT 56 | else 57 | echo "ref=${{ github.event.inputs.commit }}" >> $GITHUB_OUTPUT 58 | is_main=$(git branch -r --contains ${{ github.event.inputs.commit }} | grep -c main) 59 | if [ $is_main -eq 0 ]; then 60 | echo "Commit is not from the main branch" 61 | echo -e "> [!WARNING]\n> Commit is not from the main branch" >> $GITHUB_STEP_SUMMARY 62 | exit 1 63 | fi 64 | fi 65 | else 66 | echo "ref=${GITHUB_SHA}" >> $GITHUB_OUTPUT 67 | fi 68 | 69 | rockcraft-pack: 70 | runs-on: ubuntu-22.04 71 | needs: commit-check 72 | outputs: 73 | image_url: ${{ steps.image_url.outputs.image_url }} 74 | steps: 75 | - name: Checkout code 76 | uses: actions/checkout@v4 77 | with: 78 | fetch-depth: 0 79 | ref: ${{ needs.commit-check.outputs.REF }} 80 | 81 | - name: Install rockcraft 82 | run: | 83 | sudo snap install --classic rockcraft 84 | 85 | - name: Pack project 86 | if: steps.rock-cache.outputs.cache-hit != 'true' 87 | id: rockcraft-pack 88 | run: | 89 | sudo rockcraft pack --destructive-mode -v 90 | 91 | - name: Upload rock file 92 | uses: actions/upload-artifact@v4 93 | with: 94 | name: ${{ env.ROCK_BUILD_NAME }} 95 | path: ./*.rock 96 | 97 | - name: Set image URL 98 | id: image_url 99 | run: | 100 | IMAGE_URL=ghcr.io/${{ github.repository }}:$(date +%s)-${GITHUB_SHA:0:7} 101 | echo -e "> [!NOTE]\n> Rockcraft OCI image: $IMAGE_URL" >> $GITHUB_STEP_SUMMARY 102 | echo $DOCKERHUB_MIRROR 103 | echo "ghcr_image_url=$IMAGE_URL" >> $GITHUB_OUTPUT 104 | echo "image_url=$IMAGE_URL" >> $GITHUB_OUTPUT 105 | - name: Push to GHCR 106 | run: | 107 | echo "Pushing to GHCR.." 108 | rockcraft.skopeo --insecure-policy copy oci-archive:$(ls *.rock) docker://${{ steps.image_url.outputs.ghcr_image_url }} --dest-creds ${{ github.repository_owner }}:${{ secrets.GITHUB_TOKEN }} 109 | 110 | charmcraft-pack: 111 | runs-on: ubuntu-22.04 112 | needs: commit-check 113 | steps: 114 | - name: Checkout code 115 | uses: actions/checkout@v4 116 | with: 117 | fetch-depth: 0 118 | ref: ${{ needs.commit-check.outputs.REF }} 119 | 120 | # This pack task takes a while with less likely to change files 121 | - name: Cache charm 122 | id: charm-cache 123 | uses: actions/cache@v4 124 | with: 125 | path: ./*.charm 126 | key: ${{ runner.os}}-charmcraft-${{ hashFiles('./charm/**') }} 127 | 128 | - name: Install charmcraft 129 | if: steps.charm-cache.outputs.cache-hit != 'true' 130 | run: | 131 | sudo snap install --classic charmcraft 132 | 133 | - name: Pack charm 134 | if: steps.charm-cache.outputs.cache-hit != 'true' 135 | id: charmcraft-pack 136 | run: | 137 | # --project-dir option doesn't work with destructive-mode 138 | cd ./charm 139 | sudo charmcraft pack -v --destructive-mode 140 | mv *.charm ../ 141 | 142 | - name: Upload charm file 143 | uses: actions/upload-artifact@v4 144 | id: charm-upload 145 | with: 146 | name: ${{ env.CHARM_BUILD_NAME }} 147 | path: ./*.charm 148 | 149 | - name: Set charm URL 150 | id: charm_url 151 | run: | 152 | if [ -f ${{ steps.charm-cache.outputs.cache-hit }} ]; then 153 | echo -e "> [!NOTE]\n> Charm pack file (cached): ${{ steps.charm-upload.outputs.artifact-url }}" >> $GITHUB_STEP_SUMMARY 154 | else 155 | echo -e "> [!NOTE]\n> Charm pack file: ${{ steps.charm-upload.outputs.artifact-url }}" >> $GITHUB_STEP_SUMMARY 156 | fi 157 | 158 | deploy: 159 | needs: [commit-check, rockcraft-pack, charmcraft-pack] 160 | runs-on: 161 | [self-hosted, self-hosted-linux-amd64-jammy-private-endpoint-medium] 162 | environment: 163 | name: ${{ inputs.environment || 'Staging' }} 164 | url: ${{ vars.WEBSITE_URL }} 165 | steps: 166 | - name: Checkout code 167 | uses: actions/checkout@v4 168 | with: 169 | fetch-depth: 0 170 | ref: ${{ needs.commit-check.outputs.REF }} 171 | 172 | - name: Install juju 173 | run: | 174 | sudo snap install --channel=${{ vars.JUJU_VERSION }} juju 175 | sudo snap install --classic vault 176 | 177 | - name: Running env 178 | run: | 179 | echo "${{ env.DEPLOYMENT_ENV }}" 180 | 181 | - name: Download Charm Artifact 182 | uses: actions/download-artifact@v4 183 | with: 184 | name: ${{ env.CHARM_BUILD_NAME }} 185 | 186 | - name: Configure Vault and Juju 187 | env: 188 | VAULT_ADDR: ${{ vars.VAULT_ADDR }} 189 | VAULT_SECRET_PATH_ROLE: ${{ vars.VAULT_SECRET_PATH_ROLE }} 190 | VAULT_SECRET_PATH_COMMON: ${{ vars.VAULT_SECRET_PATH_COMMON }} 191 | JUJU_CONTROLLER: ${{ vars.JUJU_CONTROLLER }} 192 | run: | 193 | export TF_VAR_login_approle_role_id=${{ secrets.VAULT_APPROLE_ROLE_ID }} 194 | export TF_VAR_login_approle_secret_id=${{ secrets.VAULT_APPROLE_SECRET_ID }} 195 | export VAULT_TOKEN=$(vault write -f -field=token auth/approle/login role_id=${TF_VAR_login_approle_role_id} secret_id=${TF_VAR_login_approle_secret_id}) 196 | mkdir -p ~/.local/share/juju 197 | vault read -field=controller_config "${VAULT_SECRET_PATH_COMMON}/controllers/$JUJU_CONTROLLER" | base64 -d > ~/.local/share/juju/controllers.yaml 198 | USERNAME=$(vault read -field=username "${VAULT_SECRET_PATH_ROLE}/juju") 199 | PASSWORD=$(vault read -field=password "${VAULT_SECRET_PATH_ROLE}/juju") 200 | printf "controllers:\n $JUJU_CONTROLLER:\n user: %s\n password: %s\n" "$USERNAME" "$PASSWORD" > ~/.local/share/juju/accounts.yaml 201 | 202 | - name: Deploy charm 203 | env: 204 | JUJU_MODEL: ${{ vars.JUJU_MODEL }} 205 | run: | 206 | export JUJU_MODEL=admin/$JUJU_MODEL 207 | echo "Deploying to $JUJU_MODEL" 208 | echo "{\"ImageName\": \"${{ needs.rockcraft-pack.outputs.image_url }}\", \"username\":\"${{ secrets.GHCR_READ_USERNAME }}\", \"password\":\"${{ secrets.GHCR_READ_TOKEN }}\"}" > ./image_metadata.json 209 | # run the deploy command 210 | # in a fresh environment first 211 | # juju deploy ./assets-manager_ubuntu-22.04-amd64.charm --resource flask-app-image=./image_metadata.json assets-manager 212 | juju refresh assets-manager --path=./assets-manager_ubuntu-22.04-amd64.charm --resource flask-app-image=./image_metadata.json 213 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # assets.![ubuntu](https://assets.ubuntu.com/v1/9f61b97f-logo-ubuntu.svg "Ubuntu").com codebase 2 | 3 | The assets server manages the assets (mainly images and PDFs) for different Canonical and Ubuntu websites. 4 | 5 | It has 2 main parts: 6 | 7 | - https://assets.ubuntu.com/v1 : The API, which is a flask app that provides a RESTful API for storing and serving binary assets over HTTP 8 | - https://assets.ubuntu.com/manager : The web interface, which is jinja2 templates that provide a web interface for managing the assets server 9 | 10 | ## Summary 11 | 12 | - [assets..com codebase](#assetscom-codebase) 13 | - [Summary](#summary) 14 | - [Creating assets using the web interface](#creating-assets-using-the-web-interface) 15 | - [Transforming images](#transforming-images) 16 | - [Using the RestAPI](#using-the-restapi) 17 | - [Authentication](#authentication) 18 | - [Managing tokens](#managing-tokens) 19 | - [Generating a new token](#generating-a-new-token) 20 | - [Removing a token](#removing-a-token) 21 | - [Listing all the tokens](#listing-all-the-tokens) 22 | - [Managing assets](#managing-assets) 23 | - [Uploading assets](#uploading-assets) 24 | - [Deleting assets](#deleting-assets) 25 | - [Listing assets](#listing-assets) 26 | - [Pagination](#pagination) 27 | - [Managing redirects](#managing-redirects) 28 | - [Creating redirects](#creating-redirects) 29 | - [Updating redirects](#updating-redirects) 30 | - [Deleting redirects](#deleting-redirects) 31 | - [Security](#security) 32 | - [Caching](#caching) 33 | 34 | ## Creating assets using the web interface 35 | 36 | You will need to login with an SSO account that is in the `canonical-content-people` team. 37 | 38 | You can then create assets using the web interface at https://assets.ubuntu.com/manager/create. 39 | 40 | ## Transforming images 41 | 42 | When getting an image asset, the asset can be transformed using the `op` (operation) option. 43 | 44 | You can manually specify one of these operations along with their corresponding options: 45 | 46 | - `region`: 47 | - `rect`: The region as x,y,w,h; x,y: top-left position, w,h: width/height of region 48 | - `resize`: 49 | - `w`: Width 50 | - `h`: Height 51 | - `max-width` 52 | - `max-height`: 53 | - `rotate`: 54 | - `deg`: Degrees to rotate the image 55 | 56 | The default option is resize and can be used without setting `op`. 57 | 58 | The default option is resize and can be used without setting `op`, e.g.: 59 | 60 | ``` 61 | https://assets.ubuntu.com/v1/4d7a830e-logo-ubuntuone.png?w=30 62 | ``` 63 | 64 | Or you can use another feature, like `region` e.g.: 65 | 66 | ``` 67 | https://assets.ubuntu.com/v1/4d7a830e-logo-ubuntuone.png?op=region&rect=0,0,50,50 68 | ``` 69 | 70 | ## Using the RestAPI 71 | 72 | Creating a new asset can you be done using the [assets manager](https://assets.ubuntu.com/manager), however in case of advanced option such as image transformation or creating redirects, you can use the API directly. 73 | 74 | ### Authentication 75 | 76 | The API uses a token based authentication system. You can specify your token in 3 different ways: 77 | 78 | - As a `token` query parameter: e.g. `https://assets.ubuntu.com/v1?token=1234` 79 | - As an `Authorization` HTTP header: 80 | 81 | ``` 82 | Authorization: "token 1234" 83 | ``` 84 | 85 | - In case of a POST request, along with the request body: 86 | 87 | ``` 88 | key: 1234 89 | ``` 90 | 91 | ### Managing tokens 92 | 93 | #### Generating a new token 94 | 95 | You can generate a new token by running the following command: 96 | 97 | ```bash 98 | curl -X POST --data name=token-name https://assets.ubuntu.com/v1/tokens?token={your-exisiting-token} 99 | ``` 100 | 101 | #### Removing a token 102 | 103 | You can remove a token by running the following command: 104 | 105 | ```bash 106 | curl -X DELETE https://assets.ubuntu.com/v1/tokens/{token-to-be-delete}?token={your-token} 107 | ``` 108 | 109 | #### Listing all the tokens 110 | 111 | You can list all the tokens by running the following command: 112 | 113 | ```bash 114 | curl https://assets.ubuntu.com/v1/tokens?token={your-token} 115 | ``` 116 | 117 | ### Managing assets 118 | 119 | #### Uploading assets 120 | 121 | You can upload assets with the cryptically named [`upload-assets`](https://github.com/canonical/canonicalwebteam.upload-assets) tool. This can be installed with `snap install upload-assets` or `sudo pip3 install upload-assets`. 122 | 123 | It's usually best to store your API key in your RC file (e.g. `~/.bashrc`) by adding a line like `export UPLOAD_ASSETS_API_TOKEN=xxxxxx`. (You then probably need to `source ~/.bashrc` to load the environment variable). 124 | 125 | You can then upload assets: 126 | 127 | ```bash 128 | $ upload-asset --api-domain localhost:8017 MY-IMAGE.png 129 | {'url': u'http://localhost:8017/v1/xxxxx-MY-IMAGE.png', 'image': True, 'created': u'Tue Sep 27 16:13:22 2016', 'file_path': u'xxxxx-MY-IMAGE.png', 'tags': u''} 130 | ``` 131 | 132 | You can also directly upload assets using the API: 133 | 134 | ```bash 135 | echo "asset=$(base64 -w 0 MY-IMAGE.png)" | \ 136 | curl --request POST --data @- --data "friendly-name=MY-IMAGE.png" "https://assets.ubuntu.com/v1/?token={your-api-token}" 137 | ``` 138 | 139 | In the example above, we used an option called `friendly-name` which is an option among others: 140 | 141 | - `asset`: (**required**) The base64 encoded asset 142 | - `friendly-name`: (optional) The name of the asset to be included in the asset's URL 143 | - `url-path`: (optional, default: the SHA1 of the `asset`) The path of the asset as it will be served over HTTP 144 | - `optimize`: (optional, default: `false`) Whether to optimize the image, only works for images of type PNG, JPEG and SVG, this option is ignored for other types of assets 145 | - `tags`: (optional, default: `[]`) A comma separated list of tags to be associated with the asset 146 | 147 | #### Deleting assets 148 | 149 | **Warning: Please read this before deleting anything** 150 | 151 | _The assets server serves all assets with a `cache-control` header instructing all clients to cache the asset for a year. This is to get the best possible performance on our websites. You need to bear this in mind before deleting any assets. If the asset has been cached by any clients - from our own Content Cache, to other intermediary caches, to users' browsers - you may struggle to get the assets deleted from those caches._ 152 | 153 | _For this reason it's best to find ways around needing to regularly delete assets._ 154 | 155 | _This is also why the [assets manager](https://assets.ubuntu.com/manager) doesn't support deleting assets through the interface._ 156 | 157 | To delete an assets, simply use `curl` with the `DELETE` method: 158 | 159 | ```bash 160 | curl --request DELETE "https://assets.ubuntu.com/v1/{asset-filename}?token={your-api-token}" 161 | ``` 162 | 163 | #### Listing assets 164 | 165 | You can list all the assets by running the following command: 166 | 167 | ```bash 168 | curl https://assets.ubuntu.com/v1/?token={your-api-token} 169 | ``` 170 | 171 | You can also filter the assets by: 172 | 173 | - `tag`: Filter the assets by a specific tag or by a query string to filter the assets by filename, e.g. `tag=ubuntu` will return all the assets with `ubuntu` in their filename 174 | - `type`: The type of the asset, e.g. `type=png` will return all the assets with the `png` extension 175 | - `include_deprecated`: Whether to include or not the deprecated assets. Default is `false` 176 | 177 | ##### Pagination 178 | 179 | By default, the API will return the first 20 assets. You can specify the number of assets to return by using the `per_page` query parameter, e.g. `per_page=100`. 180 | 181 | You can also specify the page number by using the `page` query parameter, e.g. `page=2` (this will return the second page of assets). 182 | 183 | ### Managing redirects 184 | 185 | Since assets are cached for a very long time, if you know you will want to update the version of an assets behind a specific URL, this should be achieved by setting up a (non-permanent) redirect to the assets. 186 | 187 | E.g., Every day we upload the latest Server Guide to e.g. `https://assets.ubuntu.com/v1/25868d7a-ubuntu-server-guide-2022-07-11.pdf`, and then we change the `https://assets.ubuntu.com/ubuntu-server-guide` URL to redirect to this latest version. 188 | 189 | #### Creating redirects 190 | 191 | You can set up a new redirect with `curl --data redirect_path={the-path-to-redirect} --data target_url={the-redirect-target} https://assets.ubuntu.com/v1/redirects?token={your-api-token}`. 192 | 193 | E.g. this would create the `https://assets.ubuntu.com/ubuntu-server-guide` redirect mentioned above: 194 | 195 | ```bash 196 | $ curl --data redirect_path=ubuntu-server-guide --data target_url=https://assets.ubuntu.com/v1/25868d7a-ubuntu-server-guide-2022-07-11.pdf "https://assets.ubuntu.com/v1/redirects?token=xxxxxxxxxxx" 197 | { 198 | "permanent": false, 199 | "message": "Redirect created", 200 | "target_url": "https://assets.ubuntu.com/v1/25868d7a-ubuntu-server-guide-2022-07-11.pdf", 201 | "redirect_path": "ubuntu-server-guide" 202 | } 203 | ``` 204 | 205 | #### Updating redirects 206 | 207 | Once a redirect already exists, you can use the `PUT` method to update it using `curl --request PUT --data target_url={target-url} https://assets.ubuntu.com/v1/redirects/{redirect-path}?token={your-api-token}`. 208 | 209 | E.g. (following the above example): 210 | 211 | ```bash 212 | $ curl --request PUT --data target_url=https://assets.ubuntu.com/v1/fe8d7514-ubuntu-server-guide-2022-07-13.pdf "https://assets.ubuntu.com/v1/redirects/ubuntu-server-guide?token=xxxxxxxxx" 213 | { 214 | "target_url": "https://assets.ubuntu.com/v1/fe8d7514-ubuntu-server-guide-2022-07-13.pdf", 215 | "permanent": false, 216 | "redirect_path": "ubuntu-server-guide" 217 | } 218 | ``` 219 | 220 | #### Deleting redirects 221 | 222 | Deleting redirects is similarly simple, with `curl --request DELETE https://assets.ubuntu.com/v1/redirects/{redirect-path}?token={your-api-token}`, e.g. `curl --request DELETE https://assets.ubuntu.com/v1/redirects/ubuntu-server-guide?token=xxxxxxxxx`. 223 | 224 | ## Security 225 | 226 | As the server only uses a basic token for authentication, it is paramount that in a production setting, the API functions are only accessed over HTTPS, to keep the API token secret. For this reason, when `DEBUG==false` the server will force a redirect to HTTPS for all API calls. 227 | 228 | ## Caching 229 | 230 | The server is intended to be run in production behind a caching layer (e.g. [squid cache](http://www.squid-cache.org/)). And as the server stores assets by default with a unique hash corresponding to the file's contents (e.g. ``a2f56da4``-some-image.png``), the cache expiration time should be as long as possible to maximise performance. 231 | --------------------------------------------------------------------------------