├── lib
├── external_api
│ ├── __init__.py
│ ├── group_requests.py
│ └── event_requests.py
├── import_mp
│ └── __init__.py
├── __init__.py
├── utility
│ ├── __init__.py
│ └── enum.py
├── data_handling
│ ├── __init__.py
│ ├── namespaces.py
│ ├── flavor_matcher.py
│ ├── elements.py
│ ├── transform_acl.py
│ ├── asset_util.py
│ ├── errors.py
│ ├── types.py
│ ├── compare_assets.py
│ ├── parse_acl.py
│ └── get_assets_from_oaipmh.py
├── input_output
│ ├── __init__.py
│ ├── get_dummy_series_dc.py
│ ├── read_file.py
│ ├── log_writer.py
│ └── unique_names.py
├── rest_requests
│ ├── __init__.py
│ ├── group_requests.py
│ ├── search_requests.py
│ ├── get_response_content.py
│ ├── tenant_requests.py
│ ├── assetmanager_requests.py
│ ├── stream_security_requests.py
│ ├── oaipmh_requests.py
│ └── file_requests.py
├── args
│ ├── __init__.py
│ ├── args_parser.py
│ ├── args_error.py
│ ├── digest_login.py
│ └── url_builder.py
└── dummy_series_dc.xml
├── delete_dead_distribution_artefacts
├── __init__.py
├── util
│ ├── __init__.py
│ └── count.py
├── input
│ ├── __init__.py
│ ├── delete_question.py
│ ├── present_dead_distribution_artefacts.py
│ └── parse_args.py
└── delete_artefacts
│ ├── __init__.py
│ ├── check_distribution_artefacts.py
│ ├── delete_dead_distribution_artefacts.py
│ └── find_distribution_artefacts.py
├── xml-to-yaml-workflow
├── requierments.txt
└── readme.md
├── lti-test-consumer
├── requirements.txt
├── readme.md
└── lticonsumer.py
├── dev-scripts
├── ocmk
├── ocstop
├── ocrebuild
├── ocread
├── oclog
├── ocreindex
├── ocindexdrop
├── ocpr
├── ocpicbr
├── download-release
├── ocbuild
├── ocstart
└── mpr
├── opencast-migration-scripts
├── requirements.txt
├── .gitignore
├── opencast
│ ├── __init__.py
│ ├── client.py
│ ├── info.py
│ ├── assetmanager.py
│ ├── archive.py
│ ├── staticfile.py
│ ├── search.py
│ ├── captureadmin.py
│ ├── scheduler.py
│ └── series.py
├── create-archive.sh
├── tests
│ ├── test_assetmanager.py
│ ├── test_search.py
│ └── test_externalapi.py
├── LICENSE
├── README.md
├── check_search_episodes_in_archive.py
└── check_search_episodes_in_assetmanager.py
├── simulate-ingests
├── __init__.py
├── mediapackages
│ ├── templates
│ │ ├── org.opencastproject.capture.agent.properties
│ │ ├── series.xml
│ │ ├── episode.xml
│ │ └── manifest.xml
│ └── tracks
│ │ └── README.md
└── README.md
├── ingest
├── video.webm
├── ingest-single-request.sh
├── readme.md
├── ingest.py
└── ingest-addtrack.sh
├── recover_backup
├── __init__.py
├── input
│ ├── __init__.py
│ ├── check_recovery_start.py
│ └── parse_args.py
├── recover
│ └── __init__.py
└── workflow_example.xml
├── start-workflow-from-archive
├── requirments.txt
└── README.md
├── check_data_integrity
├── fix
│ ├── io
│ │ ├── __init__.py
│ │ ├── print_events_to_be_fixed.py
│ │ ├── input.py
│ │ └── results_parser.py
│ ├── __init__.py
│ ├── fixer
│ │ ├── __init__.py
│ │ ├── fixer.py
│ │ └── series_dc_of_event_fixer.py
│ └── workflows
│ │ ├── __init__.py
│ │ ├── workflow.py
│ │ └── workflow_definitions
│ │ └── add_or_update_series_dc.xml
├── check
│ ├── check_data
│ │ ├── __init__.py
│ │ └── malformed.py
│ ├── __init__.py
│ └── args
│ │ ├── __init__.py
│ │ └── check_settings.py
└── __init__.py
├── multilang-subtitles
├── webvtt-example.mp4
├── sample-en.vtt
├── sample-de.vtt
└── ingest-subtitles.sh
├── export-workflow-statistics
├── demo_plot.png
├── requirements.txt
├── plot.gp
├── config.py
├── tenant_plot.gp
└── README.md
├── export-videos
├── requirements.txt
├── config.py
└── parse_args.py
├── create-groups
├── config.py
├── requirements.txt
├── main.py
└── README.md
├── import-to-other-tenant
├── requirements.txt
├── config.py
└── parse_args.py
├── find-and-delete-empty-series
├── requirements.txt
├── config.py
├── README.md
├── delete_empty_series.py
└── find_empty_series.py
├── start-workflow-from-archive-multitenancy
├── config.py
├── requirements.txt
├── README.md
└── main.py
├── create-series
├── readme.md
├── create-series.sh
└── create-series-ocomplex.sh
├── csv-export
├── readme.md
├── getEvents.sh
└── events2csv.py
├── visualize-workflow
├── get-workflow.sh
├── viewSvg.html
├── plot-workflow.gnuplot
├── prep-workflow.py
├── plot-mulit-workflow-3D.gnuplot
└── readme.md
├── generate-maven-notices
├── print-licenses.sh
└── parse-licenses.py
├── multi-node-test-setup
├── readme.md
└── multi-node-test-setup.sh
├── readme.md
├── release-management
├── translation-progress
│ ├── translation-progress.sh
│ └── readme.md
└── create-changelog
│ ├── changelog.py
│ └── readme.md
├── whisper-AI-exec-script
├── whisper-getvtt.sh
└── whisper-generate.sh
├── fake-hls
├── readme.md
├── nginx.conf
└── fake-hls.sh
├── schedule-now
├── readme.md
└── schedule-now.sh
├── migrate-events-to-another-opencast
├── import.yaml
└── readme.md
├── auto-restart-services
└── Sanitizer.py
├── LICENSE
└── .travis.yml
/lib/external_api/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/lib/import_mp/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/delete_dead_distribution_artefacts/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/xml-to-yaml-workflow/requierments.txt:
--------------------------------------------------------------------------------
1 | PyYAML>=6.0
--------------------------------------------------------------------------------
/lti-test-consumer/requirements.txt:
--------------------------------------------------------------------------------
1 | flask
2 | lti
3 |
--------------------------------------------------------------------------------
/dev-scripts/ocmk:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | ocbuild "${@:2}" -am -pl $1
4 |
--------------------------------------------------------------------------------
/opencast-migration-scripts/requirements.txt:
--------------------------------------------------------------------------------
1 | httpx
2 | lxml
3 | python-magic
--------------------------------------------------------------------------------
/simulate-ingests/__init__.py:
--------------------------------------------------------------------------------
1 | __author__ = 'andrew wilson, james perrin'
2 |
--------------------------------------------------------------------------------
/lib/__init__.py:
--------------------------------------------------------------------------------
1 | """
2 | This package contains modules used by multiple scripts.
3 | """
--------------------------------------------------------------------------------
/ingest/video.webm:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/opencast/helper-scripts/HEAD/ingest/video.webm
--------------------------------------------------------------------------------
/recover_backup/__init__.py:
--------------------------------------------------------------------------------
1 | """
2 | This package contains the recovery script.
3 | """
4 |
--------------------------------------------------------------------------------
/start-workflow-from-archive/requirments.txt:
--------------------------------------------------------------------------------
1 | argparse>=1.4.0
2 | requests>=2.19.0
3 |
4 |
--------------------------------------------------------------------------------
/lib/utility/__init__.py:
--------------------------------------------------------------------------------
1 | """
2 | This package contains modules providing utility functionality.
3 | """
4 |
--------------------------------------------------------------------------------
/check_data_integrity/fix/io/__init__.py:
--------------------------------------------------------------------------------
1 | """
2 | This package contains modules used for any kind of io.
3 | """
--------------------------------------------------------------------------------
/lib/data_handling/__init__.py:
--------------------------------------------------------------------------------
1 | """
2 | This package contains all shared modules handling opencast data.
3 | """
--------------------------------------------------------------------------------
/lib/input_output/__init__.py:
--------------------------------------------------------------------------------
1 | """
2 | This package contains modules that deal with input and output of data.
3 | """
--------------------------------------------------------------------------------
/opencast-migration-scripts/.gitignore:
--------------------------------------------------------------------------------
1 | venv
2 | .idea
3 | .vscode
4 | *.pyc
5 | downloads
6 | theme_id_mappings_*.json
--------------------------------------------------------------------------------
/delete_dead_distribution_artefacts/util/__init__.py:
--------------------------------------------------------------------------------
1 | """ This package contains modules with utility functionality"""
2 |
--------------------------------------------------------------------------------
/check_data_integrity/check/check_data/__init__.py:
--------------------------------------------------------------------------------
1 | """
2 | This package contains modules for checking the data integrity.
3 | """
--------------------------------------------------------------------------------
/delete_dead_distribution_artefacts/input/__init__.py:
--------------------------------------------------------------------------------
1 | """ This package contains modules that read in and process user input"""
2 |
--------------------------------------------------------------------------------
/opencast-migration-scripts/opencast/__init__.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | # -*- coding: utf-8 -*-
3 |
4 | tmp_dir = 'downloads/'
--------------------------------------------------------------------------------
/check_data_integrity/fix/__init__.py:
--------------------------------------------------------------------------------
1 | """
2 | This package contains modules used only by the script fixing data inconsistencies.
3 | """
--------------------------------------------------------------------------------
/check_data_integrity/fix/fixer/__init__.py:
--------------------------------------------------------------------------------
1 | """
2 | This package contains modules that can fix specific errors in inconsistent data.
3 | """
--------------------------------------------------------------------------------
/dev-scripts/ocstop:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | docker compose -f docs/scripts/devel-dependency-containers/docker-compose-all-sql.yml down
4 |
--------------------------------------------------------------------------------
/multilang-subtitles/webvtt-example.mp4:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/opencast/helper-scripts/HEAD/multilang-subtitles/webvtt-example.mp4
--------------------------------------------------------------------------------
/check_data_integrity/check/__init__.py:
--------------------------------------------------------------------------------
1 | """
2 | This package contains modules used only by the script checking for data inconsistencies.
3 | """
--------------------------------------------------------------------------------
/export-workflow-statistics/demo_plot.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/opencast/helper-scripts/HEAD/export-workflow-statistics/demo_plot.png
--------------------------------------------------------------------------------
/recover_backup/input/__init__.py:
--------------------------------------------------------------------------------
1 | """
2 | This package contains modules that provide input functionality for the recovery script.
3 | """
4 |
--------------------------------------------------------------------------------
/lib/rest_requests/__init__.py:
--------------------------------------------------------------------------------
1 | """
2 | This package contains modules that deal with making requests to the REST API of an opencast instance.
3 | """
--------------------------------------------------------------------------------
/check_data_integrity/check/args/__init__.py:
--------------------------------------------------------------------------------
1 | """
2 | This package contains modules used for parsing and representing the arguments of the check script.
3 | """
--------------------------------------------------------------------------------
/delete_dead_distribution_artefacts/delete_artefacts/__init__.py:
--------------------------------------------------------------------------------
1 | """This package contains modules that contain the main functionality of this script."""
2 |
--------------------------------------------------------------------------------
/export-videos/requirements.txt:
--------------------------------------------------------------------------------
1 | certifi>=2020.6.20
2 | chardet>=3.0.4
3 | idna>=2.10
4 | requests>=2.24.0
5 | requests-toolbelt>=0.9.1
6 | urllib3>=1.25.11
7 |
--------------------------------------------------------------------------------
/recover_backup/recover/__init__.py:
--------------------------------------------------------------------------------
1 | """
2 | This package contains modules that provide necessary functionality for the recovery of media packages.
3 | """
4 |
--------------------------------------------------------------------------------
/create-groups/config.py:
--------------------------------------------------------------------------------
1 | admin_url = "http://localhost:8080" # CHANGE ME
2 | digest_user = "opencast_system_account"
3 | digest_pw = "CHANGE_ME" # CHANGE ME
4 |
--------------------------------------------------------------------------------
/import-to-other-tenant/requirements.txt:
--------------------------------------------------------------------------------
1 | certifi>=2020.6.20
2 | chardet>=3.0.4
3 | idna>=2.10
4 | requests>=2.24.0
5 | requests-toolbelt>=0.9.1
6 | urllib3>=1.25.10
7 |
--------------------------------------------------------------------------------
/export-workflow-statistics/requirements.txt:
--------------------------------------------------------------------------------
1 | certifi>=2020.11.8
2 | chardet>=3.0.4
3 | idna>=2.10
4 | requests>=2.25.0
5 | requests-toolbelt>=0.9.1
6 | urllib3>=1.26.2
7 |
--------------------------------------------------------------------------------
/lib/args/__init__.py:
--------------------------------------------------------------------------------
1 | """
2 | This package contains modules responsible for arguments parsing and wrapping those arguments into objects for easier
3 | usability.
4 | """
5 |
--------------------------------------------------------------------------------
/find-and-delete-empty-series/requirements.txt:
--------------------------------------------------------------------------------
1 | certifi==2021.10.8
2 | charset-normalizer==2.0.12
3 | idna==3.3
4 | requests==2.27.1
5 | requests-toolbelt==0.9.1
6 | urllib3==1.26.9
--------------------------------------------------------------------------------
/create-groups/requirements.txt:
--------------------------------------------------------------------------------
1 | certifi==2021.5.30
2 | chardet==4.0.0
3 | idna==2.10
4 | pkg-resources==0.0.0
5 | requests==2.25.1
6 | requests-toolbelt==0.9.1
7 | urllib3==1.26.5
8 |
--------------------------------------------------------------------------------
/dev-scripts/ocrebuild:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | ocbuild clean install && mv -f target/*`basename $(pwd)`* ../../build/opencast-dist-*/system/org/opencastproject/*`basename $(pwd)`/*/
4 |
--------------------------------------------------------------------------------
/find-and-delete-empty-series/config.py:
--------------------------------------------------------------------------------
1 | # Configuration
2 |
3 | admin_url = "https://develop.opencast.org"
4 | digest_user = "opencast_system_account"
5 | digest_pw = "CHANGE_ME"
6 |
--------------------------------------------------------------------------------
/start-workflow-from-archive-multitenancy/config.py:
--------------------------------------------------------------------------------
1 | url_pattern = "https://{}.opencast.com" # CHANGE ME
2 | digest_user = "opencast_system_account"
3 | digest_pw = "CHANGE_ME" # CHANGE ME
4 |
--------------------------------------------------------------------------------
/dev-scripts/ocread:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | if [ $# -ge 1 ]; then
4 | less .build/opencast-dist-$1-*/data/log/opencast.log
5 | else
6 | less ./build/opencast-dist*/data/log/opencast.log
7 | fi
8 |
--------------------------------------------------------------------------------
/dev-scripts/oclog:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | if [ $# -ge 1 ]; then
4 | tail -f .build/opencast-dist-$1-*/data/log/opencast.log
5 | else
6 | tail -f ./build/opencast-dist*/data/log/opencast.log
7 | fi
8 |
--------------------------------------------------------------------------------
/start-workflow-from-archive-multitenancy/requirements.txt:
--------------------------------------------------------------------------------
1 | certifi==2021.5.30
2 | chardet==4.0.0
3 | idna==2.10
4 | pkg-resources==0.0.0
5 | requests==2.25.1
6 | requests-toolbelt==0.9.1
7 | urllib3==1.26.5
8 |
--------------------------------------------------------------------------------
/check_data_integrity/fix/workflows/__init__.py:
--------------------------------------------------------------------------------
1 | """
2 | This package contains the module responsible for parsing the definitions of the workflows used for fixing as well as
3 | the definitions themselves in a subdirectory.
4 | """
--------------------------------------------------------------------------------
/create-series/readme.md:
--------------------------------------------------------------------------------
1 | Create Opencast Series
2 | ======================
3 |
4 | This script creates a number of test series. The configuration is placed on
5 | top of the file.
6 |
7 | Usage:
8 |
9 | bash create-series.sh
10 |
--------------------------------------------------------------------------------
/lib/dummy_series_dc.xml:
--------------------------------------------------------------------------------
1 |
2 | *
3 | Dummy Series
4 |
--------------------------------------------------------------------------------
/multilang-subtitles/sample-en.vtt:
--------------------------------------------------------------------------------
1 | WEBVTT
2 |
3 | 00:00:00.500 --> 00:00:02.000
4 | The Web is always changing
5 |
6 | 00:00:02.500 --> 00:00:04.300
7 | and the way we access it is changing
8 |
9 | 00:00:04.700 --> 00:00:08.300
10 | The END
11 |
--------------------------------------------------------------------------------
/check_data_integrity/__init__.py:
--------------------------------------------------------------------------------
1 | """
2 | This package contains the modules for both the script checking for data inconsistencies in an opencast
3 | instance as well as for the script for fixing some of those inconsistencies and modules shared by both scripts.
4 | """
--------------------------------------------------------------------------------
/multilang-subtitles/sample-de.vtt:
--------------------------------------------------------------------------------
1 | WEBVTT
2 |
3 | 00:00:00.500 --> 00:00:02.000
4 | Das Web verändert sich stetig
5 |
6 | 00:00:02.500 --> 00:00:04.300
7 | and wie wir auf das Web zugreifen ändert sich
8 |
9 | 00:00:04.700 --> 00:00:08.300
10 | Das ENDE.
11 |
--------------------------------------------------------------------------------
/opencast-migration-scripts/create-archive.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | set -e
4 |
5 | OUT_FILENAME="opencast-migration-scripts-$(date +%F).tar.gz"
6 | git archive --prefix "opencast-migration-scripts/" -o "$OUT_FILENAME" HEAD
7 | echo "Created archive: $OUT_FILENAME"
8 |
--------------------------------------------------------------------------------
/export-workflow-statistics/plot.gp:
--------------------------------------------------------------------------------
1 | set terminal pdf
2 | set output 'plot.pdf'
3 | set title "Processed Recordings per Week"
4 | set xlabel "Calendar Week"
5 | set ylabel "Amount of Processed Recordings per Week"
6 | plot 'workflow-statistics.dat' w lines notitle lt rgb "red"
7 |
--------------------------------------------------------------------------------
/csv-export/readme.md:
--------------------------------------------------------------------------------
1 | Export Events from Opencast to a csv file
2 | =========================================
3 |
4 | 1. Edit credentials in `getEvents.sh`
5 | 2. Run `./getEvents.sh` to get a JSON file which contains all events
6 | 3. Run `./events2csv.py` to generate the CSV file
7 | 4. Open `events.csv`
8 |
--------------------------------------------------------------------------------
/dev-scripts/ocreindex:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | TARGET="http://localhost:8080/index/rebuild"
4 |
5 | if [ $# -ge 1 ]; then
6 | TARGET="$1"
7 | fi
8 |
9 | echo "Reindexing at $TARGET"
10 |
11 | curl -v -f --digest -u 'opencast_system_account:CHANGE_ME' -H 'X-Requested-Auth: Digest' -X POST $TARGET
12 |
--------------------------------------------------------------------------------
/visualize-workflow/get-workflow.sh:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 | set -ue
3 |
4 | LOGIN=opencast_system_account:CHANGE_ME
5 | HOST=https://develop.opencast.org
6 | ID=$1
7 |
8 | curl -f --digest -u "${LOGIN}" -H "X-Requested-Auth: Digest" \
9 | -o "workflow-${ID}.json" "${HOST}/workflow/instance/${ID}.json"
10 |
--------------------------------------------------------------------------------
/create-series/create-series.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | URL=http://localhost:8080
4 | USER=admin
5 | PASSWORD=opencast
6 |
7 | curl -u "${USER}:${PASSWORD}" "${URL}/series/" \
8 | -F title="I 🖤 Opencast" \
9 | -F acl='{"acl": {"ace": [{"allow": true,"role": "ROLE_USER","action": "read"}]}}' \
10 | -F creator=lk
11 |
--------------------------------------------------------------------------------
/lib/utility/enum.py:
--------------------------------------------------------------------------------
1 | """
2 | This module makes enum definitions easier.
3 | """
4 |
5 |
6 | def enum(**named_values):
7 | """
8 | Create an enum with the following values.
9 |
10 | :param named_values:
11 | :return: enum
12 | :rtype: Enum
13 | """
14 | return type('Enum', (), named_values)
15 |
--------------------------------------------------------------------------------
/generate-maven-notices/print-licenses.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | echo "generating html files"
4 | mvn license:third-party-report > /dev/null
5 | echo ""
6 |
7 | for f in $(find . -name 'dependencies.html' | sort); do
8 | echo $f | cut -d '/' -f 3
9 | python parse-licenses.py $f
10 | echo ""
11 | echo ""
12 | done
13 |
--------------------------------------------------------------------------------
/lib/args/args_parser.py:
--------------------------------------------------------------------------------
1 | import argparse
2 |
3 |
4 | def get_args_parser():
5 | parser = argparse.ArgumentParser()
6 | optional = parser._action_groups.pop()
7 | required = parser.add_argument_group('required arguments')
8 | parser._action_groups.append(optional)
9 |
10 | return parser, optional, required
11 |
--------------------------------------------------------------------------------
/check_data_integrity/check/check_data/malformed.py:
--------------------------------------------------------------------------------
1 | """
2 | This module defines a Malformed object to replace an element or asset if any errors were encountered while checking it.
3 | It contains the error messages for the encountered errors.
4 | """
5 |
6 | from collections import namedtuple
7 |
8 | Malformed = namedtuple('Malformed', ['errors'])
9 |
--------------------------------------------------------------------------------
/opencast-migration-scripts/opencast/client.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | # -*- coding: utf-8 -*-
3 | from httpx import Client
4 |
5 |
6 | class OpencastClient(Client):
7 |
8 | def __init__(
9 | self,
10 | hostname,
11 | *args, **kwargs):
12 | super().__init__(base_url=hostname, *args, **kwargs)
13 |
14 |
--------------------------------------------------------------------------------
/import-to-other-tenant/config.py:
--------------------------------------------------------------------------------
1 | # Configuration
2 |
3 | # urls
4 | source_url = "https://tenant1.opencast.com"
5 | target_url = "https://tenant2.opencast.com"
6 |
7 | # digest login
8 | digest_user = "opencast_system_account"
9 | digest_pw = "CHANGE_ME"
10 |
11 | # workflow config
12 | workflow_id = "reimport-workflow"
13 | workflow_config = {"autopublish": "false"}
14 |
--------------------------------------------------------------------------------
/dev-scripts/ocindexdrop:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | if [ $# -ne 1 ]; then
4 | echo "Usage: $0 host -> Queries the list of OC indexes direct from Opensearch and then drops them"
5 | exit 1
6 | fi
7 | set -uxe
8 | USER=admin
9 | PASS=admin
10 |
11 | HOST=$1
12 |
13 | curl -s -u "$USER:$PASS" "$HOST/_cat/indices?v" | grep -o "opencast_\w*" | while read name
14 | do
15 | curl -u "$USER:$PASS" -X DELETE $HOST/$name
16 | done
17 |
--------------------------------------------------------------------------------
/simulate-ingests/mediapackages/templates/org.opencastproject.capture.agent.properties:
--------------------------------------------------------------------------------
1 | event.location={{ location }}
2 | event.series={{ series_id }}
3 | event.title={{ title }}
4 | org.opencastproject.workflow.definition={{ workflow }}
5 | org.opencastproject.workflow.config.emailAddresses={{ email }}
6 | org.opencastproject.workflow.config.publishToOaiPmh={{ publish }}
7 | org.opencastproject.workflow.config.editRecording={{ edit }}
8 |
--------------------------------------------------------------------------------
/lib/args/args_error.py:
--------------------------------------------------------------------------------
1 | def args_error(parser, error=None):
2 | """
3 | Print usage of script and an error message in case there is one, and quit the script.
4 |
5 | :param parser: The arguments parser
6 | :type parser: argparse.ArgumentParser
7 | :param error: Optional error message
8 | :type error: str
9 | """
10 |
11 | if error:
12 | print(error)
13 |
14 | parser.print_usage()
15 | parser.exit()
16 |
--------------------------------------------------------------------------------
/csv-export/getEvents.sh:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 |
3 | DIGEST_LOGIN='opencast_system_account:CHANGE_ME'
4 | PERIOD='2018-04-01T00:00:00.000Z/2018-11-11T00:00:00.000Z'
5 | FILENAME='events.json'
6 | OPENCASTADMINNODE=https://stable.opencast.org
7 |
8 | set -eu
9 |
10 | filter="technical_start:${PERIOD}"
11 | curl -f --digest -o "${FILENAME}" -u "${DIGEST_LOGIN}" \
12 | -H "X-Requested-Auth: Digest" \
13 | "${OPENCASTADMINNODE}/admin-ng/event/events.json?filter=${filter}"
14 |
--------------------------------------------------------------------------------
/lib/data_handling/namespaces.py:
--------------------------------------------------------------------------------
1 | """
2 | Namespaces for parsing the XML returned by the opencast REST api.
3 | """
4 |
5 | namespaces = {'inlined': 'http://www.opencastproject.org/oai/matterhorn-inlined',
6 | 'dc': 'http://www.opencastproject.org/xsd/1.0/dublincore/',
7 | 'dcterms': 'http://purl.org/dc/terms/',
8 | 'xsi': 'http://www.w3.org/2001/XMLSchema-instance',
9 | 'acl': 'urn:oasis:names:tc:xacml:2.0:policy:schema:os'}
10 |
--------------------------------------------------------------------------------
/multi-node-test-setup/readme.md:
--------------------------------------------------------------------------------
1 | Build and Setup Local Multi Node Cluster
2 | ========================================
3 |
4 | This script build Opencast, extracts the admin, presentation and worker
5 | distribution and configures all three to run on the same machine on different
6 | ports as a local distributed setup for testing.
7 |
8 | The setup requires ActiveMQ and a PostgreSQL database as provided by:
9 |
10 | docs/scripts/devel-dependency-containers/docker-compose-postgresql.yml
11 |
--------------------------------------------------------------------------------
/simulate-ingests/README.md:
--------------------------------------------------------------------------------
1 | # Opencast Simulate Ingests
2 | Simulate ingesting of recordings of using multiple mediapackage profiles.
3 |
4 | ## Requirements
5 |
6 | * depends on python-jinja2, python-requests
7 | * appropriate presentation and presenter tracks in that match the mediapackage profiles,
8 | see mediapackages/tracks/README.md for suggestions on how to locate suitable data.
9 |
10 | ## Configuration
11 |
12 | Copy simulation-example.properties to simulation.properties and edit according to the comments.
--------------------------------------------------------------------------------
/lib/args/digest_login.py:
--------------------------------------------------------------------------------
1 | import getpass
2 | from collections import namedtuple
3 |
4 | DigestLogin = namedtuple('DigestLogin', ['user', 'password'])
5 |
6 |
7 | def read_digest_password():
8 | """
9 | Read in digest password until it isn't empty.
10 | :return:
11 | """
12 |
13 | digest_pw = getpass.getpass('No password provided, enter digest password: ')
14 | while not digest_pw:
15 | digest_pw = getpass.getpass('Password cannot be empty, please try again: ')
16 | return digest_pw
17 |
--------------------------------------------------------------------------------
/readme.md:
--------------------------------------------------------------------------------
1 | Helper Scripts For Opencast
2 | ===========================
3 |
4 | [
6 | ](https://travis-ci.org/opencast/helper-scripts)
7 |
8 | This repository contains several small helper scripts to automate certain tasks
9 | mostly for *testing* purposes. Please note that these scripts are not part of
10 | Opencast and may be doing weird and/or insecure things. Please read the scripts
11 | and their documentation carefully before using them!
12 |
--------------------------------------------------------------------------------
/visualize-workflow/viewSvg.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 | 2D Workflow Operation Time SVG
8 |
9 |
10 |
11 | 3D Workflows Operation Times SVG
12 |
13 |
14 |
15 |
16 |
17 |
--------------------------------------------------------------------------------
/simulate-ingests/mediapackages/templates/series.xml:
--------------------------------------------------------------------------------
1 |
2 | {{ title }}
3 | {{ created }}
4 | {{ source }}
5 | false
6 | {{ identifier }}
7 |
--------------------------------------------------------------------------------
/opencast-migration-scripts/opencast/info.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | # -*- coding: utf-8 -*-
3 |
4 | def get_me_json(opencast_client, **kwargs):
5 | url = '/info/me.json'
6 | response = opencast_client.get(url, **kwargs)
7 | response.raise_for_status()
8 | return response.json()
9 |
10 |
11 | def get_version(opencast_client, **kwargs):
12 | url = '/sysinfo/bundles/version'
13 | params = {
14 | 'prefix': 'opencast'
15 | }
16 | response = opencast_client.get(url, **kwargs)
17 | response.raise_for_status()
18 | return response.json()
19 |
--------------------------------------------------------------------------------
/check_data_integrity/fix/workflows/workflow.py:
--------------------------------------------------------------------------------
1 | import os
2 |
3 | workflow_dir = "workflow_definitions"
4 |
5 |
6 | def get_workflow_definition(workflow):
7 | """
8 | Get workflow definition as string from file.
9 |
10 | :param workflow: Requested workflow
11 | :type workflow: Workflow
12 | :return: Workflow definition
13 | :rtype str:
14 | """
15 |
16 | workflow_path = os.path.abspath(os.path.join("fix", "workflows", workflow_dir, workflow))
17 |
18 | with open(workflow_path, 'r', newline='') as file:
19 | workflow = file.read()
20 |
21 | return workflow
22 |
--------------------------------------------------------------------------------
/simulate-ingests/mediapackages/templates/episode.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 | {{ created }}
4 | {{ identifier }}
5 | {{ title }}
6 | {{ is_part_of }}
7 | {{ source }}
8 | {{ location }}
9 |
--------------------------------------------------------------------------------
/dev-scripts/ocpr:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | repo=$(git remote -v | grep -oe 'opencast/.*' | grep -v 'ghsa' | grep '(fetch)' | sort | uniq | sed 's/.git .*//g' | sed 's/ (fetch)//g')
4 |
5 | #If it's tuesday
6 | if [ 2 == `date +"%u"` ]; then
7 | dateParams="-created:\>=`date -d "today + 15 hours + 15 minutes UTC" +"%Y-%m-%dT%H:%M:%S%:z"`"
8 | else
9 | dateParams="-created:\>=`date -d "last tuesday + 15 hours + 15 minutes UTC" +"%Y-%m-%dT%H:%M:%S%:z"`"
10 | fi
11 |
12 | gh search prs --repo=$repo --base `git rev-parse --abbrev-ref HEAD` --limit 120 --draft=false --state=open --sort=created -- $dateParams -label:has-conflicts
13 |
--------------------------------------------------------------------------------
/export-workflow-statistics/config.py:
--------------------------------------------------------------------------------
1 | # Configuration
2 |
3 | # Set this to your global admin node
4 | url = "https://develop.opencast.org"
5 |
6 | # If you have multiple tenants use something like
7 | # url_pattern = "https://{}.example.org"
8 | # otherwise, url_pattern should be the same as the url variable above
9 | url_pattern = "https://develop.opencast.org"
10 |
11 | digest_user = "opencast_system_account"
12 | digest_pw = "CHANGE_ME"
13 |
14 | workflow_definitions = ["import", "fast"]
15 |
16 | exclude_tenants = []
17 |
18 | start_date = "2020-01-06"
19 | end_date = "2020-11-29"
20 | week_offset = 1
21 |
22 | export_dir = "."
23 |
--------------------------------------------------------------------------------
/export-workflow-statistics/tenant_plot.gp:
--------------------------------------------------------------------------------
1 | set terminal pdf size 6,4
2 | set output 'tenant_plot.pdf'
3 | set title "Processed Recordings per Week and Tenant"
4 | set xlabel "Calendar Week"
5 | set ylabel "Amount of Processed Recordings per Week"
6 | set key outside
7 |
8 | file_names = system("cat filenames.txt")
9 | nr_files = words(file_names)
10 |
11 | tenant(file_name)=sprintf("%s",substr(file_name,19,strlen(file_name)-24))
12 |
13 | hue(i)=(i*1.0)/nr_files
14 | light(i)=(nr_files-(i*0.5))/nr_files
15 |
16 | plot for [i=1:nr_files] word(file_names, i) with lines title tenant(word(file_names, i)) \
17 | # lc rgbcolor hsv2rgb(hue(i),1.0,light(i))
--------------------------------------------------------------------------------
/lib/input_output/get_dummy_series_dc.py:
--------------------------------------------------------------------------------
1 | import os
2 |
3 | from input_output.read_file import read_file
4 |
5 |
6 | def get_dummy_series_dc(series_id):
7 | """
8 | Get an empty series dublin core catalog with the given id.
9 |
10 | :param series_id: The series ID
11 | :type series_id: str
12 | :return: A dummy series dublin core catalog
13 | :rtype: str
14 | """
15 |
16 | dummy_series_dc = os.path.abspath("dummy_series_dc.xml")
17 | content = read_file(dummy_series_dc)
18 | content_first, content_second = content.split("*")
19 | content = content_first + series_id + content_second
20 | return content
21 |
--------------------------------------------------------------------------------
/ingest/ingest-single-request.sh:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 |
3 | SERVER='https://develop.opencast.org'
4 | LOGIN='admin:opencast'
5 | VIDEO_FILE='video.webm'
6 | WORKFLOW='fast'
7 |
8 | set -eux
9 |
10 | # Generate title and name
11 | TITLE=$(telnet bofh.jeffballard.us 666 2>&- | \
12 | sed -e 's/^Your excuse is: //;tx;d;:x')
13 | NAME=$(curl -s 'http://www.richyli.com/randomname/' | \
14 | sed -e 's/^.*>\([^>(]*\) (Try in .*$/\1/;tx;d;:x')
15 |
16 | # Ingest media
17 | curl -f -i -u ${LOGIN} \
18 | "${SERVER}/ingest/addMediaPackage/${WORKFLOW}" \
19 | -F flavor="presentation/source" \
20 | -F "BODY=@${VIDEO_FILE}" -F title="${TITLE}" \
21 | -F creator="${NAME}"
22 |
--------------------------------------------------------------------------------
/lti-test-consumer/readme.md:
--------------------------------------------------------------------------------
1 | Opencast LTI Test Consumer
2 | ==========================
3 |
4 |
5 | Run Consumer
6 | ------------
7 |
8 | ```sh
9 | # optionally setup virtual environment
10 | virtualenv venv
11 | . ./venv/bin/activate
12 |
13 | # install requirements
14 | pip install -r requirements.txt
15 |
16 | # run test consumer
17 | python3 lticonsumer.py
18 | ```
19 |
20 | Configuration
21 | -------------
22 |
23 | You can configure some some basic settings by editing the following variables in
24 | `lticonsumer.py`:
25 |
26 | ```
27 | CONSUMER_KEY = 'CONSUMERKEY'
28 | CONSUMER_SECRET = 'CONSUMERSECRET'
29 | LAUNCH_URL = 'http://localhost:8080/lti'
30 | ```
31 |
--------------------------------------------------------------------------------
/dev-scripts/ocpicbr:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | if [ $# -eq 0 ]; then
4 | echo "Assuming you want to test in the upstream dir!"
5 | target="upstream"
6 | else
7 | target="$1"
8 | fi
9 |
10 | #npm ci
11 | npm ci || exit 1
12 | export PUBLIC_URL="/admin-ui"
13 | npm run build || exit 1
14 | cd build
15 | tar cvzf ../test.tar.gz *
16 | cd ..
17 | sha=`sha256sum test.tar.gz | cut -f 1 -d " "`
18 |
19 | pushd .
20 |
21 | cd ~/opencast/$target/modules/admin-ui-interface
22 | sed -i 's/url>.*<\/interface/url>http:\/\/localhost\/public\/adminui\/test.tar.gz<\/interface/' pom.xml
23 | sed -i "s/sha256>.*<\/interface/sha256>$sha<\/interface/" pom.xml
24 | ocrebuild
25 |
26 | popd
27 |
--------------------------------------------------------------------------------
/release-management/translation-progress/translation-progress.sh:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 |
3 | set -ue
4 |
5 | if [ ! -f ~/.crowdin.yaml ]; then
6 | echo ~/.crowdin.yaml is missing
7 | fi
8 |
9 | echo 'Opencast Translation Status'
10 | date --utc
11 | echo ''
12 |
13 | key="$(awk '{print $2};' < ~/.crowdin.yaml)"
14 | curl -s "https://api.crowdin.com/api/project/opencast-community/status?key=$key" \
15 | | grep 'name\|translated_progress' \
16 | | tr '\n' ' ' \
17 | | sed "s/<\\/translated_progress>/\\n/g" \
18 | | sed 's#^.*\(.*\)\(.*\)#\2 \1#' \
19 | | sort -hr \
20 | | sed 's/^\(.[^ ]\) / \1 /' \
21 | | sed 's/^\([^ ]\) / \1 /' \
22 | | sed '/^ *$/d'
23 |
--------------------------------------------------------------------------------
/opencast-migration-scripts/opencast/assetmanager.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | # -*- coding: utf-8 -*-
3 |
4 | from opencast.mediapackage import Mediapackage, PublicationMediapackageElement
5 |
6 |
7 | def get_mediapackage(opencast_admin_client, mediapackage_id):
8 | url = f'/assets/episode/{mediapackage_id}'
9 | response = opencast_admin_client.get(url)
10 | response.raise_for_status()
11 | return Mediapackage(response.text.encode('utf-8'))
12 |
13 |
14 | def get_workflow_properties(opencast_admin_client, mediapackage_id):
15 | url = f'/assets/{mediapackage_id}/workflowProperties.json'
16 | response = opencast_admin_client.get(url)
17 | response.raise_for_status()
18 | return response.json()
19 |
--------------------------------------------------------------------------------
/find-and-delete-empty-series/README.md:
--------------------------------------------------------------------------------
1 | # Find and delete empty series
2 |
3 | These scripts can be used to
4 |
5 | 1. find series that don't have events (`find-empy-series`), and
6 | 2. use that output to delete those series (`delete-empty-series`).
7 |
8 | The affected series ids will be written into a text file.
9 |
10 | To use, first update `config.py` for your installation.
11 |
12 | **Use the second script with caution since it will delete data that cannot be
13 | recovered!**
14 |
15 | ## Requirements
16 |
17 | This scrypt was written for Python 3.11. You can install the necessary packages with
18 |
19 | ```
20 | pip install -r requirements.txt
21 | ```
22 |
23 | Additionally, this script uses modules contained in the _lib_ directory.
--------------------------------------------------------------------------------
/release-management/translation-progress/readme.md:
--------------------------------------------------------------------------------
1 | Format Translation Status
2 | =========================
3 |
4 | A simple script to generate a list of all of Opencast's target language ranked
5 | by their current translation state:
6 |
7 | ```sh
8 | % ./translation-progress.sh
9 | 100 Greek
10 | 100 German
11 | 100 English, United Kingdom
12 | 99 Spanish
13 | 99 Galician
14 | 90 Danish
15 | 86 Hebrew
16 | 86 French
17 | 86 Dutch
18 | 83 Chinese Traditional
19 | 80 Swedish
20 | 79 Tagalog
21 | 79 Slovenian
22 | 77 Turkish
23 | 75 Polish
24 | 74 Chinese Simplified
25 | 72 Filipino
26 | 52 Portuguese, Brazilian
27 | 22 Italian
28 | 11 Japanese
29 | 7 Russian
30 | 0 Portuguese
31 | 0 Norwegian
32 | 0 Finnish
33 | ```
34 |
--------------------------------------------------------------------------------
/check_data_integrity/fix/io/print_events_to_be_fixed.py:
--------------------------------------------------------------------------------
1 | def print_events_to_be_fixed(events_to_be_fixed, progress_printer, level):
2 | """
3 | Print events to be fixed.
4 |
5 | :param events_to_be_fixed: IDs of events that can be fixed
6 | :type events_to_be_fixed: list
7 | :param progress_printer: Object to print progress messages
8 | :type progress_printer: ProgressPrinter
9 | :param level: The level of indentation
10 | :type level: int
11 | """
12 |
13 | progress_printer.print_message("Media package", level)
14 | progress_printer.print_message("------------------------------------", level)
15 | for event in events_to_be_fixed:
16 | progress_printer.print_message(event, level)
17 | print()
18 |
--------------------------------------------------------------------------------
/export-videos/config.py:
--------------------------------------------------------------------------------
1 | # Configuration
2 |
3 | # server settings
4 | admin_url = "http://develop.opencast.org" # CHANGE ME
5 | # presentation_url = # defaults to admin url, configure for separate presentation node
6 | digest_user = "opencast_system_account"
7 | digest_pw = "CHANGE_ME" # CHANGE ME
8 | stream_security = False
9 |
10 | # export settings
11 | export_archived = True
12 | export_search = True
13 | export_publications = ["internal"]
14 | export_mimetypes = ["video/mp4"]
15 | export_flavors = []
16 | export_catalogs = ["smil/cutting", "dublincore/*"]
17 |
18 | # target directory settings
19 | target_directory = "/home/user/Desktop/videos" # CHANGE ME
20 | create_series_dirs = False
21 | original_filenames = False
22 | title_folders = False
23 |
--------------------------------------------------------------------------------
/dev-scripts/download-release:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | if [ $# -ne 1 ]; then
4 | echo "Usage: $0 release"
5 | echo " eg: $0 17.5 -> downloads the 17.5 allinone release tarball and copies its jars to your ~/.m2 repository"
6 | exit 1
7 | fi
8 |
9 | curl -s -L \
10 | -H "Accept: application/vnd.github+json" \
11 | -H "X-GitHub-Api-Version: 2022-11-28" \
12 | https://api.github.com/repos/opencast/opencast/releases/tags/$1 | \
13 | jq -r '.assets[] | select(.name | contains("allinone")) | .browser_download_url' | while read url
14 | do
15 | echo "Downloading $url"
16 | curl -LO $url
17 | tar xzf `basename $url`
18 | rsync -a ./opencast-dist-allinone/system/org/opencastproject/ ~/.m2/repository/org/opencastproject/
19 | rm -rf opencast-dist-allinone*
20 | done
21 |
--------------------------------------------------------------------------------
/csv-export/events2csv.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 |
3 | import json
4 |
5 | if __name__ == '__main__':
6 |
7 | csvString = 'titel;start date;series;id\n'
8 |
9 | with open('events.json') as data_file:
10 | data = json.load(data_file)
11 |
12 | for u in data['results']:
13 | seriestitle = ''
14 | if u.get('series'):
15 | seriestitle = u.get('series')['title']
16 |
17 | csvString += ';'.join((u['title'],
18 | u['technical_start'],
19 | seriestitle,
20 | u['id']))
21 | csvString += '\n'
22 |
23 | print(csvString)
24 |
25 | with open('events.csv', 'w') as file:
26 | file.write(csvString.encode('utf-8'))
27 |
--------------------------------------------------------------------------------
/whisper-AI-exec-script/whisper-getvtt.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | whisperServer=${1}
4 | videoFile=${2}
5 | eventId=${3}
6 | outputVTT=${4}
7 | translate=${5:-0}
8 |
9 | # detach audio from video for a faster transfer
10 | ffmpeg -y -nostdin -nostats -i "$videoFile" -vn -acodec copy "/tmp/$eventId.aac"
11 |
12 | if [[ "$translate" == "translate" ]]
13 | then
14 | echo "Translating into english"
15 | curl --max-time 7200 --location --request POST "$whisperServer/asr?task=translate&output=vtt" \
16 | --form 'audio_file=@"/tmp/'"$eventId"'.aac"' -o "$outputVTT"
17 | else
18 | # Send audio for transcription
19 | echo "Transcribing audio"
20 | curl --max-time 7200 --location --request POST "$whisperServer/asr?task=transcribe&output=vtt" \
21 | --form 'audio_file=@"/tmp/'"$eventId"'.aac"' -o "$outputVTT"
22 | fi
23 |
24 | rm -f "/tmp/$eventId.aac"
25 |
--------------------------------------------------------------------------------
/fake-hls/readme.md:
--------------------------------------------------------------------------------
1 | Fake HLS Streaming
2 | ======================
3 |
4 | This script uses ffmpeg (use the Opencast release, or something very recent), and nginx's docker image to create a fake
5 | HLS streaming source. This can be used to test that your live-streaming workflow and player work as expected.
6 |
7 | This script requires minor configuration of Opencast's `LiveScheduleServiceImpl.cfg`:
8 | - `live.streamingUrl=http://localhost:8888/`
9 | - `live.streamName=master.m3u8`
10 |
11 | Your workflow also needs to have `publishLive` set to true for the live streaming event to be properly listed in the
12 | search index. Stream should be playable via VLC, ffplay, or other video players from http://localhost:8888/master.m3u8.
13 | You may need to wait a minute or two after starting the stream before it is properly playable.
14 |
15 | Usage:
16 |
17 | bash fake-hls.sh
18 |
--------------------------------------------------------------------------------
/schedule-now/readme.md:
--------------------------------------------------------------------------------
1 | Schedule 1min Event in 1min
2 | ===========================
3 |
4 | *This script is for Opencast >= 4.0*
5 |
6 | By default, this script will schedule a one minute test reconding in one
7 | minute. Capture agent and server can be specified at the top of the script:
8 |
9 | - `CAPTURE_AGENT="pyca"`
10 | - `HOST="https://octestallinone.virtuos.uos.de"`
11 | - `USER="opencast_system_account"`
12 | - `PASSWORD="CHANGE_ME"`
13 |
14 | You can overwrite the default times be passed new ones to the script as command
15 | line argument. For example, to schedule a 15min recording in 5 min run:
16 |
17 | ./schedule-now.sh 5 15
18 |
19 | A more complex example: To schedule 100 recordings of 5 minutes each, starting
20 | in 1 minute, and at 15 minute intervals after that (using bash):
21 |
22 | for i in {0..100}; do ./schedule-now.sh $((15 * i + 1)) 5; done
23 |
--------------------------------------------------------------------------------
/visualize-workflow/plot-workflow.gnuplot:
--------------------------------------------------------------------------------
1 | # Plot individual Workflow Operation Charts
2 | # Change WF_COUNT to the number of workflows to process
3 | # Expects workflow data files to be named "workflow.dat", incremented 1-N
4 |
5 | WF_COUNT = 1
6 | file(n) = sprintf("workflow%d.dat",n)
7 |
8 | set terminal svg size 1600,WF_COUNT * 900 font "Sans bold, 20" background rgb 'white'
9 | set output 'workflow.svg'
10 |
11 |
12 | set multiplot layout WF_COUNT, 1
13 |
14 | set boxwidth 0.8
15 | set style fill solid 1.0 border -1
16 | set xtics rotate
17 | set ylabel "Time [s]"
18 |
19 | do for [f=1:WF_COUNT] {
20 | set title "Workflow Operation Times from ".file(f)
21 | plot file(f) using 3:xticlabels(1) with boxes lt rgb "#000000" notitle,\
22 | '' using 0:3:5 with labels offset 0,0.5 notitle
23 | # Use "using 0:3:4" for percentages of the overall time
24 | }
25 |
26 | unset multiplot
27 |
--------------------------------------------------------------------------------
/export-videos/parse_args.py:
--------------------------------------------------------------------------------
1 | from args.args_parser import get_args_parser
2 | from args.args_error import args_error
3 |
4 |
5 | def parse_args():
6 | """
7 | Parse the arguments and check them for correctness
8 |
9 | :return: list of event ids, list of series ids (one of them will be None)
10 | :rtype: list, list
11 | """
12 | parser, optional_args, required_args = get_args_parser()
13 |
14 | optional_args.add_argument("-e", "--events", type=str, nargs='+', help="list of event ids")
15 | optional_args.add_argument("-s", "--series", type=str, nargs='+', help="list of series ids")
16 |
17 | args = parser.parse_args()
18 |
19 | if args.events and args.series:
20 | args_error(parser, "You can only provide either events or series, not both")
21 |
22 | if not args.events and not args.series:
23 | args_error(parser, "You have to provide at least one series or event id")
24 |
25 | return args.events, args.series
26 |
--------------------------------------------------------------------------------
/import-to-other-tenant/parse_args.py:
--------------------------------------------------------------------------------
1 | from args.args_parser import get_args_parser
2 | from args.args_error import args_error
3 |
4 |
5 | def parse_args():
6 | """
7 | Parse the arguments and check them for correctness
8 |
9 | :return: list of event ids, list of series ids (one of them will be None)
10 | :rtype: list, list
11 | """
12 | parser, optional_args, required_args = get_args_parser()
13 |
14 | optional_args.add_argument("-e", "--events", type=str, nargs='+', help="list of event ids")
15 | optional_args.add_argument("-s", "--series", type=str, nargs='+', help="list of series ids")
16 |
17 | args = parser.parse_args()
18 |
19 | if args.events and args.series:
20 | args_error(parser, "You can only provide either events or series, not both")
21 |
22 | if not args.events and not args.series:
23 | args_error(parser, "You have to provide at least one series or event id")
24 |
25 | return args.events, args.series
26 |
--------------------------------------------------------------------------------
/lib/rest_requests/group_requests.py:
--------------------------------------------------------------------------------
1 | from rest_requests.request import post_request
2 |
3 |
4 | def create_group(base_url, digest_login, name, description="", roles="", users=""):
5 | """
6 | Create group.
7 |
8 | :param base_url: The URL for the request
9 | :type base_url: str
10 | :param digest_login: The login credentials for digest authentication
11 | :type digest_login: DigestLogin
12 | :param name: The name of the group
13 | :type name: str
14 | :param description: The description of the group
15 | :type description: str
16 | :param roles: The roles of the group
17 | :type roles: str
18 | :param users: The users of the group
19 | :type users: str
20 | :raise RequestError:
21 | """
22 | url = '{}/admin-ng/groups'.format(base_url)
23 | data = {'name': name, 'description': description, 'roles': roles, 'users': users}
24 | post_request(url, digest_login, element_description="/admin-ng/groups", data=data)
25 |
--------------------------------------------------------------------------------
/ingest/readme.md:
--------------------------------------------------------------------------------
1 | Ingest Scripts
2 | ==============
3 |
4 | ingest-single-request.sh
5 | ------------------------
6 |
7 | Repeatedly ingest files using a single request ingest. Parameters can be
8 | adjusted at the top of the file.
9 |
10 | ingest-addtrack.sh
11 | ------------------
12 |
13 | Ingest files using multiple requests, ingesting metadata, audio and video files
14 | separately. Parameters can be adjusted at the top of the file.
15 |
16 | ingest-with-smil.sh
17 | -------------------
18 |
19 | A small extension to the ingest-addtrack script. SMIL cutting information is
20 | ingested along with the video track, allowing workflows to immediately trim
21 | videos if the editor step is included. The end result is similar to a
22 | publication subject to the user's editing preference.
23 |
24 | ingest-with-acl.sh
25 | ------------------
26 |
27 | Another modification of the ingest-addtrack script.
28 | This script uploads an access control list in addition to the tracks and metadata.
29 |
--------------------------------------------------------------------------------
/lib/rest_requests/search_requests.py:
--------------------------------------------------------------------------------
1 | from rest_requests.get_response_content import get_json_content
2 | from rest_requests.request import get_request
3 |
4 |
5 | def get_episode_from_search(base_url, digest_login, event_id):
6 | """
7 | Get episode from search as json.
8 |
9 | :param base_url: The base URL for the request
10 | :type base_url: str
11 | :param digest_login: The login credentials for digest authentication
12 | :type digest_login: DigestLogin
13 | :param event_id: The event id
14 | :type event_id: str
15 | :return: episode as json or None
16 | :rtype: episode as json or None
17 | :raise RequestError:
18 | """
19 |
20 | url = '{}/search/episode.json?id={}'.format(base_url, event_id)
21 |
22 | response = get_request(url, digest_login, "search episode")
23 | search_results = get_json_content(response)["search-results"]
24 | if "result" in search_results:
25 | return search_results["result"]
26 | return None
27 |
--------------------------------------------------------------------------------
/dev-scripts/ocbuild:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | if [ $# -lt 1 ]; then
4 | echo "Usage: $0 [JDK_VERSION] args"
5 | exit 1
6 | fi
7 |
8 | MVNPATH="mvn"
9 |
10 | if [ -f "./mvnw" ]; then #base dir
11 | MVNPATH="./mvnw"
12 | DEFAULT_JDK=`grep -o 'java.release>.*' pom.xml | sed 's#java.release>\(.*\)#\1#g'`
13 | elif [ -f "../mvnw" ]; then #for assemblies
14 | MVNPATH="../mvnw"
15 | DEFAULT_JDK=`grep -o 'java.release>.*' ../pom.xml | sed 's#java.release>\(.*\)#\1#g'`
16 | elif [ -f "../../mvnw" ]; then #for modules
17 | MVNPATH="../../mvnw"
18 | DEFAULT_JDK=`grep -o 'java.release>.*' ../../pom.xml | sed 's#java.release>\(.*\)#\1#g'`
19 | else
20 | echo "WARNING: Defaulting to system maven!"
21 | fi
22 | #Looking for integers
23 | re='^[0-9]+$'
24 | if [[ $1 =~ $re ]] ; then
25 | JDK="$1"
26 | JAVA_HOME=/usr/lib/jvm/java-$JDK-openjdk-amd64 $MVNPATH "${@:2}"
27 | exit $?
28 | else
29 | JAVA_HOME=/usr/lib/jvm/java-$DEFAULT_JDK-openjdk-amd64 $MVNPATH "${@:1}"
30 | exit $?
31 | fi
32 |
33 |
34 |
--------------------------------------------------------------------------------
/migrate-events-to-another-opencast/import.yaml:
--------------------------------------------------------------------------------
1 | ---
2 | id: import
3 | title: Import and publish media from another Opencast
4 | operations:
5 |
6 | - id: ingest-download
7 | description: Download external media package elements
8 | max-attempts: 4
9 | exception-handler-workflow: partial-error
10 | fail-on-error: 'true'
11 | configurations:
12 | - delete-external: 'true'
13 |
14 | - id: publish-engage
15 | description: Publishing media
16 | max-attempts: 2
17 | fail-on-error: 'true'
18 | exception-handler-workflow: partial-error
19 | configurations:
20 | - download-source-flavors: '*/*'
21 | - check-availability: false
22 |
23 | - id: snapshot
24 | description: Archiving media
25 | exception-handler-workflow: partial-error
26 | fail-on-error: true
27 | configurations:
28 | - source-flavors: '*/*'
29 |
30 | - id: cleanup
31 | description: Cleaning up
32 | fail-on-error: 'false'
33 | configurations:
34 | - delete-external: 'false'
35 | - preserve-flavors: 'security/*'
36 |
--------------------------------------------------------------------------------
/whisper-AI-exec-script/whisper-generate.sh:
--------------------------------------------------------------------------------
1 | #! /usr/bin/sh
2 |
3 | #Generate Whisper AI Rest API container
4 | #
5 | # The Quadro P4000 only has 8GB of VRAM, unfortunately it can't run all models at the same time
6 | # The VRAM requiremens for each model are:
7 | # tiny: ~1GB
8 | # base ~1 GB
9 | # small ~2 GB
10 | # medium ~5 GB
11 | # large ~10 GB
12 | #
13 | # Uncomment or comment the next lines depending what containers you want to generate
14 | #
15 | #
16 | #
17 |
18 | #docker run -d --name Whisper_tiny --gpus all -p 9000:9000 -e ASR_MODEL=tiny onerahmet/openai-whisper-asr-webservice-gpu
19 | docker run -d --name Whisper_base --gpus all -p 9001:9000 -e ASR_MODEL=base onerahmet/openai-whisper-asr-webservice-gpu
20 | #docker run -d --name Whisper_small --gpus all -p 9002:9000 -e ASR_MODEL=small onerahmet/openai-whisper-asr-webservice-gpu
21 | docker run -d --name Whisper_medium --gpus all -p 9003:9000 -e ASR_MODEL=medium onerahmet/openai-whisper-asr-webservice-gpu
22 | #docker run -d --name Whisper_large --gpus all -p 9004:9000 -e ASR_MODEL=large onerahmet/openai-whisper-asr-webservice-gpu
23 |
--------------------------------------------------------------------------------
/auto-restart-services/Sanitizer.py:
--------------------------------------------------------------------------------
1 | import requests
2 | from requests.auth import HTTPDigestAuth
3 | import sys
4 |
5 |
6 | def main(argv):
7 | if len(argv) != 3:
8 | print('wrong number of arguments. 3 expected : url, username, password')
9 | return
10 | sanitize(argv[0], argv[1], argv[2])
11 |
12 |
13 | def sanitize(url, username, password):
14 | response = requests.get(url+'/services/services.json', auth=HTTPDigestAuth(username, password), headers={'X-Requested-Auth':'Digest'})
15 | data = response.json()
16 | for services in data:
17 | for service in data[services]:
18 | for state in data[services][service]:
19 | if state['service_state'] != 'NORMAL':
20 | resp = requests.post(url + '/services/sanitize', files={'serviceType': state['type'], 'host': state['host']}, auth=HTTPDigestAuth(username, password), headers={'X-Requested-Auth': 'Digest'})
21 | if resp.status_code > 204:
22 | print('sth went wrong')
23 |
24 |
25 | if __name__ == '__main__':
26 | main(sys.argv[1:])
27 |
--------------------------------------------------------------------------------
/opencast-migration-scripts/tests/test_assetmanager.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | # -*- coding: utf-8 -*-
3 | import unittest
4 |
5 | from httpx import Timeout
6 |
7 | from opencast.assetmanager import get_mediapackage
8 | from opencast.client import OpencastClient
9 | from opencast.externalapi import get_events
10 |
11 |
12 | class AssetmanagerTestCase(unittest.TestCase):
13 |
14 | def __init__(self, *args, **kwargs):
15 | super(AssetmanagerTestCase, self).__init__(*args, **kwargs)
16 | self.hostname = 'https://develop.opencast.org'
17 | self.username = 'admin'
18 | self.password = 'opencast'
19 |
20 | def test_get_mediapackage(self):
21 | with OpencastClient(self.hostname, auth=(self.username, self.password), timeout=Timeout(5.0)) as opencast_client:
22 | for event in get_events(opencast_client):
23 | mp = get_mediapackage(opencast_client, event['identifier'])
24 | self.assertEqual(mp.get_identifier(), event['identifier'])
25 | break
26 |
27 |
28 | if __name__ == '__main__':
29 | unittest.main()
30 |
--------------------------------------------------------------------------------
/xml-to-yaml-workflow/readme.md:
--------------------------------------------------------------------------------
1 | # Script to convert XML workflows to YAML
2 |
3 | With this script, you can convert XML workflows to YAML workflows.\
4 | It does not convert Comments at the moment.
5 |
6 | ## How to Use
7 | ### Usage
8 |
9 | The script can be called with the following parameters (all parameters in brackets are optional):
10 |
11 | `main.py [-i INPUTFILE] [-o OUTPUTFILE]`
12 |
13 | Either input file (`-i`) and output file (`-o`) have to be provided or none. \
14 | If none is provided the script will convert all `.xml` files which are in the folder of the script.
15 |
16 | | Short Option | Long Option | Description |
17 | |:------------:|:------------|:-----------------------------------------------|
18 | | `-i` | `--input` | The xml file to be coonverted (with extension) |
19 | | `-o` | `--output` | The file to output (with extension) |
20 |
21 | #### Usage example
22 |
23 | `main.py -i fast.xml -o fast.yaml`
24 |
25 | ## Requirements
26 |
27 | This script was written for Python 3. You can install the necessary packages with
28 |
29 | `pip install -r requirements.txt`
--------------------------------------------------------------------------------
/lib/args/url_builder.py:
--------------------------------------------------------------------------------
1 | DEFAULT_TENANT = "mh_default_org"
2 |
3 |
4 | class URLBuilder:
5 |
6 | def __init__(self, opencast_url, https):
7 | """
8 | Constructor
9 |
10 | :param opencast_url: URL to an opencast instance
11 | :type opencast_url: str
12 | :param https: Whether to use https
13 | :type https: bool
14 | """
15 |
16 | self.opencast = opencast_url
17 |
18 | if https:
19 | self.protocol = "https"
20 | else:
21 | self.protocol = "http"
22 |
23 | def get_base_url(self, tenant=None):
24 | """
25 | Build a basic url for requests using the chosen protocol, possibly a tenant, and the opencast URL.
26 | Default protocol is http. Handing over the default tenant is equivalent to handing over None.
27 |
28 | :param tenant: Tenant ID or None
29 | :type tenant: str or None
30 | :return: Base url for requests
31 | :rtype: str
32 | """
33 |
34 | if not tenant or tenant == DEFAULT_TENANT:
35 | return "{}://{}".format(self.protocol, self.opencast)
36 | else:
37 | return "{}://{}.{}".format(self.protocol, tenant, self.opencast)
38 |
--------------------------------------------------------------------------------
/recover_backup/input/check_recovery_start.py:
--------------------------------------------------------------------------------
1 | from input_output.input import get_yes_no_answer
2 |
3 |
4 | def check_recovery_start(mps_to_recover, configured_mps=None):
5 | """
6 | Present the media packages about to be recovered to the user and ask whether a recovery should be attempted under
7 | these circumstances.
8 |
9 | :param mps_to_recover: The media packages that can be recovered.
10 | :type mps_to_recover: list
11 | :param configured_mps: The configured media packages to recover, if any.
12 | :type configured_mps: list
13 | :return: Whether the recovery should be started.
14 | :rtype: bool
15 | """
16 |
17 | print()
18 | print("The following {} media packages can be recovered:".format(len(mps_to_recover)))
19 | print("Media package | Version | Path")
20 |
21 | for mp in mps_to_recover:
22 | print("%36s | %7s | %s" % (mp.id, mp.version, mp.path))
23 | print()
24 |
25 | if configured_mps and len(configured_mps) > len(mps_to_recover):
26 | print("{} media packages cannot be recovered.\n".format((len(configured_mps) - len(mps_to_recover))))
27 |
28 | start_recover = get_yes_no_answer("Start recovery?")
29 |
30 | return start_recover
31 |
--------------------------------------------------------------------------------
/delete_dead_distribution_artefacts/input/delete_question.py:
--------------------------------------------------------------------------------
1 | from input_output.input import get_configurable_answer
2 | from utility.enum import enum
3 |
4 | DeleteAnswer = enum(
5 | NEXT="n",
6 | ALL="a",
7 | QUIT="q"
8 | )
9 |
10 |
11 | def delete_question(media_package, level=0):
12 | """
13 | Ask user the question whether they want to delete the distribution artefacts for the next media package or for all
14 | remaining media packages.
15 |
16 | :param media_package: The media package to ask the question for
17 | :type: str
18 | :param level: The level to indent the question to
19 | :type level: int
20 | :return: The answer.
21 | :rtype: FixAnswer
22 | """
23 |
24 | long_descriptions = ["deleting the distribution artefacts of the next media package",
25 | "deleting all(remaining) distribution artefacts",
26 | "quitting the script"]
27 | short_descriptions = ["next", "all", "quit"]
28 | options = ['n', 'a', 'q']
29 |
30 | question = "Delete distribution artefacts of media package {}?".format(media_package)
31 |
32 | answer = get_configurable_answer(options, short_descriptions, long_descriptions, question, level)
33 | return answer
34 |
--------------------------------------------------------------------------------
/opencast-migration-scripts/tests/test_search.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | # -*- coding: utf-8 -*-
3 | import unittest
4 |
5 | from httpx import Timeout
6 |
7 | from opencast.client import OpencastClient
8 | from opencast.externalapi import get_events
9 | from opencast.search import get_mediapackage
10 |
11 |
12 | class SearchTestCase(unittest.TestCase):
13 |
14 | def __init__(self, *args, **kwargs):
15 | super(SearchTestCase, self).__init__(*args, **kwargs)
16 | self.hostname = 'https://develop.opencast.org'
17 | self.username = 'admin'
18 | self.password = 'opencast'
19 |
20 | def test_get_mediapackage(self):
21 | with OpencastClient(self.hostname, auth=(self.username, self.password), timeout=Timeout(5.0)) as opencast_client:
22 | events = get_events(opencast_client)
23 | for event in events:
24 | # get mediapackage from search service (if published)
25 | mediapackage = get_mediapackage(opencast_client, event['identifier'])
26 | if mediapackage:
27 | # print(mediapackage)
28 | self.assertEqual(event['identifier'], mediapackage.get_identifier())
29 | return
30 |
31 |
32 | if __name__ == '__main__':
33 | unittest.main()
34 |
--------------------------------------------------------------------------------
/check_data_integrity/fix/io/input.py:
--------------------------------------------------------------------------------
1 | from input_output.input import get_configurable_answer
2 | from utility.enum import enum
3 |
4 | FixAnswer = enum(
5 | NEXT="n",
6 | ALL="a",
7 | REST="r",
8 | SKIP="s",
9 | QUIT="q"
10 | )
11 |
12 |
13 | def fix_question(level=0):
14 | """
15 | Ask user the question whether they want to fix one/more element(s) to give them the chance to change their mind,
16 | skip some errors or check previous fixes for correctness before continuing.
17 |
18 | :return: The answer.
19 | :rtype: FixAnswer
20 | """
21 |
22 | long_descriptions = ["fixing the next event with this error of the current tenant",
23 | "fixing all remaining events with this errors of the current tenant",
24 | "fixing all events with all errors for all tenants without asking again",
25 | "skipping the rest of the events with this error of the current tenant",
26 | "quitting the script completely"]
27 | short_descriptions = ["next", "remaining", "all", "skip", "quit"]
28 | options = ['n', 'r', 'a', 's', 'q']
29 | question = "Fix?"
30 |
31 | answer = get_configurable_answer(options, short_descriptions, long_descriptions, question, level)
32 | return answer
33 |
--------------------------------------------------------------------------------
/dev-scripts/ocstart:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | getversion() {
4 | if [ ! -f $1/etc/org.apache.karaf.features.cfg ]; then
5 | exit 1
6 | fi
7 | sed -n 's/.*opencast-karaf-features\/\([0-9]*\).*/\1/p' $1/etc/org.apache.karaf.features.cfg
8 | }
9 |
10 | if [ $# -ge 1 ];
11 | then
12 | search=$1
13 | else
14 | search="develop"
15 | fi
16 |
17 | if [ $# -ge 2 ];
18 | then
19 | jdk="$2"
20 | else
21 | jdk=`grep -o 'java.release>.*' pom.xml | sed 's#java.release>\(.*\)#\1#g'`
22 | fi
23 |
24 | if [ ! -d /usr/lib/jvm/java-$jdk-openjdk-amd64 ];
25 | then
26 | echo "/usr/lib/jvm/java-$jdk-openjdk-amd64 does not exist!"
27 | exit 1
28 | fi
29 |
30 | target=`find ./build -type d -name opencast-dist-$search* | head -n 1`
31 | version=$(getversion $target)
32 |
33 | echo "Attempting to start $search with jdk $jdk"
34 | if [ "" == "$target" ]; then
35 | echo "$search does not exist!"
36 | exit 1
37 | elif [ "$target" == "./build/opencast-dist-$search-$version-SNAPSHOT" ]; then
38 | echo "Guessing that $target is right, starting that"
39 | elif [ "$target" != "./build/opencast-dist-$search" ]; then
40 | echo "Found $target, which doesn't match ./build/opencast-dist-$search"
41 | exit 1
42 | fi
43 |
44 | JAVA_HOME=/usr/lib/jvm/java-$jdk-openjdk-amd64 $target/bin/start-opencast
45 |
--------------------------------------------------------------------------------
/lib/input_output/read_file.py:
--------------------------------------------------------------------------------
1 | import io
2 |
3 |
4 | def read_file(file_path):
5 | """
6 | Return the content of a file as a string without newlines.
7 |
8 | :param file_path: The path to the file
9 | :type file_path: str
10 | :return: File content
11 | :rtype: str
12 | """
13 |
14 | file_string = ''
15 |
16 | with io.open(file_path, 'r', newline='', encoding='utf8') as file:
17 | for line in file:
18 | file_string = file_string + line.rstrip('\n')
19 |
20 | return file_string
21 |
22 |
23 | def write_list_to_file(file_path, list):
24 | """
25 | Write each item of a list into a new line of the file.
26 |
27 | :param file_path: The path to the file
28 | :type file_path: str
29 | :param list: A list of strings to write
30 | :type list: list
31 | """
32 |
33 | with io.open(file_path, 'w', encoding='utf8') as file:
34 | for item in list:
35 | file.write("{}\n".format(item))
36 |
37 |
38 | def read_list_from_file(file_path):
39 | """
40 | Read list of strings from a file where each item is on a new line.
41 |
42 | :param file_path: The path to the file
43 | :type file_path: str
44 | :return: File content
45 | :rtype: list
46 | """
47 |
48 | with io.open(file_path, 'r', encoding='utf8') as file:
49 | return file.read().splitlines()
50 |
--------------------------------------------------------------------------------
/check_data_integrity/fix/fixer/fixer.py:
--------------------------------------------------------------------------------
1 | from abc import ABCMeta, abstractmethod
2 |
3 |
4 | class Fixer:
5 | """
6 | Abstract class defining the methods every subclass has to implement for fixing inconsistent data.
7 | """
8 | __metaclass__ = ABCMeta
9 |
10 | @abstractmethod
11 | def fix(self, opencast_url, digest_login, event_id):
12 | """
13 | Fix the given event.
14 |
15 | :param opencast_url: URL to opencast instance
16 | :type opencast_url: str
17 | :param digest_login: User and password for digest authentication
18 | :type digest_login: DigestLogin
19 | :param event_id: ID of event to be fixed
20 | :type event_id: str
21 | """
22 | raise NotImplementedError
23 |
24 | @staticmethod
25 | @abstractmethod
26 | def get_errors():
27 | """
28 | Return which errors this fixer can fix.
29 |
30 | :return: A list of errors this fixer can fix
31 | :rtype: list
32 | """
33 | raise NotImplementedError
34 |
35 | @staticmethod
36 | @abstractmethod
37 | def get_fix_description():
38 | """
39 | Return a description of what this fixer does to fix inconsistent description.
40 |
41 | :return: Description of what this fixer does.
42 | :rtype: str
43 | """
44 | raise NotImplementedError
45 |
--------------------------------------------------------------------------------
/opencast-migration-scripts/opencast/archive.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | # -*- coding: utf-8 -*-
3 | from opencast.mediapackage import Mediapackage
4 |
5 |
6 | def get_all_episodes(opencast_admin_client, sort='DATE_CREATED_DESC', **kwargs):
7 | url = f'/archive/episode.json'
8 | limit = 10
9 | offset = 0
10 | params = {
11 | 'limit': limit,
12 | 'sort': sort,
13 | 'onlyLatest': True,
14 | 'admin': True,
15 | 'episodes': False,
16 | }
17 | for (k, v) in kwargs.items():
18 | params[k] = v
19 | while offset >= 0:
20 | params['offset'] = offset
21 | response = opencast_admin_client.get(url, params=params)
22 | response.raise_for_status()
23 | if 'search-results' not in response.json():
24 | offset = -1
25 | else:
26 | for episode in response.json().get('search-results').get('result', []):
27 | yield episode
28 | if response.json().get('search-results').get('result', []):
29 | offset += limit
30 | else:
31 | offset = -1
32 |
33 |
34 | def get_mediapackage(opencast_admin_client, episode_id):
35 | url = f'/archive/archive/mediapackage/{episode_id}'
36 | response = opencast_admin_client.get(url)
37 | response.raise_for_status()
38 | return Mediapackage(response.text.encode('utf-8'))
39 |
--------------------------------------------------------------------------------
/lib/rest_requests/get_response_content.py:
--------------------------------------------------------------------------------
1 | """
2 | This module provides methods to decode the response of a rest request to UTF8 and parse the content in different
3 | formats.
4 | """
5 |
6 | import json
7 | import xml.etree.ElementTree as ElementTree
8 |
9 |
10 | def get_json_content(response):
11 | """
12 | Decodes the given response to UTF8 and returns it in JSON
13 |
14 | :param response: The response to a request
15 | :return: The response content as json
16 | :rtype: dict
17 | """
18 |
19 | decoded_content = response.content.decode('utf8')
20 | json_content = json.loads(decoded_content)
21 | return json_content
22 |
23 |
24 | def get_xml_content(response):
25 | """
26 | Decodes the given response to UTF8 and returns it in XML
27 |
28 | :param response: The response to a request
29 | :return: The response content as xml
30 | :rtype: ElementTree.Element
31 | """
32 |
33 | decoded_content = response.content.decode('utf8')
34 |
35 | return ElementTree.fromstring(decoded_content)
36 |
37 |
38 | def get_string_content(response):
39 | """
40 | Decodes the given response to UTF8 and returns it as a string
41 |
42 | :param response: The response to a request
43 | :return: resonse content as string
44 | :rtype: dict
45 | """
46 |
47 | decoded_content = response.content.decode('utf8')
48 | return decoded_content
49 |
--------------------------------------------------------------------------------
/lib/input_output/log_writer.py:
--------------------------------------------------------------------------------
1 | import csv
2 | import datetime
3 | import os
4 |
5 |
6 | class LogWriter:
7 | """
8 | Writer for writing any fixes made by this script to a log in case something goes wrong.
9 | """
10 |
11 | def __init__(self, logname, *header):
12 | """
13 | Constructor
14 | """
15 |
16 | today = datetime.date.today().strftime("%Y-%m-%d")
17 | filename = "{}_{}.csv".format(logname, today)
18 |
19 | new = False
20 | if not os.path.isfile(filename):
21 | new = True
22 |
23 | self.file = open(filename, 'a') # append to log in case it already exists
24 | self.writer = csv.writer(self.file)
25 |
26 | if new:
27 | header = ['timestamp', *header]
28 | self.writer.writerow(header)
29 | self.file.flush()
30 |
31 | def write_to_log(self, *line):
32 | """
33 | Write the result of fixing on event into the log file
34 |
35 | :param line: Line to write to log
36 | :type line: str
37 | """
38 | timestamp = datetime.datetime.now()
39 | line = [timestamp, *line]
40 |
41 | self.writer.writerow(line)
42 | self.file.flush()
43 |
44 | def close_log(self):
45 | """
46 | Close log when script is done or an error occurs (very important!)
47 | """
48 | self.file.close()
49 |
--------------------------------------------------------------------------------
/opencast-migration-scripts/LICENSE:
--------------------------------------------------------------------------------
1 | BSD 2-Clause License
2 |
3 | Copyright (c) 2024 The Apereo Foundation
4 | All rights reserved.
5 |
6 | Redistribution and use in source and binary forms, with or without
7 | modification, are permitted provided that the following conditions are met:
8 |
9 | * Redistributions of source code must retain the above copyright notice, this
10 | list of conditions and the following disclaimer.
11 |
12 | * Redistributions in binary form must reproduce the above copyright notice,
13 | this list of conditions and the following disclaimer in the documentation
14 | and/or other materials provided with the distribution.
15 |
16 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
17 | AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
18 | IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
19 | DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
20 | FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
21 | DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
22 | SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
23 | CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
24 | OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
25 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | BSD 2-Clause License
2 |
3 | Copyright (c) 2017-2018 The Apereo Foundation
4 | All rights reserved.
5 |
6 | Redistribution and use in source and binary forms, with or without
7 | modification, are permitted provided that the following conditions are met:
8 |
9 | * Redistributions of source code must retain the above copyright notice, this
10 | list of conditions and the following disclaimer.
11 |
12 | * Redistributions in binary form must reproduce the above copyright notice,
13 | this list of conditions and the following disclaimer in the documentation
14 | and/or other materials provided with the distribution.
15 |
16 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
17 | AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
18 | IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
19 | DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
20 | FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
21 | DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
22 | SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
23 | CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
24 | OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
25 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
26 |
--------------------------------------------------------------------------------
/lib/data_handling/flavor_matcher.py:
--------------------------------------------------------------------------------
1 | def split_flavor(flavor):
2 | """
3 | Split a flavor into type and subtype.
4 |
5 | :param flavor: The flavor to split
6 | :type flavor: str
7 | :return: type, subtype
8 | :rtype: str, str
9 | :raise: ValueError
10 | """
11 | flavor_types = flavor.split("/")
12 | if len(flavor_types) != 2:
13 | raise ValueError("Invalid flavor")
14 |
15 | flavor_type = flavor_types[0]
16 | flavor_subtype = flavor_types[1]
17 | return flavor_type, flavor_subtype
18 |
19 |
20 | def matches_flavor(asset_flavor, config_flavors):
21 | """
22 | Check if a flavor matches the flavors in a list. The latter can contain the placeholder "*".
23 |
24 | :param asset_flavor: The flavor to check
25 | :type asset_flavor: str
26 | :param config_flavors: The flavors to match (can contain "*")
27 | :type config_flavors: list
28 | :return: Whether the flavor matches.
29 | :rtype: bool
30 | """
31 |
32 | asset_flavor_type, asset_flavor_subtype = split_flavor(asset_flavor)
33 | for config_flavor in config_flavors:
34 | config_flavor_type, config_flavor_subtype = split_flavor(config_flavor)
35 |
36 | if (config_flavor_type == "*" or config_flavor_type == asset_flavor_type) and \
37 | (config_flavor_subtype == "*" or config_flavor_subtype == asset_flavor_subtype):
38 | return True
39 |
40 | return False
41 |
--------------------------------------------------------------------------------
/create-groups/main.py:
--------------------------------------------------------------------------------
1 | import os
2 | import sys
3 |
4 | sys.path.append(os.path.join(os.path.abspath('..'), "lib"))
5 |
6 | import config
7 | from args.args_error import args_error
8 | from args.args_parser import get_args_parser
9 | from args.digest_login import DigestLogin
10 | from rest_requests.group_requests import create_group
11 | from rest_requests.request_error import RequestError
12 |
13 |
14 | def parse_args():
15 | """
16 | Parse the arguments and check them for correctness
17 |
18 | :return: amount of groups
19 | :rtype: int
20 | """
21 | parser, optional_args, required_args = get_args_parser()
22 | optional_args.add_argument("-a", "--amount", type=int, help="amount of groups")
23 | args = parser.parse_args()
24 |
25 | if args.amount and args.amount <= 0:
26 | args_error(parser, "Amount has to be positive, non-zero number")
27 |
28 | return args.amount
29 |
30 |
31 | def main():
32 | digest_login = DigestLogin(user=config.digest_user, password=config.digest_pw)
33 | amount = parse_args()
34 |
35 | if not amount:
36 | amount = 100
37 |
38 | for i in range(1, amount + 1):
39 | name = str(i)
40 | try:
41 | create_group(config.admin_url, digest_login, name)
42 | print("Created group {}".format(name))
43 | except RequestError as e:
44 | print("Group {} couldn't be created: {}", name, e.error)
45 |
46 |
47 | if __name__ == '__main__':
48 | main()
49 |
--------------------------------------------------------------------------------
/migrate-events-to-another-opencast/readme.md:
--------------------------------------------------------------------------------
1 | # Migrate Published Recordings to Another Opencast
2 |
3 | This script lets you quickly migrate your published series and recordings from one to another Opencast.
4 | It will not migrate any asset manager content.
5 |
6 | For the migration, please ensure that static files are served without authentication and without job context evaluation.
7 | The easiest way of doing this is to add a rule like this to you reverse proxy configuration
8 | (this example is for Nginx):
9 |
10 | ```py
11 | location ^~ /static/ {
12 | alias /srv/opencast/opencast-dist-allinone/data/opencast/downloads/;
13 | sendfile on;
14 | }
15 | ```
16 |
17 | Next, install a suitable import workflow in your target Opencast.
18 | You can use any workflow you want and even reprocess videos,
19 | but if you just want to import and publish media as they were in the old system,
20 | you may want to use the workflow [import.yaml](import.yaml).
21 |
22 | Update the credentials and the workflow in the `migrate.py` to configure your Opencast source and target systems. If you have a distributed installation of Opencast ensure that the URLs match your admin and presentation node. With an AllInOne installation you'll have the same URL for admin and presentation.
23 |
24 | Finally, start the migration:
25 |
26 | ```
27 | ❯ python migrate.py
28 | Importing ID-wiki-commons
29 | Create series response: 201
30 | …
31 | Importing e85fcb07-6943-4d09-b739-60daa756d769
32 | Ingest response: 200
33 | …
34 | ```
35 |
--------------------------------------------------------------------------------
/create-groups/README.md:
--------------------------------------------------------------------------------
1 | # Script to create groups for testing
2 |
3 | With this script, you can easily create multiple groups for testing. Can be extended to include specific roles,
4 | users and so on.
5 |
6 | ## How to Use
7 |
8 | ### Configuration
9 |
10 | First you need to configure the script in `config.py`:
11 |
12 | | Configuration Key | Description | Default |
13 | | :---------------- | :------------------------------- | :---------------------- |
14 | | `admin_url` | The (tenant-specific) admin URL | http://localhost:8080 |
15 | | `digest_user` | The user name of the digest user | opencast_system_account |
16 | | `digest_pw` | The password of the digest user | CHANGE_ME |
17 |
18 |
19 | ### Usage
20 |
21 | The script can be called with the following parameters (all parameters in brackets are optional):
22 |
23 | `main.py -a AMOUNT_OF_GROUPS`
24 |
25 | | Short Option | Long Option | Description |
26 | | :----------: | :---------- | :-------------------------------------------------------------- |
27 | | `-a` | `--amount` | How many groups to create (default: 100) |
28 |
29 | #### Usage example
30 |
31 | `main.py -a 200`
32 |
33 | ## Requirements
34 |
35 | This script was written for Python 3.8. You can install the necessary packages with
36 |
37 | `pip install -r requirements.txt`
38 |
39 | Additionally, this script uses modules contained in the _lib_ directory.
--------------------------------------------------------------------------------
/check_data_integrity/fix/workflows/workflow_definitions/add_or_update_series_dc.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | add-or-update-series-dc
5 | Add or update series Dublin Core catalog
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
18 |
19 | dublincore/series
20 | false
21 |
22 |
23 |
24 |
25 |
28 |
29 | dublincore/series
30 | +archive
31 |
32 |
33 |
34 |
35 |
38 |
39 | archive
40 |
41 |
42 |
43 |
44 |
45 |
46 |
--------------------------------------------------------------------------------
/dev-scripts/mpr:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | if [ $# -lt 1 -o $# -gt 2 ]; then
4 | echo "Usage: $0 PRID [remote name ]: Merge a specific pull request into the local branch, optionally from a specific remote"
5 | exit 1
6 | fi
7 |
8 | PRID=$1
9 | REMOTE="upstream"
10 | if [ $# -eq 2 ]; then
11 | REMOTE="$2"
12 | fi
13 |
14 | SLUG=$(git remote -v | grep $REMOTE | sed -E 's/.*github.com[:\/]([a-zA-Z0-9]*\/[a-zA-Z0-9\-]*).*/\1/g' | uniq)
15 |
16 | JSON=$(curl -s "https://api.github.com/repos/${SLUG}/pulls/${PRID}")
17 | FIXES="$(echo "$JSON" | jq -r .body | grep -Eo 'Closes #[0-9]*|Fixes #[0-9]*|Resolves #[0-9]*')"
18 | BRANCH="$(git status | sed -n 's/^On branch \(.*\)$/\1/p')"
19 | HEAD_REPO_URL="$(echo "$JSON" | jq -r .head.repo.ssh_url)"
20 | HEAD_REPO_NAME="$(echo "$JSON" | jq -r .head.repo.full_name)"
21 | HEAD_REF="$(echo "$JSON" | jq -r .head.ref)"
22 | BASE_REF="$(echo "$JSON" | jq -r .base.ref)"
23 | TITLE="$(echo "$JSON" | jq -r .title)"
24 |
25 | if [ "${BRANCH}" != "${BASE_REF}" ]; then
26 | echo Warning: Target branch does not match current branch
27 | echo "${BRANCH} != ${BASE_REF}"
28 | fi
29 |
30 | # commit
31 | tmp="$(mktemp -d)"
32 | echo "Merge branch '${HEAD_REF}' of ${HEAD_REPO_NAME} into ${BASE_REF}
33 |
34 | Pull request #${PRID}
35 | ${FIXES}
36 | ${TITLE}" > "$tmp/COMMIT_EDITMSG"
37 |
38 |
39 | #This branch may exist from previous, failed merges. Remove it so it's up to date
40 | git branch -D pr-$PRID 2>&1 > /dev/null
41 | git fetch $REMOTE pull/$PRID/head:pr-$PRID
42 | git merge --no-ff pr-$PRID -F "$tmp/COMMIT_EDITMSG" || exit 1
43 | git branch -D pr-$PRID
44 | rm -rf "$tmp"
45 |
--------------------------------------------------------------------------------
/lib/input_output/unique_names.py:
--------------------------------------------------------------------------------
1 | import os
2 |
3 |
4 | def make_filename_unique(base_dir, file_name, file_extension):
5 | """
6 | Make sure filename is unique by checking if the file exists and appending a number if it does.
7 |
8 | :param base_dir: path to file
9 | :type base_dir: Path
10 | :param file_name: file name
11 | :type file_name: str
12 | :param file_extension: file extension
13 | :type file_extension: str
14 | :return: unique filename
15 | :rtype: str
16 | """
17 | counter = 2
18 | original_file_name = file_name
19 | path = os.path.join(base_dir, '{}.{}'.format(file_name, file_extension))
20 | while os.path.exists(path):
21 | file_name = original_file_name + "(" + str(counter) + ")"
22 | path = os.path.join(base_dir, '{}.{}'.format(file_name, file_extension))
23 | counter += 1
24 | return file_name
25 |
26 |
27 | def make_dirname_unique(base_dir, dir_name):
28 | """
29 | Make sure directory name is unique by checking if the directory exists and appending a number if it does.
30 |
31 | :param base_dir: path to directory
32 | :type base_dir: Path
33 | :param dir_name: directory name
34 | :type dir_name: str
35 | :return: unique directory name
36 | :rtype: str
37 | """
38 | counter = 2
39 | original_dir_name = dir_name
40 | path = os.path.join(base_dir, dir_name)
41 | while os.path.exists(path):
42 | dir_name = original_dir_name + "(" + str(counter) + ")"
43 | path = os.path.join(base_dir, dir_name)
44 | counter += 1
45 | return dir_name
46 |
--------------------------------------------------------------------------------
/lib/data_handling/elements.py:
--------------------------------------------------------------------------------
1 | """
2 | This module provides utility methods for series and events.
3 | """
4 |
5 |
6 | def get_id(element) -> str:
7 | """
8 | Return the ID of an event or series.
9 |
10 | :param element: event or series
11 | :type element: dict
12 | :return: ID of element
13 | :rtype: str
14 | :raise ValueError:
15 | """
16 |
17 | if "id" in element:
18 | return element["id"]
19 | elif "identifier" in element:
20 | return element["identifier"]
21 | else:
22 | raise ValueError("Element has no ID")
23 |
24 |
25 | def has_series(event) -> bool:
26 | """
27 | Check if the event belongs to a series.
28 |
29 | :param event:
30 | :type event: dict
31 | :rtype: bool
32 | """
33 | return "series" in event and event["series"]["id"]
34 |
35 |
36 | def published_to_oaipmh(event) -> bool:
37 | """
38 | Check if an event was published to at least one OAIPMH repository.
39 |
40 | :param event:
41 | :type event: dict
42 | :rtype: bool
43 | """
44 |
45 | if any("oaipmh" in publication["id"] for publication in event["publications"]):
46 | return True
47 | return False
48 |
49 |
50 | def get_oaipmh_publications(event):
51 | """
52 | Get all publications to an OAIPMH repository for an event.
53 |
54 | :param event: The event
55 | :type event: dict
56 | :return: OAIPMH publications
57 | :rtype: list
58 | """
59 |
60 | return [(publication["id"], publication["url"]) for publication in event["publications"]
61 | if "oaipmh" in publication["id"]]
62 |
--------------------------------------------------------------------------------
/generate-maven-notices/parse-licenses.py:
--------------------------------------------------------------------------------
1 | from pathlib import Path
2 | from sys import argv
3 |
4 | if len(argv) < 2:
5 | print("Usage:", "python", argv[0], "")
6 | exit(1)
7 |
8 | if "rest-test-environment" in argv[1] or "hello-world-impl" in argv[1]:
9 | exit(0)
10 |
11 | with Path(argv[1]).open("r") as f:
12 | line = f.readline()
13 | classifier = False
14 | while "" not in line:
15 | line = f.readline()
16 | if "") + 1 : line.rfind("<")]
19 | if gid == "org.opencastproject":
20 | continue
21 | line = f.readline()
22 | if "') + 2 : line.rfind("") + 1 : line.find("")]
26 | line = f.readline()
27 | line = f.readline()
28 | if "Classifier" in line or classifier:
29 | classifier = True
30 | line = f.readline()
31 | line = f.readline()
32 | if "")[:-1]:
35 | start = l.find('">') + 2
36 | if l[start:]:
37 | licenses.append(l[start:])
38 | lic = "-".join(licenses)
39 | else:
40 | lic = line[line.find(">") + 1 : line.find("")]
41 | print(" " + "{:<38}".format(gid) + " " + "{:<49}".format(aid) + lic)
42 |
--------------------------------------------------------------------------------
/lib/rest_requests/tenant_requests.py:
--------------------------------------------------------------------------------
1 | from rest_requests.basic_requests import get_tenants
2 |
3 |
4 | def filter_tenants(chosen_tenants, excluded_tenants, progress_printer, url_builder, digest_login):
5 | """
6 | Filter tenants by either choosing or excluding them (both at the same time isn't possible).
7 |
8 | :param chosen_tenants: The chosen tenants
9 | :type chosen_tenants: list or None
10 | :param excluded_tenants: The excluded tenants
11 | :type excluded_tenants: list or None
12 | :param progress_printer: The progress printer
13 | :type progress_printer: ProgressPrinter
14 | :param url_builder: The URL builder
15 | :type url_builder: URLBuilder
16 | :param digest_login: The login credentials for digest authentication
17 | :type digest_login: DigestLogin
18 | :return: A sorted list with tenants to check
19 | :rtype: list
20 | """
21 |
22 | if chosen_tenants:
23 | tenants = chosen_tenants
24 | else:
25 | # request tenants
26 | progress_printer.print_message("Requesting tenants... ", 0, False, True)
27 | tenants = get_tenants(url_builder.get_base_url(None), digest_login)
28 | progress_printer.print_message("{} tenant(s) received.\n".format(len(tenants)), 0, True, False)
29 |
30 | if excluded_tenants:
31 | progress_printer.print_message("Filtering tenants... ", 0, False, True)
32 | tenants = [tenant for tenant in tenants if tenant not in excluded_tenants]
33 | progress_printer.print_message("{} tenant(s) remain.\n".format(len(tenants)), 0, True, False)
34 |
35 | tenants.sort()
36 | return tenants
37 |
--------------------------------------------------------------------------------
/visualize-workflow/prep-workflow.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 |
3 | import json
4 | import glob
5 |
6 |
7 | def main():
8 | fileNum = 0
9 | for file in glob.glob("*.json"):
10 | fileNum += 1
11 | prep(file, fileNum)
12 |
13 |
14 | def prep(wFfile, fileNum):
15 | with open(wFfile, 'r') as f:
16 | instance = json.load(f)
17 |
18 | operations = []
19 | runtime = 0
20 | for operation in instance.get('workflow', {}).get('operations', {})\
21 | .get('operation', []):
22 | id = operation.get('id')
23 | state = operation.get('state')
24 | started = operation.get('started')
25 | completed = operation.get('completed')
26 | try:
27 | duration = (int(completed) - int(started)) / 1000.0
28 | runtime += duration
29 | except TypeError:
30 | duration = 0
31 |
32 | operations.append((id, state, duration))
33 |
34 | with open('workflow' + str(fileNum) + '.dat', 'w') as f:
35 | for op in operations:
36 | percent = op[2] * 100.0 / runtime
37 | seconds = op[2]
38 | hours = int(seconds / 3600)
39 | seconds = seconds - hours * 3600
40 | minutes = int(seconds / 60)
41 | seconds = seconds - minutes * 60
42 | if hours:
43 | time = '%02d:%02d:%02d' % (hours, minutes, seconds)
44 | else:
45 | time = '%02d:%02d' % (minutes, seconds)
46 | f.write('%-20s %-12s %8.03f %8.03f %-10s\n' %
47 | (op[0], op[1], op[2], percent, time))
48 |
49 |
50 | if __name__ == '__main__':
51 | main()
52 |
--------------------------------------------------------------------------------
/lti-test-consumer/lticonsumer.py:
--------------------------------------------------------------------------------
1 | from flask import Flask, render_template_string
2 | from lti.tool_consumer import ToolConsumer
3 |
4 |
5 | CONSUMER_KEY = 'CONSUMERKEY'
6 | CONSUMER_SECRET = 'CONSUMERSECRET'
7 | LAUNCH_URL = 'http://localhost:8080/lti'
8 |
9 |
10 | app = Flask(__name__)
11 |
12 | TPL = '''
13 |
14 | LTI Test Consumer
15 |
16 |
26 |
27 |
28 | '''
29 |
30 | @app.route('/')
31 | def consumer(name=None):
32 | consumer = ToolConsumer(
33 | consumer_key=CONSUMER_KEY,
34 | consumer_secret=CONSUMER_SECRET,
35 | launch_url=LAUNCH_URL,
36 | params={
37 | 'lti_message_type': 'basic-lti-launch-request',
38 | 'lti_version': 'LTI-1p0',
39 | 'lis_person_name_given': 'Lars',
40 | 'lis_person_name_family': 'Kiesow',
41 | 'resource_link_id': 37865823,
42 | 'user_id': 'lkiesow',
43 | 'roles': 'Instructor'
44 | }
45 | )
46 | print(consumer.generate_launch_data())
47 | return render_template_string(TPL,
48 | launch_data=consumer.generate_launch_data(),
49 | launch_url=consumer.launch_url)
50 |
51 |
52 | if __name__ == '__main__':
53 | app.run()
54 |
--------------------------------------------------------------------------------
/check_data_integrity/check/args/check_settings.py:
--------------------------------------------------------------------------------
1 | """ This module represents the check-argument. """
2 | from utility.enum import enum
3 |
4 | Checks = enum(
5 | DC="dc",
6 | ACL="acl",
7 | DC_ACL="dc_acl",
8 | OAIPMH="oaipmh",
9 | ALL="all"
10 | )
11 |
12 |
13 | class CheckSettings:
14 | """
15 | Contains the settings that define which data is supposed to be checked: ACLs, Dublin Core catalogs, oaipmh
16 | or everything.
17 | Since the check of OAIPMH needs the ACLs and Dublin Core catalogs of series and events, these are always requested
18 | (and checked) when the OAIPMH check is enabled.
19 | """
20 |
21 | def __init__(self, check):
22 | if not check:
23 | self.check = Checks.ALL
24 | else:
25 | self.check = check
26 |
27 | def check_acl(self):
28 | """
29 | :return: whether ACLs are supposed to be requested and checked for errors.
30 | :rtype: bool
31 | """
32 | return (self.check == Checks.ACL or self.check == Checks.ALL or self.check == Checks.OAIPMH or
33 | self.check == Checks.DC_ACL)
34 |
35 | def check_dc(self):
36 | """
37 | :return: whether Dublin Core catalogs are supposed to be requested and checked for errors.
38 | :rtype: bool
39 | """
40 | return (self.check == Checks.DC or self.check == Checks.ALL or self.check == Checks.OAIPMH or
41 | self.check == Checks.DC_ACL)
42 |
43 | def check_oaipmh(self):
44 | """
45 | :return: whether OAIPMH is supposed to be checked for errors.
46 | :rtype: bool
47 | """
48 |
49 | return self.check == Checks.ALL or self.check == Checks.OAIPMH
50 |
--------------------------------------------------------------------------------
/delete_dead_distribution_artefacts/util/count.py:
--------------------------------------------------------------------------------
1 | def count_media_packages(distribution_artefacts):
2 | """
3 | Count media packages in nested list.
4 |
5 | :param distribution_artefacts: Nested list containing distribution artefacts mapped to media packages and tenants
6 | :type distribution_artefacts: dict
7 | :return: Amount of media packages
8 | :rtype: int
9 | """
10 | return sum([len(distribution_artefacts[tenant].keys()) for tenant in distribution_artefacts.keys()])
11 |
12 |
13 | def count_distribution_artefacts(distribution_artefacts):
14 | """
15 | Count distribution artefacts in nested list.
16 |
17 | :param distribution_artefacts: Nested list containing distribution artefacts mapped to media packages and tenants
18 | :type distribution_artefacts: dict
19 | :return: Amount of distribution artefacts
20 | :rtype: int
21 | """
22 | return sum([sum([len(distribution_artefacts[tenant][media_package]) for media_package in
23 | distribution_artefacts[tenant].keys()]) for tenant in distribution_artefacts.keys()])
24 |
25 |
26 | def get_max_path_len(distribution_artefacts):
27 | """
28 | Get the max length of the paths to the distribution artefacts.
29 |
30 | :param distribution_artefacts: Nested list containing distribution artefacts mapped to media packages and tenants
31 | :type distribution_artefacts: dict
32 | :return: Max path length
33 | :rtype: int
34 | """
35 |
36 | return max(max(max([[[len(dist_list) for dist_list in distribution_artefacts[tenant][media_package]]
37 | for media_package in distribution_artefacts[tenant]]
38 | for tenant in distribution_artefacts.keys()])))
39 |
--------------------------------------------------------------------------------
/lib/rest_requests/assetmanager_requests.py:
--------------------------------------------------------------------------------
1 | from rest_requests.request import get_request, NOT_FOUND
2 | from rest_requests.get_response_content import get_string_content
3 | from rest_requests.request_error import RequestError
4 |
5 |
6 | def get_media_package(base_url, digest_login, mp_id):
7 | """
8 | Get a media package from the asset manager.
9 |
10 | :param base_url: The URL for the request
11 | :type base_url: str
12 | :param digest_login: The login credentials for digest authentication
13 | :type digest_login: DigestLogin
14 | :param mp_id: The ID of the media package
15 | :type mp_id: str
16 | :return: A media package definition in XML format
17 | :rtype str:
18 | :raise RequestError:
19 | """
20 |
21 | url = '{}/assets/episode/{}'.format(base_url, mp_id)
22 |
23 | response = get_request(url, digest_login, "media package")
24 |
25 | media_package = get_string_content(response)
26 |
27 | return media_package
28 |
29 |
30 | def media_package_exists(base_url, digest_login, mp_id):
31 | """
32 | Check if a media package exists.
33 |
34 | :param base_url: The URL for the request
35 | :type base_url: str
36 | :param digest_login: The login credentials for digest authentication
37 | :type digest_login: DigestLogin
38 | :param mp_id: The ID of the media package
39 | :type mp_id: str
40 | :return: true if it exists, false otherwise
41 | :rtype: bool
42 | :raise RequestError:
43 | """
44 |
45 | try:
46 | get_media_package(base_url, digest_login, mp_id)
47 |
48 | except RequestError as e:
49 | if e.has_status_code() and e.get_status_code() == NOT_FOUND:
50 | return False
51 | raise e
52 |
53 | return True
54 |
--------------------------------------------------------------------------------
/lib/data_handling/transform_acl.py:
--------------------------------------------------------------------------------
1 | from xml.etree import ElementTree
2 |
3 |
4 | namespaces = {'acl': 'urn:oasis:names:tc:xacml:2.0:policy:schema:os', 'root': 'ns0'}
5 |
6 |
7 | def transform_acl(xacml):
8 | """
9 | Transform an XACML into the shorter xml format so that the series service understands them.
10 |
11 | :param xacml: The XACML
12 | :type xacml: str
13 | :return: More minimal ACL in XML format
14 | :rtype: str
15 | """
16 |
17 | xacml = ElementTree.fromstring(xacml)
18 |
19 | xml_declaration = ''
20 |
21 | root_element = ElementTree.Element("acl")
22 | root_element.set('xmlns', namespaces['acl'])
23 |
24 | rules = xacml.findall(".//acl:Rule", namespaces)
25 |
26 | for rule in rules:
27 |
28 | rule_id = rule.get("RuleId")
29 | if rule_id == "DenyRule":
30 | continue # skip global deny rule
31 |
32 | effect = rule.get("Effect")
33 |
34 | action = rule.find(".//acl:ActionMatch//acl:AttributeValue", namespaces).text
35 | role = rule.find(".//acl:Condition/acl:Apply/acl:AttributeValue", namespaces).text
36 | allow = (effect == "Permit")
37 |
38 | ace_element = ElementTree.SubElement(root_element, "ace")
39 |
40 | action_element = ElementTree.SubElement(ace_element, "action")
41 | action_element.text = action
42 |
43 | allow_element = ElementTree.SubElement(ace_element, "allow")
44 | allow_element.text = str(allow).lower()
45 |
46 | role_element = ElementTree.SubElement(ace_element, "role")
47 | role_element.text = role
48 |
49 | new_xml = xml_declaration + ElementTree.tostring(root_element, encoding="unicode")
50 | return new_xml
51 |
--------------------------------------------------------------------------------
/lib/rest_requests/stream_security_requests.py:
--------------------------------------------------------------------------------
1 | from datetime import datetime
2 | from datetime import timedelta
3 |
4 | from rest_requests.get_response_content import get_string_content
5 | from rest_requests.request import get_request
6 |
7 |
8 | def sign_url(digest_login, server_url, url_to_sign):
9 | """
10 | Get a URL signed for 2 hours.
11 |
12 | :param digest_login: The login credentials for digest auth
13 | :type digest_login: DigestLogin
14 | :param server_url: The server URL
15 | :type server_url: str
16 | :param url_to_sign: The url to be signed
17 | :type url_to_sign: str
18 | :return: The signed URL
19 | :rtype: str
20 | :raise RequestError:
21 | """
22 |
23 | now = datetime.now()
24 | two_hours = timedelta(hours=2)
25 | two_hours_from_now = now + two_hours
26 | two_hours_from_now_timestamp = int(two_hours_from_now.timestamp())
27 |
28 | url = '{}/signing/sign?baseUrl={}&validUntil={}'.format(server_url, url_to_sign, two_hours_from_now_timestamp)
29 | response = get_request(url, digest_login, "signed URL")
30 | return get_string_content(response)
31 |
32 |
33 | def accepts_url(digest_login, server_url, url_to_sign):
34 | """
35 | Checks if a URL can be signed.
36 |
37 | :param digest_login: The login credentials for digest auth
38 | :type digest_login: DigestLogin
39 | :param server_url: The server URL
40 | :type server_url: str
41 | :param url_to_sign: The url to check
42 | :type url_to_sign: str
43 | :return: If the URL can be signed.
44 | :rtype: bool
45 | :raise RequestError:
46 | """
47 |
48 | url = '{}/signing/accepts?baseUrl={}'.format(server_url, url_to_sign)
49 | response = get_request(url, digest_login, "URL signing check")
50 | return get_string_content(response) == "true"
51 |
--------------------------------------------------------------------------------
/opencast-migration-scripts/opencast/staticfile.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | # -*- coding: utf-8 -*-
3 | from os import unlink, mkdir
4 | from os.path import join, abspath, isfile, isdir
5 |
6 |
7 | def reupload_static_file(source_opencast_presentation_client, target_opencast_presentation_client,
8 | file_url, filename, tmp_dir, file_id):
9 | try:
10 | download_static_file(source_opencast_presentation_client, file_url, tmp_dir, file_id)
11 | target_filename = upload_static_file(target_opencast_presentation_client, filename, tmp_dir, file_id)
12 | unlink(join(abspath(tmp_dir), file_id))
13 | return target_filename
14 | except Exception as e:
15 | if isfile(join(abspath(tmp_dir), filename)):
16 | unlink(join(abspath(tmp_dir), filename))
17 | raise e
18 |
19 |
20 | def download_static_file(opencast_presentation_client, file_url, tmp_dir, file_id):
21 | if tmp_dir and not isdir(abspath(tmp_dir)):
22 | mkdir(abspath(tmp_dir))
23 |
24 | output_path = join(abspath(tmp_dir), file_id)
25 | if isfile(output_path):
26 | print(f'File "{output_path}" exists. Skip download.')
27 | return
28 | with open(output_path, mode='wb') as output_file:
29 | with opencast_presentation_client.stream('GET', url=file_url) as response:
30 | for chunk in response.iter_bytes(chunk_size=1024*1024*10): # chunk size 10MB
31 | output_file.write(chunk)
32 |
33 |
34 | def upload_static_file(opencast_presentation_client, filename, tmp_dir, file_id):
35 | url = '/staticfiles'
36 | source_file_path = join(abspath(tmp_dir), file_id)
37 | files = {'BODY': (filename, open(source_file_path, 'rb'))}
38 | response = opencast_presentation_client.post(url, files=files)
39 | response.raise_for_status()
40 | return response.text
41 |
--------------------------------------------------------------------------------
/find-and-delete-empty-series/delete_empty_series.py:
--------------------------------------------------------------------------------
1 | import os
2 | import sys
3 | import time
4 |
5 | import config
6 |
7 | sys.path.append(os.path.join(os.path.abspath('..'), "lib"))
8 |
9 | from args.digest_login import DigestLogin
10 | from input_output.read_file import read_list_from_file, write_list_to_file
11 | from rest_requests.request_error import RequestError
12 | from rest_requests.series_requests import delete_series, series_has_events
13 |
14 |
15 | def delete_empty_series(admin_url, digest_login, empty_series):
16 | deleted_series = []
17 | for i, series_id in enumerate(empty_series):
18 | try:
19 | if series_has_events(admin_url, digest_login, series_id):
20 | print("Series {} has events, not deleting!".format(series_id))
21 | continue
22 | delete_series(admin_url, digest_login, series_id)
23 | deleted_series.append(series_id)
24 | except RequestError as e:
25 | print("Series {} could not be deleted: {}".format(series_id, e))
26 | if (i + 1) % 100 == 0:
27 | print("Checked {} series.".format(i + 1))
28 | time.sleep(1)
29 | return deleted_series
30 |
31 |
32 | def main():
33 | digest_login = DigestLogin(user=config.digest_user, password=config.digest_pw)
34 | empty_series = read_list_from_file("empty_series.txt")
35 | print("{} empty series found.".format(len(empty_series)))
36 |
37 | deleted_series = delete_empty_series(config.admin_url, digest_login, empty_series)
38 | write_list_to_file("deleted_series.txt", deleted_series)
39 | print("Deletion finished. {} series were successfully deleted.".format(len(deleted_series)))
40 |
41 |
42 | if __name__ == '__main__':
43 | try:
44 | main()
45 | except KeyboardInterrupt:
46 | print("\nAborting process.")
47 | sys.exit(0)
48 |
--------------------------------------------------------------------------------
/opencast-migration-scripts/opencast/search.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | # -*- coding: utf-8 -*-
3 | from lxml.etree import fromstring
4 |
5 | from opencast.mediapackage import Mediapackage
6 |
7 | namespaces = {
8 | "mp": "http://mediapackage.opencastproject.org",
9 | "ns2": "http://search.opencastproject.org",
10 | "ns3": "http://org.opencastproject.security",
11 | }
12 |
13 |
14 | def get_mediapackage(opencast_presentation_client, mediapackage_id):
15 | url = '/search/episode.xml'
16 | params = {'id': mediapackage_id}
17 | response = opencast_presentation_client.get(url, params=params)
18 | response.raise_for_status()
19 | search_result = response.text.encode('utf-8')
20 | search_result_element = fromstring(search_result)
21 | mediapackage_str = search_result_element.findtext('.//ns2:ocMediapackage', namespaces=namespaces)
22 | if mediapackage_str:
23 | return Mediapackage(mediapackage_str.encode('utf-8'))
24 | return None
25 |
26 |
27 | def get_mediapackages(opencast_presentation_client, sort="MEDIA_PACKAGE_ID"):
28 | url = '/search/episode.xml'
29 | params = {
30 | 'admin': True,
31 | 'sort': sort
32 | }
33 | offset = 0
34 | batch_size = 10
35 | while offset >= 0:
36 | params['limit'] = batch_size
37 | params['offset'] = offset
38 | response = opencast_presentation_client.get(url, params=params)
39 | response.raise_for_status()
40 | itemsfound = 0
41 | search_results = fromstring(response.text.encode('utf-8'))
42 | for mediapackage in search_results.iterfind('.//ns2:ocMediapackage', namespaces=namespaces):
43 | itemsfound += 1
44 | yield Mediapackage(mediapackage.text.encode('utf-8'))
45 | if itemsfound > 0:
46 | offset += itemsfound
47 | else:
48 | offset = -1
49 |
--------------------------------------------------------------------------------
/lib/rest_requests/oaipmh_requests.py:
--------------------------------------------------------------------------------
1 | from data_handling.elements import get_id
2 | from data_handling.types import ElementDescription
3 | from rest_requests.get_response_content import get_xml_content
4 | from rest_requests.request import get_request
5 |
6 |
7 | def get_oaipmh_record(event, repository_url, repository, digest_login, base_url):
8 | """
9 | Get the oaipmh record for a given event at base_url with the given digest login.
10 |
11 | :param event: The event
12 | :type event: dict
13 | :param repository_url: The URL to the OAIPMH repository
14 | :type repository_url: str
15 | :param repository: The OAIPMH repository ID
16 | :type repository: str
17 | :param digest_login: The login credentials for digest authentication
18 | :type digest_login: DigestLogin
19 | :param base_url: The URL of the opencast instance
20 | :type base_url: str
21 | :return: The OAIPMH record in XML format
22 | :rtype: ElementTree.Element
23 | :raise RequestError:
24 | """
25 |
26 | # absolute url
27 | if repository_url.startswith("http"):
28 |
29 | url = '{}?verb=GetRecord&metadataPrefix=matterhorn-inlined&identifier={}'.format(repository_url.split('?')[0],
30 | get_id(event))
31 | # relative url
32 | else:
33 | url = '{}{}?verb=GetRecord&metadataPrefix=matterhorn-inlined&identifier={}'.format(base_url,
34 | repository_url.split('?')[0],
35 | get_id(event))
36 |
37 | response = get_request(url, digest_login, ElementDescription.OAIPMH.format(repository).singular())
38 |
39 | record = get_xml_content(response)
40 |
41 | return record
42 |
--------------------------------------------------------------------------------
/.travis.yml:
--------------------------------------------------------------------------------
1 | sudo: false
2 |
3 | install: skip
4 |
5 | jobs:
6 | include:
7 | - stage: test
8 | env: name=ingest
9 | script:
10 | - shellcheck ingest/*.sh
11 |
12 | - stage: test
13 | env: name=series
14 | script:
15 | - shellcheck create-series/create-series.sh
16 |
17 | - stage: test
18 | env: name=schedule
19 | script:
20 | - shellcheck schedule-now/*.sh
21 |
22 | - stage: test
23 | env: name=changelog
24 | language: python
25 | python: "3.6"
26 | install:
27 | - pip install flake8
28 | script:
29 | - flake8 release-management/create-changelog/changelog.py
30 |
31 | - stage: test
32 | env: name=csv-export
33 | language: python
34 | python:
35 | - "2.7"
36 | - "3.6"
37 | install:
38 | - pip install flake8
39 | script:
40 | - shellcheck csv-export/getEvents.sh
41 | - flake8 csv-export/events2csv.py
42 |
43 | - stage: test
44 | env: name=visualize-workflow
45 | language: python
46 | python:
47 | - "3.6"
48 | install:
49 | - pip install flake8
50 | script:
51 | - shellcheck visualize-workflow/*.sh
52 | - flake8 visualize-workflow/*.py
53 | - (! grep -rn ' ' visualize-workflow/)
54 | - (! grep -rn ' $' visualize-workflow/)
55 |
56 | - stage: test
57 | env: name=start-workflow-from-archive
58 | language: python
59 | python:
60 | - "3.6"
61 | install:
62 | - pip install flake8
63 | script:
64 | - shellcheck start-workflow-from-archive/*.sh
65 | - flake8 start-workflow-from-archive/*.py
66 |
67 | - stage: test
68 | env: name=translation-progress
69 | script:
70 | - shellcheck release-management/translation-progress/translation-progress.sh
71 |
--------------------------------------------------------------------------------
/lib/data_handling/asset_util.py:
--------------------------------------------------------------------------------
1 | from data_handling.flavor_matcher import matches_flavor
2 | from data_handling.parse_manifest import Asset
3 |
4 |
5 | def asset_with_tags(old_asset, new_tags, keep_tags=None):
6 | try:
7 | old_tags = [] if not (old_asset.tags and keep_tags) else [tag for tag in old_asset.tags if tag in keep_tags]
8 | tags = new_tags + old_tags
9 | return Asset(id=old_asset.id, flavor=old_asset.flavor, tags=tags, url=old_asset.url, filename=old_asset.filename,
10 | mimetype=old_asset.mimetype, path=old_asset.path)
11 | except:
12 | return None
13 |
14 |
15 | def asset_with_flavor(old_asset, flavor):
16 | return Asset(id=old_asset.id, flavor=flavor, tags=old_asset.tags, url=old_asset.url, filename=old_asset.filename,
17 | mimetype=old_asset.mimetype, path=old_asset.path)
18 |
19 |
20 | def asset_with_tags_and_flavor(old_asset, tags, flavor):
21 | return Asset(id=old_asset.id, flavor=flavor, tags=tags, url=old_asset.url, filename=old_asset.filename,
22 | mimetype=old_asset.mimetype, path=old_asset.path)
23 |
24 |
25 | def filter_by_flavor(assets, flavor):
26 | return [asset for asset in assets if matches_flavor(asset.flavor, [flavor])]
27 |
28 |
29 | def set_tags(assets, tags, keep_tags=None):
30 | return [asset_with_tags(asset, tags, keep_tags=keep_tags) for asset in assets]
31 |
32 |
33 | def add_tag(assets, tag):
34 | return [asset_with_tags(asset, [tag], keep_tags=asset.tags) for asset in assets]
35 |
36 |
37 | def set_flavor(assets, flavor):
38 | return [asset_with_flavor(asset, flavor) for asset in assets]
39 |
40 |
41 | def is_single_stream(tracks):
42 | if len(tracks) == 1:
43 | return True
44 | return all([track.flavor == tracks[0].flavor for track in tracks])
45 |
46 |
47 | def matches_mimetype(mimetype_a, mimetype_b):
48 | return matches_flavor(mimetype_a, [mimetype_b])
49 |
--------------------------------------------------------------------------------
/lib/data_handling/errors.py:
--------------------------------------------------------------------------------
1 | import logging
2 |
3 |
4 | class SeriesError(Exception):
5 | """
6 | Represents all errors that can hinder the recovery of a series.
7 | Simply contains an error message and nothing else.
8 | """
9 | pass
10 |
11 |
12 | class MediaPackageError(Exception):
13 | """
14 | Represents all errors that can hinder the recovery of a media package.
15 | Simply contains an error message and nothing else.
16 | """
17 | pass
18 |
19 |
20 | def optional_series_error(error, ignore_errors, exception=None):
21 | """
22 | Print a warning if errors should be ignored, otherwise raise a SeriesError
23 |
24 | :param error: The message that should be printed as a warning or raised as an error
25 | :type error: str
26 | :param ignore_errors: Whether to raise an error or print a warning
27 | :type ignore_errors: bool
28 | :param exception: The exception that caused the error
29 | :type exception: Exception
30 | :raise SeriesError:
31 | """
32 | if ignore_errors:
33 | print("Warning: {}".format(error))
34 | logging.error(error, exc_info=True)
35 | else:
36 | raise SeriesError(error) from exception
37 |
38 |
39 | def optional_mp_error(error, ignore_errors, exception=None):
40 | """
41 | Print a warning if errors should be ignored, otherwise raise a MediaPackageError
42 |
43 | :param error: The message that should be printed as a warning or raised as an error
44 | :type error: str
45 | :param ignore_errors: Whether to raise an error or print a warning
46 | :type ignore_errors: bool
47 | :param exception: The exception that caused the error
48 | :type exception: Exception
49 | :raise MediaPackageError:
50 | """
51 | if ignore_errors:
52 | print("Warning: {}".format(error))
53 | logging.error(error, exc_info=True)
54 | else:
55 | raise MediaPackageError(error) from exception
56 |
--------------------------------------------------------------------------------
/lib/rest_requests/file_requests.py:
--------------------------------------------------------------------------------
1 | from rest_requests.get_response_content import get_string_content
2 | from rest_requests.request import get_request
3 |
4 |
5 | def get_file_as_string(digest_login, url):
6 | """
7 | Get the file content as a string
8 |
9 | :param url: The url to the file
10 | :type url: str
11 | :param digest_login: The login credentials for digest authentication
12 | :type digest_login: DigestLogin
13 | :return: The file content
14 | :rtype: str
15 | :raise RequestError:
16 | """
17 |
18 | response = get_request(url, digest_login, "file")
19 | return get_string_content(response)
20 |
21 |
22 | def export_text_file(digest_login, url, target_file):
23 | """
24 | Request a text file and write it into a file.
25 |
26 | :param url: The url to the file
27 | :type url: str
28 | :param digest_login: The login credentials for digest authentication
29 | :type digest_login: DigestLogin
30 | :param target_file: The file to write into
31 | :type target_file: Path
32 | :raise RequestError:
33 | """
34 |
35 | text = get_file_as_string(digest_login, url)
36 |
37 | with open(target_file, 'w') as f:
38 | f.writelines(text)
39 |
40 |
41 | def export_video_file(digest_login, url, target_file):
42 | """
43 | Request a video and write it into a file.
44 |
45 | :param url: The url to the file
46 | :type url: str
47 | :param digest_login: The login credentials for digest authentication
48 | :type digest_login: DigestLogin
49 | :param target_file: The file to write into
50 | :type target_file: Path
51 | :raise RequestError:
52 | """
53 |
54 | response = get_request(url, digest_login, "video", stream=True)
55 |
56 | with open(target_file, 'wb') as f:
57 | for chunk in response.iter_content(chunk_size=1024):
58 | if chunk:
59 | f.write(chunk)
60 | f.flush()
61 |
--------------------------------------------------------------------------------
/recover_backup/workflow_example.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 | recover-mp
7 | Recover a media package from backup
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
20 |
21 | *
22 | true
23 |
24 |
25 |
26 |
27 |
28 |
33 |
34 | false
35 | false
36 |
37 |
38 |
39 |
40 |
43 |
44 | */*
45 | +archive
46 |
47 |
48 |
49 |
50 |
53 |
54 | archive
55 |
56 |
57 |
58 |
59 |
60 |
61 |
--------------------------------------------------------------------------------
/lib/data_handling/types.py:
--------------------------------------------------------------------------------
1 | """
2 | This module defines the description for elements, assets and assettypes, mostly for building the error messages,
3 | as singular, plural and unknown/both.
4 | """
5 | from utility.enum import enum
6 |
7 |
8 | class Description:
9 |
10 | def __init__(self, singular, plural, unknown):
11 | self.singular_description = singular
12 | self.plural_description = plural
13 | self.unknown_description = unknown
14 |
15 | def format(self, *formatting_strings):
16 | self.singular_description = self.singular_description.format(*formatting_strings)
17 | self.plural_description = self.plural_description.format(*formatting_strings)
18 | self.unknown_description = self.unknown_description.format(*formatting_strings)
19 | return self
20 |
21 | def singular(self):
22 | return self.singular_description
23 |
24 | def plural(self):
25 | return self.plural_description
26 |
27 | def unknown(self):
28 | return self.unknown_description
29 |
30 |
31 | # Elements can be an event, a series or an OAIPMH record.
32 | ElementDescription = enum(
33 | SERIES=Description("series", "series", "series"),
34 | EVENT=Description("event", "events", "event(s)"),
35 | OAIPMH=Description("OAIPMH record of repository {}", "OAIPMH records of repository {}",
36 | "OAIPMH record(s) of repository {}")
37 | )
38 |
39 | # Assets belong to elements and can be either Dublin Core catalogs or ACLs.
40 | AssetDescription = enum(
41 | DC=Description("Dublin Core catalog", "Dublin Core catalogs", "Dublin Core catalog(s)"),
42 | ACL=Description("ACL", "ACLs", "ACL(s)")
43 | )
44 |
45 | # Assets (ACL or Dublin Core catalogs) can either be of type episode, series or both/undefined.
46 | AssetTypeDescription = enum(
47 | SERIES=Description("series", "series", "series"),
48 | EPISODE=Description("episode", "episode", "episode"),
49 | BOTH=Description("", "", "")
50 | )
51 |
--------------------------------------------------------------------------------
/ingest/ingest.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | import yaml
3 | import json
4 | import requests
5 | from requests.auth import HTTPBasicAuth
6 |
7 | config = {}
8 |
9 |
10 | def print_status(ok, title):
11 | color = '\033[92m' if ok else '\033[91m'
12 | text = ' ok ' if ok else 'fail'
13 | print(f' [{color}{text}\033[0m]: {title}')
14 |
15 |
16 | def post(path, **kwargs):
17 | auth = HTTPBasicAuth(
18 | config['server']['username'],
19 | config['server']['password'])
20 | server = config['server']['url']
21 | return requests.post(f'{server}{path}', auth=auth, **kwargs)
22 |
23 |
24 | def load_config():
25 | global config
26 | with open('media.yml', 'r') as f:
27 | config = yaml.safe_load(f)
28 |
29 |
30 | def acl(name="public"):
31 | return json.dumps({'acl': {'ace': config['acl'][name]}})
32 |
33 |
34 | def create_series():
35 | print('Creating series…')
36 | for series in config.get('series', []):
37 | if not acl in series:
38 | series['acl']="public"
39 | series['acl'] = acl(series['acl'])
40 | r = post('/series/', data=series)
41 | print_status(r.ok, series["title"])
42 |
43 |
44 | def create_episodes():
45 | print('Ingesting episodes…')
46 | for media in config.get('media', []):
47 | fields = []
48 | for field in media:
49 | for key, value in field.items():
50 | if "acl" == key:
51 | fields.append((key, (None, acl(value))))
52 | else:
53 | fields.append((key, (None, value)))
54 | endpoint = '/ingest/addMediaPackage/' + config['server']['workflow']
55 | r = post(endpoint, files=fields)
56 | title = [x[1][1] for x in fields if x[0] == "title"][0]
57 | print_status(r.ok, title)
58 | if not r.ok:
59 | print(r)
60 | print(r.text)
61 |
62 |
63 | if __name__ == '__main__':
64 | load_config()
65 | create_series()
66 | create_episodes()
67 |
--------------------------------------------------------------------------------
/find-and-delete-empty-series/find_empty_series.py:
--------------------------------------------------------------------------------
1 | import os
2 | import sys
3 | import time
4 |
5 | sys.path.append(os.path.join(os.path.abspath('..'), "lib"))
6 |
7 | from input_output.read_file import write_list_to_file
8 | from rest_requests.request_error import RequestError
9 | import config
10 | from args.digest_login import DigestLogin
11 | from rest_requests.basic_requests import get_series
12 | from data_handling.elements import get_id
13 | from rest_requests.series_requests import series_has_events
14 |
15 |
16 | def find_empty_series(admin_url, digest_login):
17 |
18 | empty_series = []
19 | series_errors = []
20 | all_series = get_series(admin_url, digest_login)
21 | print("Obtained {} series.".format(len(all_series)))
22 |
23 | for i, series in enumerate(all_series):
24 | series_id = get_id(series)
25 | try:
26 | if not series_has_events(admin_url, digest_login, series_id):
27 | empty_series.append(series_id)
28 | except RequestError as e:
29 | print("Series {} could not be checked: {}".format(series_id, e))
30 | series_errors.append(series_id)
31 | if (i + 1) % 100 == 0:
32 | print("Checked {} series.".format(i + 1))
33 | time.sleep(1)
34 | print("{} empty series, {} series could not be checked".format(len(empty_series), len(series_errors)))
35 | return empty_series, series_errors
36 |
37 |
38 | def main():
39 | digest_login = DigestLogin(user=config.digest_user, password=config.digest_pw)
40 | empty_series, series_errors = find_empty_series(config.admin_url, digest_login)
41 | print("Empty series: {}".format(len(empty_series)))
42 |
43 | if empty_series:
44 | write_list_to_file("empty_series.txt", empty_series)
45 | if series_errors:
46 | write_list_to_file("series_errors.txt", series_errors)
47 | print("Check finished.")
48 |
49 |
50 | if __name__ == '__main__':
51 | try:
52 | main()
53 | except KeyboardInterrupt:
54 | print("\nAborting process.")
55 | sys.exit(0)
56 |
--------------------------------------------------------------------------------
/lib/data_handling/compare_assets.py:
--------------------------------------------------------------------------------
1 | """
2 | This module provides functionality to compare assets (ACLs and Dublin Core catalogs).
3 | """
4 |
5 |
6 | def compare_dc(dc1, dc2):
7 | """
8 | Compares two Dublin Core catalogs by comparing the tag and text of every child element.
9 |
10 | :param dc1: First Dublin Core catalog
11 | :type dc1: ElementTree.Element
12 | :param dc2: Second Dublin Core catalog
13 | :type dc2: ElementTree.Element
14 | :return: true if equal
15 | :rtype: bool
16 | """
17 |
18 | if not dc1 and not dc2:
19 | return True
20 |
21 | if (not dc1 and dc2) or (dc1 and not dc2):
22 | return False
23 |
24 | children1 = dc1.findall("*")
25 | children2 = dc2.findall("*")
26 |
27 | if len(children1) != len(children2):
28 | return False
29 |
30 | for child1, child2 in zip(children1, children2):
31 |
32 | tag1 = child1.tag
33 | tag2 = child2.tag
34 |
35 | if tag1 != tag2:
36 | return False
37 |
38 | text1 = child1.text
39 | text2 = child2.text
40 |
41 | if text1 != text2:
42 | return False
43 |
44 | return True
45 |
46 |
47 | def compare_acl(acl1, acl2):
48 | """
49 | Compares two ACLs by comparing the role, action and allow setting for every ACE.
50 |
51 | :param acl1: First ACL
52 | :type acl1: dict with role, action as key and allow as value
53 | :param acl2: Second ACL
54 | :type acl2: dict with role, action as key and allow as value
55 | :return: true if equal
56 | :rtype: bool
57 | """
58 |
59 | if not acl1 and not acl2:
60 | return True
61 |
62 | if (not acl1 and acl2) or (acl1 and not acl2):
63 | return False
64 |
65 | keys1 = acl1.keys()
66 | keys2 = acl2.keys()
67 |
68 | if len(keys1) != len(keys2):
69 | return False
70 |
71 | for key in keys1:
72 |
73 | if key not in keys2:
74 | return False
75 |
76 | if acl1[key] != acl2[key]:
77 | return False
78 |
79 | return True
80 |
--------------------------------------------------------------------------------
/delete_dead_distribution_artefacts/input/present_dead_distribution_artefacts.py:
--------------------------------------------------------------------------------
1 | from input_output.input import get_configurable_answer
2 | from util.count import get_max_path_len
3 | from utility.enum import enum
4 |
5 | PresentAnswer = enum(
6 | ASK="a",
7 | DELETE="d",
8 | QUIT="q"
9 | )
10 |
11 |
12 | def present_dead_distribution_artefacts(dead_distribution_artefacts, level=0):
13 | """
14 | Present the distribution artefacts that can be deleted to the user.
15 |
16 | :param dead_distribution_artefacts: The distribution artefacts that can be deleted.
17 | :type dead_distribution_artefacts: dict
18 | :param level: The level to indent the message to (default: 0)
19 | :type level: int
20 | """
21 |
22 | max_path_len = get_max_path_len(dead_distribution_artefacts)
23 |
24 | print(" Tenant | Media package | Distribution artefacts")
25 | print(" {}".format("-"*(max_path_len + 56)))
26 | max([(len(x), x) for x in ('a', 'b', 'aa')])
27 |
28 | for tenant in dead_distribution_artefacts.keys():
29 | for mp_count, media_package in enumerate(dead_distribution_artefacts[tenant]):
30 |
31 | for dist_count, distribution_artefact in enumerate(dead_distribution_artefacts[tenant][media_package]):
32 |
33 | tenant_str = (tenant if (mp_count == 0 and dist_count == 0) else "")
34 | media_package_str = (media_package if dist_count == 0 else "")
35 |
36 | print("{:>15} | {:>36} | {}".format(tenant_str, media_package_str, distribution_artefact))
37 |
38 | print()
39 |
40 | long_descriptions = ["asking for each media package",
41 | "deleting all without asking",
42 | "quitting the script"]
43 | short_descriptions = ["ask", "delete", "quit"]
44 | options = ['a', 'd', 'q']
45 |
46 | question = "Do you wish to delete all or be asked for each media package?"
47 |
48 | answer = get_configurable_answer(options, short_descriptions, long_descriptions, question, level)
49 | return answer
50 |
--------------------------------------------------------------------------------
/schedule-now/schedule-now.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | CAPTURE_AGENT="pyca"
4 | HOST="http://localhost:8080"
5 | USER="opencast_system_account"
6 | PASSWORD="CHANGE_ME"
7 |
8 | set -eux
9 |
10 | if [ "$#" -eq 0 ]; then
11 | START_MIN=1
12 | END_MIN=2
13 | elif [ "$#" -eq 2 ]; then
14 | START_MIN="${1}"
15 | END_MIN="${2}"
16 | else
17 | echo "Usage: ${0} [start_min end_min]"
18 | fi
19 |
20 | TMP_MP="$(mktemp)"
21 | TMP_DC="$(mktemp)"
22 | START="$(date -d "${START_MIN} min" --utc +%Y-%m-%dT%H:%MZ)"
23 | END_CALC=$((START_MIN + END_MIN)) #We want the end time to be start time + duration
24 | END="$(date -d "${END_CALC} min" --utc +%Y-%m-%dT%H:%MZ)"
25 |
26 | echo '
27 |
30 | demo
31 | demo
32 | '"${START}"'
33 | start='"${START}"'; end='"${END}"'; scheme=W3C-DTF;
34 | demo
35 | demo
36 | demo
37 | '${CAPTURE_AGENT}'
38 | Demo event
39 | ' > "${TMP_DC}"
40 |
41 | # Create media package
42 | curl -f --digest -u ${USER}:${PASSWORD} -H "X-Requested-Auth: Digest" \
43 | "${HOST}/ingest/createMediaPackage" -o "${TMP_MP}"
44 |
45 | # Add DC catalog
46 | curl -f --digest -u ${USER}:${PASSWORD} -H "X-Requested-Auth: Digest" \
47 | "${HOST}/ingest/addDCCatalog" -F "mediaPackage=<${TMP_MP}" \
48 | -F "dublinCore=<${TMP_DC}" -o "${TMP_MP}"
49 |
50 | curl -v -i --digest -u ${USER}:${PASSWORD} \
51 | -H "X-Requested-Auth: Digest" \
52 | "${HOST}/ingest/schedule/fast" \
53 | -F "mediaPackage=<${TMP_MP}"
54 |
55 | rm -f "${TMP_MP}" "${TMP_DC}"
56 |
--------------------------------------------------------------------------------
/check_data_integrity/fix/fixer/series_dc_of_event_fixer.py:
--------------------------------------------------------------------------------
1 | from fix.fixer.fixer import Fixer
2 | from fix.io.results_parser import FixableError
3 | from fix.workflows.workflow import get_workflow_definition
4 | from rest_requests.assetmanager_requests import get_media_package
5 | from rest_requests.workflow_requests import start_workflow
6 |
7 |
8 | class SeriesDCOfEventFixer(Fixer):
9 | """
10 | This module can fix events that are missing their series Dublin Core catalog or that have a series Dublin Core
11 | catalog that isn't up to date by starting a workflow.
12 | """
13 |
14 | def __init__(self):
15 | """
16 | Constructor to load the workflow definition
17 | """
18 |
19 | self.workflow_definition = get_workflow_definition("add_or_update_series_dc.xml")
20 |
21 | def fix(self, opencast_url, digest_login, event_id):
22 | """
23 | Fix the given event.
24 |
25 | :param opencast_url: URL to opencast instance
26 | :type opencast_url: str
27 | :param digest_login: User and password for digest authentication
28 | :type digest_login: DigestLogin
29 | :param event_id: ID of event to be fixed
30 | :type event_id: str
31 | """
32 |
33 | media_package = get_media_package(opencast_url, digest_login, event_id)
34 | start_workflow(opencast_url, digest_login, self.workflow_definition, media_package)
35 |
36 | @staticmethod
37 | def get_errors():
38 | """
39 | Return which errors this fixer can fix.
40 |
41 | :return: A list of errors this fixer can fix
42 | :rtype: list
43 | """
44 | return [FixableError.EVENTS_MISSING_SERIES_DC, FixableError.EVENTS_NONEQUAL_SERIES_DC]
45 |
46 | @staticmethod
47 | def get_fix_description():
48 | """
49 | Return a description of what this fixer does to fix inconsistent data.
50 |
51 | :return: Description of what this fixer does.
52 | :rtype: str
53 | """
54 | return "(re)setting series Dublin Core catalog of event from series service"
55 |
--------------------------------------------------------------------------------
/start-workflow-from-archive-multitenancy/README.md:
--------------------------------------------------------------------------------
1 | # Script to start a workflow for multiple events belonging to different tenants
2 |
3 | With this script, you can start workflows for multiple events belonging to different tenants, if the tenant-specific
4 | Admin URLs follow a pattern. To use this, you need to put the event ids into files named after the tenant they belong
5 | to, one id per line.
6 | Currently, this script doesn't support workflow parameters. In contrast to start-workflow-from-archive, this script uses
7 | /admin-ng/tasks/new to start the workflow, not workflow/start.
8 |
9 | ## How to Use
10 |
11 | ### Configuration
12 |
13 | First you need to configure the script in `config.py`:
14 |
15 | | Configuration Key | Description | Default/Example |
16 | | :---------------- | :---------------------------------------------------------- | :---------------------- |
17 | | `url_pattern` | The pattern for the tenant-specific URLs to the admin node | https://{}.opencast.com |
18 | | `digest_user` | The user name of the digest user | opencast_system_account |
19 | | `digest_pw` | The password of the digest user | CHANGE_ME |
20 |
21 | ### Usage
22 |
23 | The script can be called with the following parameters:
24 |
25 | `main.py -w WORKFLOW_DEFINITION -d DIRECTORY`
26 |
27 | | Short Option | Long Option | Description |
28 | | :----------: | :------------ | :------------------------------------------------ |
29 | | `-w` | `--workflow` | The workflow to start |
30 | | `-d` | `--directory` | The path to the directory with the event id files |
31 |
32 | #### Usage example
33 |
34 | `main.py -w republish-metadata -d /home/user/Desktop/events`
35 |
36 | ## Requirements
37 |
38 | This script was written for Python 3.8. You can install the necessary packages with
39 |
40 | `pip install -r requirements.txt`
41 |
42 | Additionally, this script uses modules contained in the _lib_ directory.
--------------------------------------------------------------------------------
/simulate-ingests/mediapackages/templates/manifest.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | {%- if has_presentation_video %}
5 |
10 | {% endif -%}
11 | {%- if has_presenter_video %}
12 |
17 | {% endif -%}
18 | {%- if has_presentation_audio %}
19 |
24 | {% endif -%}
25 | {%- if has_presenter_audio %}
26 |
31 | {% endif %}
32 |
33 |
34 |
35 | text/xml
36 | episode.xml
37 |
38 |
39 | text/xml
40 | series.xml
41 |
42 |
43 |
44 |
45 | org.opencastproject.capture.agent.properties
46 |
47 |
48 |
--------------------------------------------------------------------------------
/lib/data_handling/parse_acl.py:
--------------------------------------------------------------------------------
1 | """
2 | This module provides methods to parse ACLs in json and xml format into another format so they can be easily compared.
3 | """
4 |
5 | from xml.etree import ElementTree
6 |
7 | from data_handling.namespaces import namespaces
8 |
9 |
10 | def parse_acl(acl):
11 | """
12 | Parses an ACL in json or xml format into a dict.
13 |
14 | :param acl: The ACL
15 | :type acl: ElementTree.Element or dict
16 | :return: dict with (role, action) as key and allow as value
17 | :rtype: dict
18 | """
19 |
20 | if isinstance(acl, ElementTree.Element):
21 | return parse_xml_acl(acl)
22 | else:
23 | return parse_json_acl(acl)
24 |
25 |
26 | def parse_xml_acl(xml_acl):
27 | """
28 | Parses an ACL in xml format into a dict.
29 |
30 | :param xml_acl: The ACL in xml format
31 | :type xml_acl: ElementTree.Element
32 | :return: dict with (role, action) as key and allow as value
33 | :rtype: dict
34 | """
35 |
36 | acl = {}
37 |
38 | rules = xml_acl.findall(".//acl:Rule", namespaces)
39 |
40 | for rule in rules:
41 |
42 | rule_id = rule.get("RuleId")
43 | if rule_id == "DenyRule":
44 | continue # skip global deny rule
45 |
46 | effect = rule.get("Effect")
47 |
48 | action = rule.find(".//acl:ActionMatch//acl:AttributeValue", namespaces).text
49 | role = rule.find(".//acl:Condition/acl:Apply/acl:AttributeValue", namespaces).text
50 | allow = (effect == "Permit")
51 |
52 | acl[(role, action)] = allow
53 |
54 | return acl
55 |
56 |
57 | def parse_json_acl(json_acl):
58 | """
59 | Parses an ACL in json format into a dict.
60 |
61 | :param json_acl: The ACL in json format
62 | :type json_acl: dict
63 | :return: dict with (role, action) as key and allow as value
64 | :rtype: dict
65 | """
66 |
67 | acl = {}
68 |
69 | for ace in json_acl["ace"]:
70 | role = ace["role"]
71 | action = ace["action"]
72 | allow = ace["allow"]
73 |
74 | acl[(role, action)] = allow
75 |
76 | return acl
77 |
--------------------------------------------------------------------------------
/opencast-migration-scripts/tests/test_externalapi.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | # -*- coding: utf-8 -*-
3 | import unittest
4 |
5 | from httpx import Timeout
6 |
7 | from opencast.client import OpencastClient
8 | from opencast.externalapi import get_info_me, get_series, get_events
9 |
10 |
11 | class ExternalapiTestCase(unittest.TestCase):
12 |
13 | def __init__(self, *args, **kwargs):
14 | super(ExternalapiTestCase, self).__init__(*args, **kwargs)
15 | self.hostname = 'https://develop.opencast.org'
16 | self.username = 'admin'
17 | self.password = 'opencast'
18 |
19 | def test_info_me(self):
20 | with OpencastClient(self.hostname, auth=(self.username, self.password), timeout=Timeout(5.0)) as opencast_client:
21 | me = get_info_me(opencast_client)
22 | self.assertIn('username', me.keys())
23 | self.assertEqual(me.get('username'), 'admin')
24 |
25 | def test_get_series(self):
26 | with OpencastClient(self.hostname, auth=(self.username, self.password), timeout=Timeout(5.0)) as opencast_client:
27 | series = get_series(opencast_client)
28 | for s in series:
29 | self.assertIn('identifier', s.keys())
30 | self.assertIn('title', s.keys())
31 | self.assertIn('creator', s.keys())
32 | self.assertIn('created', s.keys())
33 | # print('\t'.join([s['created'], s['identifier'], s['title']]))
34 |
35 | def test_get_events(self):
36 | with OpencastClient(self.hostname, auth=(self.username, self.password), timeout=Timeout(5.0)) as opencast_client:
37 | events = get_events(opencast_client)
38 | for event in events:
39 | self.assertIn('identifier', event.keys())
40 | self.assertIn('title', event.keys())
41 | self.assertIn('creator', event.keys())
42 | self.assertIn('created', event.keys())
43 | # print('\t'.join([event['created'], event['identifier'], event['title']]))
44 |
45 |
46 | if __name__ == '__main__':
47 | unittest.main()
48 |
--------------------------------------------------------------------------------
/simulate-ingests/mediapackages/tracks/README.md:
--------------------------------------------------------------------------------
1 | # Mediapackage Tracks Directory
2 | Video and audio tracks should be present here that match the tracks listed in the mediapackage
3 | profiles. The filenames must have the form:
4 |
5 | [presenation|presenter]--.[avi|mp3]
6 |
7 | e.g
8 |
9 | presenter-single-1.avi
10 |
11 | ## Filtering Sample Mediapackage from Database
12 |
13 | Analysing the mediapackage xml in the archive is expensive, therefore create a temporary table (#tablename) for
14 | selecting prospective tracks:
15 |
16 | /* create temp table */
17 | with xmlnamespaces (default 'http://mediapackage.opencastproject.org')
18 | select
19 | id,
20 | mp.value('(/mediapackage/media/track/@type)[1]', 'varchar(256)') as track_1,
21 | left(mp.value('(/mediapackage/media/track/mimetype)[1]', 'varchar(256)'), 5) as mime_1,
22 | mp.value('(/mediapackage/media/track/@type)[2]', 'varchar(256)') as track_2,
23 | left(mp.value('(/mediapackage/media/track/mimetype)[2]', 'varchar(256)'), 5) as mime_2,
24 | mp.value('(/mediapackage/media/track/@type)[3]', 'varchar(256)') as track_3,
25 | left(mp.value('(/mediapackage/media/track/mimetype)[3]', 'varchar(256)'), 5) as mime_3,
26 | mp.value('(/mediapackage/@duration)[1]', 'int')/60000 as duration
27 | into #mp_data
28 | from
29 | /* Cast the xml strings to UTF-16 encoded xml fields */
30 | (select id, cast(replace(cast(mediapackage_xml as nvarchar(max)), 'UTF-8', 'UTF-16') as xml) as mp
31 | from dbo.mh_archive_episode
32 | /* Pre filter the data */
33 | where modification_date > '2017-01-10 00:00:00'
34 | and version = 0 and deleted = 0) as mpxml;
35 |
36 | Once the temporary table #mp_data has been created queries can be run against it get ids of prospective example
37 | mediapackages:
38 |
39 | select id
40 | from #mp_data
41 | where
42 | duration=55
43 | and track_1='presenter/source' and mime_1='video'
44 | and track_2='presenter/source' and mime_2='audio'
45 | and track_3 is null and mime_3 is null;
46 |
47 |
--------------------------------------------------------------------------------
/start-workflow-from-archive-multitenancy/main.py:
--------------------------------------------------------------------------------
1 | import os
2 | import sys
3 | import io
4 | import config
5 |
6 | sys.path.append(os.path.join(os.path.abspath('..'), "lib"))
7 |
8 | from args.digest_login import DigestLogin
9 | from args.args_error import args_error
10 | from args.args_parser import get_args_parser
11 | from rest_requests.request_error import RequestError
12 | from rest_requests.workflow_requests import start_task
13 |
14 |
15 | def parse_args():
16 | """
17 | Parse the arguments and check them for correctness
18 |
19 | :return: workflow definition, directory path
20 | :rtype: str, str
21 | """
22 | parser, optional_args, required_args = get_args_parser()
23 | required_args.add_argument("-w", "--workflow", type=str, help="The workflow to start")
24 | required_args.add_argument("-d", "--dir", type=str, help="The path to the directory containing the event id files")
25 | args = parser.parse_args()
26 |
27 | if not os.path.isdir(args.dir):
28 | args_error(parser, "Provided directory doesn't exist!")
29 |
30 | return args.workflow, args.dir
31 |
32 |
33 | def main():
34 | digest_login = DigestLogin(user=config.digest_user, password=config.digest_pw)
35 | workflow_definition, directory = parse_args()
36 |
37 | dir_name, sub_dirs, files = next(os.walk(directory))
38 | for file in files:
39 | tenant = os.path.splitext(file)[0]
40 | server_url = config.url_pattern.format(tenant)
41 | file_path = os.path.join(directory, file)
42 | print("Starting with tenant {}".format(tenant))
43 |
44 | with io.open(file_path, 'r', newline='\n', encoding='utf8') as f:
45 | for event_id in f:
46 | event_id = event_id.rstrip('\n')
47 | try:
48 | start_task(server_url, digest_login, workflow_definition, event_id)
49 | print("Workflow started for event {}.".format(event_id))
50 | except RequestError as e:
51 | print("Workflow couldn't be started for event: {} {}".format(event_id, e.error))
52 |
53 |
54 | if __name__ == '__main__':
55 | main()
56 |
--------------------------------------------------------------------------------
/lib/data_handling/get_assets_from_oaipmh.py:
--------------------------------------------------------------------------------
1 | """
2 | This module provides methods to get assets from an oaipmh record in the matterhorn-inlined format.
3 | """
4 | from data_handling.namespaces import namespaces
5 | from data_handling.types import AssetDescription
6 |
7 |
8 | def __get_dcs_from_oaipmh(record):
9 | """
10 | Get all Dublin Core catalogs from an oaipmh record.
11 |
12 | :param record: The OAIPMH record
13 | :type record: ElementTree.Element
14 | :return: episode Dublin Core catalogs, series Dublin Core catalogs
15 | :rtype: list, list
16 | """
17 |
18 | dcs = record.findall(".//inlined:catalog", namespaces)
19 |
20 | episode_dcs = [dc.find("dc:dublincore", namespaces) for dc in dcs if dc.get('type') == 'dublincore/episode']
21 | series_dcs = [dc.find("dc:dublincore", namespaces) for dc in dcs if dc.get('type') == 'dublincore/series']
22 |
23 | return episode_dcs, series_dcs
24 |
25 |
26 | def __get_acls_from_oaipmh(record):
27 | """
28 | Get all ACLs from an oaipmh record.
29 |
30 | :param record: The OAIPMH record
31 | :type record: ElementTree.Element
32 | :return: episode ACLs, series ACLs
33 | :rtype: list, list
34 | """
35 |
36 | acls = record.findall(".//inlined:attachment", namespaces)
37 |
38 | episode_acls = [acl.find("acl:Policy", namespaces) for acl in acls if acl.get('type') == 'security/xacml+episode']
39 | series_acls = [acl.find("acl:Policy", namespaces) for acl in acls if acl.get('type') == 'security/xacml+series']
40 |
41 | return episode_acls, series_acls
42 |
43 |
44 | def get_assets_from_oaipmh(record, asset_description):
45 | """
46 | Get all assets of the specified type from an oaipmh record.
47 |
48 | :param record: The OAIPMH record
49 | :type record: ElementTree.Element
50 | :param asset_description: ACLs or Dublin Core catalogs
51 | :type asset_description: AssetDescription
52 | :return: Two lists with episode and series assets
53 | :rtype: list, list
54 | """
55 |
56 | if asset_description == AssetDescription.DC:
57 | return __get_dcs_from_oaipmh(record)
58 | else:
59 | return __get_acls_from_oaipmh(record)
60 |
--------------------------------------------------------------------------------
/ingest/ingest-addtrack.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | set -eux
4 |
5 | HOST="https://develop.opencast.org"
6 | if [ $# -ge 1 ]; then
7 | HOST=$1
8 | fi
9 | USER="opencast_system_account"
10 | PASSWORD="CHANGE_ME"
11 | WORKFLOW='schedule-and-upload'
12 | if [ $# -ge 2 ]; then
13 | WORKFLOW=$2
14 | fi
15 |
16 | TMP_MP="$(mktemp)"
17 | TMP_DC="$(mktemp)"
18 | START="$(date -d "1 min" --utc +%Y-%m-%dT%H:%MZ)"
19 | END="$(date -d "2 min" --utc +%Y-%m-%dT%H:%MZ)"
20 |
21 | echo '
22 |
25 | demo
26 | demo
27 | '"${START}"'
28 | start='"${START}"'; end='"${END}"'; scheme=W3C-DTF;
29 | demo
30 | demo
31 | eng
32 | pyca
33 | Demo event
34 | ' > "${TMP_DC}"
35 |
36 | # Create media package
37 | curl -f --digest -u ${USER}:${PASSWORD} -H "X-Requested-Auth: Digest" \
38 | "${HOST}/ingest/createMediaPackage" -o "${TMP_MP}"
39 |
40 | # Add DC catalog
41 | curl -f --digest -u ${USER}:${PASSWORD} -H "X-Requested-Auth: Digest" \
42 | "${HOST}/ingest/addDCCatalog" -F "mediaPackage=<${TMP_MP}" \
43 | -F "dublinCore=<${TMP_DC}" -o "${TMP_MP}"
44 |
45 | # Add Track
46 | curl -f --digest -u ${USER}:${PASSWORD} -H "X-Requested-Auth: Digest" \
47 | "${HOST}/ingest/addTrack" -F flavor=presenter/source \
48 | -F "mediaPackage=<${TMP_MP}" -F Body=@video.webm -o "${TMP_MP}"
49 |
50 | # Add Track
51 | curl -f --digest -u ${USER}:${PASSWORD} -H "X-Requested-Auth: Digest" \
52 | "${HOST}/ingest/addTrack" -F flavor=presentation/source \
53 | -F "mediaPackage=<${TMP_MP}" -F Body=@video.webm -o "${TMP_MP}"
54 |
55 | curl -f -v -i --digest -u ${USER}:${PASSWORD} \
56 | -H "X-Requested-Auth: Digest" \
57 | "${HOST}/ingest/ingest/${WORKFLOW}" \
58 | -F "mediaPackage=<${TMP_MP}"
59 |
60 | rm -f "${TMP_MP}" "${TMP_DC}"
61 |
--------------------------------------------------------------------------------
/start-workflow-from-archive/README.md:
--------------------------------------------------------------------------------
1 | This script allow you to start a workflow on an archived event.
2 |
3 | Install:
4 | ```
5 | $ pip3 install -r requirments.txt
6 | ```
7 |
8 | Usage:
9 | ```
10 | $ python3 start-workflow.py --help
11 | usage: start-workflow.py [-h] -m MEDIAPACKAGE -w WORKFLOW [-W PROPERTIES]
12 | [-o OPENCAST] [-u USER] [-p PASSWORD]
13 |
14 | optional arguments:
15 | -h, --help show this help message and exit
16 | -m MEDIAPACKAGE, --mediapackage MEDIAPACKAGE
17 | media package identifier
18 | -w WORKFLOW, --workflow WORKFLOW
19 | workflow definition identifier
20 | -W PROPERTIES, --properties PROPERTIES
21 | workflow configuration properties (key=value)
22 | -o OPENCAST, --opencast OPENCAST
23 | url of the opencast instance
24 | -u USER, --user USER digest user name
25 | -p PASSWORD, --password PASSWORD
26 | digest password
27 | ```
28 |
29 | Example:
30 | ```
31 | python3 start-workflow.py -o https://develop.opencast.org -m XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX -w republish-metadata \
32 | -W publishToEngage=false -W publishToOaiPmh=true
33 | ```
34 |
35 | An additional script is created to start workflows on a list of media packages.
36 |
37 | Usage:
38 | ```
39 | $ bash start-multiple-workflows.sh --help
40 | usage: start-multiple-workflows.sh -w WORKFLOW [-W PROPERTIES] [-o OPENCAST] [-u USER] [-p PASSWORD] [-h]
41 |
42 | required arguments:
43 | -w WORKFLOW, --workflow WORKFLOW
44 | workflow definition identifier
45 | optional arguments:
46 | -h, --help show this help message and exit
47 | -W PROPERTIES, --properties PROPERTIES
48 | workflow configuration properties (key=value)
49 | -o OPENCAST, --opencast OPENCAST
50 | url of the opencast instance
51 | -u USER, --user USER digest user name
52 | -p PASSWORD, --password PASSWORD
53 | digest password
54 | ```
55 |
56 | Example:
57 | ```
58 | cat mediapackages.txt | bash start-multiple-workflows.sh -o https://develop.opencast.org -w republish-metadata \
59 | -W publishToEngage=false -W publishToOaiPmh=true
60 | ```
61 |
--------------------------------------------------------------------------------
/lib/external_api/group_requests.py:
--------------------------------------------------------------------------------
1 | from rest_requests.get_response_content import get_json_content
2 | from rest_requests.request import post_request, get_request, delete_request, put_request
3 |
4 |
5 | def create_group(base_url, digest_login, group):
6 | """
7 | Create group.
8 |
9 | :param base_url: The URL for the request
10 | :type base_url: str
11 | :param digest_login: The login credentials for digest authentication
12 | :type digest_login: DigestLogin
13 | :param group: The group to create
14 | :type group: dict
15 | :raise RequestError:
16 | """
17 | url = '{}/api/groups'.format(base_url)
18 | post_request(url, digest_login, element_description="/api/groups", data=group)
19 |
20 |
21 | def update_group(base_url, digest_login, group):
22 | """
23 | Update group.
24 |
25 | :param base_url: The URL for the request
26 | :type base_url: str
27 | :param digest_login: The login credentials for digest authentication
28 | :type digest_login: DigestLogin
29 | :param group: The group to create
30 | :type group: dict
31 | :raise RequestError:
32 | """
33 | url = '{}/api/groups/{}'.format(base_url, group["identifier"])
34 | put_request(url, digest_login, element_description="/api/groups", data=group)
35 |
36 |
37 | def delete_group(base_url, digest_login, group_id):
38 | """
39 | Delete group.
40 |
41 | :param base_url: The URL for the request
42 | :type base_url: str
43 | :param digest_login: The login credentials for digest authentication
44 | :type digest_login: DigestLogin
45 | :param group_id: The id of the group to delete
46 | :type group_id: str
47 | :raise RequestError:
48 | """
49 | url = '{}/api/groups/{}'.format(base_url, group_id)
50 | delete_request(url, digest_login, "/api/groups/")
51 |
52 |
53 | def get_all_groups(base_url, digest_login):
54 | """
55 | Get all groups.
56 |
57 | :param base_url: The URL for the request
58 | :type base_url: str
59 | :param digest_login: The login credentials for digest authentication
60 | :type digest_login: DigestLogin
61 | :return: groups
62 | :rtype: list
63 | :raise RequestError:
64 | """
65 | url = '{}/api/groups'.format(base_url)
66 | groups = get_json_content(get_request(url, digest_login, "/api/groups"))
67 | return groups
68 |
--------------------------------------------------------------------------------
/check_data_integrity/fix/io/results_parser.py:
--------------------------------------------------------------------------------
1 | import os
2 |
3 | from utility.enum import enum
4 |
5 |
6 | FixableError = enum(
7 |
8 | EVENTS_MISSING_SERIES_DC="events missing the series Dublin Core catalog",
9 | EVENTS_NONEQUAL_SERIES_DC="events with a series Dublin Core catalog unequal with that of their series"
10 | )
11 |
12 |
13 | class ResultsParser:
14 | """
15 | Class for parsing the results found by the check script.
16 | """
17 |
18 | def __init__(self, results_dir):
19 | """
20 | Constructor that parses the results into a dictionary with subdictionaries for each tenant containing lists
21 | with event ids for the different errors. Errors that currently can't be fixed by this script are ignored.
22 | """
23 |
24 | self.results = {}
25 |
26 | dir_name, tenant_dirs, files = next(os.walk(results_dir))
27 |
28 | for tenant_dir in tenant_dirs:
29 |
30 | self.results[tenant_dir] = {}
31 |
32 | directory, subdirs, files = next(os.walk(os.path.join(results_dir, tenant_dir)))
33 |
34 | for filename in files:
35 |
36 | error = filename[:-4].replace("_", " ")
37 | self.results[tenant_dir][error] = []
38 |
39 | filepath = os.path.join(results_dir, tenant_dir, filename)
40 |
41 | with open(filepath, 'r', newline='') as file:
42 |
43 | for line in file:
44 |
45 | self.results[tenant_dir][error].append(line.rstrip('\n'))
46 |
47 | def get_events_with_error(self, tenant, error):
48 | """
49 | Return the ids of events belonging to the given tenant that encountered the given error.
50 |
51 | :param tenant: Tenant the events should belong to
52 | :type tenant: str
53 | :param error: Error the events should have encountered
54 | :type error: FixableError
55 |
56 | :return:
57 | """
58 |
59 | return [event_id for event_id in self.results[tenant][error]] if error in error in self.results[tenant].keys() \
60 | else []
61 |
62 | def get_tenants(self):
63 | """
64 | Get all tenants for which results were found.
65 |
66 | :return: tenants contained in results
67 | :rtype: list
68 | """
69 | return list(self.results.keys())
70 |
--------------------------------------------------------------------------------
/multilang-subtitles/ingest-subtitles.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | #set -eux
4 |
5 | randomString() {
6 | od -vN 16 -An -tx1 /dev/urandom | tr -d " \\n"; echo
7 | }
8 |
9 | HOST="http://localhost:8080"
10 | USER="admin"
11 | PASSWORD="opencast"
12 | WORKFLOW="direct-publication"
13 |
14 | TMP_MP="$(mktemp)"
15 | TMP_DC="$(mktemp)"
16 | START="$(date -d "1 min" --utc +%Y-%m-%dT%H:%MZ)"
17 | END="$(date -d "2 min" --utc +%Y-%m-%dT%H:%MZ)"
18 |
19 | echo '
20 |
23 | demo
24 | demo
25 | '"${START}"'
26 | start='"${START}"'; end='"${END}"'; scheme=W3C-DTF;
27 | demo
28 | demo
29 | demo
30 | pyca
31 | Multi-Language Captions
32 | ' > "${TMP_DC}"
33 |
34 |
35 | # Create media package
36 | curl -f -u "${USER}:${PASSWORD}" "${HOST}/ingest/createMediaPackage" -o "${TMP_MP}"
37 |
38 |
39 | # Add DC catalog
40 | curl -f -u "${USER}:${PASSWORD}" \
41 | "${HOST}/ingest/addDCCatalog" -F "mediaPackage=<${TMP_MP}" \
42 | -F "dublinCore=<${TMP_DC}" -o "${TMP_MP}"
43 |
44 | # Add Track
45 | curl -f -u ${USER}:${PASSWORD} \
46 | "${HOST}/ingest/addTrack" -F flavor=presenter/source \
47 | -F "mediaPackage=<${TMP_MP}" -F Body=@webvtt-example.mp4 -o "${TMP_MP}"
48 |
49 | # Add Subtitle
50 | curl -f -u "${USER}:${PASSWORD}" \
51 | "${HOST}/ingest/addAttachment" -F flavor=captions/vtt+en \
52 | -F "mediaPackage=<${TMP_MP}" -F "Body=@sample-en.vtt" -o "${TMP_MP}"
53 |
54 | # Add Subtitle
55 | curl -f -u "${USER}:${PASSWORD}" \
56 | "${HOST}/ingest/addAttachment" -F flavor=captions/vtt+de \
57 | -F "mediaPackage=<${TMP_MP}" -F "Body=@sample-de.vtt" -o "${TMP_MP}"
58 |
59 | # Start workflow with editor step
60 | curl -f -i -u "${USER}:${PASSWORD}" \
61 | "${HOST}/ingest/ingest/${WORKFLOW}" \
62 | -F "mediaPackage=<${TMP_MP}"
63 |
64 | rm -f "${TMP_MP}" "${TMP_DC}"
65 |
--------------------------------------------------------------------------------
/opencast-migration-scripts/opencast/captureadmin.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | # -*- coding: utf-8 -*-
3 | import json
4 |
5 |
6 | def get_agents(opencast_admin_client):
7 | url = '/capture-admin/agents.json'
8 | response = opencast_admin_client.get(url)
9 | response.raise_for_status()
10 | agents = response.json()
11 | if not isinstance(agents, dict):
12 | return
13 | agent_obj_or_list = agents.get('agents', dict()).get('agent', None)
14 | if not agent_obj_or_list:
15 | return
16 | if isinstance(agent_obj_or_list, dict) and 'name' in agent_obj_or_list:
17 | agents_list = [agent_obj_or_list]
18 | else:
19 | agents_list = agent_obj_or_list
20 | for agent in agents_list:
21 | name = agent.get('name')
22 | state = agent.get('state')
23 | url = agent.get('url')
24 | if agent.get('capabilities', None):
25 | agent_capabilities = agent.get('capabilities', dict()).get('item', None)
26 | if agent_capabilities is not None:
27 | if isinstance(agent_capabilities, list):
28 | capabilities = {item['key']: item['value'] for item in agent_capabilities}
29 | elif isinstance(agent_capabilities, dict):
30 | capabilities = {agent_capabilities['key']: agent_capabilities['value']}
31 | else:
32 | raise TypeError(f'Agent capabilities is in invalid format: {agent_capabilities}')
33 | else:
34 | capabilities = {}
35 | yield {
36 | 'name': name,
37 | 'state': state,
38 | 'url': url,
39 | 'capabilities': capabilities,
40 | }
41 |
42 |
43 | def update_agent_status(opencast_admin_client, agent_name, agent_url='http://127.0.0.1/', agent_state='offline'):
44 | url = f'/capture-admin/agents/{agent_name}'
45 | params = {
46 | 'address': agent_url,
47 | 'state': agent_state,
48 | }
49 | response = opencast_admin_client.post(url, data=params)
50 | response.raise_for_status()
51 |
52 |
53 | def set_agent_capabilities(opencast_admin_client, agent_name, agent_capabilities):
54 | url = f'/capture-admin/agents/{agent_name}/configuration'
55 | params = {
56 | 'configuration': json.dumps(agent_capabilities),
57 | }
58 | response = opencast_admin_client.post(url, data=params)
59 | response.raise_for_status()
60 |
--------------------------------------------------------------------------------
/visualize-workflow/plot-mulit-workflow-3D.gnuplot:
--------------------------------------------------------------------------------
1 | # Opencast Visualize Workflow Help utilities
2 | # This is a Demo of 3D plotting multiple instances of workflows with the same set of operations
3 | # Requires gnuplot 5.5+ for 3D charting
4 | # Based on 3D bar chart demo from http://gnuplot.sourceforge.net/demo_5.3/boxes3d.6.gnu
5 | # One of many future TODOs:
6 | # Conditionally label large operation times (aka '' using 0:3:5 with labels offset 0,0.5 notitle)
7 |
8 | set terminal svg size 1600,900 font "arial,10" fontscale 1.5 #size 1600, 800
9 | set output 'workflows-3D.svg'
10 |
11 | set boxwidth 0.5
12 | set boxdepth 0.3
13 | set style fill solid 1.00 border
14 | set grid nopolar
15 |
16 | set grid xtics nomxtics ytics nomytics ztics nomztics nortics nomrtics \
17 | nox2tics nomx2tics noy2tics nomy2tics nocbtics nomcbtics
18 |
19 | set style fill solid 1.0 border -1
20 | set xlabel "Wf Ops" offset -5, -3
21 | set ylabel "Workflows"
22 | set zlabel "Op Time [s]" offset -6,1
23 |
24 | # Expects workflow data files to be named "workflow.dat", incremented 1-N
25 |
26 | WF_COUNT = 3
27 | file(n) = sprintf("workflow%d.dat",n)
28 |
29 | # Limit one label-key for each workflow file (not one for each wf-op block)
30 | getTitle(col,f) = col == 1 ? file(f) : ""
31 |
32 | set xtics offset 0, -0.5
33 | set ytics offset 0, -0.5
34 | set grid vertical layerdefault lt 0 linecolor 0 linewidth 1.000, lt 0 linecolor 0 linewidth 1.000
35 | set wall z0 fc rgb "slategrey" fillstyle transparent solid 0.50 border lt -1
36 | set view 50, 72, 1.1, 1 # rotated to prevent long wf operation names from overlapping
37 | set style data lines
38 | set xyplane at 0
39 | set title "Workflow Operation Times"
40 | set xrange [ * : * ] noreverse writeback
41 | set x2range [ * : * ] noreverse writeback
42 | set yrange [0: WF_COUNT + 1]
43 | set y2range [ * : * ] noreverse writeback
44 | set zrange [ * : * ] noreverse writeback
45 | set cbrange [ * : * ] noreverse writeback
46 | set rrange [ * : * ] noreverse writeback
47 | set pm3d depthorder base
48 | set pm3d interpolate 1,1 flush begin noftriangles border linewidth 1.000 dashtype solid corners2color mean
49 | set pm3d lighting primary 0.5 specular 0.2 spec2 0
50 | NO_ANIMATION = 1
51 |
52 | # Plot expects first column of input to be wf-op names, third column to be time in seconds of the operation
53 | splot for [f=1:WF_COUNT] for [col=1:3] file(f) using 0:(f):3:xticlabels(1) with boxes lt f+1 title getTitle(col,f),
54 |
55 |
--------------------------------------------------------------------------------
/visualize-workflow/readme.md:
--------------------------------------------------------------------------------
1 | Visualize Workflows
2 | ===================
3 |
4 | This is a simple tool for visualizing workflow operation processing times. It
5 | helps you identify bottlenecks in your workflow by putting the processing time
6 | of single operations into a visual context.
7 |
8 | Usage
9 | -----
10 |
11 | First, set your system's location and digest login credentials in
12 | `./get-workflow.sh`.
13 |
14 | Then, get the identifier for the workflow you want to visualize from the event
15 | details of Opencast's admin interface.
16 |
17 | Finally, Run the tool-chain (this example uses the workflow identifier 1666):
18 |
19 | ```bash
20 | ./get-workflow.sh 1666
21 | ./prep-workflow.py
22 | gnuplot plot-workflow.gnuplot
23 | ```
24 |
25 | This will produce a `workflow.svg`.
26 |
27 | Example Visualize Workflows chart:
28 | 
29 |
30 | Compare Workflows 3D Bar Chart
31 | -----
32 | A second plotting file provides visual comparison of multiple workflow instances of the same type of workflow in a 3D bar chart. It helps identify processing differences resulting from variable input file types using the same workflow.
33 |
34 | This plot requires gnuplot v5.5+. The workflows being compared must have the **same** number and ordering of workflow operations. The 3D plotter cannot plot workflows that contain different operations or operations in a different order.
35 |
36 | Usage
37 | -----
38 | Set your system's location and digest login credentials in
39 | `./get-workflow.sh`.
40 |
41 | Then, get the identifier for the workflows you want to visualize from the event details of Opencast's admin interface.
42 |
43 | Then, modify the value of `WF_COUNT` to match the number of workflow instances to compare. The variable is located in both `./plot-mulit-workflow-3D.gnuplot` and `./plot-workflow.gnuplot`.
44 |
45 | Finally, Run the tool-chain (substitute with real workflow identifiers):
46 |
47 | ```bash
48 | ./get-workflow.sh
49 | ./get-workflow.sh
50 | ./get-workflow.sh
51 | ./get-workflow.sh
52 | ./get-workflow.sh
53 | ./prep-workflow.py
54 | gnuplot plot-workflow.gnuplot
55 | gnuplot plot-mulit-workflow-3D.gnuplot
56 | ```
57 |
58 | This will produce a `workflow.svg` and a `workflows-3D.svg`
59 |
60 | The HTML helper file can be used to display the new SVG files, `./viewSvg.html`.
61 |
62 |
63 | Example Compare Workflows chart:
64 | 
--------------------------------------------------------------------------------
/opencast-migration-scripts/README.md:
--------------------------------------------------------------------------------
1 | # Opencast Data Migration Scripts
2 |
3 | This scripts should help to deal with Opencast endpoints and data structures like mediapackage in first place.
4 | It was initially used by data migration scripts but also to verify existence of publication.
5 |
6 | ## Migration scripts
7 | The idea behind the migration scripts is to download all published and archived data from source Opencast
8 | and reproduce the state on a target Opencast without transcoding media files again. The goals are
9 |
10 | - keep series and mediapackage IDs
11 | - reproduce last state/snapshot of series and mediapackage on target system
12 | - will migrate
13 | - themes
14 | - capture agents
15 | - series
16 | - mediapackages
17 | - scheduled events
18 | - last archived version/snapshot only
19 | - engage publication
20 | - external api publication may be created with data from engage publication
21 | - but will not migrate
22 | - users
23 | - streaming formats (HLS, DASH,...)
24 | - older snapshots of mediapackage
25 | - mediapakages with state other than finished or scheduled
26 | - keep load at minimum
27 | - do not transcode media if possible
28 | - save storage where ever possible
29 | - make use of hard linking
30 | - share media between engage and external api publication
31 | - robustness
32 | - scripts will skip series or mediapackage migration if already exists on target system
33 | - allow script restart
34 | - skip broken mediapackages and print error message
35 | - keep going
36 | - handling of dedicated distribution server
37 | - handling of signed URLs (to be improved)
38 | - filter data to migrate
39 | - by series
40 | - creation date
41 | - etc.
42 |
43 |
44 | # Disclaimer
45 | Please review the scripts before using! No warranty at all.
46 |
47 | # How to get started
48 | First review the code before using!
49 |
50 | ## Requirements
51 | - Python 3.8+
52 | - Python virtual environment (recommended)
53 |
54 | ## Installation
55 |
56 | ```shell
57 | # Create python virtual environment (need only to be done once)
58 | python3 -m venv venv
59 | # Enable python virtual environment
60 | source venv/bin/actiavte
61 | # Update python package manager pip (need only to be done once)
62 | pip install --upgrade pip
63 | # Install dependencies (need only to be done once)
64 | pip install -r requirements.txt
65 | # Run script
66 | python migrate_opencast_data.py
67 | ```
68 |
69 | # Author
70 | elan e.V.
71 |
72 | # License
73 | [BSD 2-Clause](LICENSE)
74 |
--------------------------------------------------------------------------------
/opencast-migration-scripts/check_search_episodes_in_archive.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | # -*- coding: utf-8 -*-
3 | from httpx import BasicAuth, HTTPStatusError
4 |
5 | from opencast.archive import get_mediapackage
6 | from opencast.client import OpencastClient
7 | from opencast.info import get_me_json
8 | from opencast.search import get_mediapackages
9 |
10 |
11 | def main():
12 | source_opencast_admin_url = 'https://stable.opencast.org'
13 | source_opencast_presentation_url = 'https://stable.opencast.org'
14 | source_opencast_username = 'admin'
15 | source_opencast_password = 'opencast'
16 | source_auth = BasicAuth(source_opencast_username, source_opencast_password)
17 | with OpencastClient(source_opencast_admin_url, auth=source_auth,
18 | follow_redirects=True) as source_opencast_admin_client:
19 | with OpencastClient(source_opencast_presentation_url, auth=source_auth,
20 | follow_redirects=True) as source_opencast_presentation_client:
21 | me_json = get_me_json(source_opencast_presentation_client)
22 | assert 'roles' in me_json
23 | assert 'ROLE_ADMIN' in me_json.get('roles', [])
24 | for mp in get_mediapackages(source_opencast_presentation_client):
25 | # print(f'Found published episode {mp.get_title()} (ID: {mp.get_identifier()}).')
26 | try:
27 | mp_archive = get_mediapackage(source_opencast_admin_client, mp.get_identifier())
28 | assert mp.get_identifier() == mp_archive.get_identifier()
29 | print(f'Published episode {mp.get_title()} (ID: {mp.get_identifier()}) is in the archive.')
30 | except HTTPStatusError as e:
31 | if e.response.status_code == 404:
32 | print(f'ERROR: Published episode {mp.get_title()} (ID: {mp.get_identifier()}) is not archived.')
33 | elif e.response.status_code == 403:
34 | print(f'ERROR: Access denied for accessing episode {mp.get_title()} '
35 | f'(ID: {mp.get_identifier()}).')
36 | else:
37 | print(f'ERROR: Unable to read episode {mp.get_title()} (ID: {mp.get_identifier()}) '
38 | f'from archive. Http statuscode was {e.response.status_code}')
39 | except:
40 | print(f'ERROR: Unable to read episode {mp.get_title()} (ID: {mp.get_identifier()}) '
41 | f'from archive.')
42 |
43 |
44 | if __name__ == '__main__':
45 | main()
46 |
--------------------------------------------------------------------------------
/delete_dead_distribution_artefacts/delete_artefacts/check_distribution_artefacts.py:
--------------------------------------------------------------------------------
1 | """
2 | This module checks which distribution artefacts belong to media packages that no longer exist.
3 | """
4 | from collections import defaultdict
5 |
6 | from util.count import count_media_packages, count_distribution_artefacts
7 | from rest_requests.assetmanager_requests import media_package_exists
8 | from rest_requests.request_error import RequestError
9 |
10 |
11 | def check_distribution_artefacts(distribution_artefacts, url_builder, digest_login, progress_printer):
12 | """
13 | Check distribution artefacts for whether their media packages still exist.
14 |
15 | :param distribution_artefacts: The distribution artefacts to be checked.
16 | :type distribution_artefacts: dict
17 | :param url_builder: Object to build the URL for requests
18 | :type url_builder: URLBuilder
19 | :param digest_login: The login data_handling for digest authentication
20 | :type digest_login: DigestLogin
21 | :param progress_printer: Object to print progress bars.
22 | :type progress_printer: ProgressPrinter
23 | :return: The distribution artefacts for which the media package no longer exists.
24 | :rtype: dict
25 | """
26 |
27 | mp_count = count_media_packages(distribution_artefacts)
28 | count = 0
29 |
30 | progress_printer.print_message("Checking {} media package(s)...".format(mp_count))
31 |
32 | dead_distribution_artefacts = defaultdict(lambda: defaultdict(lambda: list))
33 |
34 | progress_printer.print_progress(count, mp_count)
35 |
36 | for tenant in distribution_artefacts.keys():
37 |
38 | base_url = url_builder.get_base_url(tenant)
39 |
40 | for media_package in distribution_artefacts[tenant].keys():
41 |
42 | try:
43 | if not media_package_exists(base_url, digest_login, media_package):
44 | dead_distribution_artefacts[tenant][media_package] = distribution_artefacts[tenant][media_package]
45 | except RequestError as e:
46 | print("Media package {} could not be checked: {}".format(media_package, e.error))
47 |
48 | dead_mp_count = count_media_packages(dead_distribution_artefacts)
49 | dead_dist_count = count_distribution_artefacts(dead_distribution_artefacts)
50 | finished_message = "{} distribution artefact(s) of {} media package(s) can be deleted."\
51 | .format(dead_dist_count, dead_mp_count)
52 |
53 | count += 1
54 | progress_printer.print_progress(count, mp_count, finished_message)
55 |
56 | return dead_distribution_artefacts
57 |
--------------------------------------------------------------------------------
/multi-node-test-setup/multi-node-test-setup.sh:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 |
3 | set -eux
4 |
5 | mvn clean install -DtrimStackTrace=false
6 | cd build
7 | find * -maxdepth 0 -type d -exec echo '{}' \; -exec rm -rI '{}' \;
8 |
9 | tar xf opencast-dist-admin-*.tar.gz
10 | tar xf opencast-dist-presentation-*.tar.gz
11 | tar xf opencast-dist-worker-*.tar.gz
12 |
13 | mkdir -p shared-storage
14 | ln -s ../../shared-storage opencast-dist-admin/data/opencast
15 | ln -s ../../shared-storage opencast-dist-presentation/data/opencast
16 | ln -s ../../shared-storage opencast-dist-worker/data/opencast
17 |
18 | sed -i 's_localhost:8080_localhost:8081_' opencast-dist-presentation/etc/custom.properties
19 | sed -i 's_localhost:8080_localhost:8082_' opencast-dist-worker/etc/custom.properties
20 |
21 | sed -i 's_port=8080_port=8081_' opencast-dist-presentation/etc/org.ops4j.pax.web.cfg
22 | sed -i 's_port=8080_port=8082_' opencast-dist-worker/etc/org.ops4j.pax.web.cfg
23 |
24 | for node in admin presentation worker; do
25 | # configure database
26 | sed -i 's_^.*db.jdbc.driver=.*$_org.opencastproject.db.jdbc.driver=org.postgresql.Driver_' \
27 | "opencast-dist-${node}/etc/custom.properties"
28 | sed -i 's_^.*db.jdbc.url=.*$_org.opencastproject.db.jdbc.url=jdbc:postgresql://127.0.0.1/opencast_' \
29 | "opencast-dist-${node}/etc/custom.properties"
30 | sed -i 's_^.*db.jdbc.user=.*$_org.opencastproject.db.jdbc.user=opencast_' \
31 | "opencast-dist-${node}/etc/custom.properties"
32 | sed -i 's_^.*db.jdbc.pass=.*$_org.opencastproject.db.jdbc.pass=dbpassword_' \
33 | "opencast-dist-${node}/etc/custom.properties"
34 |
35 | # configure organization
36 | sed -i 's_^.*admin.ui.url=.*$_prop.org.opencastproject.admin.ui.url=http://localhost:8080_' \
37 | "opencast-dist-${node}/etc/org.opencastproject.organization-mh_default_org.cfg"
38 | sed -i 's_^.*engage.ui.url=.*$_prop.org.opencastproject.engage.ui.url=http://localhost:8081_' \
39 | "opencast-dist-${node}/etc/org.opencastproject.organization-mh_default_org.cfg"
40 | sed -i 's_^.*file.repo.url=.*$_prop.org.opencastproject.file.repo.url=${prop.org.opencastproject.admin.ui.url}_' \
41 | "opencast-dist-${node}/etc/org.opencastproject.organization-mh_default_org.cfg"
42 | done
43 |
44 | sed -i 's_^.*dispatch.interval=.*$_dispatch.interval=2_' opencast-dist-admin/etc/org.opencastproject.serviceregistry.impl.ServiceRegistryJpaImpl.cfg
45 | sed -i 's_^.*dispatch.interval=.*$_dispatch.interval=0_' opencast-dist-presentation/etc/org.opencastproject.serviceregistry.impl.ServiceRegistryJpaImpl.cfg
46 | sed -i 's_^.*dispatch.interval=.*$_dispatch.interval=0_' opencast-dist-worker/etc/org.opencastproject.serviceregistry.impl.ServiceRegistryJpaImpl.cfg
47 |
--------------------------------------------------------------------------------
/delete_dead_distribution_artefacts/delete_artefacts/delete_dead_distribution_artefacts.py:
--------------------------------------------------------------------------------
1 | import shutil
2 |
3 | from input.delete_question import DeleteAnswer, delete_question
4 | from input.present_dead_distribution_artefacts import PresentAnswer
5 | from util.count import count_distribution_artefacts
6 |
7 |
8 | def delete_dead_distribution_artefacts(dead_distribution_artefacts, log_writer, progress_printer, present_answer):
9 | """
10 | Delete the dead distribution artefacts.
11 |
12 | :param dead_distribution_artefacts: The distribution artefacts to be deleted because their media packages no longer
13 | exist.
14 | :type dead_distribution_artefacts: dict
15 | :param log_writer: Object to write into the log
16 | :type log_writer: LogWriter
17 | :param progress_printer: Object to print progress bars.
18 | :type progress_printer: ProgressPrinter
19 | :param present_answer: The answer the user gave when all dead distribution artefacts were presented
20 | :type present_answer: str
21 | """
22 | count = 0
23 |
24 | dist_count = count_distribution_artefacts(dead_distribution_artefacts)
25 |
26 | state = DeleteAnswer.NEXT if present_answer == PresentAnswer.ASK else DeleteAnswer.ALL
27 |
28 | if present_answer == PresentAnswer.DELETE:
29 | progress_printer.print_message("Deleting all distribution artefacts...")
30 |
31 | for tenant in dead_distribution_artefacts.keys():
32 |
33 | for media_package in dead_distribution_artefacts[tenant].keys():
34 |
35 | if state == DeleteAnswer.NEXT:
36 | state = delete_question(media_package)
37 |
38 | if state == DeleteAnswer.QUIT:
39 | return
40 | elif state == DeleteAnswer.ALL:
41 | progress_printer.print_message("Deleting remaining distribution artefacts...")
42 | dist_count -= count
43 | count = 0
44 |
45 | if (present_answer == PresentAnswer.DELETE or state == DeleteAnswer.ALL) and count == 0:
46 | progress_printer.print_progress(count, dist_count)
47 |
48 | for distribution_artefact in dead_distribution_artefacts[tenant][media_package]:
49 |
50 | shutil.rmtree(distribution_artefact, ignore_errors=True)
51 | log_writer.write_to_log(tenant, media_package, distribution_artefact)
52 | count += 1
53 |
54 | if present_answer == PresentAnswer.DELETE or state == DeleteAnswer.ALL:
55 | progress_printer.print_progress(count, dist_count)
56 |
57 | if present_answer == PresentAnswer.ASK and not state == DeleteAnswer.ALL:
58 | progress_printer.print_message("Distribution artefacts of media package {} deleted.\n"
59 | .format(media_package))
60 |
--------------------------------------------------------------------------------
/fake-hls/nginx.conf:
--------------------------------------------------------------------------------
1 | # Defines the number of worker processes. Setting it to the number of
2 | # available CPU cores should be a good start. The value `auto` will try to
3 | # autodetect that.
4 | worker_processes auto;
5 |
6 | # Configures logging to `/var/log/...`. Log level `error` is used by default.
7 | error_log /var/log/nginx/error.log;
8 |
9 | # Defines a file that will store the process ID of the main process. This needs
10 | # to match the Systemd unit file.
11 | pid /run/nginx.pid;
12 |
13 | # Load dynamic modules. See /usr/share/nginx/README.dynamic.
14 | include /usr/share/nginx/modules/*.conf;
15 |
16 | events {
17 | # Sets the maximum number of simultaneous connections that can be opened by
18 | # a worker process.
19 | worker_connections 1024;
20 | }
21 |
22 |
23 | http {
24 | # Include mime types for different file extensions.
25 | include /etc/nginx/mime.types;
26 |
27 | # Defines the default MIME type of a response.
28 | default_type application/octet-stream;
29 |
30 | # Sendfile copies data between one file descriptor and other from within the
31 | # kernel. This is more efficient than read() and write() since they require
32 | # transferring data to and from the user space.
33 | sendfile on;
34 |
35 | # Todo: Write explanation
36 | # https://t37.net/nginx-optimization-understanding-sendfile-tcp_nodelay-and-tcp_nopush.html
37 | tcp_nopush on;
38 | tcp_nodelay on;
39 |
40 | # Disable access log.
41 | #access_log /var/log/nginx/access.log;
42 | access_log off;
43 |
44 | # Enable on-the-fly gzip compression for larger plain text files and for
45 | # proxies applications.
46 | gzip on;
47 | gzip_comp_level 2;
48 | gzip_min_length 1000;
49 | gzip_proxied expired no-cache no-store private auth;
50 | gzip_types
51 | application/javascript
52 | application/json
53 | application/x-javascript
54 | application/xml
55 | image/svg+xml
56 | text/css
57 | text/javascript
58 | text/js
59 | text/plain
60 | text/xml;
61 |
62 | # Do not send the nginx version number in error pages and Server header
63 | server_tokens off;
64 |
65 | server {
66 | listen 80;
67 | server_name _;
68 |
69 | # Basic open CORS for everyone
70 | add_header Access-Control-Allow-Origin $http_origin always;
71 | add_header Access-Control-Allow-Methods 'GET, POST, OPTIONS' always;
72 | add_header Access-Control-Allow-Credentials true always;
73 | add_header Access-Control-Allow-Headers 'Origin,Content-Type,Accept,Authorization' always;
74 |
75 | # Accept large ingests
76 | client_max_body_size 0;
77 |
78 | # Proxy configuration for Opencast
79 | location / {
80 | root /tmp;
81 | }
82 | }
83 |
84 | }
85 |
--------------------------------------------------------------------------------
/fake-hls/fake-hls.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | #Note: this temp dir grows without bounds. Make sure it lives somewhere that can handle that...
4 | temp=`mktemp -d`
5 | #Create the docker container
6 | nginx=`docker run --rm -d -v $temp:/tmp -v \`pwd\`/nginx.conf:/etc/nginx/nginx.conf:ro -p 8888:80 nginx:stable`
7 | #Then start the stream, after making sure everyone can read and execute the temp directory
8 | cd $temp
9 | chmod -R a+rx $temp
10 | ffmpeg -hide_banner \
11 | -re -f lavfi -i "
12 | testsrc2=size=1920x540:rate=25,
13 | drawbox=x=0:y=0:w=700:h=50:c=black@.6:t=fill,
14 | drawtext=x= 5:y=5:fontsize=54:fontcolor=white:text='%{pts\:gmtime\:$(date +%s)\:%Y-%m-%d}',
15 | drawtext=x=345:y=5:fontsize=54:fontcolor=white:timecode='$(date -u '+%H\:%M\:%S')\:00':rate=25:tc24hmax=1,
16 | setparams=field_mode=prog:range=tv:color_primaries=bt709:color_trc=bt709:colorspace=bt709,
17 | format=yuv420p" \
18 | -re -f lavfi -i "
19 | sine=f=1000:r=48000:samples_per_frame='st(0,mod(n,5)); 1602-not(not(eq(ld(0),1)+eq(ld(0),3)))'" \
20 | -shortest \
21 | -fflags genpts \
22 | \
23 | -filter_complex "
24 | [0:v]drawtext=x=(w-text_w)-5:y=5:fontsize=54:fontcolor=white:text='1920x540':box=1:boxcolor=black@.6:boxborderw=5[v540p];
25 | [0:v]drawtext=x=(w-text_w)-5:y=5:fontsize=54:fontcolor=white:text='960x270':box=1:boxcolor=black@.6:boxborderw=5,scale=960x270[v270p]
26 | " \
27 | -map [v540p] \
28 | -map [v270p] \
29 | -map 1:a \
30 | \
31 | -c:v libx264 \
32 | -preset:v veryfast \
33 | -tune zerolatency \
34 | -profile:v main \
35 | -crf:v:0 23 -bufsize:v:0 2250k -maxrate:v:0 2500k \
36 | -crf:v:1 23 -bufsize:v:1 540k -maxrate:v:1 600k \
37 | -g:v 100000 -keyint_min:v 50000 -force_key_frames:v "expr:gte(t,n_forced*2)" \
38 | -x264opts no-open-gop=1 \
39 | -bf 2 -b_strategy 2 -refs 1 \
40 | -rc-lookahead 24 \
41 | -export_side_data prft \
42 | -field_order progressive -colorspace bt709 -color_primaries bt709 -color_trc bt709 -color_range tv \
43 | -pix_fmt yuv420p \
44 | -c:a aac \
45 | -b:a 64k \
46 | \
47 | -f hls \
48 | -master_pl_name "master.m3u8" \
49 | -hls_list_size 5 \
50 | -hls_delete_threshold 1 \
51 | -hls_start_number_source epoch \
52 | -hls_fmp4_init_filename "init-%v.mp4" \
53 | -hls_segment_filename "chunk-stream-%v-%010d.mp4" \
54 | -hls_flags "+append_list+delete_segments+discont_start+program_date_time+independent_segments-temp_file" \
55 | -var_stream_map "a:0,name:audio-64k,agroup:audio,default:yes v:0,name:video-720p,agroup:audio v:1,name:video-360p,agroup:audio" \
56 | \
57 | -hls_time 6 \
58 | -hls_segment_type fmp4 \
59 | -hls_segment_options "movflags=+cmaf+dash+delay_moov+skip_sidx+skip_trailer" \
60 | "%v.m3u8"
61 | #Stop the docker container
62 | docker stop $nginx
63 | #And remove the test files we've generated
64 | rm -rf $temp /tmp/hls
65 |
--------------------------------------------------------------------------------
/opencast-migration-scripts/check_search_episodes_in_assetmanager.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | # -*- coding: utf-8 -*-
3 | from httpx import BasicAuth, HTTPStatusError
4 |
5 | from opencast import search, assetmanager
6 | from opencast.client import OpencastClient
7 | from opencast.info import get_me_json
8 |
9 |
10 | def main():
11 | source_opencast_admin_url = 'https://stable.opencast.org'
12 | source_opencast_admin_username = 'admin'
13 | source_opencast_admin_password = 'opencast'
14 | source_admin_auth = BasicAuth(source_opencast_admin_username, source_opencast_admin_password)
15 | source_opencast_presentation_url = 'http://localhost:8080'
16 | source_opencast_presentation_username = 'admin'
17 | source_opencast_presentation_password = 'opencast'
18 | source_presentation_auth = BasicAuth(source_opencast_presentation_username, source_opencast_presentation_password)
19 | with OpencastClient(source_opencast_admin_url, auth=source_admin_auth,
20 | follow_redirects=True) as source_opencast_admin_client:
21 | with OpencastClient(source_opencast_presentation_url, auth=source_presentation_auth,
22 | follow_redirects=True) as source_opencast_presentation_client:
23 | me_json = get_me_json(source_opencast_presentation_client)
24 | assert 'roles' in me_json
25 | assert 'ROLE_ADMIN' in me_json.get('roles', [])
26 | for mp in search.get_mediapackages(source_opencast_presentation_client):
27 | # print(f'Found published episode {mp.get_title()} (ID: {mp.get_identifier()}).')
28 | try:
29 | mp_archive = assetmanager.get_mediapackage(source_opencast_admin_client, mp.get_identifier())
30 | assert mp.get_identifier() == mp_archive.get_identifier()
31 | print(f'Published episode {mp.get_title()} (ID: {mp.get_identifier()}) is in the assetmanager.')
32 | except HTTPStatusError as e:
33 | if e.response.status_code == 404:
34 | print(f'ERROR: Published episode {mp.get_title()} (ID: {mp.get_identifier()}) is not in '
35 | f'the assetmanager.')
36 | elif e.response.status_code == 403:
37 | print(f'ERROR: Access denied for accessing episode {mp.get_title()} '
38 | f'(ID: {mp.get_identifier()}).')
39 | else:
40 | print(f'ERROR: Unable to read episode {mp.get_title()} (ID: {mp.get_identifier()}) '
41 | f'from assetmanager. Http statuscode was {e.response.status_code}')
42 | except:
43 | print(f'ERROR: Unable to read episode {mp.get_title()} (ID: {mp.get_identifier()}) '
44 | f'from assetmanager.')
45 |
46 |
47 | if __name__ == '__main__':
48 | main()
49 |
--------------------------------------------------------------------------------
/create-series/create-series-ocomplex.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # set the range of series to be created
4 | # value:
5 | # start value must be greater then end value
6 | SERIES_SEQ_BEGIN="1"
7 | SERIES_SEQ_END="1024"
8 |
9 | # set #processes should can run asynchronous
10 | # ideal value is a multiple of CPU cores on your system
11 | PARALlEL_PS=64
12 |
13 | # opencast URL
14 | OC_URL="http://localhost:8080"
15 | #OC_URL="https://octestallinone.virtuos.uos.de"
16 |
17 | # opencast digest user name
18 | OC_DIGEST_USER="opencast_system_account"
19 |
20 | # opencast digest user password
21 | OC_DIGEST_PASSWORD="CHANGE_ME"
22 |
23 | # opencast series ACL to be used for new series
24 | SERIES_ACL='
25 |
26 |
27 | read
28 | true
29 | ROLE_USER_ADMIN
30 |
31 |
32 | write
33 | true
34 | ROLE_USER_ADMIN
35 |
36 |
37 | read
38 | true
39 | ROLE_ANONYMOUS
40 |
41 | '
42 |
43 | ###################################
44 |
45 | for i in $(seq "$SERIES_SEQ_BEGIN" "$SERIES_SEQ_END"); do
46 | SERIES_DATE="$(date -Iseconds)"
47 |
48 | # opencast series dublincore catalog template
49 | read -r -d '' SERIES_XML << EOM
50 |
51 |
54 |
55 |
56 | Test Series #$i
57 |
58 |
59 | Some Test Series
60 |
61 |
62 | Test Series #$i Description
63 |
64 |
65 | Publisher #$((i%10))
66 |
67 |
68 | $SERIES_DATE
69 |
70 |
71 |
72 |
73 | EOM
74 |
75 | curl -f -w "\\n" --digest -u $OC_DIGEST_USER:$OC_DIGEST_PASSWORD -H "X-Requested-Auth: Digest" \
76 | -X POST "$OC_URL/series/" \
77 | --data-urlencode "series=$SERIES_XML" \
78 | --data-urlencode "acl=$SERIES_ACL" &
79 |
80 | PIDS[$((i%PARALlEL_PS))]="$!"
81 | if [ "$((i%PARALlEL_PS))" -eq "0" ]; then
82 | for waitidx in $(seq 0 $((PARALlEL_PS-1))); do
83 | wait "${PIDS[$waitidx]}" >/dev/null 2>&1
84 | done
85 | fi
86 | done
87 |
88 | # wait for running processes
89 | for waitidx in $(seq 0 $((PARALlEL_PS-1))); do
90 | wait "${PIDS[$waitidx]}" >/dev/null 2>&1
91 | done
92 |
93 | echo "series range $SERIES_SEQ created"
94 |
95 |
--------------------------------------------------------------------------------
/export-workflow-statistics/README.md:
--------------------------------------------------------------------------------
1 | # README
2 |
3 | With this script you can export and plot data on how many workflows ran in a week in a specified time frame.
4 | This script works with multi-tenant systems.
5 |
6 | ### 1. Configure script in config.py
7 |
8 | | Configuration Key | Description | Example |
9 | | :--------------------- | :-------------------------------------------------------------------------------- | :------------------------- |
10 | | `url` | The non-tenant-specific server URL | https://opencast.com |
11 | | `url_pattern` | Pattern for the tenant-specific server URL, leave empty for single-tenant systems | https://{}.opencast.com |
12 | | `digest_user` | The user name of the digest user | opencast_system_account |
13 | | `digest_pw` | The password of the digest user | CHANGE_ME |
14 | | `workflow_definitions` | The ids of the workflow definitions to count | ["ui-import", "api-import" |
15 | | `exclude_tenants` | The tenants to skip, if any | \["mh_default_org"\] |
16 | | `start_date` | The date to start with\* | "2020-01-06" |
17 | | `end_date` | The date to start with\* | "2020-11-29" |
18 | | `week_offset` | The offset if the start date is not in the first calendar week | 1 |
19 | | `export_dir` | The path to the directory for the exported data | "data" |
20 |
21 | * Both of these dates should be the first day of the week for the statistics to be accurate.
22 |
23 | ### 2. Execute script
24 |
25 | `python main.py`
26 |
27 | ### 3. Adjust gnuplot script in plot.gp
28 |
29 | ### 4. Plot
30 |
31 | `gnuplot plot.gp`
32 |
33 | 
34 |
35 | ### Optional: Plot tenant statistics
36 |
37 | If you have a multi-tenant system, you can also plot their statistics into a single graph. For this, adjust
38 | `tenant_plot.gp` and then plot with `gnuplot tenant.gp`. Only tenants with at least one processed
39 | recording in the specified time frame will be plotted. The order of tenants is determined by `filenames.txt`
40 | (default: tenants with most processed recordings first).
41 |
42 | For gradual colors, uncomment the last line of `tenant_plot.gp`.
43 |
44 | ## Requirements
45 |
46 | This script was written for Python 3.8. You can install the necessary packages with
47 |
48 | `pip install -r requirements.txt`
49 |
50 | Additionally, this script uses modules contained in the _lib_ directory.
--------------------------------------------------------------------------------
/opencast-migration-scripts/opencast/scheduler.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | # -*- coding: utf-8 -*-
3 | from datetime import datetime
4 |
5 | from opencast.mediapackage import Mediapackage
6 |
7 |
8 | def get_mediapackage(opencast_admin_client, mediapackage_id):
9 | url = f'/recordings/{mediapackage_id}/mediapackage.xml'
10 | response = opencast_admin_client.get(url)
11 | response.raise_for_status()
12 | return Mediapackage(response.text.encode('utf-8'))
13 |
14 |
15 | def get_schedule_technical(opencast_admin_client, mediapackage_id):
16 |
17 | url = f'/recordings/{mediapackage_id}/technical.json'
18 | response = opencast_admin_client.get(url)
19 | response.raise_for_status()
20 | return response.json()
21 |
22 |
23 | def update_schedule_with_technical_data(opencast_admin_client, mediapackage_id, mediapackage_xml=None,
24 | start=None, end=None, agent=None, users=None,
25 | workflow_properties=None, agent_properties=None):
26 | """
27 | Update an schedule in opencast with the given data.
28 |
29 | Parameters
30 | ----------
31 | opencast_admin_client: OpencastClient
32 | Opencast client
33 | mediapackage_id: str
34 | Episode identifier
35 | mediapackage_xml: str, optional
36 | Optional argument. The mediapackage to update
37 | start: str, optional
38 | Optional argument. The schedule start date time in UTC as RFC string.
39 | end: str, optional
40 | Optional argument. The schedule end date time in UTC as RFC string.
41 | agent: str, optional
42 | Optional argument. The capture agent identifier.
43 | users: list, optional
44 | Optional argument. List of user identifier (speakers/lecturers) for the episode to update.
45 | workflow_properties: dict, optional
46 | Optional argument. Workflow properties.
47 | agent_properties: dict, optional
48 | Optional argument. Capture agent properties.
49 | """
50 | url = f'/recordings/{mediapackage_id}/'
51 | params = dict()
52 | if start:
53 | params['start'] = int(datetime.strptime(start, '%Y-%m-%dT%H:%M:%S%z').timestamp()*1000)
54 | if end:
55 | params['end'] = int(datetime.strptime(end, '%Y-%m-%dT%H:%M:%S%z').timestamp()*1000)
56 | if agent:
57 | params['agent'] = agent
58 | if users:
59 | params['users'] = ','.join(users)
60 | if mediapackage_xml:
61 | params['mediaPackage'] = mediapackage_xml
62 | if workflow_properties:
63 | params['wfproperties'] = '\n'.join([f'{k}={v}' for k, v in workflow_properties.items()])
64 | if agent_properties:
65 | params['agentparameters'] = '\n'.join([f'{k}={v}' for k, v in agent_properties.items()])
66 | if not params:
67 | raise ValueError('At least one of the optional parameters to update schedule must be set.')
68 | print(f'Update schedule for media package {mediapackage_id}.')
69 | response = opencast_admin_client.put(url, params=params)
70 | response.raise_for_status()
71 |
--------------------------------------------------------------------------------
/delete_dead_distribution_artefacts/input/parse_args.py:
--------------------------------------------------------------------------------
1 | import os
2 |
3 | from args.args_error import args_error
4 | from args.args_parser import get_args_parser
5 | from args.digest_login import read_digest_password, DigestLogin
6 |
7 |
8 | def parse_args():
9 | """
10 | Parse the arguments, check them for correctness, read in the digest password if necessary, and return everything.
11 |
12 | :return: Opencast URL, list of paths to directories with distribution artefacts, whether to use https,
13 | chosen tenants, excluded tenants, channels, digest user and password, whether to print progress statements,
14 | whether to print progress bars
15 | :rtype: str, list, bool, list, list, list, DigestLogin, bool, bool
16 | """
17 |
18 | parser, optional_args, required_args = get_args_parser()
19 |
20 | required_args.add_argument("-o", "--opencast", type=str, help="url of the opencast instance", required=True)
21 | required_args.add_argument("-d", "--distribution-dirs", type=str, nargs='+',
22 | help="list of distribution directories to check", required=True)
23 | optional_args.add_argument("-t", "--chosen-tenants", type=str, nargs='+', help="list of tenants to check")
24 | optional_args.add_argument("-e", "--excluded-tenants", type=str, nargs='+', help="list of tenants to be excluded")
25 | optional_args.add_argument("-c", "--channels", type=str, nargs='+', help="list of channels to be considered")
26 | required_args.add_argument("-u", "--user", type=str, help="digest user", required=True)
27 | optional_args.add_argument("-p", "--password", type=str, help="digest password")
28 | optional_args.add_argument('-s', "--silent", action='store_true', help="disables progress output")
29 | optional_args.add_argument('-l', "--https", action='store_true', help="enables https")
30 | optional_args.add_argument('-n', "--no-fancy-output", action='store_true',
31 | help="disables fancy output including the progress bars")
32 |
33 | args = parser.parse_args()
34 |
35 | if not args.opencast and args.user and args.distribution_dirs:
36 | args_error(parser)
37 |
38 | if args.excluded_tenants and args.chosen_tenants:
39 | args_error(parser, "The options --chosen-tenants and --excluded-tenants can't both be defined.")
40 |
41 | if args.silent and args.no_fancy_output:
42 | args_error(parser, "The options --silent and --no-fancy-output can't both be defined.")
43 |
44 | for dir_path in args.distribution_dirs:
45 | distribution_dir = os.path.abspath(dir_path)
46 | if not os.path.isdir(distribution_dir):
47 | args_error(parser, "One directory for distribution artefacts does not exist.")
48 |
49 | if not args.password:
50 | digest_pw = read_digest_password()
51 | else:
52 | digest_pw = args.password
53 |
54 | return args.opencast, args.distribution_dirs, args.https, args.chosen_tenants, args.excluded_tenants, \
55 | args.channels, DigestLogin(user=args.user, password=digest_pw), args.silent, args.no_fancy_output
56 |
--------------------------------------------------------------------------------
/opencast-migration-scripts/opencast/series.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | # -*- coding: utf-8 -*-
3 |
4 | def get_all_series(opencast_admin_client, sort='CREATED_DESC', **kwargs):
5 | url = f'/series/series.json'
6 | limit = 10
7 | page = 0
8 | params = {
9 | 'count': limit,
10 | 'sort': sort
11 | }
12 | for (k, v) in kwargs.items():
13 | params[k] = v
14 | while page >= 0:
15 | params['startPage'] = page
16 | response = opencast_admin_client.get(url, params=params)
17 | response.raise_for_status()
18 | series_json = response.json()
19 | if int(series_json.get('totalCount', -1)) > 0:
20 | for catalog in series_json.get('catalogs', []):
21 | for catalog_type, values in catalog.items():
22 | if catalog_type != 'http://purl.org/dc/terms/':
23 | continue
24 | series = {}
25 | for vname, vlist in values.items():
26 | series[vname] = vlist[0]['value']
27 | yield series
28 | if series_json.get('catalogs', []):
29 | page += 1
30 | else:
31 | page = -1
32 | else:
33 | page = -1
34 |
35 |
36 | def get_series(opencast_admin_client, series_id):
37 | url = f'/series/{series_id}.xml'
38 | response = opencast_admin_client.get(url)
39 | response.raise_for_status()
40 | return response.text
41 |
42 |
43 | def get_series_acl(opencast_admin_client, series_id):
44 | url = f'/series/{series_id}/acl.xml'
45 | response = opencast_admin_client.get(url)
46 | response.raise_for_status()
47 | return response.text
48 |
49 |
50 | def get_series_elements(opencast_admin_client, series_id):
51 | url = f'/series/{series_id}/elements.json'
52 | response = opencast_admin_client.get(url)
53 | response.raise_for_status()
54 | return response.json()
55 |
56 |
57 | def get_series_element(opencast_admin_client, series_id, element_type):
58 | url = f'/series/{series_id}/elements/{element_type}'
59 | response = opencast_admin_client.get(url)
60 | response.raise_for_status()
61 | return response.text
62 |
63 |
64 | def get_series_properties(opencast_admin_client, series_id):
65 | url = f'/series/{series_id}/properties.json'
66 | response = opencast_admin_client.get(url)
67 | response.raise_for_status()
68 | return response.json()
69 |
70 |
71 | def update_series(opencast_admin_client, series_xml, acl_xml):
72 | url = '/series/'
73 | params = {
74 | 'series': series_xml,
75 | 'acl': acl_xml,
76 | }
77 | response = opencast_admin_client.post(url, data=params)
78 | response.raise_for_status()
79 | return response.text
80 |
81 |
82 | def update_series_property(opencast_admin_client, series_id, property_name, property_value):
83 | url = f'/series/{series_id}/property'
84 | params = {
85 | 'name': property_name,
86 | 'value': property_value
87 | }
88 | response = opencast_admin_client.post(url, data=params)
89 | response.raise_for_status()
90 |
--------------------------------------------------------------------------------
/recover_backup/input/parse_args.py:
--------------------------------------------------------------------------------
1 | """ This module parses the arguments for the recover and checks them for errors."""
2 |
3 | import os
4 |
5 | from args.args_error import args_error
6 | from args.args_parser import get_args_parser
7 | from args.digest_login import read_digest_password, DigestLogin
8 |
9 |
10 | def parse_args():
11 | """
12 | Parse the arguments, check them for correctness, read in the digest password if necessary, and return everything.
13 |
14 | :return: Opencast URL, Whether to use https, digest user and password, path to backup of archive, list of
15 | media packages to be recovered, tenant id, workflow_id, whether to use the last version of a media package
16 | :rtype: str, bool, DigestLogin, str, list, str, str, bool
17 | """
18 |
19 | parser, optional_args, required_args = get_args_parser()
20 |
21 | required_args.add_argument("-o", "--opencast", type=str, help="url of running opencast instance without protocol",
22 | required=True)
23 | optional_args.add_argument('-s', "--https", action='store_true', help="enables https")
24 |
25 | required_args.add_argument("-u", "--user", type=str, help="digest user", required=True)
26 | optional_args.add_argument("-p", "--password", type=str, help="digest password")
27 |
28 | optional_args.add_argument("-b", "--backup", type=str, help="path to backup")
29 | optional_args.add_argument("-m", "--media-packages", type=str, nargs='+', help="list of media package ids to be "
30 | "restored")
31 | optional_args.add_argument("-t", "--tenant", type=str, help="tenant id")
32 | optional_args.add_argument('-w', "--workflow-id", type=str, help="id for workflow on ingest")
33 | optional_args.add_argument('-l', "--last-version", action='store_true', help="always recover last version of "
34 | "media package")
35 | optional_args.add_argument('-r', "--rsync-history", type=str, help="path to rsync history to be checked as well")
36 | optional_args.add_argument('-i', "--ignore-errors", action='store_true',
37 | help="whether to recover a media package despite errors")
38 |
39 | args = parser.parse_args()
40 |
41 | if not (args.opencast and args.user):
42 | args_error(parser)
43 |
44 | if args.backup and not os.path.isdir(args.backup):
45 | args_error(parser, "Backup directory does not exist.")
46 |
47 | if args.rsync_history and not os.path.isdir(args.rsync_history):
48 | args_error(parser, "Rsync history directory does not exist.")
49 |
50 | if not (args.backup or args.rsync_history):
51 | args_error(parser, "Either a path to the archive backup or to the rsync history has to be provided")
52 |
53 | if not args.password:
54 | digest_pw = read_digest_password()
55 | else:
56 | digest_pw = args.password
57 |
58 | digest_login = DigestLogin(user=args.user, password=digest_pw)
59 |
60 | return args.opencast, args.https, digest_login, args.backup, args.media_packages, args.tenant, args.workflow_id, \
61 | args.last_version, args.rsync_history, args.ignore_errors
62 |
--------------------------------------------------------------------------------
/release-management/create-changelog/changelog.py:
--------------------------------------------------------------------------------
1 | import re
2 | import requests
3 | import sys
4 | from datetime import datetime
5 | from dateutil.parser import parse
6 |
7 | URL = 'https://api.github.com/repos/opencast/opencast/pulls' \
8 | '?state=closed&base='
9 | JIRA_TICKET_URL = 'https://opencast.jira.com/browse/'
10 |
11 |
12 | def main(branch, start_date, end_date, pat):
13 | begin = parse(start_date).replace(tzinfo=None)
14 | end = parse(end_date).replace(tzinfo=None) if end_date else datetime.now()
15 | next_url = URL + branch
16 | pullrequests = []
17 |
18 | # Auth?
19 | headers = {'Authorization': f'Bearer {pat}'} if pat else {}
20 |
21 | # get all closed pull request for a specific branch
22 | while next_url:
23 | result = requests.get(next_url, headers=headers)
24 | link_header = result.headers.get('Link')
25 | next_url = None
26 | if link_header:
27 | match = re.search('<([^>]*)>; rel="next"', link_header)
28 | if match:
29 | next_url = match.group(1)
30 | pullrequests += result.json()
31 |
32 | # filter by merge date
33 | bot_pullrequests = []
34 | for pr in pullrequests:
35 | merged = pr.get('merged_at')
36 | if not merged:
37 | continue # pull request was canceled
38 | merged = parse(merged).replace(tzinfo=None)
39 | if begin <= merged <= end:
40 | # Print (dependa-)bot pull requests seperately
41 | user_type = pr.get('user').get('type')
42 | if user_type == 'Bot':
43 | bot_pullrequests.append(pr)
44 | continue
45 |
46 | link = pr.get('html_url')
47 | title = pr.get('title').strip()
48 | nr = pr.get('number')
49 | pretty_print(title, nr, link)
50 |
51 | if len(bot_pullrequests) > 0:
52 | print('Dependency updates
\n')
53 | print('')
54 | for bot_pr in bot_pullrequests:
55 | link = bot_pr.get('html_url')
56 | title = bot_pr.get('title').strip()
57 | nr = bot_pr.get('number')
58 | bot_pretty_print(title, nr, link)
59 | print('
')
60 | print(' ')
61 |
62 |
63 | def pretty_print(title, pr_number, pr_link):
64 | title = pretty_print_title(title)
65 | print('- [[#%s](%s)] -\n %s' % (pr_number, pr_link, title))
66 |
67 | def bot_pretty_print(title, pr_number, pr_link):
68 | title = pretty_print_title(title)
69 | print('[%s] - \n %s' % (pr_link, pr_number, title))
70 |
71 | def pretty_print_title(title):
72 | return re.sub(r'^\S*[mM][hH]-\d{3,5}[,: ]*', '', title)
73 |
74 | if __name__ == '__main__':
75 | argc = len(sys.argv)
76 | if 3 <= argc <= 5:
77 | branch = sys.argv[1]
78 | start_date = sys.argv[2]
79 | end_date = None
80 | pat = None
81 | if argc >= 4:
82 | end_date = sys.argv[3]
83 | if argc == 5:
84 | pat = sys.argv[4]
85 |
86 | main(branch, start_date, end_date, pat)
87 | else:
88 | binary = sys.argv[0]
89 | print(f'Usage: {binary} branch start-date [end-date] [github pat]')
90 |
--------------------------------------------------------------------------------
/delete_dead_distribution_artefacts/delete_artefacts/find_distribution_artefacts.py:
--------------------------------------------------------------------------------
1 | import os
2 | from collections import defaultdict
3 |
4 | from util.count import count_distribution_artefacts
5 | from util.count import count_media_packages
6 |
7 |
8 | def find_distribution_artefacts(distribution_dirs, tenants, channels, progress_printer):
9 | """
10 | Find all distribution artefacts in the given directories belonging to the given tenants and return them by
11 | media package and tenant.
12 |
13 | :param distribution_dirs: Paths to the directories containing distribution artefacts.
14 | :type distribution_dirs: list
15 | :param tenants: The tenants for which distribution artefacts should be collected.
16 | :type tenants: list
17 | :param channels: The channels for which distribution artefacts should be collected.
18 | :type channels: list
19 | :param progress_printer: Object to print progress messages with.
20 | :type progress_printer: ProgressPrinter
21 | :return: The distribution artefacts by tenant and media package.
22 | :rtype: dict
23 | """
24 |
25 | progress_printer.begin_progress_message("Searching for distribution artefacts...")
26 | distribution_artefacts = defaultdict(lambda: defaultdict(list))
27 | count = 0
28 |
29 | for distribution_dir_count, distribution_dir in enumerate(distribution_dirs):
30 |
31 | dir_name, tenant_dirs, files = next(os.walk(distribution_dir))
32 |
33 | for tenant_count, tenant in enumerate(tenant_dirs):
34 |
35 | tenant_dir = os.path.join(distribution_dir, tenant)
36 |
37 | if tenant in tenants:
38 | dir_name, channel_dirs, files = next(os.walk(tenant_dir))
39 |
40 | for channel_count, channel in enumerate(channel_dirs):
41 |
42 | if not channels or channel in channels:
43 |
44 | channel_dir = os.path.join(tenant_dir, channel)
45 | dir_name, media_packages, files = next(os.walk(channel_dir))
46 |
47 | progress_printer.print_progress_message("Directory {}/{}, tenant {}/{}, channel {}/{}".
48 | format(distribution_dir_count + 1,
49 | len(distribution_dirs),
50 | tenant_count + 1, len(tenant_dirs),
51 | channel_count + 1, len(channel_dirs)))
52 |
53 | for media_package in media_packages:
54 | media_package_dir = os.path.join(channel_dir, media_package)
55 |
56 | distribution_artefacts[tenant][media_package].append(media_package_dir)
57 | count += 1
58 |
59 | dist_count = count_distribution_artefacts(distribution_artefacts)
60 | mp_count = count_media_packages(distribution_artefacts)
61 |
62 | progress_printer.end_progress_message("{} distribution artefact(s) for {} media package(s) found.\n"
63 | .format(dist_count, mp_count))
64 |
65 | return distribution_artefacts
66 |
--------------------------------------------------------------------------------
/release-management/create-changelog/readme.md:
--------------------------------------------------------------------------------
1 | Generate Changelog for Opencast
2 | ===============================
3 |
4 | This script generated a changelog based on merged pull requests. To generate a
5 | changelog for a given version, run the script with the git branch name, start
6 | date and optionally end date as arguments.
7 |
8 | Example for 14.x
9 | ---------------
10 |
11 | ```
12 | - [[#4945](https://github.com/opencast/opencast/pull/4945)] -
13 | Drop orphan statistics database index
14 | ```
15 |
16 | Changelog for N.x version
17 | -------------------------
18 |
19 | python changelog.py r/N.x
20 |
21 | Changelog for N.0 version
22 | -------------------------
23 |
24 | Since these versions are developed on both `develop` and their specific release
25 | branched, two requests need to be made and merged:
26 |
27 | python changelog.py develop
28 | python changelog.py r/N.x
29 |
30 | Note that the Github API may generate duplicate entries between the two lists depending on dates and timezones.
31 |
32 | Dates
33 | -----
34 | Computing the dates can be annoying. You need to find the earliest commit belonging to various combinations of branches.
35 |
36 | git log --pretty=%as -n1 $(diff -u <(git rev-list --first-parent r/N.x) <(git rev-list --first-parent develop) | sed -ne 's/^ //p' | head -1)
37 |
38 | As an example, to generate the full list for Opencast 14 you need to know
39 |
40 | - The changelog for `develop` between the `r/13.x` branching off, and `r/14.x` being started
41 | - The changelog for `r/14.x` up to `14.0`
42 |
43 | To find the first begin-of-development date
44 |
45 | git log --pretty=%as -n1 $(diff -u <(git rev-list --first-parent r/13.x) <(git rev-list --first-parent develop) | sed -ne 's/^ //p' | head -1)
46 |
47 | To find the 14.x branch date
48 |
49 | git log --pretty=%as -n1 $(diff -u <(git rev-list --first-parent r/14.x) <(git rev-list --first-parent develop) | sed -ne 's/^ //p' | head -1)
50 |
51 | So the final changelog calls would be
52 |
53 | % python changelog.py develop 2022-11-16 2023-05-15
54 | % python changelog.py r/14.x 2023-05-15
55 |
56 | API Limits
57 | ----------
58 |
59 | Github enforces API limits, which this script can easily hit - especially if you run it multiple times when debugging!
60 | In such cases, it is possible to encounter an error when running the script, leading to an incorrect output
61 |
62 | [[#3903](https://github.com/opencast/opencast/pull/3903)] -
63 | Common persistence util classes that also implement transaction retries
64 | Traceback (most recent call last):
65 | File "opencast-helper-scripts/release-management/create-changelog/changelog.py", line 62, in
66 | main(branch, start_date, end_date, pat)
67 | File "opencast-helper-scripts/release-management/create-changelog/changelog.py", line 34, in main
68 | merged = pr.get('merged_at')
69 | ^^^^^^
70 | AttributeError: 'str' object has no attribute 'get'
71 |
72 | To raise this limit, you may need to create a [Personal Access Token](https://github.com/settings/tokens) with appropriate permissions (read only to the
73 | upstream repo), and pass that as the *fifth* parameter. So the call with the token would be
74 |
75 | % python changelog.py develop 2022-11-16 2023-05-15 YOUR_TOKEN
--------------------------------------------------------------------------------
/lib/external_api/event_requests.py:
--------------------------------------------------------------------------------
1 | from rest_requests.get_response_content import get_json_content
2 | from rest_requests.request import get_request, delete_request, NOT_FOUND
3 | from rest_requests.request_error import RequestError
4 |
5 |
6 | def get_events_of_series(base_url, digest_login, series_id):
7 | """
8 | Get the events for a series from the API
9 |
10 | :param base_url: The base URL for the request
11 | :type base_url: str
12 | :param digest_login: The login credentials for digest authentication
13 | :type digest_login: DigestLogin
14 | :param series_id: The series id
15 | :type series_id: str
16 | :return: list of events
17 | :rtype: list
18 | :raise RequestError:
19 | """
20 |
21 | url = '{}/api/events/?filter=is_part_of:{}'.format(base_url, series_id)
22 | response = get_request(url, digest_login, "events")
23 | return get_json_content(response)
24 |
25 |
26 | def get_failed_events(base_url, digest_login):
27 | """
28 | Get failed events from the API
29 |
30 | :param base_url: The base URL for the request
31 | :type base_url: str
32 | :param digest_login: The login credentials for digest authentication
33 | :type digest_login: DigestLogin
34 | :return: list of events
35 | :rtype: list
36 | :raise RequestError:
37 | """
38 |
39 | url = '{}/api/events/?filter=status:EVENTS.EVENTS.STATUS.PROCESSING_FAILURE'.format(base_url)
40 | response = get_request(url, digest_login, "failed events")
41 | return get_json_content(response)
42 |
43 |
44 | def get_event(base_url, digest_login, event_id):
45 | """
46 | Get event.
47 |
48 | :param base_url: The URL for the request
49 | :type base_url: str
50 | :param digest_login: The login credentials for digest authentication
51 | :type digest_login: DigestLogin
52 | :param event_id: The id of the event to delete
53 | :type event_id: str
54 | :raise RequestError:
55 | """
56 | url = '{}/api/events/{}'.format(base_url, event_id)
57 | response = get_request(url, digest_login, "/api/events/")
58 | return get_json_content(response)
59 |
60 |
61 | def event_exists(base_url, digest_login, event_id):
62 | """
63 | Check if an event exists.
64 |
65 | :param base_url: The URL for the request
66 | :type base_url: str
67 | :param digest_login: The login credentials for digest authentication
68 | :type digest_login: DigestLogin
69 | :param event_id: The id of the event
70 | :type event_id: str
71 | :return: true if it exists, false otherwise
72 | :rtype: bool
73 | :raise RequestError:
74 | """
75 |
76 | try:
77 | get_event(base_url, digest_login, event_id)
78 | except RequestError as e:
79 | if e.has_status_code() and e.get_status_code() == NOT_FOUND:
80 | return False
81 | raise e
82 | return True
83 |
84 |
85 | def delete_event(base_url, digest_login, event_id):
86 | """
87 | Delete event.
88 |
89 | :param base_url: The URL for the request
90 | :type base_url: str
91 | :param digest_login: The login credentials for digest authentication
92 | :type digest_login: DigestLogin
93 | :param event_id: The id of the event to delete
94 | :type event_id: str
95 | :raise RequestError:
96 | """
97 | url = '{}/api/events/{}'.format(base_url, event_id)
98 | delete_request(url, digest_login, "/api/events/")
99 |
--------------------------------------------------------------------------------