├── .gitignore ├── client.json ├── description.txt ├── src ├── retrieval │ ├── __init__.py │ ├── preprocessing.py │ └── retrieval.py ├── utils.py ├── convert_audio.py ├── query_freesound.py ├── curate_datasets.py ├── download_clips.py ├── retrieve.py ├── create_fsd50k_subset.py └── extern │ └── freesound.py ├── pyproject.toml ├── requirements.txt ├── README.rst ├── metadata └── fsd50k_mids.csv ├── poetry.lock └── LICENSE /.gitignore: -------------------------------------------------------------------------------- 1 | /_* 2 | _*/ 3 | 4 | *.py[cod] 5 | -------------------------------------------------------------------------------- /client.json: -------------------------------------------------------------------------------- 1 | { 2 | "client_id": "", 3 | "client_secret": "", 4 | "access_token": "" 5 | } 6 | -------------------------------------------------------------------------------- /description.txt: -------------------------------------------------------------------------------- 1 | The sound of a household microwave oven as it cooks. There is also the electronic beeping sounds of the oven buttons being pressed to initiate cooking, and then pressed again to stop. -------------------------------------------------------------------------------- /src/retrieval/__init__.py: -------------------------------------------------------------------------------- 1 | from .retrieval import retrieve, Vocabulary 2 | from .preprocessing import ShortestLemmatizer 3 | 4 | 5 | __all__ = [ 6 | 'retrieve', 7 | 'Vocabulary', 8 | 'ShortestLemmatizer', 9 | ] 10 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [tool.poetry] 2 | name = "arca23k-dataset" 3 | version = "0.1.0" 4 | description = "The code used to create the ARCA23K and ARCA23K-FSD datasets" 5 | authors = ["Turab Iqbal "] 6 | license = "AGPL-3.0-or-later" 7 | 8 | [tool.poetry.dependencies] 9 | python = ">=3.8,<3.11" 10 | jaffadata = "^0.2.1" 11 | nltk = "^3.6.2" 12 | numpy = "^1.21.2" 13 | pandas = "^1.3.2" 14 | requests = "^2.26.0" 15 | 16 | [tool.poetry.dev-dependencies] 17 | flake8 = "^3.9.2" 18 | 19 | [build-system] 20 | requires = ["poetry-core>=1.0.0"] 21 | build-backend = "poetry.core.masonry.api" 22 | -------------------------------------------------------------------------------- /src/utils.py: -------------------------------------------------------------------------------- 1 | import json 2 | 3 | import pandas as pd 4 | from tqdm import tqdm 5 | 6 | 7 | def load_freesound_metadata(metadata_dir): 8 | def _has_tags(entry): 9 | return len(entry['tags']) > 0 10 | 11 | entries = [] 12 | for path in tqdm(sorted(metadata_dir.iterdir())): 13 | with open(path, 'r') as f: 14 | json_dict = json.load(f) 15 | 16 | entries += list(filter(_has_tags, json_dict['results'])) 17 | 18 | # Wrap the entries in a DataFrame object 19 | entries = pd.DataFrame(entries).set_index('id') 20 | # Filter out entries that have a duration outside [0.3, 30] 21 | entries = entries[(entries.duration >= 0.3) & (entries.duration <= 30)] 22 | 23 | return entries 24 | -------------------------------------------------------------------------------- /src/convert_audio.py: -------------------------------------------------------------------------------- 1 | import argparse 2 | import subprocess 3 | import sys 4 | from pathlib import Path 5 | 6 | 7 | def convert(args): 8 | # Ensure output directory exists 9 | output_dir = args.work_dir / 'audio' 10 | output_dir.mkdir(parents=True, exist_ok=True) 11 | 12 | downloads_dir = args.work_dir / 'downloads' 13 | for path in downloads_dir.iterdir(): 14 | output_path = output_dir / (path.stem + '.wav') 15 | if not output_path.exists(): 16 | cmd = f'ffmpeg -i {path} -sample_fmt s16 -ar 44100 ' \ 17 | f'-ac 1 -acodec pcm_s16le {output_path}' 18 | subprocess.run(cmd.split()) 19 | 20 | 21 | def parse_args(): 22 | parser = argparse.ArgumentParser() 23 | parser.add_argument('--work_dir', type=Path, default=Path('_output'), 24 | help='path to workspace directory') 25 | return parser.parse_args() 26 | 27 | 28 | if __name__ == '__main__': 29 | sys.exit(convert(parse_args())) 30 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | certifi==2021.5.30; python_version >= "2.7" and python_full_version < "3.0.0" or python_full_version >= "3.6.0" 2 | charset-normalizer==2.0.4; python_full_version >= "3.6.0" and python_version >= "3" 3 | click==8.0.1; python_version >= "3.6" 4 | colorama==0.4.4; python_version >= "3.6" and python_full_version < "3.0.0" and platform_system == "Windows" or platform_system == "Windows" and python_version >= "3.6" and python_full_version >= "3.5.0" 5 | idna==3.2; python_version >= "3.5" and python_full_version < "3.0.0" or python_full_version >= "3.6.0" and python_version >= "3.5" 6 | joblib==1.0.1; python_version >= "3.6" 7 | nltk==3.6.2; python_version >= "3.5" 8 | numpy==1.21.2; python_version >= "3.7" and python_version < "3.11" 9 | pandas==1.3.2; python_full_version >= "3.7.1" 10 | python-dateutil==2.8.2; python_full_version >= "3.7.1" 11 | pytz==2021.1; python_full_version >= "3.7.1" 12 | regex==2021.8.28; python_version >= "3.5" 13 | requests==2.26.0; (python_version >= "2.7" and python_full_version < "3.0.0") or (python_full_version >= "3.6.0") 14 | six==1.16.0; python_full_version >= "3.7.1" 15 | tqdm==4.62.2; python_version >= "3.5" and python_full_version < "3.0.0" or python_full_version >= "3.4.0" and python_version >= "3.5" 16 | urllib3==1.26.6; python_version >= "2.7" and python_full_version < "3.0.0" or python_full_version >= "3.6.0" and python_version < "4" 17 | -------------------------------------------------------------------------------- /src/retrieval/preprocessing.py: -------------------------------------------------------------------------------- 1 | import nltk 2 | from nltk.corpus import stopwords, wordnet 3 | 4 | 5 | STOP_WORDS = stopwords.words('english') 6 | 7 | 8 | class ShortestLemmatizer: 9 | def __init__(self, lemmatizer): 10 | self.lemmatizer = lemmatizer 11 | 12 | def lemmatize(self, token): 13 | words = [self.lemmatizer.lemmatize(token, pos) 14 | for pos in [wordnet.ADJ, wordnet.NOUN, 15 | wordnet.VERB, wordnet.ADV]] 16 | return min(words, key=len) 17 | 18 | 19 | def tokenize(text): 20 | return nltk.tokenize.word_tokenize(text.replace('/', ' ')) 21 | 22 | 23 | def preprocess(tokens, lemmatizer=None): 24 | # Filter out tokens that are not words e.g. punctuation 25 | tokens = filter(is_word, tokens) 26 | 27 | # Convert tokens to lowercase while preserving abbreviations 28 | # e.g. 'kHz' would not be converted to lowercase 29 | tokens = map(smart_lowercase, tokens) 30 | 31 | # Normalize tokens using lemmatization if enabled 32 | if lemmatizer is not None: 33 | tokens = map(lemmatizer.lemmatize, tokens) 34 | 35 | # Remove stop words such as 'and', 'is', etc. 36 | tokens = [token for token in tokens if token not in STOP_WORDS] 37 | 38 | # Remove words that are duplicates 39 | tokens = list(dict.fromkeys(tokens)) 40 | 41 | return tokens 42 | 43 | 44 | def smart_lowercase(word): 45 | word_lower = word.lower() 46 | return word_lower if word_lower[1:] == word[1:] else word 47 | 48 | 49 | def is_word(text): 50 | return str.isalpha(text.replace('-', '')) 51 | -------------------------------------------------------------------------------- /src/query_freesound.py: -------------------------------------------------------------------------------- 1 | import argparse 2 | import json 3 | import sys 4 | import time 5 | from pathlib import Path 6 | 7 | from extern.freesound import FreesoundClient, FreesoundException 8 | 9 | 10 | def download(args): 11 | with open('client.json') as f: 12 | params = json.load(f) 13 | 14 | client = FreesoundClient() 15 | client.set_token(params['client_secret']) 16 | 17 | # Ensure output directory exists 18 | output_dir = args.work_dir / 'query' 19 | output_dir.mkdir(parents=True, exist_ok=True) 20 | 21 | # Determine the first page to start downloading 22 | # Redownload the last downloaded page in case it has been updated 23 | initial_page = sum(1 for _ in output_dir.glob('page*.json')) or 1 24 | 25 | page = initial_page 26 | while True: 27 | response = text_search(client, page) 28 | output_path = output_dir / f'page{page:04d}.json' 29 | with open(output_path, 'w') as f: 30 | json.dump(response.json_dict, f, indent=2) 31 | 32 | print(f'Page {page} retrieved with {len(response.results)} results') 33 | 34 | if response.next is None: 35 | break 36 | 37 | page += 1 38 | 39 | if page > initial_page: 40 | print(f'Retrieved pages {initial_page}-{page}') 41 | 42 | 43 | def text_search(client, page, query=''): 44 | fields = 'id,name,tags,description,type,channels,channels,' \ 45 | 'bitdepth,duration,samplerate,license,username' 46 | 47 | try: 48 | return client.text_search( 49 | query=query, 50 | fields=fields, 51 | filter='duration:[* TO 30]', 52 | sort='created_asc', 53 | page=str(page), 54 | page_size=150, 55 | ) 56 | except FreesoundException as e: 57 | if e.code == 500: 58 | # Search server could not be reached 59 | # Wait 10 seconds before trying again 60 | time.sleep(10) 61 | 62 | return text_search(client, page, query) 63 | 64 | raise e 65 | 66 | 67 | def parse_args(): 68 | parser = argparse.ArgumentParser() 69 | parser.add_argument('--work_dir', type=Path, default=Path('_output'), 70 | help='path to workspace directory') 71 | return parser.parse_args() 72 | 73 | 74 | if __name__ == '__main__': 75 | sys.exit(download(parse_args())) 76 | -------------------------------------------------------------------------------- /src/curate_datasets.py: -------------------------------------------------------------------------------- 1 | import argparse 2 | import sys 3 | from pathlib import Path 4 | 5 | 6 | def curate(args): 7 | import pandas as pd 8 | from tqdm import tqdm 9 | 10 | with open(args.work_dir / 'labels.txt') as f: 11 | label_set = f.read().strip().split('\n') 12 | 13 | # Create and save ARCA23K-FSD annotations 14 | subset_dir = args.work_dir / 'subset' 15 | df_train = pd.read_csv(subset_dir / 'train.csv', index_col=0) 16 | df_val = pd.read_csv(subset_dir / 'val.csv', index_col=0) 17 | df_test = pd.read_csv(subset_dir / 'test.csv', index_col=0) 18 | df_train = df_train[df_train.label.isin(label_set)] 19 | df_val = df_val[df_val.label.isin(label_set)] 20 | df_test = df_test[df_test.label.isin(label_set)] 21 | fsd_dir = args.work_dir / 'final/ARCA23K-FSD.ground_truth' 22 | fsd_dir.mkdir(parents=True, exist_ok=True) 23 | df_train.to_csv(fsd_dir / 'train.csv') 24 | df_val.to_csv(fsd_dir / 'val.csv') 25 | df_test.to_csv(fsd_dir / 'test.csv') 26 | 27 | # Create DataFrame for ARCA23K annotations 28 | df = pd.read_csv(args.work_dir / 'download_list.csv', index_col=0) 29 | mids = pd.read_csv('metadata/fsd50k_mids.csv', index_col=0) 30 | df['mid'] = mids.loc[df.prediction].values 31 | df.columns = ['label', 'mid'] 32 | df.index.name = 'fname' 33 | 34 | # Discard DataFrame entries that correspond to non-existing clips 35 | for clip_id in tqdm(df.index): 36 | path = args.work_dir / f'audio/{clip_id}.wav' 37 | n_bytes = path.stat().st_size 38 | if not path.exists() or n_bytes < 26460 or n_bytes > 2646500: 39 | df.drop([clip_id], inplace=True) 40 | 41 | # Constrain ARCA23K to be the same size as ARCA23K-FSD 42 | df_train = select_subset(df, df_train) 43 | 44 | # Save ARCA23K annotations 45 | arca23k_dir = args.work_dir / 'final/ARCA23K.ground_truth' 46 | df_train.to_csv(arca23k_dir / 'train.csv') 47 | df_val.to_csv(arca23k_dir / 'val.csv') 48 | df_test.to_csv(arca23k_dir / 'test.csv') 49 | 50 | 51 | def select_subset(df, df_train): 52 | # Determine number of clips to download per class 53 | target_sizes = df_train.groupby(df_train.label).size() 54 | 55 | def _sample(x): 56 | n_samples = target_sizes[x.label][0] 57 | return x.sample(n_samples) 58 | 59 | return df.groupby(df.label).apply(_sample).droplevel(0) 60 | 61 | 62 | def parse_args(): 63 | parser = argparse.ArgumentParser() 64 | parser.add_argument('--work_dir', type=Path, default=Path('_output'), 65 | help='path to workspace directory') 66 | return parser.parse_args() 67 | 68 | 69 | if __name__ == '__main__': 70 | sys.exit(curate(parse_args())) 71 | -------------------------------------------------------------------------------- /src/download_clips.py: -------------------------------------------------------------------------------- 1 | import argparse 2 | import json 3 | from pathlib import Path 4 | 5 | 6 | def download(args): 7 | import pandas as pd 8 | from tqdm import tqdm 9 | 10 | from extern.freesound import FreesoundClient, FSRequest, URIS 11 | 12 | import utils 13 | 14 | with open('client.json') as f: 15 | params = json.load(f) 16 | 17 | client = FreesoundClient() 18 | client.set_token(params['access_token'], auth_type='oauth') 19 | 20 | # Determine which clips to download 21 | df = pd.read_csv(args.work_dir / 'freesound_matches.csv', index_col=0) 22 | with open(args.work_dir / 'labels.txt') as f: 23 | label_set = f.read().strip().split('\n') 24 | 25 | # Load query results (to determine file extensions) 26 | results = utils.load_freesound_metadata(args.work_dir / 'query') 27 | results = results[results.index.isin(df.index)] 28 | results = results.join(df) 29 | 30 | # Select a subset of the clips to download 31 | # This saves time, space, and bandwidth 32 | results = select_subset(results, label_set, args.work_dir) 33 | 34 | # Ensure output directory exists 35 | download_dir = args.work_dir / 'downloads' 36 | download_dir.mkdir(parents=True, exist_ok=True) 37 | 38 | # Start downloading the selected clips 39 | results[['prediction']].to_csv(args.work_dir / 'download_list.csv') 40 | for index, row in tqdm(results.iterrows(), total=len(results)): 41 | path = Path(download_dir / f'{index}.{row.type}') 42 | if not path.exists(): 43 | uri = URIS.uri(URIS.DOWNLOAD, index) 44 | try: 45 | FSRequest.retrieve(uri, client, path) 46 | except Exception as e: 47 | print(f'Unable to download sound {index}\nReason: {str(e)}') 48 | if 'Unauthorized' in str(e): 49 | break 50 | 51 | 52 | def select_subset(df, label_set, work_dir): 53 | import pandas as pd 54 | 55 | # Determine minimum number of clips to download per class 56 | df_train = pd.read_csv(work_dir / 'subset/train.csv', index_col=0) 57 | df_train = df_train[df_train.label.isin(label_set)] 58 | target_sizes = df_train.groupby(df_train.label).size() 59 | 60 | def _sample(x): 61 | n_samples = target_sizes[x.prediction][0] 62 | return x.sample(min(len(x), n_samples + 10)) 63 | 64 | return df.groupby('prediction').apply(_sample).droplevel(0) 65 | 66 | 67 | def parse_args(): 68 | parser = argparse.ArgumentParser() 69 | parser.add_argument('--work_dir', type=Path, default=Path('_output'), 70 | help='path to workspace directory') 71 | return parser.parse_args() 72 | 73 | 74 | if __name__ == '__main__': 75 | download(parse_args()) 76 | -------------------------------------------------------------------------------- /src/retrieval/retrieval.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import pandas as pd 3 | from tqdm import tqdm 4 | 5 | from . import preprocessing 6 | 7 | 8 | class Vocabulary: 9 | def __init__(self, label_set, ontology, lemmatizer): 10 | vocabulary = [] 11 | label_terms = {} 12 | orig_labels = {} 13 | for label in label_set: 14 | label, orig_label = to_audioset_label(label), label 15 | 16 | # Extract search terms from label 17 | # e.g. 'Electric_guitar' -> ['Electric', 'guitar'] 18 | terms = extract_label_terms(label, lemmatizer) 19 | 20 | vocabulary += terms 21 | label_terms[label] = terms 22 | orig_labels[label] = orig_label 23 | 24 | # Extract terms from child labels 25 | ids = ontology[label].child_ids 26 | for child_id in ids: 27 | child_label = ontology[child_id].name 28 | terms = extract_label_terms(child_label, lemmatizer) 29 | vocabulary += terms 30 | label_terms[child_label] = terms 31 | orig_labels[child_label] = orig_label 32 | 33 | self.vocabulary = sorted(set(vocabulary)) 34 | self.label_vecs = self.vectorize(label_terms) 35 | self.orig_labels = list(orig_labels.values()) 36 | 37 | def vectorize(self, terms): 38 | if isinstance(terms, dict): 39 | label_vecs = {label: self.vectorize(term_list) 40 | for label, term_list in terms.items()} 41 | return np.stack(list(label_vecs.values())) 42 | 43 | return np.isin(self.vocabulary, terms).astype(float) 44 | 45 | def match(self, vec): 46 | sim = cosine_similarity(vec, self.label_vecs) 47 | index = np.argmax(sim) 48 | label = self.orig_labels[index] 49 | return sim, index, label 50 | 51 | def __len__(self): 52 | return len(self.vocabulary) 53 | 54 | 55 | def retrieve(entries, vocab, lemmatizer, threshold=0.5): 56 | results = {} 57 | for clip_id, entry in tqdm(entries.iterrows(), total=len(entries)): 58 | # Preprocess tags to simplify retrieval 59 | tags = preprocessing.preprocess(entry.tags, lemmatizer) 60 | 61 | # Tokenize and preprocess clip description 62 | desc_tokens = preprocessing.tokenize(entry.description) 63 | desc_terms = preprocessing.preprocess(desc_tokens, lemmatizer) 64 | 65 | # Vectorize query and description 66 | vec = np.zeros(len(vocab)) 67 | if len(tags) > 0: 68 | vec += vocab.vectorize(tags) 69 | if len(desc_terms) > 0: 70 | vec += vocab.vectorize(desc_terms) 71 | 72 | sim, index, label = vocab.match(vec) 73 | if sim[index] > threshold: 74 | results[clip_id] = (label, sim[index]) 75 | 76 | # Create a DataFrame object for the results 77 | columns = ['prediction', 'score'] 78 | results = pd.DataFrame.from_dict(results, orient='index', columns=columns) 79 | 80 | return results 81 | 82 | 83 | def extract_label_terms(label, lemmatizer): 84 | tokens = preprocessing.tokenize(label) 85 | return preprocessing.preprocess(tokens, lemmatizer) 86 | 87 | 88 | def cosine_similarity(vec, other_vecs): 89 | norm = np.linalg.norm(vec) * np.linalg.norm(other_vecs, axis=1) 90 | return np.inner(vec, other_vecs) / (norm + 1e-8) 91 | 92 | 93 | def to_audioset_label(label): 94 | label = label.replace('_', ' ') 95 | if label == 'Dishes and pots and pans': 96 | return 'Dishes, pots, and pans' 97 | return label.replace(' and', ',') 98 | -------------------------------------------------------------------------------- /src/retrieve.py: -------------------------------------------------------------------------------- 1 | import argparse 2 | import sys 3 | from pathlib import Path 4 | 5 | 6 | def main(args): 7 | import nltk 8 | import pandas as pd 9 | from jaffadata.datasets import AudioSetOntology 10 | from nltk.stem import WordNetLemmatizer 11 | 12 | import retrieval 13 | import utils 14 | from retrieval import ShortestLemmatizer 15 | 16 | # Load metadata for FSD50K subset 17 | dataset_dir = args.work_dir / 'fsd50k' 18 | subset = read_metadata(args.work_dir / 'subset') 19 | 20 | # Download NLTK resources 21 | nltk.download('stopwords') 22 | nltk.download('punkt') 23 | nltk.download('wordnet') 24 | 25 | # Build vocabulary of relevant terms based on label set 26 | lemmatizer = ShortestLemmatizer(WordNetLemmatizer()) 27 | ontology = AudioSetOntology('metadata/ontology.json') 28 | label_set = sorted(subset.label.unique()) 29 | vocab = retrieval.Vocabulary(label_set, ontology, lemmatizer) 30 | 31 | # Load Freesound metadata 32 | entries = utils.load_freesound_metadata(args.work_dir / 'query') 33 | mask = entries.index.isin(subset.index) 34 | if args.evaluate: 35 | # Filter out entries that do not belong to FSD50K subset 36 | entries = entries[mask] 37 | else: 38 | # Filter out entries belonging to FSD50K 39 | dev_path = dataset_dir / 'FSD50K.ground_truth/dev.csv' 40 | index = pd.read_csv(dev_path, index_col=0).index 41 | mask = ~(mask | entries.index.isin(index)) 42 | entries = entries[mask] 43 | 44 | # Run retrieval algorithm 45 | results = retrieval.retrieve(entries, vocab, lemmatizer) 46 | 47 | if args.evaluate: 48 | pd.options.display.max_rows = 100 49 | scores = evaluate(results, subset) 50 | scores.to_csv(args.work_dir / 'retrieval_scores.csv') 51 | print(scores) 52 | else: 53 | # Discard classes that lack a sufficient number of matches 54 | sizes = results.groupby('prediction').size() 55 | sizes2 = subset[subset.train].groupby('label').size() 56 | ratios = sizes / (sizes2 + 3) # margin=3 for headroom 57 | labels = ratios.index[ratios >= 1] 58 | results = results[results.prediction.isin(labels)] 59 | 60 | # Write matches to CSV file 61 | results.to_csv(args.work_dir / 'freesound_matches.csv') 62 | with open(args.work_dir / 'labels.txt', 'w') as f: 63 | f.write('\n'.join(sorted(labels))) 64 | 65 | 66 | def read_metadata(metadata_dir): 67 | import pandas as pd 68 | 69 | df_train = pd.read_csv(metadata_dir / 'train.csv', index_col=0) 70 | df_val = pd.read_csv(metadata_dir / 'val.csv', index_col=0) 71 | df_test = pd.read_csv(metadata_dir / 'test.csv', index_col=0) 72 | df = pd.concat([df_train, df_val, df_test]) 73 | df['train'] = df.index.isin(df_train.index) 74 | return df 75 | 76 | 77 | def evaluate(results, subset): 78 | import pandas as pd 79 | 80 | # Combine ground truth and predictions into a single DataFrame 81 | results = subset.join(results) 82 | results = results[pd.notna(results.prediction)] 83 | 84 | accuracy = (results.label == results.prediction).mean() 85 | class_accuracy = results.groupby('label').apply( 86 | lambda x: (x.label == x.prediction).mean()) 87 | df = pd.DataFrame(class_accuracy, columns=['Accuracy']) 88 | df.loc['Macro Average'] = class_accuracy.mean() 89 | df.loc['Micro Average'] = accuracy 90 | 91 | return df 92 | 93 | 94 | def parse_args(): 95 | parser = argparse.ArgumentParser() 96 | parser.add_argument('--work_dir', type=Path, default=Path('_output'), 97 | help='path to workspace directory') 98 | parser.add_argument('--evaluate', type=bool, 99 | help='whether to run in evaluation mode') 100 | return parser.parse_args() 101 | 102 | 103 | if __name__ == '__main__': 104 | sys.exit(main(parse_args())) 105 | -------------------------------------------------------------------------------- /src/create_fsd50k_subset.py: -------------------------------------------------------------------------------- 1 | import argparse 2 | import sys 3 | from io import BytesIO 4 | from pathlib import Path 5 | from zipfile import ZipFile 6 | 7 | 8 | def main(args): 9 | from jaffadata.datasets import AudioSetOntology, FSD50K 10 | 11 | dataset_dir = args.work_dir / 'fsd50k' 12 | if not dataset_dir.is_dir(): 13 | download_fsd50k_data(dataset_dir) 14 | dataset = FSD50K(dataset_dir) 15 | ontology = AudioSetOntology('metadata/ontology.json') 16 | 17 | # Filter out audio clips that contain multiple sounds 18 | train_set = to_single_label(dataset['training'], ontology) 19 | val_set = to_single_label(dataset['validation'], ontology) 20 | test_set = to_single_label(dataset['test'], ontology) 21 | 22 | # Filter out classes that have less than n instances 23 | # mids correspond to the AudioSet classes 24 | train_mids = filter_classes_by_size(train_set, n=50) 25 | val_mids = filter_classes_by_size(val_set, n=10) 26 | test_mids = filter_classes_by_size(test_set, n=20) 27 | mids = set(train_mids) & set(val_mids) & set(test_mids) 28 | 29 | # Filter out ancestor classes 30 | mids = filter_classes_by_ancestry(mids, ontology) 31 | 32 | # Create new subsets based on the new class set 33 | train_set = filter_subset_by_class(train_set, mids) 34 | val_set = filter_subset_by_class(val_set, mids) 35 | test_set = filter_subset_by_class(test_set, mids) 36 | 37 | # Save annotations to disk 38 | output_dir = args.work_dir / 'subset' 39 | output_dir.mkdir(parents=True, exist_ok=True) 40 | save_annotations(train_set, output_dir / 'train.csv') 41 | save_annotations(val_set, output_dir / 'val.csv') 42 | save_annotations(test_set, output_dir / 'test.csv') 43 | 44 | 45 | def download_fsd50k_data(dataset_dir): 46 | import requests 47 | 48 | # Download ground truth zip file 49 | url = 'https://zenodo.org/record/4060432/files/FSD50K.ground_truth.zip' 50 | response = requests.get(url) 51 | if not response.ok: 52 | raise RuntimeError(f'Unable to download from Zenodo: ' 53 | f'{response.status_code} {response.reason}') 54 | 55 | # Unzip contents 56 | dataset_dir.mkdir(parents=True) 57 | (dataset_dir / 'FSD50K.dev_audio').mkdir() 58 | (dataset_dir / 'FSD50K.eval_audio').mkdir() 59 | zfile = ZipFile(BytesIO(response.content), 'r') 60 | zfile.extractall(dataset_dir) 61 | zfile.close() 62 | 63 | 64 | def to_single_label(subset, ontology): 65 | import numpy as np 66 | 67 | # Check if any labels are unrelated to the first label, which 68 | # would mean that the clip contains more than one type of sound. 69 | # It is assumed the first label encountered is a leaf node. 70 | def _are_unrelated(mids): 71 | return any(not ontology[mid].is_ancestor(ontology[mids[0]]) 72 | for mid in mids[1:]) 73 | 74 | mask = np.fromiter(map(_are_unrelated, subset.tags.mids), dtype=bool) 75 | 76 | subset = subset[~mask] 77 | subset.tags.insert(0, 'label', subset.tags.labels.str[0]) 78 | subset.tags.insert(1, 'mid', subset.tags.mids.str[0]) 79 | del subset.tags['labels'] 80 | del subset.tags['mids'] 81 | return subset 82 | 83 | 84 | def filter_classes_by_size(subset, n): 85 | mids = subset.tags.mid 86 | sizes = mids.groupby(mids, sort=False).size() 87 | return mids.unique()[sizes >= n] 88 | 89 | 90 | def filter_classes_by_ancestry(mids, ontology): 91 | def _item(mid): 92 | return ontology[mid] 93 | 94 | new_mids = [] 95 | for mid in mids: 96 | # Determine whether the class is an ancestor of any other class 97 | is_ancestor = any(_item(mid).is_ancestor(_item(other_mid)) 98 | for other_mid in mids) 99 | # Include the class if it is not an ancestor 100 | if not is_ancestor: 101 | new_mids.append(mid) 102 | 103 | return new_mids 104 | 105 | 106 | def filter_subset_by_class(subset, mids): 107 | return subset[subset.tags.mid.isin(mids)] 108 | 109 | 110 | def save_annotations(subset, output_path): 111 | import pandas as pd 112 | 113 | tags = subset.tags 114 | names = [fname[:-4] for fname in tags.index] 115 | tags.index = pd.Index(names, name=tags.index.name) 116 | tags = tags[['label', 'mid']] 117 | tags.to_csv(output_path) 118 | 119 | 120 | def parse_args(): 121 | parser = argparse.ArgumentParser() 122 | parser.add_argument('--work_dir', type=Path, default=Path('_output'), 123 | help='path to workspace directory') 124 | return parser.parse_args() 125 | 126 | 127 | if __name__ == '__main__': 128 | sys.exit(main(parse_args())) 129 | -------------------------------------------------------------------------------- /README.rst: -------------------------------------------------------------------------------- 1 | ARCA23K 2 | ======= 3 | 4 | This is the software used to create the ARCA23K and ARCA23K-FSD 5 | datasets. A description of these datasets, along with download links, can be 6 | found on the `Zenodo page`__. Details of how the datasets were created can be 7 | found in our `DCASE2021 paper`__ [1]_. 8 | 9 | Due to the mutable nature of the Freesound database (the source of the 10 | audio data), this software is unlikely to reproduce ARCA23K and 11 | ARCA23K-FSD exactly. Nevertheless, we hope this code can serve as a 12 | useful reference. 13 | 14 | The source code for the baseline system can be found `here`__. 15 | 16 | __ https://zenodo.org/record/5117901 17 | __ https://arxiv.org/abs/2109.09227 18 | __ https://github.com/tqbl/arca23k-baseline 19 | 20 | 21 | .. contents:: 22 | 23 | 24 | Requirements 25 | ------------ 26 | 27 | This software requires Python >=3.8. To install the dependencies, run:: 28 | 29 | poetry install 30 | 31 | or:: 32 | 33 | pip install -r requirements.txt 34 | 35 | You are also free to use another package manager (e.g. Conda). 36 | 37 | `FFmpeg`__ is required too for converting the audio files. 38 | 39 | __ https://www.ffmpeg.org 40 | 41 | 42 | Configuration 43 | ------------- 44 | 45 | Some of the scripts require access to the Freesound API. To use the API, 46 | access credentials are required, which can be applied for `here`__. Once 47 | a client ID and a client secret key are obtained, they need to be added 48 | to the `client.json`__ file. An access token is also needed to download 49 | clips from Freesound. To obtain an access token, follow the instructions 50 | given `here`__. Note that an access token is only valid for 24 hours. To 51 | use the API without request limitations, you may need to contact the 52 | Freesound developers. 53 | 54 | __ https://freesound.org/apiv2/apply 55 | __ client.json 56 | __ https://freesound.org/docs/api/authentication.html#oauth-authentication 57 | 58 | 59 | Usage 60 | ----- 61 | 62 | Each Python script has the following usage:: 63 | 64 | python SCRIPT [--work_dir DIR] [other-options...] 65 | 66 | The ``--work_dir`` option is used to specify the directory in which the 67 | output files are to be written. By default, it is set to ``_output/``. 68 | As some scripts depend on the output files of other scripts, please 69 | ensure that this option is set to the same value across scripts. 70 | 71 | For details of the other arguments and options, use ``--help``. 72 | 73 | This software provides six scripts: 74 | 75 | 1. ``src/create_fsd50k_subset.py``: Creates a tentative version of 76 | ARCA23K-FSD. It selects a single-label subset of FSD50K and saves the 77 | ground truth data of the subset. 78 | 2. ``src/query_freesound.py``: Uses the Freesound API to search the 79 | database for all clips that are up to 30 seconds in duration. The 80 | search results, which include various metadata, are saved to disk. 81 | 3. ``src/retrieve.py``: Based on the search results of the previous 82 | script, the results are narrowed down to clips that can be assigned a 83 | label, which is determined using an automated procedure. 84 | 4. ``src/download_clips.py``: Uses the Freesound API to download clips 85 | from Freesound. The clips that are downloaded depend on the results 86 | of the previous script. 87 | 5. ``src/convert_audio.py``: Converts the downloaded Freesound clips to 88 | mono 16-bit 44.1 kHz WAV files. 89 | 6. ``src/curate_datasets.py``: Creates the final ground truth data for 90 | ARCA23K and ARCA23K-FSD. 91 | 92 | Ensure that the scripts are run in the given order. 93 | 94 | 95 | Attribution 96 | ----------- 97 | 98 | `src/extern/freesound.py`__ is from `MTG/freesound-datasets`__. 99 | 100 | `metadata/ontology.json`__ is from `audioset/ontology`__. 101 | 102 | __ src/extern/freesound.py 103 | __ https://github.com/MTG/freesound-datasets 104 | __ metadata/ontology.json 105 | __ https://github.com/audioset/ontology 106 | 107 | 108 | Citing 109 | ------ 110 | 111 | If you wish to cite this work, please cite the following paper: 112 | 113 | .. [1] \T. Iqbal, Y. Cao, A. Bailey, M. D. Plumbley, and W. Wang, 114 | “ARCA23K: An audio dataset for investigating open-set label 115 | noise”, in Proceedings of the Detection and Classification of 116 | Acoustic Scenes and Events 2021 Workshop (DCASE2021), 2021, 117 | Barcelona, Spain, pp. 201–205. 118 | 119 | BibTeX:: 120 | 121 | @inproceedings{Iqbal2021, 122 | author = {Iqbal, T. and Cao, Y. and Bailey, A. and Plumbley, M. D. and Wang, W.}, 123 | title = {{ARCA23K}: An audio dataset for investigating open-set label noise}, 124 | booktitle = {Proceedings of the Detection and Classification of Acoustic Scenes and Events 2021 Workshop (DCASE2021)}, 125 | pages = {201--205}, 126 | year = {2021}, 127 | address = {Barcelona, Spain}, 128 | } 129 | -------------------------------------------------------------------------------- /metadata/fsd50k_mids.csv: -------------------------------------------------------------------------------- 1 | label,mid 2 | Accelerating_and_revving_and_vroom,/m/07q2z82 3 | Accordion,/m/0mkg 4 | Acoustic_guitar,/m/042v_gx 5 | Aircraft,/m/0k5j 6 | Alarm,/m/07pp_mv 7 | Animal,/m/0jbk 8 | Applause,/m/028ght 9 | Bark,/m/05tny_ 10 | Bass_drum,/m/0bm02 11 | Bass_guitar,/m/018vs 12 | Bathtub_(filling_or_washing),/m/03dnzn 13 | Bell,/m/0395lw 14 | Bicycle,/m/0199g 15 | Bird,/m/015p6 16 | Bird_vocalization_and_bird_call_and_bird_song,/m/020bb7 17 | Boat_and_Water_vehicle,/m/019jd 18 | Boiling,/m/0dv3j 19 | Boom,/m/07qqyl4 20 | Bowed_string_instrument,/m/0l14_3 21 | Brass_instrument,/m/01kcd 22 | Breathing,/m/0lyf6 23 | Burping_and_eructation,/m/03q5_w 24 | Bus,/m/01bjv 25 | Buzz,/m/07pjwq1 26 | Camera,/m/0dv5r 27 | Car,/m/0k4j 28 | Car_passing_by,/t/dd00134 29 | Cat,/m/01yrx 30 | Chatter,/m/07rkbfh 31 | Cheering,/m/053hz1 32 | Chewing_and_mastication,/m/03cczk 33 | Chicken_and_rooster,/m/09b5t 34 | Child_speech_and_kid_speaking,/m/0ytgt 35 | Chime,/m/0f8s22 36 | Chink_and_clink,/m/07q7njn 37 | Chirp_and_tweet,/m/07pggtn 38 | Chuckle_and_chortle,/m/07rgt08 39 | Church_bell,/m/03w41f 40 | Clapping,/m/0l15bq 41 | Clock,/m/01x3z 42 | Coin_(dropping),/m/0242l 43 | Computer_keyboard,/m/01m2v 44 | Conversation,/m/01h8n0 45 | Cough,/m/01b_21 46 | Crack,/m/07qs1cx 47 | Crackle,/m/07pzfmf 48 | Crash_cymbal,/m/0bm0k 49 | Cricket,/m/09xqv 50 | Crow,/m/04s8yn 51 | Crowd,/m/03qtwd 52 | Crumpling_and_crinkling,/t/dd00112 53 | Crushing,/m/07plct2 54 | Crying_and_sobbing,/m/0463cq4 55 | Cupboard_open_or_close,/m/0642b4 56 | Cutlery_and_silverware,/m/023pjk 57 | Cymbal,/m/01qbl 58 | Dishes_and_pots_and_pans,/m/04brg2 59 | Dog,/m/0bt9lr 60 | Domestic_sounds_and_home_sounds,/t/dd00071 61 | Door,/m/02dgv 62 | Drawer_open_or_close,/m/0fqfqc 63 | Drill,/m/01d380 64 | Drip,/m/07r5v4s 65 | Drum,/m/026t6 66 | Drum_kit,/m/02hnl 67 | Electric_guitar,/m/02sgy 68 | Engine,/m/02mk9 69 | Engine_starting,/t/dd00130 70 | Explosion,/m/014zdl 71 | Fart,/m/02_nn 72 | Female_singing,/t/dd00004 73 | Female_speech_and_woman_speaking,/m/02zsn 74 | Fill_(with_liquid),/m/07p7b8y 75 | Finger_snapping,/m/025_jnm 76 | Fire,/m/02_41 77 | Fireworks,/m/0g6b5 78 | Fixed-wing_aircraft_and_airplane,/m/0cmf2 79 | Fowl,/m/025rv6n 80 | Frog,/m/09ld4 81 | Frying_(food),/m/0dxrf 82 | Gasp,/m/07s0dtb 83 | Giggle,/m/07r660_ 84 | Glass,/m/039jq 85 | Glockenspiel,/m/0dwtp 86 | Gong,/m/0mbct 87 | Growling,/m/0ghcn6 88 | Guitar,/m/0342h 89 | Gull_and_seagull,/m/01dwxx 90 | Gunshot_and_gunfire,/m/032s66 91 | Gurgling,/m/07swgks 92 | Hammer,/m/03l9g 93 | Hands,/m/0k65p 94 | Harmonica,/m/03qjg 95 | Harp,/m/03m5k 96 | Hi-hat,/m/03qtq 97 | Hiss,/m/07rjwbb 98 | Human_group_actions,/t/dd00012 99 | Human_voice,/m/09l8g 100 | Idling,/m/07pb8fc 101 | Insect,/m/03vt0 102 | Keyboard_(musical),/m/05148p4 103 | Keys_jangling,/m/03v3yw 104 | Knock,/m/07r4wb8 105 | Laughter,/m/01j3sz 106 | Liquid,/m/04k94 107 | Livestock_and_farm_animals_and_working_animals,/m/0ch8v 108 | Male_singing,/t/dd00003 109 | Male_speech_and_man_speaking,/m/05zppz 110 | Mallet_percussion,/m/0j45pbj 111 | Marimba_and_xylophone,/m/0dwsp 112 | Mechanical_fan,/m/02x984l 113 | Mechanisms,/t/dd00077 114 | Meow,/m/07qrkrw 115 | Microwave_oven,/m/0fx9l 116 | Motor_vehicle_(road),/m/012f08 117 | Motorcycle,/m/04_sv 118 | Musical_instrument,/m/04szw 119 | Ocean,/m/05kq4 120 | Organ,/m/013y1f 121 | Packing_tape_and_duct_tape,/m/05mxj0q 122 | Percussion,/m/0l14md 123 | Piano,/m/05r5c 124 | Plucked_string_instrument,/m/0fx80y 125 | Power_tool,/m/0_ksk 126 | Printer,/m/01m4t 127 | Purr,/m/02yds9 128 | Race_car_and_auto_racing,/m/0ltv 129 | Rain,/m/06mb1 130 | Raindrop,/m/07r10fb 131 | Ratchet_and_pawl,/m/02bm9n 132 | Rattle,/m/07qn4z3 133 | Rattle_(instrument),/m/05r5wn 134 | Respiratory_sounds,/m/09hlz4 135 | Ringtone,/m/01hnzm 136 | Run,/m/06h7j 137 | Sawing,/m/01b82r 138 | Scissors,/m/01lsmm 139 | Scratching_(performance_technique),/m/01hgjl 140 | Screaming,/m/03qc9zr 141 | Screech,/m/07q8k13 142 | Shatter,/m/07rn7sz 143 | Shout,/m/07p6fty 144 | Sigh,/m/07plz5l 145 | Singing,/m/015lz1 146 | Sink_(filling_or_washing),/m/0130jx 147 | Siren,/m/03kmc9 148 | Skateboard,/m/06_fw 149 | Slam,/m/07rjzl8 150 | Sliding_door,/m/02y_763 151 | Snare_drum,/m/06rvn 152 | Sneeze,/m/01hsr_ 153 | Speech,/m/09x0r 154 | Speech_synthesizer,/m/0brhx 155 | Splash_and_splatter,/m/07rrlb6 156 | Squeak,/m/07q6cd_ 157 | Stream,/m/0j6m2 158 | Strum,/m/07s0s5r 159 | Subway_and_metro_and_underground,/m/0195fx 160 | Tabla,/m/01p970 161 | Tambourine,/m/07brj 162 | Tap,/m/07qcpgn 163 | Tearing,/m/07qcx4z 164 | Telephone,/m/07cx4 165 | Thump_and_thud,/m/07qnq_y 166 | Thunder,/m/0ngt1 167 | Thunderstorm,/m/0jb2l 168 | Tick,/m/07qjznt 169 | Tick-tock,/m/07qjznl 170 | Toilet_flush,/m/01jt3m 171 | Tools,/m/07k1x 172 | Traffic_noise_and_roadway_noise,/m/0btp2 173 | Train,/m/07jdr 174 | Trickle_and_dribble,/m/07pqc89 175 | Truck,/m/07r04 176 | Trumpet,/m/07gql 177 | Typewriter,/m/0c2wf 178 | Typing,/m/0316dw 179 | Vehicle,/m/07yv9 180 | Walk_and_footsteps,/m/07pbtc8 181 | Water,/m/0838f 182 | Water_tap_and_faucet,/m/02jz0l 183 | Waves_and_surf,/m/034srq 184 | Whispering,/m/02rtxlg 185 | Whoosh_and_swoosh_and_swish,/m/07rqsjt 186 | Wild_animals,/m/01280g 187 | Wind,/m/03m9d0z 188 | Wind_chime,/m/026fgl 189 | Wind_instrument_and_woodwind_instrument,/m/085jw 190 | Wood,/m/083vt 191 | Writing,/m/081rb 192 | Yell,/m/07sr1lc 193 | Zipper_(clothing),/m/01s0vc 194 | -------------------------------------------------------------------------------- /src/extern/freesound.py: -------------------------------------------------------------------------------- 1 | """ 2 | A python client for the Freesound API. 3 | 4 | Find the API documentation at http://www.freesound.org/docs/api/. 5 | 6 | Apply for an API key at http://www.freesound.org/api/apply/. 7 | 8 | The client automatically maps function arguments to http parameters of the API. 9 | JSON results are converted to python objects. The main object types (Sound, 10 | User, Pack) are augmented with the corresponding API calls. 11 | 12 | Note that POST resources are not supported. Downloading full quality sounds 13 | requires Oauth2 authentication 14 | (see http://freesound.org/docs/api/authentication.html). Oauth2 authentication 15 | is supported, but you are expected to implement the workflow. 16 | """ 17 | import os 18 | import re 19 | import json 20 | 21 | try: # python 3 22 | from urllib.request import urlopen, FancyURLopener, Request # noqa 23 | from urllib.parse import urlencode, quote 24 | from urllib.error import HTTPError 25 | py3 = True 26 | except ImportError: # python 2.7 27 | from urllib import urlencode, FancyURLopener, quote 28 | from urllib2 import HTTPError, urlopen, Request 29 | py3 = False 30 | 31 | 32 | class URIS(): 33 | HOST = 'freesound.org' 34 | BASE = 'https://' + HOST + '/apiv2' 35 | TEXT_SEARCH = '/search/text/' 36 | CONTENT_SEARCH = '/search/content/' 37 | COMBINED_SEARCH = '/search/combined/' 38 | SOUND = '/sounds//' 39 | SOUND_ANALYSIS = '/sounds//analysis/' 40 | SIMILAR_SOUNDS = '/sounds//similar/' 41 | COMMENTS = '/sounds//comments/' 42 | DOWNLOAD = '/sounds//download/' 43 | UPLOAD = '/sounds/upload/' 44 | DESCRIBE = '/sounds//describe/' 45 | PENDING = '/sounds/pending_uploads/' 46 | BOOKMARK = '/sounds//bookmark/' 47 | RATE = '/sounds//rate/' 48 | COMMENT = '/sounds//comment/' 49 | AUTHORIZE = '/oauth2/authorize/' 50 | LOGOUT = '/api-auth/logout/' 51 | LOGOUT_AUTHORIZE = '/oauth2/logout_and_authorize/' 52 | ME = '/me/' 53 | USER = '/users//' 54 | USER_SOUNDS = '/users//sounds/' 55 | USER_PACKS = '/users//packs/' 56 | USER_BOOKMARK_CATEGORIES = '/users//bookmark_categories/' 57 | USER_BOOKMARK_CATEGORY_SOUNDS = '/users//bookmark_categories//sounds/' # noqa 58 | PACK = '/packs//' 59 | PACK_SOUNDS = '/packs//sounds/' 60 | PACK_DOWNLOAD = '/packs//download/' 61 | 62 | @classmethod 63 | def uri(cls, uri, *args): 64 | for a in args: 65 | uri = re.sub('<[\w_]+>', quote(str(a)), uri, 1) 66 | return cls.BASE + uri 67 | 68 | 69 | class FreesoundClient(): 70 | """ 71 | Start here, create a FreesoundClient and set an authentication token using 72 | set_token 73 | >>> c = FreesoundClient() 74 | >>> c.set_token("") 75 | """ 76 | client_secret = "" 77 | client_id = "" 78 | token = "" 79 | header = "" 80 | 81 | def get_sound(self, sound_id, **params): 82 | """ 83 | Get a sound object by id 84 | Relevant params: descriptors, fields, normalized 85 | http://freesound.org/docs/api/resources_apiv2.html#sound-resources 86 | 87 | >>> sound = c.get_sound(6) 88 | """ 89 | uri = URIS.uri(URIS.SOUND, sound_id) 90 | return FSRequest.request(uri, params, self, Sound) 91 | 92 | def text_search(self, **params): 93 | """ 94 | Search sounds using a text query and/or filter. Returns an iterable 95 | Pager object. The fields parameter allows you to specify the 96 | information you want in the results list 97 | http://freesound.org/docs/api/resources_apiv2.html#text-search 98 | 99 | >>> sounds = c.text_search( 100 | >>> query="dubstep", filter="tag:loop", fields="id,name,url" 101 | >>> ) 102 | >>> for snd in sounds: print snd.name 103 | """ 104 | uri = URIS.uri(URIS.TEXT_SEARCH) 105 | return FSRequest.request(uri, params, self, Pager) 106 | 107 | def content_based_search(self, **params): 108 | """ 109 | Search sounds using a content-based descriptor target and/or filter 110 | See essentia_example.py for an example using essentia 111 | http://freesound.org/docs/api/resources_apiv2.html#content-search 112 | 113 | >>> sounds = c.content_based_search( 114 | >>> target="lowlevel.pitch.mean:220", 115 | >>> descriptors_filter="lowlevel.pitch_instantaneous_confidence.mean:[0.8 TO 1]", # noqa 116 | >>> fields="id,name,url") 117 | >>> for snd in sounds: print snd.name 118 | """ 119 | uri = URIS.uri(URIS.CONTENT_SEARCH) 120 | return FSRequest.request(uri, params, self, Pager) 121 | 122 | def combined_search(self, **params): 123 | """ 124 | Combine both text and content-based queries. 125 | http://freesound.org/docs/api/resources_apiv2.html#combined-search 126 | 127 | >>> sounds = c.combined_search( 128 | >>> target="lowlevel.pitch.mean:220", 129 | >>> filter="single-note" 130 | >>> ) 131 | """ 132 | uri = URIS.uri(URIS.COMBINED_SEARCH) 133 | return FSRequest.request(uri, params, self, CombinedSearchPager) 134 | 135 | def get_user(self, username): 136 | """ 137 | Get a user object by username 138 | http://freesound.org/docs/api/resources_apiv2.html#combined-search 139 | 140 | >>> u=c.get_user("xserra") 141 | """ 142 | uri = URIS.uri(URIS.USER, username) 143 | return FSRequest.request(uri, {}, self, User) 144 | 145 | def get_pack(self, pack_id): 146 | """ 147 | Get a user object by username 148 | http://freesound.org/docs/api/resources_apiv2.html#combined-search 149 | 150 | >>> p = c.get_pack(3416) 151 | """ 152 | uri = URIS.uri(URIS.PACK, pack_id) 153 | return FSRequest.request(uri, {}, self, Pack) 154 | 155 | def set_token(self, token, auth_type="token"): 156 | """ 157 | Set your API key or Oauth2 token 158 | http://freesound.org/docs/api/authentication.html 159 | http://freesound.org/docs/api/resources_apiv2.html#combined-search 160 | 161 | >>> c.set_token("") 162 | """ 163 | self.token = token # TODO 164 | if auth_type == 'oauth': 165 | self.header = 'Bearer ' + token 166 | else: 167 | self.header = 'Token ' + token 168 | 169 | 170 | class FreesoundObject: 171 | """ 172 | Base object, automatically populated from parsed json dictionary 173 | """ 174 | def __init__(self, json_dict, client): 175 | self.client = client 176 | self.json_dict = json_dict 177 | 178 | def replace_dashes(d): 179 | for k, v in d.items(): 180 | if "-" in k: 181 | d[k.replace("-", "_")] = d[k] 182 | del d[k] 183 | if isinstance(v, dict): 184 | replace_dashes(v) 185 | 186 | replace_dashes(json_dict) 187 | self.__dict__.update(json_dict) 188 | for k, v in json_dict.items(): 189 | if isinstance(v, dict): 190 | self.__dict__[k] = FreesoundObject(v, client) 191 | 192 | def as_dict(self): 193 | return self.json_dict 194 | 195 | 196 | class FreesoundException(Exception): 197 | """ 198 | Freesound API exception 199 | """ 200 | def __init__(self, http_code, detail): 201 | self.code = http_code 202 | self.detail = detail 203 | 204 | def __str__(self): 205 | return '' % \ 206 | (self.code, self.detail) 207 | 208 | 209 | class Retriever(FancyURLopener): 210 | """ 211 | Downloads previews and original sound files to disk. 212 | """ 213 | def http_error_default(self, url, fp, errcode, errmsg, headers): 214 | resp = fp.read() 215 | try: 216 | error = json.loads(resp) 217 | raise FreesoundException(errcode, error.detail) 218 | except: 219 | raise Exception(resp) 220 | 221 | 222 | class FSRequest: 223 | """ 224 | Makes requests to the freesound API. Should not be used directly. 225 | """ 226 | @classmethod 227 | def request( 228 | cls, 229 | uri, 230 | params={}, 231 | client=None, 232 | wrapper=FreesoundObject, 233 | method='GET', 234 | data=False 235 | ): 236 | p = params if params else {} 237 | url = '%s?%s' % (uri, urlencode(p)) if params else uri 238 | d = urlencode(data) if data else None 239 | headers = {'Authorization': client.header} 240 | req = Request(url, d, headers) 241 | try: 242 | f = urlopen(req) 243 | except HTTPError as e: 244 | resp = e.read() 245 | if e.code >= 200 and e.code < 300: 246 | return resp 247 | else: 248 | raise FreesoundException(e.code, json.loads(resp)) 249 | if py3: 250 | resp = f.read().decode("utf-8") 251 | else: 252 | resp = f.read() 253 | f.close() 254 | result = None 255 | try: 256 | result = json.loads(resp) 257 | except: 258 | raise FreesoundException(0, "Couldn't parse response") 259 | if wrapper: 260 | return wrapper(result, client) 261 | return result 262 | 263 | @classmethod 264 | def retrieve(cls, url, client, path): 265 | r = Retriever() 266 | r.addheader('Authorization', client.header) 267 | return r.retrieve(url, path) 268 | 269 | 270 | class Pager(FreesoundObject): 271 | """ 272 | Paginates search results. Can be used in for loops to iterate its results 273 | array. 274 | """ 275 | def __getitem__(self, key): 276 | return Sound(self.results[key], self.client) 277 | 278 | def next_page(self): 279 | """ 280 | Get a Pager with the next results page. 281 | """ 282 | return FSRequest.request(self.next, {}, self.client, Pager) 283 | 284 | def previous_page(self): 285 | """ 286 | Get a Pager with the previous results page. 287 | """ 288 | return FSRequest.request(self.previous, {}, self.client, Pager) 289 | 290 | 291 | class GenericPager(Pager): 292 | """ 293 | Paginates results for objects different than Sound. 294 | """ 295 | def __getitem__(self, key): 296 | return FreesoundObject(self.results[key], self.client) 297 | 298 | 299 | class CombinedSearchPager(FreesoundObject): 300 | """ 301 | Combined search uses a different pagination style. 302 | The total amount of results is not available, and the size of the page is 303 | not guaranteed. 304 | Use :py:meth:`~freesound.CombinedSearchPager.more` to get more results if 305 | available. 306 | """ 307 | def __getitem__(self, key): 308 | return Sound(self.results[key], None) 309 | 310 | def more(self): 311 | """ 312 | Get more results 313 | """ 314 | return FSRequest.request( 315 | self.more, {}, self.client, CombinedSearchPager 316 | ) 317 | 318 | 319 | class Sound(FreesoundObject): 320 | """ 321 | Freesound Sound resources 322 | 323 | >>> sound = c.get_sound(6) 324 | """ 325 | def retrieve(self, directory, name=False): 326 | """ 327 | Download the original sound file (requires Oauth2 authentication). 328 | http://freesound.org/docs/api/resources_apiv2.html#download-sound-oauth2-required 329 | 330 | >>> sound.retrieve("/tmp") 331 | """ 332 | path = os.path.join(directory, name if name else self.name) 333 | uri = URIS.uri(URIS.DOWNLOAD, self.id) 334 | return FSRequest.retrieve(uri, self.client, path) 335 | 336 | def retrieve_preview(self, directory, name=False): 337 | """ 338 | Download the low quality mp3 preview. 339 | 340 | >>> sound.retrieve_preview("/tmp") 341 | """ 342 | try: 343 | path = os.path.join( 344 | directory, 345 | name if name else self.previews.preview_lq_mp3.split("/")[-1]) 346 | except AttributeError: 347 | raise FreesoundException( 348 | '-', 349 | 'Preview uris are not present in your sound object. Please add' 350 | ' them using the fields parameter in your request. See ' 351 | ' http://www.freesound.org/docs/api/resources_apiv2.html#response-sound-list.' # noqa 352 | ) 353 | return FSRequest.retrieve( 354 | self.previews.preview_lq_mp3, 355 | self.client, 356 | path 357 | ) 358 | 359 | def get_analysis(self, descriptors=None, normalized=0): 360 | """ 361 | Get content-based descriptors. 362 | http://freesound.org/docs/api/resources_apiv2.html#sound-analysis 363 | 364 | >>> a = sound.get_analysis(descriptors="lowlevel.pitch.mean") 365 | >>> print(a.lowlevel.pitch.mean) 366 | """ 367 | uri = URIS.uri(URIS.SOUND_ANALYSIS, self.id) 368 | params = {} 369 | if descriptors: 370 | params['descriptors'] = descriptors 371 | if normalized: 372 | params['normalized'] = normalized 373 | return FSRequest.request(uri, params, self.client, FreesoundObject) 374 | 375 | def get_similar(self, **params): 376 | """ 377 | Get similar sounds based on content-based descriptors. 378 | Relevant params: page, page_size, fields, descriptors, normalized, 379 | descriptors_filter 380 | http://freesound.org/docs/api/resources_apiv2.html#similar-sounds 381 | 382 | >>> s = sound.get_similar() 383 | """ 384 | uri = URIS.uri(URIS.SIMILAR_SOUNDS, self.id) 385 | return FSRequest.request(uri, params, self.client, Pager) 386 | 387 | def get_comments(self, **params): 388 | """ 389 | Get user comments. 390 | Relevant params: page, page_size 391 | http://freesound.org/docs/api/resources_apiv2.html#sound-comments 392 | 393 | >>> comments = sound.get_comments() 394 | """ 395 | uri = URIS.uri(URIS.COMMENTS, self.id) 396 | return FSRequest.request(uri, params, self.client, GenericPager) 397 | 398 | def __repr__(self): 399 | return '' % (self.id, self.name) 400 | 401 | 402 | class User(FreesoundObject): 403 | """ 404 | Freesound User resources. 405 | 406 | >>> u=c.get_user("xserra") 407 | """ 408 | def get_sounds(self, **params): 409 | """ 410 | Get user sounds. 411 | Relevant params: page, page_size, fields, descriptors, normalized 412 | http://freesound.org/docs/api/resources_apiv2.html#user-sounds 413 | 414 | >>> u.get_sounds() 415 | """ 416 | uri = URIS.uri(URIS.USER_SOUNDS, self.username) 417 | return FSRequest.request(uri, params, self.client, Pager) 418 | 419 | def get_packs(self, **params): 420 | """ 421 | Get user packs. 422 | Relevant params: page, page_size 423 | http://freesound.org/docs/api/resources_apiv2.html#user-packs 424 | 425 | >>> u.get_packs() 426 | """ 427 | uri = URIS.uri(URIS.USER_PACKS, self.username) 428 | return FSRequest.request(uri, params, self.client, GenericPager) 429 | 430 | def get_bookmark_categories(self, **params): 431 | """ 432 | Get user bookmark categories. 433 | Relevant params: page, page_size 434 | http://freesound.org/docs/api/resources_apiv2.html#user-bookmark-categories 435 | 436 | >>> u.get_bookmark_categories() 437 | """ 438 | uri = URIS.uri(URIS.USER_BOOKMARK_CATEGORIES, self.username) 439 | return FSRequest.request(uri, params, self.client, GenericPager) 440 | 441 | def get_bookmark_category_sounds(self, category_id, **params): 442 | """ 443 | Get user bookmarks. 444 | Relevant params: page, page_size, fields, descriptors, normalized 445 | http://freesound.org/docs/api/resources_apiv2.html#user-bookmark-category-sounds 446 | 447 | >>> p=u.get_bookmark_category_sounds(0) 448 | """ 449 | uri = URIS.uri( 450 | URIS.USER_BOOKMARK_CATEGORY_SOUNDS, self.username, category_id 451 | ) 452 | return FSRequest.request(uri, params, self.client, Pager) 453 | 454 | def __repr__(self): 455 | return '' % self.username 456 | 457 | 458 | class Pack(FreesoundObject): 459 | """ 460 | Freesound Pack resources. 461 | 462 | >>> p = c.get_pack(3416) 463 | """ 464 | def get_sounds(self, **params): 465 | """ 466 | Get pack sounds 467 | Relevant params: page, page_size, fields, descriptors, normalized 468 | http://freesound.org/docs/api/resources_apiv2.html#pack-sounds 469 | 470 | >>> sounds = p.get_sounds() 471 | """ 472 | uri = URIS.uri(URIS.PACK_SOUNDS, self.id) 473 | return FSRequest.request(uri, params, self.client, Pager) 474 | 475 | def __repr__(self): 476 | return '' % self.name 477 | -------------------------------------------------------------------------------- /poetry.lock: -------------------------------------------------------------------------------- 1 | [[package]] 2 | name = "certifi" 3 | version = "2021.5.30" 4 | description = "Python package for providing Mozilla's CA Bundle." 5 | category = "main" 6 | optional = false 7 | python-versions = "*" 8 | 9 | [[package]] 10 | name = "charset-normalizer" 11 | version = "2.0.4" 12 | description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." 13 | category = "main" 14 | optional = false 15 | python-versions = ">=3.5.0" 16 | 17 | [package.extras] 18 | unicode_backport = ["unicodedata2"] 19 | 20 | [[package]] 21 | name = "click" 22 | version = "8.0.1" 23 | description = "Composable command line interface toolkit" 24 | category = "main" 25 | optional = false 26 | python-versions = ">=3.6" 27 | 28 | [package.dependencies] 29 | colorama = {version = "*", markers = "platform_system == \"Windows\""} 30 | 31 | [[package]] 32 | name = "colorama" 33 | version = "0.4.4" 34 | description = "Cross-platform colored terminal text." 35 | category = "main" 36 | optional = false 37 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" 38 | 39 | [[package]] 40 | name = "flake8" 41 | version = "3.9.2" 42 | description = "the modular source code checker: pep8 pyflakes and co" 43 | category = "dev" 44 | optional = false 45 | python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" 46 | 47 | [package.dependencies] 48 | mccabe = ">=0.6.0,<0.7.0" 49 | pycodestyle = ">=2.7.0,<2.8.0" 50 | pyflakes = ">=2.3.0,<2.4.0" 51 | 52 | [[package]] 53 | name = "idna" 54 | version = "3.2" 55 | description = "Internationalized Domain Names in Applications (IDNA)" 56 | category = "main" 57 | optional = false 58 | python-versions = ">=3.5" 59 | 60 | [[package]] 61 | name = "jaffadata" 62 | version = "0.2.1" 63 | description = "A library for working with audio datasets in ML" 64 | category = "main" 65 | optional = false 66 | python-versions = ">=3.8,<3.11" 67 | 68 | [package.dependencies] 69 | numpy = ">=1.21.3,<2.0.0" 70 | pandas = ">=1.3.4,<2.0.0" 71 | 72 | [[package]] 73 | name = "joblib" 74 | version = "1.0.1" 75 | description = "Lightweight pipelining with Python functions" 76 | category = "main" 77 | optional = false 78 | python-versions = ">=3.6" 79 | 80 | [[package]] 81 | name = "mccabe" 82 | version = "0.6.1" 83 | description = "McCabe checker, plugin for flake8" 84 | category = "dev" 85 | optional = false 86 | python-versions = "*" 87 | 88 | [[package]] 89 | name = "nltk" 90 | version = "3.6.2" 91 | description = "Natural Language Toolkit" 92 | category = "main" 93 | optional = false 94 | python-versions = ">=3.5.*" 95 | 96 | [package.dependencies] 97 | click = "*" 98 | joblib = "*" 99 | regex = "*" 100 | tqdm = "*" 101 | 102 | [package.extras] 103 | all = ["matplotlib", "twython", "scipy", "numpy", "gensim (<4.0.0)", "python-crfsuite", "pyparsing", "scikit-learn", "requests"] 104 | corenlp = ["requests"] 105 | machine_learning = ["gensim (<4.0.0)", "numpy", "python-crfsuite", "scikit-learn", "scipy"] 106 | plot = ["matplotlib"] 107 | tgrep = ["pyparsing"] 108 | twitter = ["twython"] 109 | 110 | [[package]] 111 | name = "numpy" 112 | version = "1.21.3" 113 | description = "NumPy is the fundamental package for array computing with Python." 114 | category = "main" 115 | optional = false 116 | python-versions = ">=3.7,<3.11" 117 | 118 | [[package]] 119 | name = "pandas" 120 | version = "1.3.4" 121 | description = "Powerful data structures for data analysis, time series, and statistics" 122 | category = "main" 123 | optional = false 124 | python-versions = ">=3.7.1" 125 | 126 | [package.dependencies] 127 | numpy = [ 128 | {version = ">=1.17.3", markers = "platform_machine != \"aarch64\" and platform_machine != \"arm64\" and python_version < \"3.10\""}, 129 | {version = ">=1.19.2", markers = "platform_machine == \"aarch64\" and python_version < \"3.10\""}, 130 | {version = ">=1.20.0", markers = "platform_machine == \"arm64\" and python_version < \"3.10\""}, 131 | {version = ">=1.21.0", markers = "python_version >= \"3.10\""}, 132 | ] 133 | python-dateutil = ">=2.7.3" 134 | pytz = ">=2017.3" 135 | 136 | [package.extras] 137 | test = ["hypothesis (>=3.58)", "pytest (>=6.0)", "pytest-xdist"] 138 | 139 | [[package]] 140 | name = "pycodestyle" 141 | version = "2.7.0" 142 | description = "Python style guide checker" 143 | category = "dev" 144 | optional = false 145 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" 146 | 147 | [[package]] 148 | name = "pyflakes" 149 | version = "2.3.1" 150 | description = "passive checker of Python programs" 151 | category = "dev" 152 | optional = false 153 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" 154 | 155 | [[package]] 156 | name = "python-dateutil" 157 | version = "2.8.2" 158 | description = "Extensions to the standard Python datetime module" 159 | category = "main" 160 | optional = false 161 | python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" 162 | 163 | [package.dependencies] 164 | six = ">=1.5" 165 | 166 | [[package]] 167 | name = "pytz" 168 | version = "2021.1" 169 | description = "World timezone definitions, modern and historical" 170 | category = "main" 171 | optional = false 172 | python-versions = "*" 173 | 174 | [[package]] 175 | name = "regex" 176 | version = "2021.8.28" 177 | description = "Alternative regular expression module, to replace re." 178 | category = "main" 179 | optional = false 180 | python-versions = "*" 181 | 182 | [[package]] 183 | name = "requests" 184 | version = "2.26.0" 185 | description = "Python HTTP for Humans." 186 | category = "main" 187 | optional = false 188 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" 189 | 190 | [package.dependencies] 191 | certifi = ">=2017.4.17" 192 | charset-normalizer = {version = ">=2.0.0,<2.1.0", markers = "python_version >= \"3\""} 193 | idna = {version = ">=2.5,<4", markers = "python_version >= \"3\""} 194 | urllib3 = ">=1.21.1,<1.27" 195 | 196 | [package.extras] 197 | socks = ["PySocks (>=1.5.6,!=1.5.7)", "win-inet-pton"] 198 | use_chardet_on_py3 = ["chardet (>=3.0.2,<5)"] 199 | 200 | [[package]] 201 | name = "six" 202 | version = "1.16.0" 203 | description = "Python 2 and 3 compatibility utilities" 204 | category = "main" 205 | optional = false 206 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" 207 | 208 | [[package]] 209 | name = "tqdm" 210 | version = "4.62.2" 211 | description = "Fast, Extensible Progress Meter" 212 | category = "main" 213 | optional = false 214 | python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,>=2.7" 215 | 216 | [package.dependencies] 217 | colorama = {version = "*", markers = "platform_system == \"Windows\""} 218 | 219 | [package.extras] 220 | dev = ["py-make (>=0.1.0)", "twine", "wheel"] 221 | notebook = ["ipywidgets (>=6)"] 222 | telegram = ["requests"] 223 | 224 | [[package]] 225 | name = "urllib3" 226 | version = "1.26.6" 227 | description = "HTTP library with thread-safe connection pooling, file post, and more." 228 | category = "main" 229 | optional = false 230 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4" 231 | 232 | [package.extras] 233 | brotli = ["brotlipy (>=0.6.0)"] 234 | secure = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "certifi", "ipaddress"] 235 | socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] 236 | 237 | [metadata] 238 | lock-version = "1.1" 239 | python-versions = ">=3.8,<3.11" 240 | content-hash = "4432276a1aa03bb1c36fd5ccaed0aca76d769d60a21981b80d1a8fd71aa45392" 241 | 242 | [metadata.files] 243 | certifi = [ 244 | {file = "certifi-2021.5.30-py2.py3-none-any.whl", hash = "sha256:50b1e4f8446b06f41be7dd6338db18e0990601dce795c2b1686458aa7e8fa7d8"}, 245 | {file = "certifi-2021.5.30.tar.gz", hash = "sha256:2bbf76fd432960138b3ef6dda3dde0544f27cbf8546c458e60baf371917ba9ee"}, 246 | ] 247 | charset-normalizer = [ 248 | {file = "charset-normalizer-2.0.4.tar.gz", hash = "sha256:f23667ebe1084be45f6ae0538e4a5a865206544097e4e8bbcacf42cd02a348f3"}, 249 | {file = "charset_normalizer-2.0.4-py3-none-any.whl", hash = "sha256:0c8911edd15d19223366a194a513099a302055a962bca2cec0f54b8b63175d8b"}, 250 | ] 251 | click = [ 252 | {file = "click-8.0.1-py3-none-any.whl", hash = "sha256:fba402a4a47334742d782209a7c79bc448911afe1149d07bdabdf480b3e2f4b6"}, 253 | {file = "click-8.0.1.tar.gz", hash = "sha256:8c04c11192119b1ef78ea049e0a6f0463e4c48ef00a30160c704337586f3ad7a"}, 254 | ] 255 | colorama = [ 256 | {file = "colorama-0.4.4-py2.py3-none-any.whl", hash = "sha256:9f47eda37229f68eee03b24b9748937c7dc3868f906e8ba69fbcbdd3bc5dc3e2"}, 257 | {file = "colorama-0.4.4.tar.gz", hash = "sha256:5941b2b48a20143d2267e95b1c2a7603ce057ee39fd88e7329b0c292aa16869b"}, 258 | ] 259 | flake8 = [ 260 | {file = "flake8-3.9.2-py2.py3-none-any.whl", hash = "sha256:bf8fd333346d844f616e8d47905ef3a3384edae6b4e9beb0c5101e25e3110907"}, 261 | {file = "flake8-3.9.2.tar.gz", hash = "sha256:07528381786f2a6237b061f6e96610a4167b226cb926e2aa2b6b1d78057c576b"}, 262 | ] 263 | idna = [ 264 | {file = "idna-3.2-py3-none-any.whl", hash = "sha256:14475042e284991034cb48e06f6851428fb14c4dc953acd9be9a5e95c7b6dd7a"}, 265 | {file = "idna-3.2.tar.gz", hash = "sha256:467fbad99067910785144ce333826c71fb0e63a425657295239737f7ecd125f3"}, 266 | ] 267 | jaffadata = [ 268 | {file = "jaffadata-0.2.1-py3-none-any.whl", hash = "sha256:59bdf83bcdc4e9b6fc025c9151ea848c3155177856061815353128822b610102"}, 269 | {file = "jaffadata-0.2.1.tar.gz", hash = "sha256:753f18e8ccff4a4a13202f9d5271de429d971a0cf0972647636a429a80b9b9c3"}, 270 | ] 271 | joblib = [ 272 | {file = "joblib-1.0.1-py3-none-any.whl", hash = "sha256:feeb1ec69c4d45129954f1b7034954241eedfd6ba39b5e9e4b6883be3332d5e5"}, 273 | {file = "joblib-1.0.1.tar.gz", hash = "sha256:9c17567692206d2f3fb9ecf5e991084254fe631665c450b443761c4186a613f7"}, 274 | ] 275 | mccabe = [ 276 | {file = "mccabe-0.6.1-py2.py3-none-any.whl", hash = "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42"}, 277 | {file = "mccabe-0.6.1.tar.gz", hash = "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"}, 278 | ] 279 | nltk = [ 280 | {file = "nltk-3.6.2-py3-none-any.whl", hash = "sha256:240e23ab1ab159ef9940777d30c7c72d7e76d91877099218a7585370c11f6b9e"}, 281 | {file = "nltk-3.6.2.zip", hash = "sha256:57d556abed621ab9be225cc6d2df1edce17572efb67a3d754630c9f8381503eb"}, 282 | ] 283 | numpy = [ 284 | {file = "numpy-1.21.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:508b0b513fa1266875524ba8a9ecc27b02ad771fe1704a16314dc1a816a68737"}, 285 | {file = "numpy-1.21.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5dfe9d6a4c39b8b6edd7990091fea4f852888e41919d0e6722fe78dd421db0eb"}, 286 | {file = "numpy-1.21.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8a10968963640e75cc0193e1847616ab4c718e83b6938ae74dea44953950f6b7"}, 287 | {file = "numpy-1.21.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:49c6249260890e05b8111ebfc391ed58b3cb4b33e63197b2ec7f776e45330721"}, 288 | {file = "numpy-1.21.3-cp310-cp310-win_amd64.whl", hash = "sha256:f8f4625536926a155b80ad2bbff44f8cc59e9f2ad14cdda7acf4c135b4dc8ff2"}, 289 | {file = "numpy-1.21.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:e54af82d68ef8255535a6cdb353f55d6b8cf418a83e2be3569243787a4f4866f"}, 290 | {file = "numpy-1.21.3-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:f41b018f126aac18583956c54544db437f25c7ee4794bcb23eb38bef8e5e192a"}, 291 | {file = "numpy-1.21.3-cp37-cp37m-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:50cd26b0cf6664cb3b3dd161ba0a09c9c1343db064e7c69f9f8b551f5104d654"}, 292 | {file = "numpy-1.21.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4cc9b512e9fb590797474f58b7f6d1f1b654b3a94f4fa8558b48ca8b3cfc97cf"}, 293 | {file = "numpy-1.21.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:88a5d6b268e9ad18f3533e184744acdaa2e913b13148160b1152300c949bbb5f"}, 294 | {file = "numpy-1.21.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:3c09418a14471c7ae69ba682e2428cae5b4420a766659605566c0fa6987f6b7e"}, 295 | {file = "numpy-1.21.3-cp37-cp37m-win32.whl", hash = "sha256:90bec6a86b348b4559b6482e2b684db4a9a7eed1fa054b86115a48d58fbbf62a"}, 296 | {file = "numpy-1.21.3-cp37-cp37m-win_amd64.whl", hash = "sha256:043e83bfc274649c82a6f09836943e4a4aebe5e33656271c7dbf9621dd58b8ec"}, 297 | {file = "numpy-1.21.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:75621882d2230ab77fb6a03d4cbccd2038511491076e7964ef87306623aa5272"}, 298 | {file = "numpy-1.21.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:188031f833bbb623637e66006cf75e933e00e7231f67e2b45cf8189612bb5dc3"}, 299 | {file = "numpy-1.21.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:160ccc1bed3a8371bf0d760971f09bfe80a3e18646620e9ded0ad159d9749baa"}, 300 | {file = "numpy-1.21.3-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:29fb3dcd0468b7715f8ce2c0c2d9bbbaf5ae686334951343a41bd8d155c6ea27"}, 301 | {file = "numpy-1.21.3-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:32437f0b275c1d09d9c3add782516413e98cd7c09e6baf4715cbce781fc29912"}, 302 | {file = "numpy-1.21.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e606e6316911471c8d9b4618e082635cfe98876007556e89ce03d52ff5e8fcf0"}, 303 | {file = "numpy-1.21.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a99a6b067e5190ac6d12005a4d85aa6227c5606fa93211f86b1dafb16233e57d"}, 304 | {file = "numpy-1.21.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:dde972a1e11bb7b702ed0e447953e7617723760f420decb97305e66fb4afc54f"}, 305 | {file = "numpy-1.21.3-cp38-cp38-win32.whl", hash = "sha256:fe52dbe47d9deb69b05084abd4b0df7abb39a3c51957c09f635520abd49b29dd"}, 306 | {file = "numpy-1.21.3-cp38-cp38-win_amd64.whl", hash = "sha256:75eb7cadc8da49302f5b659d40ba4f6d94d5045fbd9569c9d058e77b0514c9e4"}, 307 | {file = "numpy-1.21.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:2a6ee9620061b2a722749b391c0d80a0e2ae97290f1b32e28d5a362e21941ee4"}, 308 | {file = "numpy-1.21.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5c4193f70f8069550a1788bd0cd3268ab7d3a2b70583dfe3b2e7f421e9aace06"}, 309 | {file = "numpy-1.21.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:28f15209fb535dd4c504a7762d3bc440779b0e37d50ed810ced209e5cea60d96"}, 310 | {file = "numpy-1.21.3-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:c6c2d535a7beb1f8790aaa98fd089ceab2e3dd7ca48aca0af7dc60e6ef93ffe1"}, 311 | {file = "numpy-1.21.3-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:bffa2eee3b87376cc6b31eee36d05349571c236d1de1175b804b348dc0941e3f"}, 312 | {file = "numpy-1.21.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cc14e7519fab2a4ed87d31f99c31a3796e4e1fe63a86ebdd1c5a1ea78ebd5896"}, 313 | {file = "numpy-1.21.3-cp39-cp39-win32.whl", hash = "sha256:dd0482f3fc547f1b1b5d6a8b8e08f63fdc250c58ce688dedd8851e6e26cff0f3"}, 314 | {file = "numpy-1.21.3-cp39-cp39-win_amd64.whl", hash = "sha256:300321e3985c968e3ae7fbda187237b225f3ffe6528395a5b7a5407f73cf093e"}, 315 | {file = "numpy-1.21.3-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:98339aa9911853f131de11010f6dd94c8cec254d3d1f7261528c3b3e3219f139"}, 316 | {file = "numpy-1.21.3.zip", hash = "sha256:63571bb7897a584ca3249c86dd01c10bcb5fe4296e3568b2e9c1a55356b6410e"}, 317 | ] 318 | pandas = [ 319 | {file = "pandas-1.3.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:9707bdc1ea9639c886b4d3be6e2a45812c1ac0c2080f94c31b71c9fa35556f9b"}, 320 | {file = "pandas-1.3.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c2f44425594ae85e119459bb5abb0748d76ef01d9c08583a667e3339e134218e"}, 321 | {file = "pandas-1.3.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:372d72a3d8a5f2dbaf566a5fa5fa7f230842ac80f29a931fb4b071502cf86b9a"}, 322 | {file = "pandas-1.3.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d99d2350adb7b6c3f7f8f0e5dfb7d34ff8dd4bc0a53e62c445b7e43e163fce63"}, 323 | {file = "pandas-1.3.4-cp310-cp310-win_amd64.whl", hash = "sha256:4acc28364863127bca1029fb72228e6f473bb50c32e77155e80b410e2068eeac"}, 324 | {file = "pandas-1.3.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c2646458e1dce44df9f71a01dc65f7e8fa4307f29e5c0f2f92c97f47a5bf22f5"}, 325 | {file = "pandas-1.3.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5298a733e5bfbb761181fd4672c36d0c627320eb999c59c65156c6a90c7e1b4f"}, 326 | {file = "pandas-1.3.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:22808afb8f96e2269dcc5b846decacb2f526dd0b47baebc63d913bf847317c8f"}, 327 | {file = "pandas-1.3.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b528e126c13816a4374e56b7b18bfe91f7a7f6576d1aadba5dee6a87a7f479ae"}, 328 | {file = "pandas-1.3.4-cp37-cp37m-win32.whl", hash = "sha256:fe48e4925455c964db914b958f6e7032d285848b7538a5e1b19aeb26ffaea3ec"}, 329 | {file = "pandas-1.3.4-cp37-cp37m-win_amd64.whl", hash = "sha256:eaca36a80acaacb8183930e2e5ad7f71539a66805d6204ea88736570b2876a7b"}, 330 | {file = "pandas-1.3.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:42493f8ae67918bf129869abea8204df899902287a7f5eaf596c8e54e0ac7ff4"}, 331 | {file = "pandas-1.3.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a388960f979665b447f0847626e40f99af8cf191bce9dc571d716433130cb3a7"}, 332 | {file = "pandas-1.3.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5ba0aac1397e1d7b654fccf263a4798a9e84ef749866060d19e577e927d66e1b"}, 333 | {file = "pandas-1.3.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f567e972dce3bbc3a8076e0b675273b4a9e8576ac629149cf8286ee13c259ae5"}, 334 | {file = "pandas-1.3.4-cp38-cp38-win32.whl", hash = "sha256:c1aa4de4919358c5ef119f6377bc5964b3a7023c23e845d9db7d9016fa0c5b1c"}, 335 | {file = "pandas-1.3.4-cp38-cp38-win_amd64.whl", hash = "sha256:dd324f8ee05925ee85de0ea3f0d66e1362e8c80799eb4eb04927d32335a3e44a"}, 336 | {file = "pandas-1.3.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d47750cf07dee6b55d8423471be70d627314277976ff2edd1381f02d52dbadf9"}, 337 | {file = "pandas-1.3.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2d1dc09c0013d8faa7474574d61b575f9af6257ab95c93dcf33a14fd8d2c1bab"}, 338 | {file = "pandas-1.3.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10e10a2527db79af6e830c3d5842a4d60383b162885270f8cffc15abca4ba4a9"}, 339 | {file = "pandas-1.3.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:35c77609acd2e4d517da41bae0c11c70d31c87aae8dd1aabd2670906c6d2c143"}, 340 | {file = "pandas-1.3.4-cp39-cp39-win32.whl", hash = "sha256:003ba92db58b71a5f8add604a17a059f3068ef4e8c0c365b088468d0d64935fd"}, 341 | {file = "pandas-1.3.4-cp39-cp39-win_amd64.whl", hash = "sha256:a51528192755f7429c5bcc9e80832c517340317c861318fea9cea081b57c9afd"}, 342 | {file = "pandas-1.3.4.tar.gz", hash = "sha256:a2aa18d3f0b7d538e21932f637fbfe8518d085238b429e4790a35e1e44a96ffc"}, 343 | ] 344 | pycodestyle = [ 345 | {file = "pycodestyle-2.7.0-py2.py3-none-any.whl", hash = "sha256:514f76d918fcc0b55c6680472f0a37970994e07bbb80725808c17089be302068"}, 346 | {file = "pycodestyle-2.7.0.tar.gz", hash = "sha256:c389c1d06bf7904078ca03399a4816f974a1d590090fecea0c63ec26ebaf1cef"}, 347 | ] 348 | pyflakes = [ 349 | {file = "pyflakes-2.3.1-py2.py3-none-any.whl", hash = "sha256:7893783d01b8a89811dd72d7dfd4d84ff098e5eed95cfa8905b22bbffe52efc3"}, 350 | {file = "pyflakes-2.3.1.tar.gz", hash = "sha256:f5bc8ecabc05bb9d291eb5203d6810b49040f6ff446a756326104746cc00c1db"}, 351 | ] 352 | python-dateutil = [ 353 | {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, 354 | {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, 355 | ] 356 | pytz = [ 357 | {file = "pytz-2021.1-py2.py3-none-any.whl", hash = "sha256:eb10ce3e7736052ed3623d49975ce333bcd712c7bb19a58b9e2089d4057d0798"}, 358 | {file = "pytz-2021.1.tar.gz", hash = "sha256:83a4a90894bf38e243cf052c8b58f381bfe9a7a483f6a9cab140bc7f702ac4da"}, 359 | ] 360 | regex = [ 361 | {file = "regex-2021.8.28-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9d05ad5367c90814099000442b2125535e9d77581855b9bee8780f1b41f2b1a2"}, 362 | {file = "regex-2021.8.28-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f3bf1bc02bc421047bfec3343729c4bbbea42605bcfd6d6bfe2c07ade8b12d2a"}, 363 | {file = "regex-2021.8.28-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5f6a808044faae658f546dd5f525e921de9fa409de7a5570865467f03a626fc0"}, 364 | {file = "regex-2021.8.28-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:a617593aeacc7a691cc4af4a4410031654f2909053bd8c8e7db837f179a630eb"}, 365 | {file = "regex-2021.8.28-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:79aef6b5cd41feff359acaf98e040844613ff5298d0d19c455b3d9ae0bc8c35a"}, 366 | {file = "regex-2021.8.28-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:0fc1f8f06977c2d4f5e3d3f0d4a08089be783973fc6b6e278bde01f0544ff308"}, 367 | {file = "regex-2021.8.28-cp310-cp310-win32.whl", hash = "sha256:6eebf512aa90751d5ef6a7c2ac9d60113f32e86e5687326a50d7686e309f66ed"}, 368 | {file = "regex-2021.8.28-cp310-cp310-win_amd64.whl", hash = "sha256:ac88856a8cbccfc14f1b2d0b829af354cc1743cb375e7f04251ae73b2af6adf8"}, 369 | {file = "regex-2021.8.28-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:c206587c83e795d417ed3adc8453a791f6d36b67c81416676cad053b4104152c"}, 370 | {file = "regex-2021.8.28-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e8690ed94481f219a7a967c118abaf71ccc440f69acd583cab721b90eeedb77c"}, 371 | {file = "regex-2021.8.28-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:328a1fad67445550b982caa2a2a850da5989fd6595e858f02d04636e7f8b0b13"}, 372 | {file = "regex-2021.8.28-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:c7cb4c512d2d3b0870e00fbbac2f291d4b4bf2634d59a31176a87afe2777c6f0"}, 373 | {file = "regex-2021.8.28-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:66256b6391c057305e5ae9209941ef63c33a476b73772ca967d4a2df70520ec1"}, 374 | {file = "regex-2021.8.28-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:8e44769068d33e0ea6ccdf4b84d80c5afffe5207aa4d1881a629cf0ef3ec398f"}, 375 | {file = "regex-2021.8.28-cp36-cp36m-win32.whl", hash = "sha256:08d74bfaa4c7731b8dac0a992c63673a2782758f7cfad34cf9c1b9184f911354"}, 376 | {file = "regex-2021.8.28-cp36-cp36m-win_amd64.whl", hash = "sha256:abb48494d88e8a82601af905143e0de838c776c1241d92021e9256d5515b3645"}, 377 | {file = "regex-2021.8.28-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b4c220a1fe0d2c622493b0a1fd48f8f991998fb447d3cd368033a4b86cf1127a"}, 378 | {file = "regex-2021.8.28-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d4a332404baa6665b54e5d283b4262f41f2103c255897084ec8f5487ce7b9e8e"}, 379 | {file = "regex-2021.8.28-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c61dcc1cf9fd165127a2853e2c31eb4fb961a4f26b394ac9fe5669c7a6592892"}, 380 | {file = "regex-2021.8.28-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:ee329d0387b5b41a5dddbb6243a21cb7896587a651bebb957e2d2bb8b63c0791"}, 381 | {file = "regex-2021.8.28-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f60667673ff9c249709160529ab39667d1ae9fd38634e006bec95611f632e759"}, 382 | {file = "regex-2021.8.28-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:b844fb09bd9936ed158ff9df0ab601e2045b316b17aa8b931857365ea8586906"}, 383 | {file = "regex-2021.8.28-cp37-cp37m-win32.whl", hash = "sha256:4cde065ab33bcaab774d84096fae266d9301d1a2f5519d7bd58fc55274afbf7a"}, 384 | {file = "regex-2021.8.28-cp37-cp37m-win_amd64.whl", hash = "sha256:1413b5022ed6ac0d504ba425ef02549a57d0f4276de58e3ab7e82437892704fc"}, 385 | {file = "regex-2021.8.28-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ed4b50355b066796dacdd1cf538f2ce57275d001838f9b132fab80b75e8c84dd"}, 386 | {file = "regex-2021.8.28-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:28fc475f560d8f67cc8767b94db4c9440210f6958495aeae70fac8faec631797"}, 387 | {file = "regex-2021.8.28-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bdc178caebd0f338d57ae445ef8e9b737ddf8fbc3ea187603f65aec5b041248f"}, 388 | {file = "regex-2021.8.28-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:999ad08220467b6ad4bd3dd34e65329dd5d0df9b31e47106105e407954965256"}, 389 | {file = "regex-2021.8.28-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:808ee5834e06f57978da3e003ad9d6292de69d2bf6263662a1a8ae30788e080b"}, 390 | {file = "regex-2021.8.28-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:d5111d4c843d80202e62b4fdbb4920db1dcee4f9366d6b03294f45ed7b18b42e"}, 391 | {file = "regex-2021.8.28-cp38-cp38-win32.whl", hash = "sha256:473858730ef6d6ff7f7d5f19452184cd0caa062a20047f6d6f3e135a4648865d"}, 392 | {file = "regex-2021.8.28-cp38-cp38-win_amd64.whl", hash = "sha256:31a99a4796bf5aefc8351e98507b09e1b09115574f7c9dbb9cf2111f7220d2e2"}, 393 | {file = "regex-2021.8.28-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:04f6b9749e335bb0d2f68c707f23bb1773c3fb6ecd10edf0f04df12a8920d468"}, 394 | {file = "regex-2021.8.28-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9b006628fe43aa69259ec04ca258d88ed19b64791693df59c422b607b6ece8bb"}, 395 | {file = "regex-2021.8.28-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:121f4b3185feaade3f85f70294aef3f777199e9b5c0c0245c774ae884b110a2d"}, 396 | {file = "regex-2021.8.28-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:a577a21de2ef8059b58f79ff76a4da81c45a75fe0bfb09bc8b7bb4293fa18983"}, 397 | {file = "regex-2021.8.28-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1743345e30917e8c574f273f51679c294effba6ad372db1967852f12c76759d8"}, 398 | {file = "regex-2021.8.28-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:e1e8406b895aba6caa63d9fd1b6b1700d7e4825f78ccb1e5260551d168db38ed"}, 399 | {file = "regex-2021.8.28-cp39-cp39-win32.whl", hash = "sha256:ed283ab3a01d8b53de3a05bfdf4473ae24e43caee7dcb5584e86f3f3e5ab4374"}, 400 | {file = "regex-2021.8.28-cp39-cp39-win_amd64.whl", hash = "sha256:610b690b406653c84b7cb6091facb3033500ee81089867ee7d59e675f9ca2b73"}, 401 | {file = "regex-2021.8.28.tar.gz", hash = "sha256:f585cbbeecb35f35609edccb95efd95a3e35824cd7752b586503f7e6087303f1"}, 402 | ] 403 | requests = [ 404 | {file = "requests-2.26.0-py2.py3-none-any.whl", hash = "sha256:6c1246513ecd5ecd4528a0906f910e8f0f9c6b8ec72030dc9fd154dc1a6efd24"}, 405 | {file = "requests-2.26.0.tar.gz", hash = "sha256:b8aa58f8cf793ffd8782d3d8cb19e66ef36f7aba4353eec859e74678b01b07a7"}, 406 | ] 407 | six = [ 408 | {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, 409 | {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, 410 | ] 411 | tqdm = [ 412 | {file = "tqdm-4.62.2-py2.py3-none-any.whl", hash = "sha256:80aead664e6c1672c4ae20dc50e1cdc5e20eeff9b14aa23ecd426375b28be588"}, 413 | {file = "tqdm-4.62.2.tar.gz", hash = "sha256:a4d6d112e507ef98513ac119ead1159d286deab17dffedd96921412c2d236ff5"}, 414 | ] 415 | urllib3 = [ 416 | {file = "urllib3-1.26.6-py2.py3-none-any.whl", hash = "sha256:39fb8672126159acb139a7718dd10806104dec1e2f0f6c88aab05d17df10c8d4"}, 417 | {file = "urllib3-1.26.6.tar.gz", hash = "sha256:f57b4c16c62fa2760b7e3d97c35b255512fb6b59a259730f36ba32ce9f8e342f"}, 418 | ] 419 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | GNU AFFERO GENERAL PUBLIC LICENSE 2 | Version 3, 19 November 2007 3 | 4 | Copyright (C) 2007 Free Software Foundation, Inc. 5 | Everyone is permitted to copy and distribute verbatim copies 6 | of this license document, but changing it is not allowed. 7 | 8 | Preamble 9 | 10 | The GNU Affero General Public License is a free, copyleft license for 11 | software and other kinds of works, specifically designed to ensure 12 | cooperation with the community in the case of network server software. 13 | 14 | The licenses for most software and other practical works are designed 15 | to take away your freedom to share and change the works. By contrast, 16 | our General Public Licenses are intended to guarantee your freedom to 17 | share and change all versions of a program--to make sure it remains free 18 | software for all its users. 19 | 20 | When we speak of free software, we are referring to freedom, not 21 | price. Our General Public Licenses are designed to make sure that you 22 | have the freedom to distribute copies of free software (and charge for 23 | them if you wish), that you receive source code or can get it if you 24 | want it, that you can change the software or use pieces of it in new 25 | free programs, and that you know you can do these things. 26 | 27 | Developers that use our General Public Licenses protect your rights 28 | with two steps: (1) assert copyright on the software, and (2) offer 29 | you this License which gives you legal permission to copy, distribute 30 | and/or modify the software. 31 | 32 | A secondary benefit of defending all users' freedom is that 33 | improvements made in alternate versions of the program, if they 34 | receive widespread use, become available for other developers to 35 | incorporate. Many developers of free software are heartened and 36 | encouraged by the resulting cooperation. However, in the case of 37 | software used on network servers, this result may fail to come about. 38 | The GNU General Public License permits making a modified version and 39 | letting the public access it on a server without ever releasing its 40 | source code to the public. 41 | 42 | The GNU Affero General Public License is designed specifically to 43 | ensure that, in such cases, the modified source code becomes available 44 | to the community. It requires the operator of a network server to 45 | provide the source code of the modified version running there to the 46 | users of that server. Therefore, public use of a modified version, on 47 | a publicly accessible server, gives the public access to the source 48 | code of the modified version. 49 | 50 | An older license, called the Affero General Public License and 51 | published by Affero, was designed to accomplish similar goals. This is 52 | a different license, not a version of the Affero GPL, but Affero has 53 | released a new version of the Affero GPL which permits relicensing under 54 | this license. 55 | 56 | The precise terms and conditions for copying, distribution and 57 | modification follow. 58 | 59 | TERMS AND CONDITIONS 60 | 61 | 0. Definitions. 62 | 63 | "This License" refers to version 3 of the GNU Affero General Public License. 64 | 65 | "Copyright" also means copyright-like laws that apply to other kinds of 66 | works, such as semiconductor masks. 67 | 68 | "The Program" refers to any copyrightable work licensed under this 69 | License. Each licensee is addressed as "you". "Licensees" and 70 | "recipients" may be individuals or organizations. 71 | 72 | To "modify" a work means to copy from or adapt all or part of the work 73 | in a fashion requiring copyright permission, other than the making of an 74 | exact copy. The resulting work is called a "modified version" of the 75 | earlier work or a work "based on" the earlier work. 76 | 77 | A "covered work" means either the unmodified Program or a work based 78 | on the Program. 79 | 80 | To "propagate" a work means to do anything with it that, without 81 | permission, would make you directly or secondarily liable for 82 | infringement under applicable copyright law, except executing it on a 83 | computer or modifying a private copy. Propagation includes copying, 84 | distribution (with or without modification), making available to the 85 | public, and in some countries other activities as well. 86 | 87 | To "convey" a work means any kind of propagation that enables other 88 | parties to make or receive copies. Mere interaction with a user through 89 | a computer network, with no transfer of a copy, is not conveying. 90 | 91 | An interactive user interface displays "Appropriate Legal Notices" 92 | to the extent that it includes a convenient and prominently visible 93 | feature that (1) displays an appropriate copyright notice, and (2) 94 | tells the user that there is no warranty for the work (except to the 95 | extent that warranties are provided), that licensees may convey the 96 | work under this License, and how to view a copy of this License. If 97 | the interface presents a list of user commands or options, such as a 98 | menu, a prominent item in the list meets this criterion. 99 | 100 | 1. Source Code. 101 | 102 | The "source code" for a work means the preferred form of the work 103 | for making modifications to it. "Object code" means any non-source 104 | form of a work. 105 | 106 | A "Standard Interface" means an interface that either is an official 107 | standard defined by a recognized standards body, or, in the case of 108 | interfaces specified for a particular programming language, one that 109 | is widely used among developers working in that language. 110 | 111 | The "System Libraries" of an executable work include anything, other 112 | than the work as a whole, that (a) is included in the normal form of 113 | packaging a Major Component, but which is not part of that Major 114 | Component, and (b) serves only to enable use of the work with that 115 | Major Component, or to implement a Standard Interface for which an 116 | implementation is available to the public in source code form. A 117 | "Major Component", in this context, means a major essential component 118 | (kernel, window system, and so on) of the specific operating system 119 | (if any) on which the executable work runs, or a compiler used to 120 | produce the work, or an object code interpreter used to run it. 121 | 122 | The "Corresponding Source" for a work in object code form means all 123 | the source code needed to generate, install, and (for an executable 124 | work) run the object code and to modify the work, including scripts to 125 | control those activities. However, it does not include the work's 126 | System Libraries, or general-purpose tools or generally available free 127 | programs which are used unmodified in performing those activities but 128 | which are not part of the work. For example, Corresponding Source 129 | includes interface definition files associated with source files for 130 | the work, and the source code for shared libraries and dynamically 131 | linked subprograms that the work is specifically designed to require, 132 | such as by intimate data communication or control flow between those 133 | subprograms and other parts of the work. 134 | 135 | The Corresponding Source need not include anything that users 136 | can regenerate automatically from other parts of the Corresponding 137 | Source. 138 | 139 | The Corresponding Source for a work in source code form is that 140 | same work. 141 | 142 | 2. Basic Permissions. 143 | 144 | All rights granted under this License are granted for the term of 145 | copyright on the Program, and are irrevocable provided the stated 146 | conditions are met. This License explicitly affirms your unlimited 147 | permission to run the unmodified Program. The output from running a 148 | covered work is covered by this License only if the output, given its 149 | content, constitutes a covered work. This License acknowledges your 150 | rights of fair use or other equivalent, as provided by copyright law. 151 | 152 | You may make, run and propagate covered works that you do not 153 | convey, without conditions so long as your license otherwise remains 154 | in force. You may convey covered works to others for the sole purpose 155 | of having them make modifications exclusively for you, or provide you 156 | with facilities for running those works, provided that you comply with 157 | the terms of this License in conveying all material for which you do 158 | not control copyright. Those thus making or running the covered works 159 | for you must do so exclusively on your behalf, under your direction 160 | and control, on terms that prohibit them from making any copies of 161 | your copyrighted material outside their relationship with you. 162 | 163 | Conveying under any other circumstances is permitted solely under 164 | the conditions stated below. Sublicensing is not allowed; section 10 165 | makes it unnecessary. 166 | 167 | 3. Protecting Users' Legal Rights From Anti-Circumvention Law. 168 | 169 | No covered work shall be deemed part of an effective technological 170 | measure under any applicable law fulfilling obligations under article 171 | 11 of the WIPO copyright treaty adopted on 20 December 1996, or 172 | similar laws prohibiting or restricting circumvention of such 173 | measures. 174 | 175 | When you convey a covered work, you waive any legal power to forbid 176 | circumvention of technological measures to the extent such circumvention 177 | is effected by exercising rights under this License with respect to 178 | the covered work, and you disclaim any intention to limit operation or 179 | modification of the work as a means of enforcing, against the work's 180 | users, your or third parties' legal rights to forbid circumvention of 181 | technological measures. 182 | 183 | 4. Conveying Verbatim Copies. 184 | 185 | You may convey verbatim copies of the Program's source code as you 186 | receive it, in any medium, provided that you conspicuously and 187 | appropriately publish on each copy an appropriate copyright notice; 188 | keep intact all notices stating that this License and any 189 | non-permissive terms added in accord with section 7 apply to the code; 190 | keep intact all notices of the absence of any warranty; and give all 191 | recipients a copy of this License along with the Program. 192 | 193 | You may charge any price or no price for each copy that you convey, 194 | and you may offer support or warranty protection for a fee. 195 | 196 | 5. Conveying Modified Source Versions. 197 | 198 | You may convey a work based on the Program, or the modifications to 199 | produce it from the Program, in the form of source code under the 200 | terms of section 4, provided that you also meet all of these conditions: 201 | 202 | a) The work must carry prominent notices stating that you modified 203 | it, and giving a relevant date. 204 | 205 | b) The work must carry prominent notices stating that it is 206 | released under this License and any conditions added under section 207 | 7. This requirement modifies the requirement in section 4 to 208 | "keep intact all notices". 209 | 210 | c) You must license the entire work, as a whole, under this 211 | License to anyone who comes into possession of a copy. This 212 | License will therefore apply, along with any applicable section 7 213 | additional terms, to the whole of the work, and all its parts, 214 | regardless of how they are packaged. This License gives no 215 | permission to license the work in any other way, but it does not 216 | invalidate such permission if you have separately received it. 217 | 218 | d) If the work has interactive user interfaces, each must display 219 | Appropriate Legal Notices; however, if the Program has interactive 220 | interfaces that do not display Appropriate Legal Notices, your 221 | work need not make them do so. 222 | 223 | A compilation of a covered work with other separate and independent 224 | works, which are not by their nature extensions of the covered work, 225 | and which are not combined with it such as to form a larger program, 226 | in or on a volume of a storage or distribution medium, is called an 227 | "aggregate" if the compilation and its resulting copyright are not 228 | used to limit the access or legal rights of the compilation's users 229 | beyond what the individual works permit. Inclusion of a covered work 230 | in an aggregate does not cause this License to apply to the other 231 | parts of the aggregate. 232 | 233 | 6. Conveying Non-Source Forms. 234 | 235 | You may convey a covered work in object code form under the terms 236 | of sections 4 and 5, provided that you also convey the 237 | machine-readable Corresponding Source under the terms of this License, 238 | in one of these ways: 239 | 240 | a) Convey the object code in, or embodied in, a physical product 241 | (including a physical distribution medium), accompanied by the 242 | Corresponding Source fixed on a durable physical medium 243 | customarily used for software interchange. 244 | 245 | b) Convey the object code in, or embodied in, a physical product 246 | (including a physical distribution medium), accompanied by a 247 | written offer, valid for at least three years and valid for as 248 | long as you offer spare parts or customer support for that product 249 | model, to give anyone who possesses the object code either (1) a 250 | copy of the Corresponding Source for all the software in the 251 | product that is covered by this License, on a durable physical 252 | medium customarily used for software interchange, for a price no 253 | more than your reasonable cost of physically performing this 254 | conveying of source, or (2) access to copy the 255 | Corresponding Source from a network server at no charge. 256 | 257 | c) Convey individual copies of the object code with a copy of the 258 | written offer to provide the Corresponding Source. This 259 | alternative is allowed only occasionally and noncommercially, and 260 | only if you received the object code with such an offer, in accord 261 | with subsection 6b. 262 | 263 | d) Convey the object code by offering access from a designated 264 | place (gratis or for a charge), and offer equivalent access to the 265 | Corresponding Source in the same way through the same place at no 266 | further charge. You need not require recipients to copy the 267 | Corresponding Source along with the object code. If the place to 268 | copy the object code is a network server, the Corresponding Source 269 | may be on a different server (operated by you or a third party) 270 | that supports equivalent copying facilities, provided you maintain 271 | clear directions next to the object code saying where to find the 272 | Corresponding Source. Regardless of what server hosts the 273 | Corresponding Source, you remain obligated to ensure that it is 274 | available for as long as needed to satisfy these requirements. 275 | 276 | e) Convey the object code using peer-to-peer transmission, provided 277 | you inform other peers where the object code and Corresponding 278 | Source of the work are being offered to the general public at no 279 | charge under subsection 6d. 280 | 281 | A separable portion of the object code, whose source code is excluded 282 | from the Corresponding Source as a System Library, need not be 283 | included in conveying the object code work. 284 | 285 | A "User Product" is either (1) a "consumer product", which means any 286 | tangible personal property which is normally used for personal, family, 287 | or household purposes, or (2) anything designed or sold for incorporation 288 | into a dwelling. In determining whether a product is a consumer product, 289 | doubtful cases shall be resolved in favor of coverage. For a particular 290 | product received by a particular user, "normally used" refers to a 291 | typical or common use of that class of product, regardless of the status 292 | of the particular user or of the way in which the particular user 293 | actually uses, or expects or is expected to use, the product. A product 294 | is a consumer product regardless of whether the product has substantial 295 | commercial, industrial or non-consumer uses, unless such uses represent 296 | the only significant mode of use of the product. 297 | 298 | "Installation Information" for a User Product means any methods, 299 | procedures, authorization keys, or other information required to install 300 | and execute modified versions of a covered work in that User Product from 301 | a modified version of its Corresponding Source. The information must 302 | suffice to ensure that the continued functioning of the modified object 303 | code is in no case prevented or interfered with solely because 304 | modification has been made. 305 | 306 | If you convey an object code work under this section in, or with, or 307 | specifically for use in, a User Product, and the conveying occurs as 308 | part of a transaction in which the right of possession and use of the 309 | User Product is transferred to the recipient in perpetuity or for a 310 | fixed term (regardless of how the transaction is characterized), the 311 | Corresponding Source conveyed under this section must be accompanied 312 | by the Installation Information. But this requirement does not apply 313 | if neither you nor any third party retains the ability to install 314 | modified object code on the User Product (for example, the work has 315 | been installed in ROM). 316 | 317 | The requirement to provide Installation Information does not include a 318 | requirement to continue to provide support service, warranty, or updates 319 | for a work that has been modified or installed by the recipient, or for 320 | the User Product in which it has been modified or installed. Access to a 321 | network may be denied when the modification itself materially and 322 | adversely affects the operation of the network or violates the rules and 323 | protocols for communication across the network. 324 | 325 | Corresponding Source conveyed, and Installation Information provided, 326 | in accord with this section must be in a format that is publicly 327 | documented (and with an implementation available to the public in 328 | source code form), and must require no special password or key for 329 | unpacking, reading or copying. 330 | 331 | 7. Additional Terms. 332 | 333 | "Additional permissions" are terms that supplement the terms of this 334 | License by making exceptions from one or more of its conditions. 335 | Additional permissions that are applicable to the entire Program shall 336 | be treated as though they were included in this License, to the extent 337 | that they are valid under applicable law. If additional permissions 338 | apply only to part of the Program, that part may be used separately 339 | under those permissions, but the entire Program remains governed by 340 | this License without regard to the additional permissions. 341 | 342 | When you convey a copy of a covered work, you may at your option 343 | remove any additional permissions from that copy, or from any part of 344 | it. (Additional permissions may be written to require their own 345 | removal in certain cases when you modify the work.) You may place 346 | additional permissions on material, added by you to a covered work, 347 | for which you have or can give appropriate copyright permission. 348 | 349 | Notwithstanding any other provision of this License, for material you 350 | add to a covered work, you may (if authorized by the copyright holders of 351 | that material) supplement the terms of this License with terms: 352 | 353 | a) Disclaiming warranty or limiting liability differently from the 354 | terms of sections 15 and 16 of this License; or 355 | 356 | b) Requiring preservation of specified reasonable legal notices or 357 | author attributions in that material or in the Appropriate Legal 358 | Notices displayed by works containing it; or 359 | 360 | c) Prohibiting misrepresentation of the origin of that material, or 361 | requiring that modified versions of such material be marked in 362 | reasonable ways as different from the original version; or 363 | 364 | d) Limiting the use for publicity purposes of names of licensors or 365 | authors of the material; or 366 | 367 | e) Declining to grant rights under trademark law for use of some 368 | trade names, trademarks, or service marks; or 369 | 370 | f) Requiring indemnification of licensors and authors of that 371 | material by anyone who conveys the material (or modified versions of 372 | it) with contractual assumptions of liability to the recipient, for 373 | any liability that these contractual assumptions directly impose on 374 | those licensors and authors. 375 | 376 | All other non-permissive additional terms are considered "further 377 | restrictions" within the meaning of section 10. If the Program as you 378 | received it, or any part of it, contains a notice stating that it is 379 | governed by this License along with a term that is a further 380 | restriction, you may remove that term. If a license document contains 381 | a further restriction but permits relicensing or conveying under this 382 | License, you may add to a covered work material governed by the terms 383 | of that license document, provided that the further restriction does 384 | not survive such relicensing or conveying. 385 | 386 | If you add terms to a covered work in accord with this section, you 387 | must place, in the relevant source files, a statement of the 388 | additional terms that apply to those files, or a notice indicating 389 | where to find the applicable terms. 390 | 391 | Additional terms, permissive or non-permissive, may be stated in the 392 | form of a separately written license, or stated as exceptions; 393 | the above requirements apply either way. 394 | 395 | 8. Termination. 396 | 397 | You may not propagate or modify a covered work except as expressly 398 | provided under this License. Any attempt otherwise to propagate or 399 | modify it is void, and will automatically terminate your rights under 400 | this License (including any patent licenses granted under the third 401 | paragraph of section 11). 402 | 403 | However, if you cease all violation of this License, then your 404 | license from a particular copyright holder is reinstated (a) 405 | provisionally, unless and until the copyright holder explicitly and 406 | finally terminates your license, and (b) permanently, if the copyright 407 | holder fails to notify you of the violation by some reasonable means 408 | prior to 60 days after the cessation. 409 | 410 | Moreover, your license from a particular copyright holder is 411 | reinstated permanently if the copyright holder notifies you of the 412 | violation by some reasonable means, this is the first time you have 413 | received notice of violation of this License (for any work) from that 414 | copyright holder, and you cure the violation prior to 30 days after 415 | your receipt of the notice. 416 | 417 | Termination of your rights under this section does not terminate the 418 | licenses of parties who have received copies or rights from you under 419 | this License. If your rights have been terminated and not permanently 420 | reinstated, you do not qualify to receive new licenses for the same 421 | material under section 10. 422 | 423 | 9. Acceptance Not Required for Having Copies. 424 | 425 | You are not required to accept this License in order to receive or 426 | run a copy of the Program. Ancillary propagation of a covered work 427 | occurring solely as a consequence of using peer-to-peer transmission 428 | to receive a copy likewise does not require acceptance. However, 429 | nothing other than this License grants you permission to propagate or 430 | modify any covered work. These actions infringe copyright if you do 431 | not accept this License. Therefore, by modifying or propagating a 432 | covered work, you indicate your acceptance of this License to do so. 433 | 434 | 10. Automatic Licensing of Downstream Recipients. 435 | 436 | Each time you convey a covered work, the recipient automatically 437 | receives a license from the original licensors, to run, modify and 438 | propagate that work, subject to this License. You are not responsible 439 | for enforcing compliance by third parties with this License. 440 | 441 | An "entity transaction" is a transaction transferring control of an 442 | organization, or substantially all assets of one, or subdividing an 443 | organization, or merging organizations. If propagation of a covered 444 | work results from an entity transaction, each party to that 445 | transaction who receives a copy of the work also receives whatever 446 | licenses to the work the party's predecessor in interest had or could 447 | give under the previous paragraph, plus a right to possession of the 448 | Corresponding Source of the work from the predecessor in interest, if 449 | the predecessor has it or can get it with reasonable efforts. 450 | 451 | You may not impose any further restrictions on the exercise of the 452 | rights granted or affirmed under this License. For example, you may 453 | not impose a license fee, royalty, or other charge for exercise of 454 | rights granted under this License, and you may not initiate litigation 455 | (including a cross-claim or counterclaim in a lawsuit) alleging that 456 | any patent claim is infringed by making, using, selling, offering for 457 | sale, or importing the Program or any portion of it. 458 | 459 | 11. Patents. 460 | 461 | A "contributor" is a copyright holder who authorizes use under this 462 | License of the Program or a work on which the Program is based. The 463 | work thus licensed is called the contributor's "contributor version". 464 | 465 | A contributor's "essential patent claims" are all patent claims 466 | owned or controlled by the contributor, whether already acquired or 467 | hereafter acquired, that would be infringed by some manner, permitted 468 | by this License, of making, using, or selling its contributor version, 469 | but do not include claims that would be infringed only as a 470 | consequence of further modification of the contributor version. For 471 | purposes of this definition, "control" includes the right to grant 472 | patent sublicenses in a manner consistent with the requirements of 473 | this License. 474 | 475 | Each contributor grants you a non-exclusive, worldwide, royalty-free 476 | patent license under the contributor's essential patent claims, to 477 | make, use, sell, offer for sale, import and otherwise run, modify and 478 | propagate the contents of its contributor version. 479 | 480 | In the following three paragraphs, a "patent license" is any express 481 | agreement or commitment, however denominated, not to enforce a patent 482 | (such as an express permission to practice a patent or covenant not to 483 | sue for patent infringement). To "grant" such a patent license to a 484 | party means to make such an agreement or commitment not to enforce a 485 | patent against the party. 486 | 487 | If you convey a covered work, knowingly relying on a patent license, 488 | and the Corresponding Source of the work is not available for anyone 489 | to copy, free of charge and under the terms of this License, through a 490 | publicly available network server or other readily accessible means, 491 | then you must either (1) cause the Corresponding Source to be so 492 | available, or (2) arrange to deprive yourself of the benefit of the 493 | patent license for this particular work, or (3) arrange, in a manner 494 | consistent with the requirements of this License, to extend the patent 495 | license to downstream recipients. "Knowingly relying" means you have 496 | actual knowledge that, but for the patent license, your conveying the 497 | covered work in a country, or your recipient's use of the covered work 498 | in a country, would infringe one or more identifiable patents in that 499 | country that you have reason to believe are valid. 500 | 501 | If, pursuant to or in connection with a single transaction or 502 | arrangement, you convey, or propagate by procuring conveyance of, a 503 | covered work, and grant a patent license to some of the parties 504 | receiving the covered work authorizing them to use, propagate, modify 505 | or convey a specific copy of the covered work, then the patent license 506 | you grant is automatically extended to all recipients of the covered 507 | work and works based on it. 508 | 509 | A patent license is "discriminatory" if it does not include within 510 | the scope of its coverage, prohibits the exercise of, or is 511 | conditioned on the non-exercise of one or more of the rights that are 512 | specifically granted under this License. You may not convey a covered 513 | work if you are a party to an arrangement with a third party that is 514 | in the business of distributing software, under which you make payment 515 | to the third party based on the extent of your activity of conveying 516 | the work, and under which the third party grants, to any of the 517 | parties who would receive the covered work from you, a discriminatory 518 | patent license (a) in connection with copies of the covered work 519 | conveyed by you (or copies made from those copies), or (b) primarily 520 | for and in connection with specific products or compilations that 521 | contain the covered work, unless you entered into that arrangement, 522 | or that patent license was granted, prior to 28 March 2007. 523 | 524 | Nothing in this License shall be construed as excluding or limiting 525 | any implied license or other defenses to infringement that may 526 | otherwise be available to you under applicable patent law. 527 | 528 | 12. No Surrender of Others' Freedom. 529 | 530 | If conditions are imposed on you (whether by court order, agreement or 531 | otherwise) that contradict the conditions of this License, they do not 532 | excuse you from the conditions of this License. If you cannot convey a 533 | covered work so as to satisfy simultaneously your obligations under this 534 | License and any other pertinent obligations, then as a consequence you may 535 | not convey it at all. For example, if you agree to terms that obligate you 536 | to collect a royalty for further conveying from those to whom you convey 537 | the Program, the only way you could satisfy both those terms and this 538 | License would be to refrain entirely from conveying the Program. 539 | 540 | 13. Remote Network Interaction; Use with the GNU General Public License. 541 | 542 | Notwithstanding any other provision of this License, if you modify the 543 | Program, your modified version must prominently offer all users 544 | interacting with it remotely through a computer network (if your version 545 | supports such interaction) an opportunity to receive the Corresponding 546 | Source of your version by providing access to the Corresponding Source 547 | from a network server at no charge, through some standard or customary 548 | means of facilitating copying of software. This Corresponding Source 549 | shall include the Corresponding Source for any work covered by version 3 550 | of the GNU General Public License that is incorporated pursuant to the 551 | following paragraph. 552 | 553 | Notwithstanding any other provision of this License, you have 554 | permission to link or combine any covered work with a work licensed 555 | under version 3 of the GNU General Public License into a single 556 | combined work, and to convey the resulting work. The terms of this 557 | License will continue to apply to the part which is the covered work, 558 | but the work with which it is combined will remain governed by version 559 | 3 of the GNU General Public License. 560 | 561 | 14. Revised Versions of this License. 562 | 563 | The Free Software Foundation may publish revised and/or new versions of 564 | the GNU Affero General Public License from time to time. Such new versions 565 | will be similar in spirit to the present version, but may differ in detail to 566 | address new problems or concerns. 567 | 568 | Each version is given a distinguishing version number. If the 569 | Program specifies that a certain numbered version of the GNU Affero General 570 | Public License "or any later version" applies to it, you have the 571 | option of following the terms and conditions either of that numbered 572 | version or of any later version published by the Free Software 573 | Foundation. If the Program does not specify a version number of the 574 | GNU Affero General Public License, you may choose any version ever published 575 | by the Free Software Foundation. 576 | 577 | If the Program specifies that a proxy can decide which future 578 | versions of the GNU Affero General Public License can be used, that proxy's 579 | public statement of acceptance of a version permanently authorizes you 580 | to choose that version for the Program. 581 | 582 | Later license versions may give you additional or different 583 | permissions. However, no additional obligations are imposed on any 584 | author or copyright holder as a result of your choosing to follow a 585 | later version. 586 | 587 | 15. Disclaimer of Warranty. 588 | 589 | THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY 590 | APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT 591 | HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY 592 | OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, 593 | THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR 594 | PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM 595 | IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF 596 | ALL NECESSARY SERVICING, REPAIR OR CORRECTION. 597 | 598 | 16. Limitation of Liability. 599 | 600 | IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING 601 | WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS 602 | THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY 603 | GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE 604 | USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF 605 | DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD 606 | PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), 607 | EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF 608 | SUCH DAMAGES. 609 | 610 | 17. Interpretation of Sections 15 and 16. 611 | 612 | If the disclaimer of warranty and limitation of liability provided 613 | above cannot be given local legal effect according to their terms, 614 | reviewing courts shall apply local law that most closely approximates 615 | an absolute waiver of all civil liability in connection with the 616 | Program, unless a warranty or assumption of liability accompanies a 617 | copy of the Program in return for a fee. 618 | 619 | END OF TERMS AND CONDITIONS 620 | 621 | How to Apply These Terms to Your New Programs 622 | 623 | If you develop a new program, and you want it to be of the greatest 624 | possible use to the public, the best way to achieve this is to make it 625 | free software which everyone can redistribute and change under these terms. 626 | 627 | To do so, attach the following notices to the program. It is safest 628 | to attach them to the start of each source file to most effectively 629 | state the exclusion of warranty; and each file should have at least 630 | the "copyright" line and a pointer to where the full notice is found. 631 | 632 | 633 | Copyright (C) 634 | 635 | This program is free software: you can redistribute it and/or modify 636 | it under the terms of the GNU Affero General Public License as published by 637 | the Free Software Foundation, either version 3 of the License, or 638 | (at your option) any later version. 639 | 640 | This program is distributed in the hope that it will be useful, 641 | but WITHOUT ANY WARRANTY; without even the implied warranty of 642 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 643 | GNU Affero General Public License for more details. 644 | 645 | You should have received a copy of the GNU Affero General Public License 646 | along with this program. If not, see . 647 | 648 | Also add information on how to contact you by electronic and paper mail. 649 | 650 | If your software can interact with users remotely through a computer 651 | network, you should also make sure that it provides a way for users to 652 | get its source. For example, if your program is a web application, its 653 | interface could display a "Source" link that leads users to an archive 654 | of the code. There are many ways you could offer source, and different 655 | solutions will be better for different programs; see section 13 for the 656 | specific requirements. 657 | 658 | You should also get your employer (if you work as a programmer) or school, 659 | if any, to sign a "copyright disclaimer" for the program, if necessary. 660 | For more information on this, and how to apply and follow the GNU AGPL, see 661 | . 662 | --------------------------------------------------------------------------------