├── .github
├── FUNDING.yml
└── workflows
│ ├── main.yml
│ └── docker-hub.yml
├── frontend
├── __pycache__
│ └── main.cpython-38.pyc
├── components
│ ├── __pycache__
│ │ ├── auth.cpython-38.pyc
│ │ ├── base.cpython-38.pyc
│ │ ├── core.cpython-38.pyc
│ │ ├── header.cpython-38.pyc
│ │ ├── header.cpython-39.pyc
│ │ ├── ranker.cpython-38.pyc
│ │ ├── ranker.cpython-39.pyc
│ │ ├── custodian.cpython-38.pyc
│ │ ├── inspector.cpython-38.pyc
│ │ ├── inspector.cpython-39.pyc
│ │ ├── knowledge.cpython-38.pyc
│ │ ├── knowledge.cpython-39.pyc
│ │ ├── metadata.cpython-38.pyc
│ │ ├── navigator.cpython-38.pyc
│ │ ├── navigator.cpython-39.pyc
│ │ ├── viewport.cpython-38.pyc
│ │ ├── viewport.cpython-39.pyc
│ │ ├── bibliography.cpython-38.pyc
│ │ ├── microverses.cpython-38.pyc
│ │ └── microverses.cpython-39.pyc
│ ├── header.py
│ ├── ranker.py
│ ├── navigator.py
│ ├── viewport.py
│ ├── knowledge.py
│ ├── bibliography.py
│ ├── inspector.py
│ └── microverses.py
├── .streamlit
│ └── config.toml
├── Dockerfile
├── requirements.txt
└── main.py
├── .gitignore
├── requirements.txt
├── backend
├── requirements.txt
├── Dockerfile
├── bibliography.py
├── security.py
├── microverses.py
├── main.py
└── util.py
├── docker-compose.yml
├── docker-compose-build.yml
├── README.md
├── scripts
└── util.py
└── LICENSE
/.github/FUNDING.yml:
--------------------------------------------------------------------------------
1 | github: paulbricman
2 |
--------------------------------------------------------------------------------
/frontend/__pycache__/main.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/paulbricman/conceptarium/HEAD/frontend/__pycache__/main.cpython-38.pyc
--------------------------------------------------------------------------------
/frontend/components/__pycache__/auth.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/paulbricman/conceptarium/HEAD/frontend/components/__pycache__/auth.cpython-38.pyc
--------------------------------------------------------------------------------
/frontend/components/__pycache__/base.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/paulbricman/conceptarium/HEAD/frontend/components/__pycache__/base.cpython-38.pyc
--------------------------------------------------------------------------------
/frontend/components/__pycache__/core.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/paulbricman/conceptarium/HEAD/frontend/components/__pycache__/core.cpython-38.pyc
--------------------------------------------------------------------------------
/frontend/components/__pycache__/header.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/paulbricman/conceptarium/HEAD/frontend/components/__pycache__/header.cpython-38.pyc
--------------------------------------------------------------------------------
/frontend/components/__pycache__/header.cpython-39.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/paulbricman/conceptarium/HEAD/frontend/components/__pycache__/header.cpython-39.pyc
--------------------------------------------------------------------------------
/frontend/components/__pycache__/ranker.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/paulbricman/conceptarium/HEAD/frontend/components/__pycache__/ranker.cpython-38.pyc
--------------------------------------------------------------------------------
/frontend/components/__pycache__/ranker.cpython-39.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/paulbricman/conceptarium/HEAD/frontend/components/__pycache__/ranker.cpython-39.pyc
--------------------------------------------------------------------------------
/frontend/components/__pycache__/custodian.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/paulbricman/conceptarium/HEAD/frontend/components/__pycache__/custodian.cpython-38.pyc
--------------------------------------------------------------------------------
/frontend/components/__pycache__/inspector.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/paulbricman/conceptarium/HEAD/frontend/components/__pycache__/inspector.cpython-38.pyc
--------------------------------------------------------------------------------
/frontend/components/__pycache__/inspector.cpython-39.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/paulbricman/conceptarium/HEAD/frontend/components/__pycache__/inspector.cpython-39.pyc
--------------------------------------------------------------------------------
/frontend/components/__pycache__/knowledge.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/paulbricman/conceptarium/HEAD/frontend/components/__pycache__/knowledge.cpython-38.pyc
--------------------------------------------------------------------------------
/frontend/components/__pycache__/knowledge.cpython-39.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/paulbricman/conceptarium/HEAD/frontend/components/__pycache__/knowledge.cpython-39.pyc
--------------------------------------------------------------------------------
/frontend/components/__pycache__/metadata.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/paulbricman/conceptarium/HEAD/frontend/components/__pycache__/metadata.cpython-38.pyc
--------------------------------------------------------------------------------
/frontend/components/__pycache__/navigator.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/paulbricman/conceptarium/HEAD/frontend/components/__pycache__/navigator.cpython-38.pyc
--------------------------------------------------------------------------------
/frontend/components/__pycache__/navigator.cpython-39.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/paulbricman/conceptarium/HEAD/frontend/components/__pycache__/navigator.cpython-39.pyc
--------------------------------------------------------------------------------
/frontend/components/__pycache__/viewport.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/paulbricman/conceptarium/HEAD/frontend/components/__pycache__/viewport.cpython-38.pyc
--------------------------------------------------------------------------------
/frontend/components/__pycache__/viewport.cpython-39.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/paulbricman/conceptarium/HEAD/frontend/components/__pycache__/viewport.cpython-39.pyc
--------------------------------------------------------------------------------
/frontend/components/__pycache__/bibliography.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/paulbricman/conceptarium/HEAD/frontend/components/__pycache__/bibliography.cpython-38.pyc
--------------------------------------------------------------------------------
/frontend/components/__pycache__/microverses.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/paulbricman/conceptarium/HEAD/frontend/components/__pycache__/microverses.cpython-38.pyc
--------------------------------------------------------------------------------
/frontend/components/__pycache__/microverses.cpython-39.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/paulbricman/conceptarium/HEAD/frontend/components/__pycache__/microverses.cpython-39.pyc
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | __pycache__/*
2 | backend/__pycache__/*
3 | knowledge/*
4 | backend/records.json
5 | backend/microverses.json
6 | *.zip
7 | frontend/__pycache__/*
8 | frontend/components/__pycache__/*
--------------------------------------------------------------------------------
/frontend/.streamlit/config.toml:
--------------------------------------------------------------------------------
1 | [theme]
2 | base="light"
3 | primaryColor="#228b22"
4 | font="monospace"
5 |
6 | [server]
7 | enableCORS = false
8 | enableXsrfProtection = false
9 | headless = true
--------------------------------------------------------------------------------
/requirements.txt:
--------------------------------------------------------------------------------
1 | torch==1.9.0
2 | sentence_transformers==2.0.0
3 | requests==2.22.0
4 | numpy==1.17.4
5 | streamlit==1.4.0
6 | Pillow==9.0.0
7 | extra-streamlit-components==0.1.53
8 | arxiv2bib==1.0.8
--------------------------------------------------------------------------------
/frontend/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM python:3.9
2 | COPY requirements.txt app/requirements.txt
3 | WORKDIR /app
4 | RUN pip install -r requirements.txt
5 | COPY . /app
6 | EXPOSE 8501
7 | ENTRYPOINT ["streamlit","run"]
8 | CMD ["main.py"]
--------------------------------------------------------------------------------
/frontend/requirements.txt:
--------------------------------------------------------------------------------
1 | --find-links https://download.pytorch.org/whl/torch_stable.html
2 |
3 | torch==1.9.0+cpu
4 | sentence_transformers==2.0.0
5 | requests==2.22.0
6 | numpy==1.17.4
7 | streamlit==1.4.0
8 | Pillow==9.0.0
9 | extra-streamlit-components==0.1.53
10 | arxiv2bib==1.0.8
11 | click==8
--------------------------------------------------------------------------------
/backend/requirements.txt:
--------------------------------------------------------------------------------
1 | --find-links https://download.pytorch.org/whl/torch_stable.html
2 |
3 | torch==1.9.0+cpu
4 | numpy==1.17.4
5 | sentence_transformers==2.1.0
6 | fastapi==0.73.0
7 | Pillow==9.0.0
8 | secrets==1.0.2
9 | slowapi==0.1.5
10 | python-multipart==0.0.5
11 | orjson==3.6.6
12 | feedgen==0.9.0
13 | ics==0.7
--------------------------------------------------------------------------------
/frontend/components/header.py:
--------------------------------------------------------------------------------
1 | import streamlit as st
2 |
3 |
4 | def paint():
5 | st.markdown('### 💡 conceptarium')
6 |
7 | hide_streamlit_style = '''
8 |
12 | '''
13 | st.markdown(hide_streamlit_style, unsafe_allow_html=True)
--------------------------------------------------------------------------------
/backend/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM ubuntu
2 | EXPOSE 8000
3 | WORKDIR /app
4 | RUN apt update
5 | RUN apt install -y git python3 python3-pip
6 | RUN apt install -y libsasl2-dev python-dev libldap2-dev libssl-dev
7 | COPY requirements.txt ./requirements.txt
8 | RUN pip3 install pyOpenSSL uvicorn[standard]
9 | RUN pip3 install -r requirements.txt
10 | RUN pip3 install transformers -U
11 | COPY . .
12 | CMD python3 -m uvicorn --host 0.0.0.0 main:app --reload
--------------------------------------------------------------------------------
/.github/workflows/main.yml:
--------------------------------------------------------------------------------
1 | name: Sync to Hugging Face hub
2 | on:
3 | push:
4 | branches: [main]
5 |
6 | # to run this workflow manually from the Actions tab
7 | workflow_dispatch:
8 |
9 | jobs:
10 | sync-to-hub:
11 | runs-on: ubuntu-latest
12 | steps:
13 | - uses: actions/checkout@v2
14 | with:
15 | fetch-depth: 0
16 | - name: Push to hub
17 | env:
18 | HF_TOKEN: ${{ secrets.HF_TOKEN }}
19 | run: git push https://paulbricman:$HF_TOKEN@huggingface.co/spaces/paulbricman/conceptarium main
20 |
--------------------------------------------------------------------------------
/docker-compose.yml:
--------------------------------------------------------------------------------
1 | version: "3.7"
2 |
3 | networks:
4 | local:
5 |
6 | services:
7 | frontend:
8 | image: paulbricman/conceptarium_frontend:latest
9 | ports:
10 | - 8501:8501
11 | networks:
12 | local:
13 | aliases:
14 | - frontend.docker
15 | depends_on:
16 | - backend
17 | volumes:
18 | - ./knowledge:/knowledge
19 |
20 | backend:
21 | image: paulbricman/conceptarium_backend:latest
22 | ports:
23 | - 8000:8000
24 | networks:
25 | local:
26 | aliases:
27 | - backend.docker
28 | volumes:
29 | - ./knowledge:/knowledge
--------------------------------------------------------------------------------
/frontend/components/ranker.py:
--------------------------------------------------------------------------------
1 | import streamlit as st
2 |
3 |
4 | def paint():
5 | st.session_state['ranker_relatedness'] = st.slider(
6 | 'relatedness', -1., 1., 0.8, 0.01, help='Specify the weight of semantic similarity of thoughts to the query in ranking the search results.')
7 | st.session_state['ranker_activation'] = st.slider(
8 | 'activation', -1., 1., 0., 0.01, help='Specify the weight of thought activation in ranking the search results.')
9 | st.session_state['ranker_noise'] = st.slider(
10 | 'noise', 0., 0.1, 0.01, 0.001, help='Specify the desired amount of randomness in ranking the search results.')
11 |
--------------------------------------------------------------------------------
/docker-compose-build.yml:
--------------------------------------------------------------------------------
1 | version: "3.7"
2 |
3 | networks:
4 | local:
5 |
6 | services:
7 | frontend:
8 | build: frontend
9 | image: paulbricman/conceptarium_frontend:latest
10 | ports:
11 | - 8501:8501
12 | networks:
13 | local:
14 | aliases:
15 | - frontend.docker
16 | depends_on:
17 | - backend
18 | volumes:
19 | - ./knowledge:/knowledge
20 |
21 | backend:
22 | build: backend
23 | image: paulbricman/conceptarium_backend:latest
24 | ports:
25 | - 8000:8000
26 | networks:
27 | local:
28 | aliases:
29 | - backend.docker
30 | volumes:
31 | - ./knowledge:/knowledge
--------------------------------------------------------------------------------
/.github/workflows/docker-hub.yml:
--------------------------------------------------------------------------------
1 | name: Docker Image CI
2 |
3 | on:
4 | push:
5 | branches: [ main ]
6 | pull_request:
7 | branches: [ main ]
8 |
9 | jobs:
10 | build:
11 | runs-on: ubuntu-latest
12 | steps:
13 | - uses: actions/checkout@v2
14 | - name: Build Docker images
15 | run: docker-compose -f docker-compose-build.yml build
16 | - name: Login to Docker Hub
17 | uses: docker/login-action@v1
18 | with:
19 | username: ${{ secrets.DOCKER_USERNAME }}
20 | password: ${{ secrets.DOCKER_TOKEN }}
21 | - name: Push frontend image
22 | run: docker push paulbricman/conceptarium_frontend:latest
23 | - name: Push backend image
24 | run: docker push paulbricman/conceptarium_backend:latest
25 |
26 |
--------------------------------------------------------------------------------
/frontend/components/navigator.py:
--------------------------------------------------------------------------------
1 | import streamlit as st
2 | from . import knowledge
3 |
4 |
5 | def paint():
6 | modality = st.selectbox('modality', ['text', 'image'],
7 | ['text', 'image'].index(st.session_state.get('navigator_modality', 'text')), help='Select the type of query you want to search with.')
8 |
9 | if modality == 'text':
10 | input = st.text_area('input', height=100,
11 | help='Enter the actual contents of your query.')
12 | elif modality == 'image':
13 | input = st.file_uploader(
14 | 'input', help='Enter the actual contents of your query.')
15 |
16 | if st.button('jump', help='Click to search for thoughts based on the specified query.'):
17 | st.session_state['authorized_thoughts'] = knowledge.load(
18 | modality, input)
19 | st.session_state['navigator_modality'] = modality
20 | st.session_state['navigator_input'] = input
21 | st.session_state['navigator_thought'] = None
22 |
--------------------------------------------------------------------------------
/frontend/main.py:
--------------------------------------------------------------------------------
1 | import importlib
2 | import streamlit as st
3 | from components import header, viewport
4 | from components import microverses
5 | import json
6 |
7 |
8 | st.set_page_config(
9 | page_title='💡 conceptarium',
10 | layout='wide')
11 |
12 | microverses.paint()
13 | header.paint()
14 |
15 | layout = st.session_state['layout']
16 |
17 | col_count = layout['viewportCols'] + \
18 | int(len(layout['leftColumn']) > 0) + int(len(layout['rightColumn']) > 0)
19 | cols = st.columns(col_count)
20 |
21 | for component in layout['leftColumn']:
22 | with cols[0]:
23 | m = importlib.import_module('components.' + component)
24 | with st.expander(component, True):
25 | m.paint()
26 |
27 | for component in layout['rightColumn']:
28 | with cols[-1]:
29 | m = importlib.import_module('components.' + component)
30 | with st.expander(component, True):
31 | m.paint()
32 |
33 | start_viewport_col = int(len(layout['leftColumn']) > 0)
34 | end_viewport_col = start_viewport_col + layout['viewportCols']
35 | viewport.paint(cols[start_viewport_col:end_viewport_col])
36 |
--------------------------------------------------------------------------------
/backend/bibliography.py:
--------------------------------------------------------------------------------
1 | import json
2 | from pathlib import Path
3 | from ics import Calendar, Event
4 | import requests
5 |
6 |
7 | def set_ical(ical_url, auth):
8 | if not auth['custodian']:
9 | return {
10 | 'message': 'Only the conceptarium\'s custodian can set its bibliography ical.'
11 | }
12 |
13 | records_path = Path('..') / 'knowledge' / 'records.json'
14 | records = json.load(open(records_path))
15 | records['bibliography_ical'] = ical_url
16 | records = json.dump(records, open(records_path, 'w'))
17 |
18 | return {
19 | 'message': 'Successfully set bibliography ical.'
20 | }
21 |
22 |
23 | def get_ical_events():
24 | records_path = Path('..') / 'knowledge' / 'records.json'
25 | ical_url = json.load(open(records_path)).get('bibliography_ical')
26 |
27 | if not ical_url:
28 | return []
29 |
30 | cal = Calendar(requests.get(ical_url).text)
31 | events = list(cal.events)
32 | for e_idx, e in enumerate(events):
33 | event_dict = {}
34 | event_dict['name'] = e.name.replace('"', '')
35 | event_dict['timestamp'] = (e.begin.timestamp + e.end.timestamp) // 2
36 | events[e_idx] = event_dict
37 |
38 | events = sorted(events, key=lambda x: x['timestamp'])
39 | return events
40 |
--------------------------------------------------------------------------------
/backend/security.py:
--------------------------------------------------------------------------------
1 | from pathlib import Path
2 | import json
3 | import os
4 |
5 |
6 | def auth(token, compact=False):
7 | if not token:
8 | return {
9 | 'custodian': False
10 | }
11 |
12 | knowledge_base_path = Path('..') / 'knowledge'
13 | records_path = knowledge_base_path / 'records.json'
14 |
15 | if not records_path.exists():
16 | if not knowledge_base_path.exists():
17 | os.mkdir(knowledge_base_path)
18 |
19 | records = {
20 | 'custodian_token': token
21 | }
22 | json.dump(records, open(records_path, 'w'))
23 |
24 | return {
25 | 'custodian': True
26 | }
27 | else:
28 | records = json.load(open(records_path))
29 |
30 | if records['custodian_token'] == token:
31 | return {
32 | 'custodian': True
33 | }
34 | else:
35 | microverses_path = Path('..') / 'knowledge' / 'microverses.json'
36 | if not microverses_path.exists():
37 | json.dump([], open(microverses_path, 'w'))
38 |
39 | microverses = json.load(open(microverses_path))
40 | authorized_microverse = [
41 | e for e in microverses if e['token'] == token]
42 |
43 | if compact:
44 | if len(authorized_microverse) > 0:
45 | authorized_microverse[0].pop('embeddings')
46 |
47 | return {
48 | 'custodian': False,
49 | 'authorized_microverse': authorized_microverse
50 | }
51 |
--------------------------------------------------------------------------------
/frontend/components/viewport.py:
--------------------------------------------------------------------------------
1 | import streamlit as st
2 | from . import knowledge
3 |
4 |
5 | def get_name():
6 | return '🪟 viewport'
7 |
8 |
9 | def paint(cols):
10 | if st.session_state.get('authorized_thoughts', None) is not None:
11 | authorized_thoughts = st.session_state['authorized_thoughts']
12 | similarity_threshold = 0.3
13 | authorized_thoughts = [
14 | e for e in authorized_thoughts if e['relatedness'] > similarity_threshold]
15 |
16 | for e_idx, e in enumerate(authorized_thoughts):
17 | with cols[e_idx % len(cols)]:
18 | if e['modality'] == 'text':
19 | content = e['content']
20 | st.success(e['content'])
21 | elif e['modality'] == 'image':
22 | url = e['conceptarium_url'] + '/static?filename=' + \
23 | e['content']
24 | content = knowledge.fetch_image(url, e['access_token'])
25 | st.image(content)
26 |
27 | for event in e.get('events', []):
28 | st.markdown('- ' + event['name'])
29 |
30 | if st.button('jump (' + str(round(e['relatedness'], 2)) + ')', e['content'], help='Use this as the basis of a new search query.'):
31 | st.session_state['navigator_input'] = content
32 | st.session_state['navigator_modality'] = e['modality']
33 | st.session_state['navigator_thought'] = e
34 | st.session_state['authorized_thoughts'] = knowledge.load(
35 | e['modality'], content)
36 | st.experimental_rerun()
37 |
--------------------------------------------------------------------------------
/frontend/components/knowledge.py:
--------------------------------------------------------------------------------
1 | import streamlit as st
2 | from streamlit.uploaded_file_manager import UploadedFile
3 | import requests
4 | import json
5 | import io
6 | from PIL import Image
7 |
8 |
9 | def load(modality, query):
10 | thoughts = []
11 |
12 | for microverse in st.session_state.get('microverses', []):
13 | url = microverse['url']
14 | url += '/find'
15 |
16 | if modality == 'text':
17 | response = requests.get(url, params={
18 | 'query': query,
19 | 'relatedness': st.session_state.get('ranker_relatedness', 0.8),
20 | 'activation': st.session_state.get('ranker_activation', 0.),
21 | 'noise': st.session_state.get('ranker_noise', 0.01),
22 | 'return_embeddings': False
23 | }, headers={'Authorization': f"Bearer {microverse['token']}"})
24 | elif modality == 'image':
25 | if isinstance(query, UploadedFile):
26 | query = Image.open(io.BytesIO(query.getvalue()))
27 |
28 | img_io = io.BytesIO()
29 | query = query.convert('RGB')
30 | query.save(img_io, 'jpeg')
31 | img_io.seek(0)
32 | query = img_io.read()
33 |
34 | response = requests.post(url, data={
35 | 'relatedness': st.session_state.get('ranker_relatedness', 0.8),
36 | 'activation': st.session_state.get('ranker_activation', 0.),
37 | 'noise': st.session_state.get('ranker_noise', 0.01),
38 | 'return_embeddings': False
39 | }, files={'query': query}, headers={'Authorization': f"Bearer {microverse['token']}"})
40 |
41 | content = json.loads(response.content)
42 | new_thoughts = content['authorized_thoughts']
43 | for e_idx, e in enumerate(new_thoughts):
44 | new_thoughts[e_idx]['conceptarium_url'] = microverse['url']
45 | new_thoughts[e_idx]['access_token'] = microverse['token']
46 | new_thoughts[e_idx]['auth'] = microverse['auth']
47 |
48 | if isinstance(content, dict):
49 | thoughts += content['authorized_thoughts']
50 |
51 | return thoughts
52 |
53 |
54 | @ st.cache()
55 | def fetch_image(url, token):
56 | response = requests.get(url, headers={'Authorization': f"Bearer {token}"})
57 | image = Image.open(io.BytesIO(response.content))
58 | return image
59 |
--------------------------------------------------------------------------------
/frontend/components/bibliography.py:
--------------------------------------------------------------------------------
1 | import streamlit as st
2 | import requests
3 | from arxiv2bib import Cli
4 |
5 |
6 | def paint():
7 | if st.session_state.get('authorized_thoughts') is not None:
8 | thought = st.session_state.get('navigator_thought')
9 |
10 | if thought:
11 | events = [thought.get('events', [])]
12 | else:
13 | events = [e.get('events', [])
14 | for e in st.session_state['authorized_thoughts'] if e['relatedness'] > 0.5]
15 |
16 | events = [f for e in events for f in e]
17 | events = list({e['name']: e for e in events}.values())
18 |
19 | for e_idx, e in enumerate(events):
20 | if 'doi' in e['name']:
21 | events[e_idx]['doi'] = e['name']
22 | elif 'arxiv' in e['name']:
23 | if 'abs' in e['name']:
24 | events[e_idx]['arxiv_id'] = e['name'].split(
25 | 'abs')[-1].replace('/', '')
26 | elif 'pdf' in e['name']:
27 | events[e_idx]['arxiv_id'] = e['name'].split(
28 | '/pdf/')[-1].replace('/', '').replace('.pdf', '')
29 |
30 | min_one_paper = False
31 | for e in events:
32 | if 'doi' in e.keys() or 'arxiv_id' in e.keys():
33 | min_one_paper = True
34 | st.markdown('- ' + e['name'])
35 |
36 | if min_one_paper:
37 | st.markdown('')
38 | if st.button('show bibtex'):
39 | compiled_bibtex = ''
40 | for e in events:
41 | if 'doi' in e.keys():
42 | bibtex = doi_to_bibtex(e['doi'])
43 | if bibtex:
44 | compiled_bibtex += bibtex + '\n\n'
45 | elif 'arxiv_id' in e.keys():
46 | bibtex = arxiv_to_bibtex(
47 | e['arxiv_id'])
48 | if bibtex:
49 | compiled_bibtex += bibtex + '\n\n'
50 |
51 | st.code(compiled_bibtex)
52 | elif events != []:
53 | st.markdown('')
54 |
55 |
56 | def doi_to_bibtex(doi):
57 | response = requests.get('http://dx.doi.org/' + doi, headers={
58 | 'Accept': 'application/x-bibtex'
59 | })
60 | if response.status_code == 200:
61 | return response.content.decode('utf-8')
62 |
63 |
64 | def arxiv_to_bibtex(arxiv_id):
65 | cli = Cli([arxiv_id])
66 | cli.run()
67 | return cli.output[0]
68 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | ---
2 | title: conceptarium
3 | emoji: 💡
4 | colorFrom: green
5 | colorTo: gray
6 | sdk: streamlit
7 | app_file: frontend/main.py
8 | pinned: false
9 | ---
10 |
11 | | screenshot 1 | screenshot 2 |
12 | | --------------------------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------- |
13 | |  |  |
14 |
15 | # 💡 Conceptarium
16 |
17 | The conceptarium is an **experimental** personal knowledge base designed to weave AI capabilities into knowledge work. Its main features include:
18 |
19 | - powerful multi-modal search across ideas
20 | - sharing [microverses of knowledge](https://paulbricman.com/reflections/sharing-searches) with peers
21 | - ranking items by Anki-like activation, so as to promote serendipity
22 |
23 | ## Installation
24 |
25 | #### Docker
26 |
27 | After installing `docker` and `docker-compose`, run:
28 |
29 | ```
30 | # install with:
31 | curl -fsS https://raw.githubusercontent.com/paulbricman/conceptarium/main/docker-compose.yml -o docker-compose.yml
32 | mkdir knowledge
33 | docker-compose up -d
34 |
35 | # stop with:
36 | docker-compose stop
37 |
38 | # update with:
39 | docker-compose stop
40 | docker-compose rm -f
41 | docker-compose pull
42 | docker-compose up -d
43 | ```
44 |
45 | Note that you'll have to wait a bit initially for the models to be downloaded in the docker container. Use `docker logs ` or watch the process's memory for feedback on that. Or just try using it until it via the API or UI until it works (see usage).
46 |
47 | #### Source
48 |
49 | After pulling this repo run:
50 |
51 | ```
52 | python3 -m pip install -r frontend/requirements.txt
53 | python3 -m pip install -r backend/requirements.txt
54 | streamlit run frontend/main.py
55 |
56 | # in a separate session:
57 | cd backend
58 | python3 -m uvicorn main:app --reload
59 |
60 | # update by pulling from repo again
61 | ```
62 |
63 | Missing dependencies? Please have a look at `frontend/Dockerfile` and `backend/Dockerfile`. ARM architecture (e.g. Raspberry Pi)? Remove the `torch` entries from `requirements.txt`, and install a [custom-built version](https://github.com/ljk53/pytorch-rpi).
64 |
65 | ## Usage
66 |
67 | The web app should then be available at `localhost:8501`, while the API at `localhost:8000` (with docs at `localhost:8000/docs`). The backend component takes a few minutes to get the ML models at first.
68 |
69 | To access your local instance, enter the conceptarium URL (i.e. `localhost:8000` if you ran from source, `backend.docker:8000` if you used docker), and your desired token. Remember your token, as you'll have to use it to authenticate in future sessions.
70 |
--------------------------------------------------------------------------------
/backend/microverses.py:
--------------------------------------------------------------------------------
1 | import json
2 | from pathlib import Path
3 | from util import encode, get_content
4 | import secrets
5 | import time
6 | from PIL import Image
7 | import io
8 | import os
9 |
10 |
11 | def create_microverse(modality, query, auth_result, text_encoder, text_image_encoder):
12 | knowledge_base_path = Path('..') / 'knowledge'
13 | microverses_path = knowledge_base_path / 'microverses.json'
14 |
15 | if auth_result['custodian'] == False:
16 | return {
17 | 'message': 'Only the conceptarium\'s custodian can create microverses in it.'
18 | }
19 | else:
20 | if not microverses_path.exists():
21 | json.dump([], open(microverses_path, 'w'))
22 |
23 | query_embedding = encode(
24 | modality, query, text_encoder, text_image_encoder)
25 | token = secrets.token_urlsafe(16)
26 |
27 | if modality == 'text':
28 | filename = secrets.token_urlsafe(16) + '.md'
29 | open(knowledge_base_path / filename, 'w').write(query)
30 |
31 | microverses = json.load(open(microverses_path))
32 | microverses += [{
33 | "filename": filename,
34 | "modality": modality,
35 | "timestamp": time.time(),
36 | "token": token,
37 | "embeddings": query_embedding
38 | }]
39 | json.dump(microverses, open(microverses_path, 'w'))
40 | elif modality == 'image':
41 | filename = secrets.token_urlsafe(16) + '.jpg'
42 | query = Image.open(io.BytesIO(query)).convert('RGB')
43 | query.save(knowledge_base_path / filename, quality=50)
44 |
45 | microverses = json.load(open(microverses_path))
46 | microverses += [{
47 | "filename": filename,
48 | "modality": modality,
49 | "timestamp": time.time(),
50 | "token": token,
51 | "embeddings": query_embedding
52 | }]
53 | json.dump(microverses, open(microverses_path, 'w'))
54 |
55 | return {
56 | "token": token
57 | }
58 |
59 |
60 | def remove_microverse(auth_result, microverse_token):
61 | knowledge_base_path = Path('..') / 'knowledge'
62 | microverses_path = knowledge_base_path / 'microverses.json'
63 |
64 | if auth_result['custodian'] == False:
65 | return {
66 | 'message': 'Only the conceptarium\'s custodian can create microverses in it.'
67 | }
68 | else:
69 | microverses = json.load(open(microverses_path))
70 | microverses = [
71 | e for e in microverses if e['token'] != microverse_token]
72 | removal_target = [
73 | e for e in microverses if e['token'] == microverse_token]
74 | json.dump(microverses, open(microverses_path, 'w'))
75 | if len(removal_target) > 0:
76 | os.remove(knowledge_base_path / removal_target[0]['filename'])
77 |
78 |
79 | def list_microverses(auth_result):
80 | microverses_path = Path('..') / 'knowledge' / 'microverses.json'
81 |
82 | if auth_result['custodian'] == False:
83 | return {
84 | 'message': 'Only the conceptarium\'s custodian can list all microverses in it.'
85 | }
86 | else:
87 | if not microverses_path.exists():
88 | json.dump([], open(microverses_path, 'w'))
89 |
90 | microverses = json.load(open(microverses_path))
91 |
92 | for e_idx, e in enumerate(microverses):
93 | microverses[e_idx]['content'] = get_content(
94 | e, True)
95 | return microverses
96 |
--------------------------------------------------------------------------------
/frontend/components/inspector.py:
--------------------------------------------------------------------------------
1 | import streamlit as st
2 | from streamlit.uploaded_file_manager import UploadedFile
3 | from datetime import datetime
4 | import numpy as np
5 | import time
6 | import requests
7 | import json
8 | from PIL import Image
9 | import io
10 | from . import knowledge
11 |
12 |
13 | def paint():
14 | if st.session_state.get('authorized_thoughts') is not None:
15 | thought = st.session_state.get('navigator_thought')
16 | if thought:
17 | st.markdown('**type**: past entry')
18 | if thought['modality'] == 'text':
19 | st.success(thought['content'])
20 | elif thought['modality'] == 'image':
21 | url = thought['conceptarium_url'] + '/static?filename=' + \
22 | thought['content']
23 | image = knowledge.fetch_image(url, thought['access_token'])
24 | st.image(image)
25 |
26 | st.markdown('**modality**: ' + thought['modality'])
27 | st.markdown('**timestamp**: ' + datetime.utcfromtimestamp(
28 | int(thought['timestamp'])).strftime("%d.%m.%Y"))
29 | st.markdown('**interest**: ' + str(round(thought['interest'], 2)))
30 | st.markdown('**activation**: ' + str(round(np.log(thought['interest'] / (1 - 0.9)) -
31 | 0.9 * np.log((time.time() - thought['timestamp']) / (3600 * 24) + 0.1), 2)))
32 | st.markdown('**custodian**: ' + str(thought['auth']['custodian']))
33 | st.markdown('**conceptarium**: ' + thought['conceptarium_url'])
34 |
35 | if thought['auth']['custodian']:
36 | if st.button('remove', help='Delete this thought from your conceptarium. Only available for custodians.'):
37 | requests.get(thought['conceptarium_url'] + '/remove', params={
38 | 'filename': thought['filename']
39 | }, headers={'Authorization': f"Bearer {thought['access_token']}"})
40 | st.info(
41 | 'The thought has been removed, which should be reflected in future navigator jumps.')
42 | else:
43 | st.markdown('**type**: custom query')
44 | if st.session_state['navigator_modality'] == 'text':
45 | st.success(st.session_state['navigator_input'])
46 | elif st.session_state['navigator_modality'] == 'image':
47 | st.image(st.session_state['navigator_input'])
48 |
49 | custodian_microverse = [
50 | e for e in st.session_state['microverses'] if e['auth']['custodian'] == True]
51 | if len(custodian_microverse) > 0:
52 | if st.button('save', help='Persist this content as a new thought in your conceptarium. Only available for custodians.'):
53 | if st.session_state['navigator_modality'] == 'text':
54 | requests.get(custodian_microverse[0]['url'] + '/save', params={
55 | 'query': st.session_state['navigator_input']
56 | }, headers={'Authorization': f"Bearer {custodian_microverse[0]['token']}"})
57 | elif st.session_state['navigator_modality'] == 'image':
58 | query = st.session_state['navigator_input']
59 | if isinstance(query, UploadedFile):
60 | query = Image.open(io.BytesIO(query.getvalue()))
61 |
62 | img_io = io.BytesIO()
63 | query = query.convert('RGB')
64 | query.save(img_io, 'jpeg')
65 | img_io.seek(0)
66 | query = img_io.read()
67 |
68 | requests.post(custodian_microverse[0]['url'] + '/save', files={'query': query},
69 | headers={'Authorization': f"Bearer {custodian_microverse[0]['token']}"})
70 | st.info(
71 | 'The thought has been saved, which should be reflected in future navigator jumps.')
72 | if st.button('share microverse', help='Grant access to the past and future search results of this query through a microverse token.'):
73 | if st.session_state['navigator_modality'] == 'text':
74 | response = requests.get(custodian_microverse[0]['url'] + '/microverse/create', params={
75 | 'query': st.session_state['navigator_input']
76 | }, headers={'Authorization': f"Bearer {custodian_microverse[0]['token']}"})
77 | elif st.session_state['navigator_modality'] == 'image':
78 | query = st.session_state['navigator_input']
79 | if isinstance(query, UploadedFile):
80 | query = Image.open(io.BytesIO(query.getvalue()))
81 |
82 | img_io = io.BytesIO()
83 | query = query.convert('RGB')
84 | query.save(img_io, 'jpeg')
85 | img_io.seek(0)
86 | query = img_io.read()
87 |
88 | response = requests.post(custodian_microverse[0]['url'] + '/microverse/create', files={'query': query},
89 | headers={'Authorization': f"Bearer {custodian_microverse[0]['token']}"})
90 |
91 | response = json.loads(response.content)['token']
92 | st.info(response)
93 | st.experimental_rerun()
94 |
--------------------------------------------------------------------------------
/backend/main.py:
--------------------------------------------------------------------------------
1 | from security import auth
2 | from util import find, rank, save, get_authorized_thoughts, remove, dump, compile_rss
3 | from bibliography import set_ical
4 | from microverses import create_microverse, remove_microverse, list_microverses
5 |
6 | from sentence_transformers import SentenceTransformer
7 | from fastapi import Depends, FastAPI, Request, Response
8 | from fastapi.datastructures import UploadFile
9 | from fastapi import FastAPI, File, Form
10 | from fastapi.responses import FileResponse, ORJSONResponse
11 | from fastapi.security import HTTPBearer, HTTPBasicCredentials
12 | from pathlib import Path
13 | from slowapi import Limiter, _rate_limit_exceeded_handler
14 | from slowapi.util import get_remote_address
15 | from slowapi.middleware import SlowAPIMiddleware
16 | from slowapi.errors import RateLimitExceeded
17 |
18 |
19 | security = HTTPBearer()
20 | limiter = Limiter(key_func=get_remote_address, default_limits=['30/minute'])
21 | app = FastAPI()
22 | app.state.limiter = limiter
23 | app.add_exception_handler(RateLimitExceeded, _rate_limit_exceeded_handler)
24 | app.add_middleware(SlowAPIMiddleware)
25 |
26 | text_image_encoder = SentenceTransformer('clip-ViT-B-32')
27 | text_encoder = SentenceTransformer(
28 | 'sentence-transformers/multi-qa-mpnet-base-cos-v1')
29 |
30 |
31 | @app.get('/find', response_class=ORJSONResponse)
32 | async def find_text_handler(
33 | query: str,
34 | relatedness: float = 0.8,
35 | activation: float = 0.,
36 | noise: float = 0.1,
37 | return_embeddings: bool = False,
38 | silent: bool = False,
39 | request: Request = None,
40 | authorization: HTTPBasicCredentials = Depends(security)
41 | ):
42 | return find(
43 | 'text',
44 | query,
45 | relatedness,
46 | activation,
47 | noise,
48 | return_embeddings,
49 | auth(authorization.credentials),
50 | text_encoder,
51 | text_image_encoder,
52 | silent
53 | )
54 |
55 |
56 | @app.post('/find', response_class=ORJSONResponse)
57 | async def find_image_handler(
58 | query: UploadFile = File(...),
59 | relatedness: float = Form(0.8),
60 | activation: float = Form(0.),
61 | noise: float = Form(0.1),
62 | return_embeddings: bool = Form(False),
63 | silent: bool = Form(False),
64 | request: Request = None,
65 | authorization: HTTPBasicCredentials = Depends(security)
66 | ):
67 | query = await query.read()
68 | return find(
69 | 'image',
70 | query,
71 | relatedness,
72 | activation,
73 | noise,
74 | return_embeddings,
75 | auth(authorization.credentials),
76 | text_encoder,
77 | text_image_encoder,
78 | silent
79 | )
80 |
81 |
82 | @app.get('/rss')
83 | async def rss_handler(
84 | authorization: str,
85 | request: Request = None
86 | ):
87 | items = find(
88 | 'text',
89 | '',
90 | 0,
91 | 0,
92 | 0,
93 | False,
94 | auth(authorization),
95 | text_encoder,
96 | text_image_encoder,
97 | False
98 | )
99 | return Response(content=compile_rss(items), media_type="application/xml")
100 |
101 |
102 | @app.get('/save')
103 | async def save_text_handler(query: str, request: Request, authorization: HTTPBasicCredentials = Depends(security)):
104 | return save('text', query, auth(authorization.credentials),
105 | text_encoder, text_image_encoder)
106 |
107 |
108 | @app.post('/save')
109 | async def save_image_handler(query: UploadFile = File(...), request: Request = None, authorization: HTTPBasicCredentials = Depends(security)):
110 | query = await query.read()
111 | results = save('image', query, auth(authorization.credentials),
112 | text_encoder, text_image_encoder)
113 | return results
114 |
115 |
116 | @app.get('/remove')
117 | async def remove_handler(filename: str, request: Request, authorization: HTTPBasicCredentials = Depends(security)):
118 | return remove(auth(authorization.credentials), filename)
119 |
120 |
121 | @app.get('/dump')
122 | async def save_text_handler(request: Request, authorization: HTTPBasicCredentials = Depends(security)):
123 | return dump(auth(authorization.credentials))
124 |
125 |
126 | @app.get('/static')
127 | @limiter.limit("200/minute")
128 | async def static_handler(filename: str, request: Request, authorization: HTTPBasicCredentials = Depends(security)):
129 | knowledge_base_path = Path('..') / 'knowledge'
130 | thoughts = get_authorized_thoughts(auth(authorization.credentials))
131 | if filename in [e['filename'] for e in thoughts]:
132 | return FileResponse(knowledge_base_path / filename)
133 |
134 |
135 | @app.get('/microverse/create')
136 | async def microverse_create_handler(query: str, request: Request, authorization: HTTPBasicCredentials = Depends(security)):
137 | return create_microverse('text', query, auth(authorization.credentials), text_encoder, text_image_encoder)
138 |
139 |
140 | @app.post('/microverse/create')
141 | async def microverse_create_handler(query: UploadFile = File(...), request: Request = None, authorization: HTTPBasicCredentials = Depends(security)):
142 | query = await query.read()
143 | return create_microverse('image', query, auth(authorization.credentials), text_encoder, text_image_encoder)
144 |
145 |
146 | @app.get('/microverse/remove')
147 | async def microverse_remove_handler(microverse: str, request: Request, authorization: HTTPBasicCredentials = Depends(security)):
148 | return remove_microverse(auth(authorization.credentials), microverse)
149 |
150 |
151 | @app.get('/microverse/list')
152 | async def microverse_list_handler(request: Request, authorization: HTTPBasicCredentials = Depends(security)):
153 | return list_microverses(auth(authorization.credentials))
154 |
155 |
156 | @app.get('/custodian/check')
157 | async def check_custodian(request: Request, authorization: HTTPBasicCredentials = Depends(security)):
158 | return auth(authorization.credentials, True)
159 |
160 |
161 | @app.get('/bibliography/set')
162 | async def set_bibliography_ical(ical_url: str, request: Request, authorization: HTTPBasicCredentials = Depends(security)):
163 | return set_ical(ical_url, auth(authorization.credentials))
164 |
--------------------------------------------------------------------------------
/frontend/components/microverses.py:
--------------------------------------------------------------------------------
1 | import datetime
2 | import streamlit as st
3 | import requests
4 | import json
5 | import extra_streamlit_components as stx
6 | from time import sleep
7 | import os
8 | from pathlib import Path
9 |
10 |
11 | def paint():
12 | sleep(0.15)
13 | cookie_manager = stx.CookieManager()
14 | user_state = cookie_manager.get('user_state')
15 |
16 | if not user_state:
17 | with st.spinner('retrieving user data...'):
18 | sleep(2.)
19 | user_state = cookie_manager.get('user_state')
20 | if not user_state:
21 | user_state = {}
22 |
23 | user_state['layout'] = user_state.get('layout', default_layout())
24 | user_state['microverses'] = user_state.get('microverses', [])
25 | st.session_state['microverses'] = user_state['microverses']
26 | st.session_state['layout'] = user_state['layout']
27 |
28 | with st.sidebar:
29 | with st.expander('💻 layout', expanded=True):
30 | user_state['layout']['viewportCols'] = int(st.number_input(
31 | 'viewport cols', 1, 5, user_state['layout'].get('viewportCols', 3), 1))
32 |
33 | faux_components = ['header', 'knowledge',
34 | 'microverses', 'viewport']
35 |
36 | components_path = Path('components')
37 | if not components_path.exists():
38 | components_path = Path('frontend') / 'components'
39 |
40 | components = [e.split('.')[0] for e in os.listdir(components_path) if e.endswith(
41 | '.py') and e.split('.')[0] not in faux_components]
42 | user_state['layout']['leftColumn'] = st.multiselect(
43 | 'left column', components, user_state['layout'].get('leftColumn', ['navigator', 'ranker']))
44 | user_state['layout']['rightColumn'] = st.multiselect(
45 | 'right column', components, user_state['layout'].get('rightColumn', ['inspector']))
46 | st.session_state['layout'] = user_state['layout']
47 | cookie_manager.set('user_state', user_state, expires_at=datetime.datetime.now(
48 | ) + datetime.timedelta(days=30))
49 |
50 | if len(user_state['microverses']) > 0:
51 | with st.expander('🔌 connected microverses', expanded=True):
52 | for e_idx, e in enumerate(user_state['microverses']):
53 | if e['auth']['custodian']:
54 | display_text = '🗝️ ' + e['url']
55 | else:
56 | display_text = e['url']
57 | st.code(display_text)
58 |
59 | if e['auth']['custodian']:
60 | if st.button('create archive', key=e):
61 | archive = requests.get(e['url'] + '/dump',
62 | headers={'Authorization': f"Bearer {e['token']}"}).content
63 | st.download_button(
64 | 'download archive', data=archive, file_name='knowledge.zip')
65 |
66 | if st.button('remove', key=e, help='Remove this source of thoughts.'):
67 | user_state['microverses'].remove(e)
68 | cookie_manager.delete('user_state')
69 | cookie_manager.set(
70 | 'user_state', user_state, expires_at=datetime.datetime.now() + datetime.timedelta(days=30), key='remove')
71 |
72 | with st.expander('🆕 connect to new microverse', expanded=True):
73 | url = st.text_input('conceptarium url',
74 | key=user_state['microverses'], help='Specify the base URL of the conceptarium you wish to access thoughts from. If you\'re trying to connect to your local instance, enter `localhost`.')
75 | token = st.text_input(
76 | 'access token', key=user_state['microverses'], help='Specify the token to be used in authorizing access to this conceptarium. If you\'re the custodian of this conceptarium, enter your custodian token. If this is someone else\'s instance, please use the microverse token they provided you with.', type='password')
77 |
78 | if st.button('add', help='Add this conceptarium as a source of thoughts to be explored.'):
79 | if '://' not in url:
80 | url = 'http://' + url
81 | if url[-1] == '/':
82 | url = url[:-1]
83 |
84 | custodian_check = json.loads(
85 | requests.get(url + '/custodian/check',
86 | headers={'Authorization': f"Bearer {token}"}).content)
87 | if len([e for e in user_state['microverses'] if e['url'] == url]) == 0:
88 | user_state['microverses'] += [{
89 | 'url': url,
90 | 'token': token,
91 | 'auth': custodian_check
92 | }]
93 | cookie_manager.set(
94 | 'user_state', user_state, expires_at=datetime.datetime.now() + datetime.timedelta(days=30), key='add')
95 | st.session_state['microverses'] = user_state['microverses']
96 |
97 | custodian_microverse = [
98 | e for e in user_state['microverses'] if e['auth']['custodian'] == True]
99 | if len(custodian_microverse) > 0:
100 | shared_microverses = json.loads(requests.get(custodian_microverse[0]['url'] + '/microverse/list',
101 | headers={'Authorization': f"Bearer {custodian_microverse[0]['token']}"}).content)
102 | if len(shared_microverses) > 0:
103 | with st.expander('🗝️ shared microverses', expanded=True):
104 | for e_idx, e in enumerate(shared_microverses):
105 | if isinstance(e, dict):
106 | st.code(e['token'])
107 | if e['modality'] == 'text':
108 | st.success(e['content'])
109 |
110 | if st.button('disable', help='Disable the access to this microverse.', key=e):
111 | requests.get(custodian_microverse[0]['url'] + '/microverse/remove', params={
112 | 'microverse': e['token']
113 | }, headers={'Authorization': f"Bearer {custodian_microverse[0]['token']}"})
114 | st.info(
115 | 'The microverse has been removed.')
116 |
117 |
118 | def default_layout():
119 | return {
120 | 'viewportCols': 3,
121 | 'leftColumn': ['navigator', 'ranker'],
122 | 'rightColumn': ['inspector']
123 | }
124 |
--------------------------------------------------------------------------------
/scripts/util.py:
--------------------------------------------------------------------------------
1 | from sentence_transformers import SentenceTransformer, util
2 | from PIL import Image
3 | import io
4 | import pickle
5 | import os
6 | import time
7 | import numpy as np
8 | import pprint
9 |
10 | metadata_path = 'conceptarium/metadata.pickle'
11 |
12 |
13 | def init():
14 | if not os.path.exists(metadata_path):
15 | os.mkdir('conceptarium')
16 | pickle.dump(list(), open(metadata_path, 'wb'))
17 |
18 |
19 | def save(thought):
20 | conceptarium = pickle.load(open(metadata_path, 'rb'))
21 |
22 | if len(conceptarium) > 0:
23 | modality_match = [e.modality == thought.modality for e in conceptarium]
24 | corpus_embeddings = [e.embedding for e in conceptarium]
25 |
26 | results = util.semantic_search(
27 | [thought.embedding], corpus_embeddings, top_k=len(corpus_embeddings), score_function=util.dot_score)[0]
28 | results = [e if modality_match[e['corpus_id']]
29 | else compensate_modality_mismatch(e) for e in results]
30 |
31 | for result in results:
32 | conceptarium[result['corpus_id']
33 | ].interest += result['score']
34 |
35 | if len(list(filter(lambda x: open(x.filename, 'rb').read() == open(thought.filename, 'rb').read(), conceptarium))) == 0:
36 | conceptarium += [thought]
37 | pickle.dump(conceptarium, open(metadata_path, 'wb'))
38 |
39 |
40 | def find(query, model, relatedness, serendipity, noise, silent, top_k):
41 | conceptarium = pickle.load(open(metadata_path, 'rb'))
42 |
43 | query_embedding = embed(query, model)
44 | query_modality = get_modality(query)
45 |
46 | modality_match = [e.modality == query_modality for e in conceptarium]
47 | corpus_embeddings = [e.embedding for e in conceptarium]
48 |
49 | results = util.semantic_search(
50 | [query_embedding], corpus_embeddings, top_k=len(corpus_embeddings), score_function=util.dot_score)[0]
51 | results = [e if modality_match[e['corpus_id']]
52 | else compensate_modality_mismatch(e) for e in results]
53 |
54 | if not silent:
55 | for result in results:
56 | conceptarium[result['corpus_id']
57 | ].interest += result['score']
58 | pickle.dump(conceptarium, open(metadata_path, 'wb'))
59 |
60 | for idx, result in enumerate(results):
61 | results[idx]['score'] = (relatedness * result['score']
62 | - serendipity *
63 | (np.log(conceptarium[result['corpus_id']].interest / (1 - 0.9)) - 0.9 * np.log((time.time() - conceptarium[result['corpus_id']].timestamp) / (3600 * 24) + 0.1))) \
64 | * np.random.normal(1, noise)
65 |
66 | results = sorted(
67 | results, key=lambda result: result['score'], reverse=True)
68 | memories = [conceptarium[e['corpus_id']] for e in results][:top_k]
69 | return memories
70 |
71 |
72 | def get_doc_paths(directory):
73 | paths = []
74 |
75 | for root, directories, files in os.walk(directory):
76 | for filename in files:
77 | path = os.path.join(root, filename)
78 | paths.append(path)
79 |
80 | return paths
81 |
82 |
83 | def load_model():
84 | return SentenceTransformer('clip-ViT-B-32')
85 |
86 |
87 | def embed(content, model):
88 | if get_modality(content) == 'language':
89 | return model.encode(content, convert_to_tensor=True, normalize_embeddings=True)
90 | else:
91 | return model.encode(Image.open(io.BytesIO(content)), convert_to_tensor=True, normalize_embeddings=True)
92 |
93 |
94 | def reset_embeddings(model):
95 | conceptarium = pickle.load(open(metadata_path, 'rb'))
96 | for thought_idx, thought in enumerate(conceptarium):
97 | if thought.modality == 'language':
98 | content = open(thought.filename, 'r').read()
99 | else:
100 | content = open(thought.filename, 'rb').read()
101 | conceptarium[thought_idx].embedding = embed(content, model)
102 |
103 | pickle.dump(conceptarium, open(metadata_path, 'wb'))
104 |
105 |
106 | def get_modality(content):
107 | if isinstance(content, str):
108 | return 'language'
109 | else:
110 | return 'imagery'
111 |
112 |
113 | def compensate_modality_mismatch(result):
114 | result['score'] *= 2.5
115 | return result
116 |
117 |
118 | class Thought:
119 | def __init__(self, filename, content, model):
120 | self.filename = filename
121 | self.modality = get_modality(content)
122 | self.timestamp = time.time()
123 | self.interest = 1
124 | self.embedding = embed(content, model)
125 |
126 | def get_content(self):
127 | if self.modality == 'language':
128 | return open(self.filename).read()
129 | elif self.modality == 'imagery':
130 | return open(self.filename, 'rb').read()
131 |
132 |
133 | '''
134 | import json
135 | thoughts = json.load(open('knowledge/base/metadata.json', 'rb'))
136 |
137 | from datetime import datetime
138 | new_thoughts = []
139 | for thought in thoughts:
140 | new_thought = {}
141 | new_thought['filename'] = thought.filename
142 | new_thought['modality'] = thought.modality
143 | new_thought['timestamp'] = thought.timestamp
144 | new_thought['interest'] = thought.interest
145 | new_thought['embedding'] = thought.embedding
146 | new_thoughts += [new_thought]
147 |
148 | for e_idx, e in enumerate(new_thoughts):
149 | if e['modality'] == 'language':
150 | new_thoughts[e_idx]['modality'] = 'text'
151 | elif e['modality'] == 'imagery':
152 | new_thoughts[e_idx]['modality'] = 'image'
153 | else:
154 | print(e['modality'])
155 |
156 | for e_idx, e in enumerate(new_thoughts):
157 | new_thoughts[e_idx]['embedding'] = e['embedding'].tolist()
158 |
159 | for e_idx, e in enumerate(new_thoughts):
160 | new_thoughts[e_idx]['embedding'] = [round(f, 5) for f in e['embedding']]
161 |
162 | for e_idx, e in enumerate(new_thoughts):
163 | new_thoughts[e_idx]['filename'] = e['filename'].split('/')[-1]
164 |
165 | def get_content(thought, json_friendly=False):
166 | knowledge_base_path = Path('conceptarium')
167 | if thought['modality'] == 'text':
168 | content = open(knowledge_base_path / thought['filename']).read()
169 | elif thought['modality'] == 'image':
170 | content = open(knowledge_base_path / thought['filename'], 'rb').read()
171 | if json_friendly:
172 | content = thought['filename']
173 | return content
174 |
175 | from sentence_transformers import SentenceTransformer, util
176 | from pathlib import Path
177 | from PIL import Image
178 | import io
179 |
180 | text_image_encoder = SentenceTransformer('clip-ViT-B-32')
181 | text_encoder = SentenceTransformer(
182 | 'sentence-transformers/multi-qa-mpnet-base-cos-v1')
183 |
184 | for e_idx, e in enumerate(new_thoughts):
185 | if 'embedding' in e.keys():
186 | new_thoughts[e_idx].pop('embedding')
187 | embs = encode(new_thoughts[e_idx]['modality'], get_content(new_thoughts[e_idx]), text_encoder, text_image_encoder)
188 | new_thoughts[e_idx]['embeddings'] = embs
189 |
190 | new_thoughts[0]
191 | json.dump(new_thoughts, open('conceptarium/metadata.json', 'w'))
192 | '''
193 |
--------------------------------------------------------------------------------
/backend/util.py:
--------------------------------------------------------------------------------
1 | import json
2 | from pathlib import Path
3 | from PIL import Image
4 | import io
5 | import secrets
6 | import time
7 | import numpy as np
8 | from numpy.linalg import norm
9 | import os
10 | import time
11 | import shutil
12 | from fastapi.responses import FileResponse
13 | from feedgen.feed import FeedGenerator
14 | import datetime
15 | from bibliography import get_ical_events
16 |
17 |
18 | def find(modality, query, relatedness, activation, noise, return_embeddings, auth_result, text_encoder, text_image_encoder, silent=False):
19 | authorized_thoughts = get_authorized_thoughts(auth_result)
20 | knowledge_base_path = Path('..') / 'knowledge'
21 | query_embeddings = encode(
22 | modality, query, text_encoder, text_image_encoder)
23 |
24 | if len(authorized_thoughts) == 0:
25 | return {
26 | 'authorized_thoughts': [],
27 | 'query_embeddings': query_embeddings
28 | }
29 |
30 | sims = []
31 | text_image_scaling = 1
32 | image_image_scaling = 0.4
33 | for e in authorized_thoughts:
34 | if modality == 'text':
35 | if e['modality'] == 'text':
36 | sims += [np.dot(e['embeddings']['text'], query_embeddings['text']) / (
37 | norm(e['embeddings']['text']) * norm(query_embeddings['text']))]
38 | elif e['modality'] == 'image':
39 | sims += [np.dot(e['embeddings']['text_image'], query_embeddings['text_image']) / (
40 | norm(e['embeddings']['text_image']) * norm(query_embeddings['text_image'])) * text_image_scaling]
41 | elif modality == 'image':
42 | sims += [np.dot(e['embeddings']['text_image'], query_embeddings['text_image']) / (
43 | norm(e['embeddings']['text_image']) * norm(query_embeddings['text_image'])) * image_image_scaling]
44 |
45 | if not silent and auth_result['custodian']:
46 | for e_idx, e in enumerate(sims):
47 | authorized_thoughts[e_idx]['interest'] += e
48 | open(knowledge_base_path / 'metadata.json',
49 | 'w').write(json.dumps(authorized_thoughts))
50 |
51 | events = get_ical_events()
52 |
53 | for e_idx, e in enumerate(sims):
54 | authorized_thoughts[e_idx]['relatedness'] = float(e)
55 | authorized_thoughts[e_idx]['interest'] = float(
56 | authorized_thoughts[e_idx]['interest'])
57 | authorized_thoughts[e_idx]['content'] = get_content(
58 | authorized_thoughts[e_idx], True)
59 | authorized_thoughts[e_idx]['events'] = [
60 | f for f in events if abs(f['timestamp'] - authorized_thoughts[e_idx]['timestamp']) < 60 * 60]
61 |
62 | if not return_embeddings:
63 | if 'embeddings' in authorized_thoughts[e_idx]:
64 | authorized_thoughts[e_idx].pop('embeddings')
65 |
66 | authorized_thoughts = rank(
67 | authorized_thoughts, relatedness, activation, noise)
68 |
69 | response = {
70 | 'authorized_thoughts': authorized_thoughts
71 | }
72 |
73 | if return_embeddings:
74 | response['query_embeddings'] = query_embeddings
75 |
76 | return response
77 |
78 |
79 | def rank(authorized_thoughts, relatedness, activation, noise):
80 | for e_idx, e in enumerate(authorized_thoughts):
81 | authorized_thoughts[e_idx]['score'] = float(relatedness * e['relatedness'] +
82 | activation * (np.log(max(1, e['interest'] / (1 - 0.9))) -
83 | 0.9 * np.log(max(1, (time.time() - e['timestamp']) / (3600 * 24)))) * np.random.normal(1, noise))
84 |
85 | authorized_thoughts = sorted(
86 | authorized_thoughts, reverse=True, key=lambda x: x['score'])
87 |
88 | return authorized_thoughts
89 |
90 |
91 | def save(modality, query, auth_result, text_encoder, text_image_encoder, silent=False):
92 | knowledge_base_path = Path('..') / 'knowledge'
93 |
94 | if auth_result['custodian'] == False:
95 | return {
96 | 'message': 'Only the conceptarium\'s custodian can save thoughts in it.'
97 | }
98 | else:
99 | if not (knowledge_base_path / 'metadata.json').exists():
100 | open(knowledge_base_path / 'metadata.json', 'w').write(json.dumps([]))
101 |
102 | query_embeddings = encode(
103 | modality, query, text_encoder, text_image_encoder)
104 | thoughts = json.load(open(knowledge_base_path / 'metadata.json'))
105 |
106 | if modality == 'text':
107 | duplicates = [e for e in thoughts if e['modality'] ==
108 | 'text' and open(knowledge_base_path / e['filename']).read() == query]
109 |
110 | if len(duplicates) == 0:
111 | filename = secrets.token_urlsafe(16) + '.md'
112 | open(knowledge_base_path / filename, 'w').write(query)
113 | elif modality == 'image':
114 | duplicates = [e for e in thoughts if e['modality'] ==
115 | 'image' and open(knowledge_base_path / e['filename'], 'rb').read() == query]
116 |
117 | if len(duplicates) == 0:
118 | filename = secrets.token_urlsafe(16) + '.jpg'
119 | query = Image.open(io.BytesIO(query)).convert('RGB')
120 | query.save(knowledge_base_path / filename, quality=50)
121 |
122 | sims = []
123 | text_image_scaling = 1
124 | image_image_scaling = 0.4
125 | for e in thoughts:
126 | if modality == 'text':
127 | if e['modality'] == 'text':
128 | sims += [np.dot(e['embeddings']['text'], query_embeddings['text']) / (
129 | norm(e['embeddings']['text']) * norm(query_embeddings['text']))]
130 | elif e['modality'] == 'image':
131 | sims += [np.dot(e['embeddings']['text_image'], query_embeddings['text_image']) / (
132 | norm(e['embeddings']['text_image']) * norm(query_embeddings['text_image'])) * text_image_scaling]
133 | elif modality == 'image':
134 | sims += [np.dot(e['embeddings']['text_image'], query_embeddings['text_image']) / (
135 | norm(e['embeddings']['text_image']) * norm(query_embeddings['text_image'])) * image_image_scaling]
136 |
137 | if not silent:
138 | for e_idx, e in enumerate(sims):
139 | thoughts[e_idx]['interest'] += e
140 |
141 | if len(duplicates) == 0:
142 | new_thought = {
143 | 'filename': filename,
144 | 'modality': modality,
145 | 'timestamp': time.time(),
146 | 'interest': 1,
147 | 'embeddings': query_embeddings
148 | }
149 |
150 | thoughts += [new_thought]
151 | open(knowledge_base_path / 'metadata.json',
152 | 'w').write(json.dumps(thoughts))
153 |
154 | return new_thought
155 | else:
156 | return {
157 | 'message': 'Duplicate thought found.'
158 | }
159 |
160 |
161 | def remove(auth_result, filename):
162 | knowledge_base_path = Path('..') / 'knowledge'
163 |
164 | if auth_result['custodian'] == False:
165 | return {
166 | 'message': 'Only the conceptarium\'s custodian can remove thoughts from it.'
167 | }
168 | else:
169 | if not (knowledge_base_path / 'metadata.json').exists():
170 | open(knowledge_base_path / 'metadata.json', 'w').write(json.dumps([]))
171 |
172 | thoughts = json.load(open(knowledge_base_path / 'metadata.json'))
173 | target = [e for e in thoughts if e['filename'] == filename]
174 |
175 | if len(target) > 0:
176 | os.remove(knowledge_base_path / filename)
177 | thoughts.remove(target[0])
178 | open(knowledge_base_path / 'metadata.json',
179 | 'w').write(json.dumps(thoughts))
180 |
181 |
182 | def get_authorized_thoughts(auth_result):
183 | metadata_path = Path('..') / 'knowledge' / 'metadata.json'
184 |
185 | if not (metadata_path).exists():
186 | open(metadata_path, 'w').write(json.dumps([]))
187 |
188 | thoughts = json.load(open(metadata_path))
189 |
190 | if auth_result['custodian'] == True:
191 | return thoughts
192 | else:
193 | similarity_threshold = 0.3
194 | authorized_microverse = auth_result['authorized_microverse']
195 |
196 | if authorized_microverse == []:
197 | return []
198 |
199 | query_embeddings = authorized_microverse[0]['embeddings']
200 | text_image_scaling = 1
201 | image_image_scaling = 0.4
202 | sims = []
203 | for e in thoughts:
204 | if authorized_microverse[0]['modality'] == 'text':
205 | if e['modality'] == 'text':
206 | sims += [np.dot(e['embeddings']['text'], query_embeddings['text']) / (
207 | norm(e['embeddings']['text']) * norm(query_embeddings['text']))]
208 | elif e['modality'] == 'image':
209 | sims += [np.dot(e['embeddings']['text_image'], query_embeddings['text_image']) / (
210 | norm(e['embeddings']['text_image']) * norm(query_embeddings['text_image'])) * text_image_scaling]
211 | elif authorized_microverse[0]['modality'] == 'image':
212 | sims += [np.dot(e['embeddings']['text_image'], query_embeddings['text_image']) / (
213 | norm(e['embeddings']['text_image']) * norm(query_embeddings['text_image'])) * image_image_scaling]
214 |
215 | scored_thoughts = zip(thoughts, sims)
216 | authorized_thoughts = [e[0]
217 | for e in scored_thoughts if e[1] > similarity_threshold]
218 |
219 | return authorized_thoughts
220 |
221 |
222 | def encode(modality, content, text_encoder, text_image_encoder):
223 | if modality == 'text':
224 | return {
225 | 'text_model': 'sentence-transformers/multi-qa-mpnet-base-cos-v1',
226 | 'text_image_model': 'clip-ViT-B-32',
227 | 'text': [round(e, 5) for e in text_encoder.encode(content).tolist()],
228 | 'text_image': [round(e, 5) for e in text_image_encoder.encode(content).tolist()]
229 | }
230 | elif modality == 'image':
231 | content = Image.open(io.BytesIO(content))
232 | img_io = io.BytesIO()
233 | content = content.convert('RGB')
234 | content.save(img_io, 'jpeg')
235 | img_io.seek(0)
236 | content = img_io.read()
237 | content = Image.open(img_io)
238 |
239 | return {
240 | 'text_image_model': 'clip-ViT-B-32',
241 | 'text_image': [round(e, 5) for e in text_image_encoder.encode(content).tolist()]
242 | }
243 | else:
244 | raise Exception('Can\'t encode content of modality "' + modality + '"')
245 |
246 |
247 | def get_content(thought, json_friendly=False):
248 | knowledge_base_path = Path('..') / 'knowledge'
249 |
250 | if thought['modality'] == 'text':
251 | content = open(knowledge_base_path / thought['filename']).read()
252 | elif thought['modality'] == 'image':
253 | content = open(knowledge_base_path / thought['filename'], 'rb').read()
254 |
255 | if json_friendly:
256 | content = thought['filename']
257 |
258 | return content
259 |
260 |
261 | def dump(auth_result):
262 | knowledge_base_path = Path('..') / 'knowledge'
263 | archive_path = Path('..') / 'knowledge.zip'
264 |
265 | if auth_result['custodian'] == False:
266 | return {
267 | 'message': 'Only the conceptarium\'s custodian can download its full contents as an archive.'
268 | }
269 | else:
270 | shutil.make_archive(knowledge_base_path, 'zip', knowledge_base_path)
271 | return FileResponse(archive_path, filename='knowledge.zip')
272 |
273 |
274 | def compile_rss(items):
275 | fg = FeedGenerator()
276 | fg.title('microverse')
277 | fg.description(
278 | 'This microverse of knowledge contains a cluster of ideas centered around a certain topic.')
279 | fg.link(href='https://paulbricman.com/thoughtware/conceptarium')
280 |
281 | for item in items['authorized_thoughts']:
282 | if item['modality'] == 'text':
283 | fe = fg.add_entry()
284 | fe.title(item['filename'])
285 | fe.content(item['content'])
286 | published = datetime.datetime.fromtimestamp(item['timestamp'])
287 | published = published.astimezone(datetime.timezone.utc)
288 | fe.published(published)
289 |
290 | return fg.rss_str(pretty=True)
291 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | Mozilla Public License Version 2.0
2 | ==================================
3 |
4 | 1. Definitions
5 | --------------
6 |
7 | 1.1. "Contributor"
8 | means each individual or legal entity that creates, contributes to
9 | the creation of, or owns Covered Software.
10 |
11 | 1.2. "Contributor Version"
12 | means the combination of the Contributions of others (if any) used
13 | by a Contributor and that particular Contributor's Contribution.
14 |
15 | 1.3. "Contribution"
16 | means Covered Software of a particular Contributor.
17 |
18 | 1.4. "Covered Software"
19 | means Source Code Form to which the initial Contributor has attached
20 | the notice in Exhibit A, the Executable Form of such Source Code
21 | Form, and Modifications of such Source Code Form, in each case
22 | including portions thereof.
23 |
24 | 1.5. "Incompatible With Secondary Licenses"
25 | means
26 |
27 | (a) that the initial Contributor has attached the notice described
28 | in Exhibit B to the Covered Software; or
29 |
30 | (b) that the Covered Software was made available under the terms of
31 | version 1.1 or earlier of the License, but not also under the
32 | terms of a Secondary License.
33 |
34 | 1.6. "Executable Form"
35 | means any form of the work other than Source Code Form.
36 |
37 | 1.7. "Larger Work"
38 | means a work that combines Covered Software with other material, in
39 | a separate file or files, that is not Covered Software.
40 |
41 | 1.8. "License"
42 | means this document.
43 |
44 | 1.9. "Licensable"
45 | means having the right to grant, to the maximum extent possible,
46 | whether at the time of the initial grant or subsequently, any and
47 | all of the rights conveyed by this License.
48 |
49 | 1.10. "Modifications"
50 | means any of the following:
51 |
52 | (a) any file in Source Code Form that results from an addition to,
53 | deletion from, or modification of the contents of Covered
54 | Software; or
55 |
56 | (b) any new file in Source Code Form that contains any Covered
57 | Software.
58 |
59 | 1.11. "Patent Claims" of a Contributor
60 | means any patent claim(s), including without limitation, method,
61 | process, and apparatus claims, in any patent Licensable by such
62 | Contributor that would be infringed, but for the grant of the
63 | License, by the making, using, selling, offering for sale, having
64 | made, import, or transfer of either its Contributions or its
65 | Contributor Version.
66 |
67 | 1.12. "Secondary License"
68 | means either the GNU General Public License, Version 2.0, the GNU
69 | Lesser General Public License, Version 2.1, the GNU Affero General
70 | Public License, Version 3.0, or any later versions of those
71 | licenses.
72 |
73 | 1.13. "Source Code Form"
74 | means the form of the work preferred for making modifications.
75 |
76 | 1.14. "You" (or "Your")
77 | means an individual or a legal entity exercising rights under this
78 | License. For legal entities, "You" includes any entity that
79 | controls, is controlled by, or is under common control with You. For
80 | purposes of this definition, "control" means (a) the power, direct
81 | or indirect, to cause the direction or management of such entity,
82 | whether by contract or otherwise, or (b) ownership of more than
83 | fifty percent (50%) of the outstanding shares or beneficial
84 | ownership of such entity.
85 |
86 | 2. License Grants and Conditions
87 | --------------------------------
88 |
89 | 2.1. Grants
90 |
91 | Each Contributor hereby grants You a world-wide, royalty-free,
92 | non-exclusive license:
93 |
94 | (a) under intellectual property rights (other than patent or trademark)
95 | Licensable by such Contributor to use, reproduce, make available,
96 | modify, display, perform, distribute, and otherwise exploit its
97 | Contributions, either on an unmodified basis, with Modifications, or
98 | as part of a Larger Work; and
99 |
100 | (b) under Patent Claims of such Contributor to make, use, sell, offer
101 | for sale, have made, import, and otherwise transfer either its
102 | Contributions or its Contributor Version.
103 |
104 | 2.2. Effective Date
105 |
106 | The licenses granted in Section 2.1 with respect to any Contribution
107 | become effective for each Contribution on the date the Contributor first
108 | distributes such Contribution.
109 |
110 | 2.3. Limitations on Grant Scope
111 |
112 | The licenses granted in this Section 2 are the only rights granted under
113 | this License. No additional rights or licenses will be implied from the
114 | distribution or licensing of Covered Software under this License.
115 | Notwithstanding Section 2.1(b) above, no patent license is granted by a
116 | Contributor:
117 |
118 | (a) for any code that a Contributor has removed from Covered Software;
119 | or
120 |
121 | (b) for infringements caused by: (i) Your and any other third party's
122 | modifications of Covered Software, or (ii) the combination of its
123 | Contributions with other software (except as part of its Contributor
124 | Version); or
125 |
126 | (c) under Patent Claims infringed by Covered Software in the absence of
127 | its Contributions.
128 |
129 | This License does not grant any rights in the trademarks, service marks,
130 | or logos of any Contributor (except as may be necessary to comply with
131 | the notice requirements in Section 3.4).
132 |
133 | 2.4. Subsequent Licenses
134 |
135 | No Contributor makes additional grants as a result of Your choice to
136 | distribute the Covered Software under a subsequent version of this
137 | License (see Section 10.2) or under the terms of a Secondary License (if
138 | permitted under the terms of Section 3.3).
139 |
140 | 2.5. Representation
141 |
142 | Each Contributor represents that the Contributor believes its
143 | Contributions are its original creation(s) or it has sufficient rights
144 | to grant the rights to its Contributions conveyed by this License.
145 |
146 | 2.6. Fair Use
147 |
148 | This License is not intended to limit any rights You have under
149 | applicable copyright doctrines of fair use, fair dealing, or other
150 | equivalents.
151 |
152 | 2.7. Conditions
153 |
154 | Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted
155 | in Section 2.1.
156 |
157 | 3. Responsibilities
158 | -------------------
159 |
160 | 3.1. Distribution of Source Form
161 |
162 | All distribution of Covered Software in Source Code Form, including any
163 | Modifications that You create or to which You contribute, must be under
164 | the terms of this License. You must inform recipients that the Source
165 | Code Form of the Covered Software is governed by the terms of this
166 | License, and how they can obtain a copy of this License. You may not
167 | attempt to alter or restrict the recipients' rights in the Source Code
168 | Form.
169 |
170 | 3.2. Distribution of Executable Form
171 |
172 | If You distribute Covered Software in Executable Form then:
173 |
174 | (a) such Covered Software must also be made available in Source Code
175 | Form, as described in Section 3.1, and You must inform recipients of
176 | the Executable Form how they can obtain a copy of such Source Code
177 | Form by reasonable means in a timely manner, at a charge no more
178 | than the cost of distribution to the recipient; and
179 |
180 | (b) You may distribute such Executable Form under the terms of this
181 | License, or sublicense it under different terms, provided that the
182 | license for the Executable Form does not attempt to limit or alter
183 | the recipients' rights in the Source Code Form under this License.
184 |
185 | 3.3. Distribution of a Larger Work
186 |
187 | You may create and distribute a Larger Work under terms of Your choice,
188 | provided that You also comply with the requirements of this License for
189 | the Covered Software. If the Larger Work is a combination of Covered
190 | Software with a work governed by one or more Secondary Licenses, and the
191 | Covered Software is not Incompatible With Secondary Licenses, this
192 | License permits You to additionally distribute such Covered Software
193 | under the terms of such Secondary License(s), so that the recipient of
194 | the Larger Work may, at their option, further distribute the Covered
195 | Software under the terms of either this License or such Secondary
196 | License(s).
197 |
198 | 3.4. Notices
199 |
200 | You may not remove or alter the substance of any license notices
201 | (including copyright notices, patent notices, disclaimers of warranty,
202 | or limitations of liability) contained within the Source Code Form of
203 | the Covered Software, except that You may alter any license notices to
204 | the extent required to remedy known factual inaccuracies.
205 |
206 | 3.5. Application of Additional Terms
207 |
208 | You may choose to offer, and to charge a fee for, warranty, support,
209 | indemnity or liability obligations to one or more recipients of Covered
210 | Software. However, You may do so only on Your own behalf, and not on
211 | behalf of any Contributor. You must make it absolutely clear that any
212 | such warranty, support, indemnity, or liability obligation is offered by
213 | You alone, and You hereby agree to indemnify every Contributor for any
214 | liability incurred by such Contributor as a result of warranty, support,
215 | indemnity or liability terms You offer. You may include additional
216 | disclaimers of warranty and limitations of liability specific to any
217 | jurisdiction.
218 |
219 | 4. Inability to Comply Due to Statute or Regulation
220 | ---------------------------------------------------
221 |
222 | If it is impossible for You to comply with any of the terms of this
223 | License with respect to some or all of the Covered Software due to
224 | statute, judicial order, or regulation then You must: (a) comply with
225 | the terms of this License to the maximum extent possible; and (b)
226 | describe the limitations and the code they affect. Such description must
227 | be placed in a text file included with all distributions of the Covered
228 | Software under this License. Except to the extent prohibited by statute
229 | or regulation, such description must be sufficiently detailed for a
230 | recipient of ordinary skill to be able to understand it.
231 |
232 | 5. Termination
233 | --------------
234 |
235 | 5.1. The rights granted under this License will terminate automatically
236 | if You fail to comply with any of its terms. However, if You become
237 | compliant, then the rights granted under this License from a particular
238 | Contributor are reinstated (a) provisionally, unless and until such
239 | Contributor explicitly and finally terminates Your grants, and (b) on an
240 | ongoing basis, if such Contributor fails to notify You of the
241 | non-compliance by some reasonable means prior to 60 days after You have
242 | come back into compliance. Moreover, Your grants from a particular
243 | Contributor are reinstated on an ongoing basis if such Contributor
244 | notifies You of the non-compliance by some reasonable means, this is the
245 | first time You have received notice of non-compliance with this License
246 | from such Contributor, and You become compliant prior to 30 days after
247 | Your receipt of the notice.
248 |
249 | 5.2. If You initiate litigation against any entity by asserting a patent
250 | infringement claim (excluding declaratory judgment actions,
251 | counter-claims, and cross-claims) alleging that a Contributor Version
252 | directly or indirectly infringes any patent, then the rights granted to
253 | You by any and all Contributors for the Covered Software under Section
254 | 2.1 of this License shall terminate.
255 |
256 | 5.3. In the event of termination under Sections 5.1 or 5.2 above, all
257 | end user license agreements (excluding distributors and resellers) which
258 | have been validly granted by You or Your distributors under this License
259 | prior to termination shall survive termination.
260 |
261 | ************************************************************************
262 | * *
263 | * 6. Disclaimer of Warranty *
264 | * ------------------------- *
265 | * *
266 | * Covered Software is provided under this License on an "as is" *
267 | * basis, without warranty of any kind, either expressed, implied, or *
268 | * statutory, including, without limitation, warranties that the *
269 | * Covered Software is free of defects, merchantable, fit for a *
270 | * particular purpose or non-infringing. The entire risk as to the *
271 | * quality and performance of the Covered Software is with You. *
272 | * Should any Covered Software prove defective in any respect, You *
273 | * (not any Contributor) assume the cost of any necessary servicing, *
274 | * repair, or correction. This disclaimer of warranty constitutes an *
275 | * essential part of this License. No use of any Covered Software is *
276 | * authorized under this License except under this disclaimer. *
277 | * *
278 | ************************************************************************
279 |
280 | ************************************************************************
281 | * *
282 | * 7. Limitation of Liability *
283 | * -------------------------- *
284 | * *
285 | * Under no circumstances and under no legal theory, whether tort *
286 | * (including negligence), contract, or otherwise, shall any *
287 | * Contributor, or anyone who distributes Covered Software as *
288 | * permitted above, be liable to You for any direct, indirect, *
289 | * special, incidental, or consequential damages of any character *
290 | * including, without limitation, damages for lost profits, loss of *
291 | * goodwill, work stoppage, computer failure or malfunction, or any *
292 | * and all other commercial damages or losses, even if such party *
293 | * shall have been informed of the possibility of such damages. This *
294 | * limitation of liability shall not apply to liability for death or *
295 | * personal injury resulting from such party's negligence to the *
296 | * extent applicable law prohibits such limitation. Some *
297 | * jurisdictions do not allow the exclusion or limitation of *
298 | * incidental or consequential damages, so this exclusion and *
299 | * limitation may not apply to You. *
300 | * *
301 | ************************************************************************
302 |
303 | 8. Litigation
304 | -------------
305 |
306 | Any litigation relating to this License may be brought only in the
307 | courts of a jurisdiction where the defendant maintains its principal
308 | place of business and such litigation shall be governed by laws of that
309 | jurisdiction, without reference to its conflict-of-law provisions.
310 | Nothing in this Section shall prevent a party's ability to bring
311 | cross-claims or counter-claims.
312 |
313 | 9. Miscellaneous
314 | ----------------
315 |
316 | This License represents the complete agreement concerning the subject
317 | matter hereof. If any provision of this License is held to be
318 | unenforceable, such provision shall be reformed only to the extent
319 | necessary to make it enforceable. Any law or regulation which provides
320 | that the language of a contract shall be construed against the drafter
321 | shall not be used to construe this License against a Contributor.
322 |
323 | 10. Versions of the License
324 | ---------------------------
325 |
326 | 10.1. New Versions
327 |
328 | Mozilla Foundation is the license steward. Except as provided in Section
329 | 10.3, no one other than the license steward has the right to modify or
330 | publish new versions of this License. Each version will be given a
331 | distinguishing version number.
332 |
333 | 10.2. Effect of New Versions
334 |
335 | You may distribute the Covered Software under the terms of the version
336 | of the License under which You originally received the Covered Software,
337 | or under the terms of any subsequent version published by the license
338 | steward.
339 |
340 | 10.3. Modified Versions
341 |
342 | If you create software not governed by this License, and you want to
343 | create a new license for such software, you may create and use a
344 | modified version of this License if you rename the license and remove
345 | any references to the name of the license steward (except to note that
346 | such modified license differs from this License).
347 |
348 | 10.4. Distributing Source Code Form that is Incompatible With Secondary
349 | Licenses
350 |
351 | If You choose to distribute Source Code Form that is Incompatible With
352 | Secondary Licenses under the terms of this version of the License, the
353 | notice described in Exhibit B of this License must be attached.
354 |
355 | Exhibit A - Source Code Form License Notice
356 | -------------------------------------------
357 |
358 | This Source Code Form is subject to the terms of the Mozilla Public
359 | License, v. 2.0. If a copy of the MPL was not distributed with this
360 | file, You can obtain one at http://mozilla.org/MPL/2.0/.
361 |
362 | If it is not possible or desirable to put the notice in a particular
363 | file, then You may include the notice in a location (such as a LICENSE
364 | file in a relevant directory) where a recipient would be likely to look
365 | for such a notice.
366 |
367 | You may add additional accurate notices of copyright ownership.
368 |
369 | Exhibit B - "Incompatible With Secondary Licenses" Notice
370 | ---------------------------------------------------------
371 |
372 | This Source Code Form is "Incompatible With Secondary Licenses", as
373 | defined by the Mozilla Public License, v. 2.0.
374 |
--------------------------------------------------------------------------------