├── ssb ├── __init__.py ├── adt │ ├── __init__.py │ ├── tangle.py │ └── lfs.py ├── app │ ├── __init__.py │ └── drive.py ├── local │ ├── __init__.py │ ├── util.py │ ├── config.py │ └── worm.py ├── peer │ ├── __init__.py │ └── session.py ├── rpc │ ├── __init__.py │ ├── tests │ │ ├── __init__.py │ │ ├── test_util.py │ │ ├── test_feed.py │ │ └── test_packet_stream.py │ ├── feed │ │ ├── __init__.py │ │ └── models.py │ ├── muxrpc.py │ └── packet_stream.py ├── README.md └── shs │ ├── __init__.py │ ├── boxstream.py │ ├── test_boxstream.py │ ├── util.py │ ├── test_network.py │ ├── network.py │ ├── test_crypto.py │ └── crypto.py ├── doc ├── tangle-1.png ├── tangle-2.png ├── tangle-3.png ├── tangle-4.png ├── tangle-5.png ├── tangle-6.png ├── OR-Set-algo.png ├── demo-20180831.gif ├── tangle-2.gliffy ├── tangle.md └── tangle-3.gliffy ├── requirements.txt ├── AUTHORS ├── LICENSE ├── COPYRIGHT ├── .gitignore ├── README.md └── ssb-drive.py /ssb/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /ssb/adt/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /ssb/app/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /ssb/local/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /ssb/peer/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /ssb/rpc/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /ssb/rpc/tests/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /doc/tangle-1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cn-uofbasel/ssbdrv/HEAD/doc/tangle-1.png -------------------------------------------------------------------------------- /doc/tangle-2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cn-uofbasel/ssbdrv/HEAD/doc/tangle-2.png -------------------------------------------------------------------------------- /doc/tangle-3.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cn-uofbasel/ssbdrv/HEAD/doc/tangle-3.png -------------------------------------------------------------------------------- /doc/tangle-4.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cn-uofbasel/ssbdrv/HEAD/doc/tangle-4.png -------------------------------------------------------------------------------- /doc/tangle-5.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cn-uofbasel/ssbdrv/HEAD/doc/tangle-5.png -------------------------------------------------------------------------------- /doc/tangle-6.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cn-uofbasel/ssbdrv/HEAD/doc/tangle-6.png -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | PyNaCl == 1.2.1 2 | prompt_toolkit == 2.0.4 3 | psutil == 5.4.7 4 | -------------------------------------------------------------------------------- /doc/OR-Set-algo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cn-uofbasel/ssbdrv/HEAD/doc/OR-Set-algo.png -------------------------------------------------------------------------------- /doc/demo-20180831.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cn-uofbasel/ssbdrv/HEAD/doc/demo-20180831.gif -------------------------------------------------------------------------------- /ssb/rpc/feed/__init__.py: -------------------------------------------------------------------------------- 1 | from .models import Feed, LocalFeed, Message, LocalMessage, NoPrivateKeyException 2 | 3 | __all__ = ('Feed', 'LocalFeed', 'Message', 'LocalMessage', 'NoPrivateKeyException') 4 | -------------------------------------------------------------------------------- /AUTHORS: -------------------------------------------------------------------------------- 1 | # AUTHORS 2 | 3 | ssbdrv copyright (c) 4 | pyssb copyright (c) Pedro Ferreira 5 | PySecretHandshake copyright (c) Pedro Ferreira 6 | 7 | This repo: 8 | https://github.com/cn-uofbasel/ssbdrv 9 | 10 | includes code from: 11 | https://github.com/pferreir/pyssb 12 | https://github.com/pferreir/PySecretHandshake 13 | -------------------------------------------------------------------------------- /ssb/local/util.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | # ssb/local/util.py 4 | 5 | import base64 6 | import os 7 | import psutil 8 | 9 | def username2dir(n): 10 | ssb_home = os.path.expanduser('~/.ssb') 11 | if not n: 12 | return ssb_home 13 | return os.path.join(ssb_home, 'user.' + n) 14 | 15 | def is_locked(username): 16 | logname = username2dir(username) + '/flume/log.offset' 17 | for p in psutil.process_iter(): 18 | try: 19 | for f in p.open_files(): 20 | if logname in str(f): 21 | print(f) 22 | return p 23 | except: 24 | pass 25 | return None 26 | 27 | def id2bytes(id): 28 | return base64.b64decode(id.split('.')[0][1:]) 29 | 30 | # eof 31 | -------------------------------------------------------------------------------- /ssb/README.md: -------------------------------------------------------------------------------- 1 | # Content of directory ssb/ 2 | 3 | dir | content 4 | ---: | --- 5 | adt | abstract data types 6 | app | application logic 7 | cmd | app main programs with UI 8 | local | access to local files (log, indices) 9 | peer | peer connection 10 | rpc | SSB RPC protocol 11 | shs | SSB secure handshake protocol 12 | 13 | and their dependencies: 14 | 15 | ```txt 16 | .-----------------------------. 17 | | cmd | 18 | + .------------------. | 19 | | | app | | 20 | +-----+-----+-------. | | 21 | | peer | adt | | | 22 | +-----. +-------+----+----+ 23 | | rpc | | | 24 | +-----+-----+ local | 25 | | shs | | 26 | `-----------+-----------------' 27 | ``` 28 | 29 | --- 30 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2018 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /COPYRIGHT: -------------------------------------------------------------------------------- 1 | # COPYRIGHT 2 | 3 | Copyright (c) 2018 ssbdrv contributors (see AUTHORS for more details) 4 | 5 | Copyright (c) 2017 pyssb contributors (see AUTHORS for more details) 6 | 7 | Permission is hereby granted, free of charge, to any person obtaining a copy 8 | of this software and associated documentation files (the "Software"), to deal 9 | in the Software without restriction, including without limitation the rights 10 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 11 | copies of the Software, and to permit persons to whom the Software is 12 | furnished to do so, subject to the following conditions: 13 | 14 | The above copyright notice and this permission notice shall be included in all 15 | copies or substantial portions of the Software. 16 | 17 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 18 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 19 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 20 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 21 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 22 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 23 | SOFTWARE. 24 | -------------------------------------------------------------------------------- /ssb/shs/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2017 PySecretHandshake contributors (see AUTHORS for more details) 2 | # 3 | # Permission is hereby granted, free of charge, to any person obtaining a copy 4 | # of this software and associated documentation files (the "Software"), to deal 5 | # in the Software without restriction, including without limitation the rights 6 | # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 7 | # copies of the Software, and to permit persons to whom the Software is 8 | # furnished to do so, subject to the following conditions: 9 | # 10 | # The above copyright notice and this permission notice shall be included in all 11 | # copies or substantial portions of the Software. 12 | # 13 | # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 14 | # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 15 | # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 16 | # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 17 | # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 18 | # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 19 | # SOFTWARE. 20 | 21 | 22 | from .network import SHSClient, SHSServer 23 | 24 | __all__ = ('SHSClient', 'SHSServer') 25 | -------------------------------------------------------------------------------- /ssb/rpc/tests/test_util.py: -------------------------------------------------------------------------------- 1 | from base64 import b64decode 2 | from unittest.mock import mock_open, patch 3 | 4 | import pytest 5 | 6 | from ssb.util import load_ssb_secret, ConfigException 7 | 8 | 9 | CONFIG_FILE = """ 10 | ## Comments should be supported too 11 | { 12 | "curve": "ed25519", 13 | "public": "rsYpBIcXsxjQAf0JNes+MHqT2DL+EfopWKAp4rGeEPQ=ed25519", 14 | "private": "/bqDBI/vGLD5qy3GxMsgHFgYIrrY08JfTzUaCYT6x0GuxikEhxezGNAB/Qk16z4wepPYMv4R+ilYoCnisZ4Q9A==", 15 | "id": "@rsYpBIcXsxjQAf0JNes+MHqT2DL+EfopWKAp4rGeEPQ=.ed25519" 16 | } 17 | """ 18 | 19 | CONFIG_FILE_INVALID = CONFIG_FILE.replace('ed25519', 'foo') 20 | 21 | 22 | def test_load_secret(): 23 | with patch('ssb.util.open', mock_open(read_data=CONFIG_FILE), create=True): 24 | secret = load_ssb_secret() 25 | 26 | priv_key = b'\xfd\xba\x83\x04\x8f\xef\x18\xb0\xf9\xab-\xc6\xc4\xcb \x1cX\x18"\xba\xd8\xd3\xc2_O5\x1a\t\x84\xfa\xc7A' 27 | 28 | assert secret['id'] == '@rsYpBIcXsxjQAf0JNes+MHqT2DL+EfopWKAp4rGeEPQ=.ed25519' 29 | assert bytes(secret['keypair']) == priv_key 30 | assert bytes(secret['keypair'].verify_key) == b64decode('rsYpBIcXsxjQAf0JNes+MHqT2DL+EfopWKAp4rGeEPQ=') 31 | 32 | 33 | def test_load_exception(): 34 | with pytest.raises(ConfigException): 35 | with patch('ssb.util.open', mock_open(read_data=CONFIG_FILE_INVALID), create=True): 36 | load_ssb_secret() 37 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | wheels/ 23 | *.egg-info/ 24 | .installed.cfg 25 | *.egg 26 | MANIFEST 27 | 28 | # PyInstaller 29 | # Usually these files are written by a python script from a template 30 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 31 | *.manifest 32 | *.spec 33 | 34 | # Installer logs 35 | pip-log.txt 36 | pip-delete-this-directory.txt 37 | 38 | # Unit test / coverage reports 39 | htmlcov/ 40 | .tox/ 41 | .coverage 42 | .coverage.* 43 | .cache 44 | nosetests.xml 45 | coverage.xml 46 | *.cover 47 | .hypothesis/ 48 | .pytest_cache/ 49 | 50 | # Translations 51 | *.mo 52 | *.pot 53 | 54 | # Django stuff: 55 | *.log 56 | local_settings.py 57 | db.sqlite3 58 | 59 | # Flask stuff: 60 | instance/ 61 | .webassets-cache 62 | 63 | # Scrapy stuff: 64 | .scrapy 65 | 66 | # Sphinx documentation 67 | docs/_build/ 68 | 69 | # PyBuilder 70 | target/ 71 | 72 | # Jupyter Notebook 73 | .ipynb_checkpoints 74 | 75 | # IPython 76 | profile_default/ 77 | ipython_config.py 78 | 79 | # pyenv 80 | .python-version 81 | 82 | # celery beat schedule file 83 | celerybeat-schedule 84 | 85 | # SageMath parsed files 86 | *.sage.py 87 | 88 | # Environments 89 | .env 90 | .venv 91 | env/ 92 | venv/ 93 | ENV/ 94 | env.bak/ 95 | venv.bak/ 96 | 97 | # Spyder project settings 98 | .spyderproject 99 | .spyproject 100 | 101 | # Rope project settings 102 | .ropeproject 103 | 104 | # mkdocs documentation 105 | /site 106 | 107 | # mypy 108 | .mypy_cache/ 109 | .dmypy.json 110 | dmypy.json 111 | 112 | # emacs backups: 113 | *~ 114 | 115 | # eof 116 | -------------------------------------------------------------------------------- /ssb/shs/boxstream.py: -------------------------------------------------------------------------------- 1 | # ssb/shs/boxtream.py 2 | 3 | # June 2017 (c) Pedro Ferreira 4 | # https://github.com/pferreir/pyssb 5 | 6 | import struct 7 | from asyncio import IncompleteReadError 8 | 9 | from async_generator import async_generator, yield_ 10 | from nacl.secret import SecretBox 11 | 12 | from .util import inc_nonce, split_chunks 13 | 14 | HEADER_LENGTH = 2 + 16 + 16 15 | MAX_SEGMENT_SIZE = 4 * 1024 16 | TERMINATION_HEADER = (b'\x00' * 18) 17 | 18 | 19 | def get_stream_pair(reader, writer, **kwargs): 20 | """Return a tuple with `(unbox_stream, box_stream)` (reader/writer). 21 | 22 | :return: (:class:`secret_handshake.boxstream.UnboxStream`, 23 | :class:`secret_handshake.boxstream.BoxStream`) """ 24 | box_args = { 25 | 'key': kwargs['encrypt_key'], 26 | 'nonce': kwargs['encrypt_nonce'], 27 | } 28 | unbox_args = { 29 | 'key': kwargs['decrypt_key'], 30 | 'nonce': kwargs['decrypt_nonce'], 31 | } 32 | return UnboxStream(reader, **unbox_args), BoxStream(writer, **box_args) 33 | 34 | 35 | class UnboxStream(object): 36 | def __init__(self, reader, key, nonce): 37 | self.reader = reader 38 | self.key = key 39 | self.nonce = nonce 40 | self.closed = False 41 | 42 | async def read(self): 43 | try: 44 | data = await self.reader.readexactly(HEADER_LENGTH) 45 | except IncompleteReadError: 46 | self.closed = True 47 | return None 48 | 49 | box = SecretBox(self.key) 50 | 51 | header = box.decrypt(data, self.nonce) 52 | 53 | if header == TERMINATION_HEADER: 54 | self.closed = True 55 | return None 56 | 57 | length = struct.unpack('>H', header[:2])[0] 58 | mac = header[2:] 59 | 60 | data = await self.reader.readexactly(length) 61 | 62 | body = box.decrypt(mac + data, inc_nonce(self.nonce)) 63 | 64 | self.nonce = inc_nonce(inc_nonce(self.nonce)) 65 | return body 66 | 67 | @async_generator 68 | async def __aiter__(self): 69 | while True: 70 | data = await self.read() 71 | if data is None: 72 | return 73 | await yield_(data) 74 | 75 | 76 | class BoxStream(object): 77 | def __init__(self, writer, key, nonce): 78 | self.writer = writer 79 | self.key = key 80 | self.box = SecretBox(self.key) 81 | self.nonce = nonce 82 | 83 | def write(self, data): 84 | for chunk in split_chunks(data, MAX_SEGMENT_SIZE): 85 | body = self.box.encrypt(chunk, inc_nonce(self.nonce))[24:] 86 | header = struct.pack('>H', len(body) - 16) + body[:16] 87 | 88 | hdrbox = self.box.encrypt(header, self.nonce)[24:] 89 | self.writer.write(hdrbox) 90 | 91 | self.nonce = inc_nonce(inc_nonce(self.nonce)) 92 | self.writer.write(body[16:]) 93 | 94 | def close(self): 95 | self.writer.write(self.box.encrypt(b'\x00' * 18, self.nonce)[24:]) 96 | -------------------------------------------------------------------------------- /ssb/shs/test_boxstream.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2017 PySecretHandshake contributors (see AUTHORS for more details) 2 | # 3 | # Permission is hereby granted, free of charge, to any person obtaining a copy 4 | # of this software and associated documentation files (the "Software"), to deal 5 | # in the Software without restriction, including without limitation the rights 6 | # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 7 | # copies of the Software, and to permit persons to whom the Software is 8 | # furnished to do so, subject to the following conditions: 9 | # 10 | # The above copyright notice and this permission notice shall be included in all 11 | # copies or substantial portions of the Software. 12 | # 13 | # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 14 | # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 15 | # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 16 | # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 17 | # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 18 | # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 19 | # SOFTWARE. 20 | 21 | 22 | import pytest 23 | 24 | from secret_handshake.boxstream import HEADER_LENGTH, BoxStream, UnboxStream 25 | from secret_handshake.util import AsyncBuffer, async_comprehend 26 | 27 | from .test_crypto import CLIENT_ENCRYPT_KEY, CLIENT_ENCRYPT_NONCE 28 | 29 | MESSAGE_1 = (b'\xcev\xedE\x06l\x02\x13\xc8\x17V\xfa\x8bZ?\x88B%O\xb0L\x9f\x8e\x8c0y\x1dv\xc0\xc9\xf6\x9d\xc2\xdf\xdb' 30 | b'\xee\x9d') 31 | MESSAGE_2 = b"\x141\xd63\x13d\xd1\xecZ\x9b\xd0\xd4\x03\xcdR?'\xaa.\x89I\x92I\xf9guL\xaa\x06?\xea\xca/}\x88*\xb2" 32 | MESSAGE_3 = (b'\xcbYY\xf1\x0f\xa5O\x13r\xa6"\x15\xc5\x9d\r.*\x0b\x92\x10m\xa6(\x0c\x0c\xc61\x80j\x81)\x800\xed\xda' 33 | b'\xad\xa1') 34 | MESSAGE_CLOSED = b'\xb1\x14hU\'\xb5M\xa6"\x03\x9duy\xa1\xd4evW,\xdcE\x18\xe4+ C4\xe8h\x96\xed\xc5\x94\x80' 35 | 36 | 37 | @pytest.mark.asyncio 38 | async def test_boxstream(): 39 | buffer = AsyncBuffer() 40 | box_stream = BoxStream(buffer, CLIENT_ENCRYPT_KEY, CLIENT_ENCRYPT_NONCE) 41 | box_stream.write(b'foo') 42 | buffer.seek(0) 43 | assert await buffer.read() == MESSAGE_1 44 | 45 | pos = buffer.tell() 46 | box_stream.write(b'foo') 47 | buffer.seek(pos) 48 | assert await buffer.read() == MESSAGE_2 49 | 50 | pos = buffer.tell() 51 | box_stream.write(b'bar') 52 | buffer.seek(pos) 53 | assert await buffer.read() == MESSAGE_3 54 | 55 | pos = buffer.tell() 56 | box_stream.close() 57 | buffer.seek(pos) 58 | assert await buffer.read() == MESSAGE_CLOSED 59 | 60 | 61 | @pytest.mark.asyncio 62 | async def test_unboxstream(): 63 | buffer = AsyncBuffer(MESSAGE_1 + MESSAGE_2 + MESSAGE_3 + MESSAGE_CLOSED) 64 | buffer.seek(0) 65 | 66 | unbox_stream = UnboxStream(buffer, CLIENT_ENCRYPT_KEY, CLIENT_ENCRYPT_NONCE) 67 | assert not unbox_stream.closed 68 | assert (await async_comprehend(unbox_stream)) == [b'foo', b'foo', b'bar'] 69 | assert unbox_stream.closed 70 | 71 | 72 | @pytest.mark.asyncio 73 | async def test_long_packets(): 74 | data_size = 6 * 1024 75 | data = bytes(n % 256 for n in range(data_size)) 76 | 77 | # box 6K buffer 78 | buffer = AsyncBuffer() 79 | box_stream = BoxStream(buffer, CLIENT_ENCRYPT_KEY, CLIENT_ENCRYPT_NONCE) 80 | box_stream.write(data) 81 | # the size overhead corresponds to the two packet headers 82 | assert buffer.tell() == data_size + (HEADER_LENGTH * 2) 83 | buffer.seek(0) 84 | 85 | # now let's unbox it and check whether it's OK 86 | unbox_stream = UnboxStream(buffer, CLIENT_ENCRYPT_KEY, CLIENT_ENCRYPT_NONCE) 87 | first_packet = await unbox_stream.read() 88 | assert first_packet == data[:4096] 89 | second_packet = await unbox_stream.read() 90 | assert second_packet == data[4096:] 91 | -------------------------------------------------------------------------------- /ssb/rpc/feed/models.py: -------------------------------------------------------------------------------- 1 | # ssb/rpc/feed/models.py 2 | 3 | # June 2017 (c) Pedro Ferreira 4 | # https://github.com/pferreir/pyssb 5 | 6 | import datetime 7 | from base64 import b64encode 8 | from collections import namedtuple, OrderedDict 9 | from hashlib import sha256 10 | 11 | from json import dumps, loads 12 | 13 | 14 | OrderedMsg = namedtuple('OrderedMsg', ('previous', 'author', 'sequence', 'timestamp', 'hash', 'content')) 15 | 16 | 17 | class NoPrivateKeyException(Exception): 18 | pass 19 | 20 | 21 | def to_ordered(data): 22 | smsg = OrderedMsg(**data) 23 | return OrderedDict((k, getattr(smsg, k)) for k in smsg._fields) 24 | 25 | 26 | def get_millis_1970(): 27 | return int(datetime.datetime.utcnow().timestamp() * 1000) 28 | 29 | 30 | class Feed(object): 31 | def __init__(self, public_key): 32 | self.public_key = public_key 33 | 34 | @property 35 | def id(self): 36 | return (b'@' + b64encode(bytes(self.public_key)) + \ 37 | b'.ed25519').decode('ascii') 38 | 39 | def sign(self, msg): 40 | raise NoPrivateKeyException('Cannot use remote identity to sign (no private key!)') 41 | 42 | 43 | class LocalFeed(Feed): 44 | def __init__(self, private_key): 45 | self.private_key = private_key 46 | 47 | @property 48 | def public_key(self): 49 | return self.private_key.verify_key 50 | 51 | def sign(self, msg): 52 | return self.private_key.sign(msg).signature 53 | 54 | 55 | class Message(object): 56 | def __init__(self, feed, content, signature, sequence=1, timestamp=None, previous=None): 57 | self.feed = feed 58 | self.content = content 59 | 60 | if signature is None: 61 | raise ValueError("signature can't be None") 62 | self.signature = signature 63 | 64 | self.previous = previous 65 | if self.previous: 66 | self.sequence = self.previous.sequence + 1 67 | else: 68 | self.sequence = sequence 69 | 70 | self.timestamp = get_millis_1970() if timestamp is None else timestamp 71 | 72 | @classmethod 73 | def parse(cls, data, feed): 74 | obj = loads(data, object_pairs_hook=OrderedDict) 75 | msg = cls(feed, obj['content'], timestamp=obj['timestamp']) 76 | return msg 77 | 78 | def serialize(self, add_signature=True): 79 | return dumps(self.to_dict(add_signature=add_signature), indent=2).encode('utf-8') 80 | 81 | def to_dict(self, add_signature=True): 82 | obj = to_ordered({ 83 | 'previous': self.previous.key if self.previous else None, 84 | 'author': self.feed.id, 85 | 'sequence': self.sequence, 86 | 'timestamp': self.timestamp, 87 | 'hash': 'sha256', 88 | 'content': self.content 89 | }) 90 | 91 | if add_signature: 92 | obj['signature'] = self.signature 93 | return obj 94 | 95 | def verify(self, signature): 96 | return self.signature == signature 97 | 98 | @property 99 | def hash(self): 100 | hash = sha256(self.serialize()).digest() 101 | return b64encode(hash).decode('ascii') + '.sha256' 102 | 103 | @property 104 | def key(self): 105 | return '%' + self.hash 106 | 107 | 108 | class LocalMessage(Message): 109 | def __init__(self, feed, content, signature=None, sequence=1, timestamp=None, previous=None): 110 | self.feed = feed 111 | self.content = content 112 | 113 | self.previous = previous 114 | if self.previous: 115 | self.sequence = self.previous.sequence + 1 116 | else: 117 | self.sequence = sequence 118 | 119 | self.timestamp = get_millis_1970() if timestamp is None else timestamp 120 | 121 | if signature is None: 122 | self.signature = self._sign() 123 | else: 124 | self.signature = signature 125 | 126 | def _sign(self): 127 | # ensure ordering of keys and indentation of 2 characters, like ssb-keys 128 | data = self.serialize(add_signature=False) 129 | return (b64encode(bytes(self.feed.sign(data))) + b'.sig.ed25519').decode('ascii') 130 | -------------------------------------------------------------------------------- /ssb/shs/util.py: -------------------------------------------------------------------------------- 1 | # ssb/shs/util.py 2 | 3 | # Copyright (c) 2017 PySecretHandshake contributors (see AUTHORS for more details) 4 | # 5 | # Permission is hereby granted, free of charge, to any person obtaining a copy 6 | # of this software and associated documentation files (the "Software"), to deal 7 | # in the Software without restriction, including without limitation the rights 8 | # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | # copies of the Software, and to permit persons to whom the Software is 10 | # furnished to do so, subject to the following conditions: 11 | # 12 | # The above copyright notice and this permission notice shall be included in all 13 | # copies or substantial portions of the Software. 14 | # 15 | # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | # SOFTWARE. 22 | 23 | 24 | import struct 25 | from io import BytesIO 26 | 27 | NONCE_SIZE = 24 28 | MAX_NONCE = (8 * NONCE_SIZE) 29 | 30 | 31 | class AsyncBuffer(BytesIO): 32 | """Just a BytesIO with an async read method.""" 33 | async def read(self, n=None): 34 | v = super(AsyncBuffer, self).read(n) 35 | return v 36 | readexactly = read 37 | 38 | def append(self, data): 39 | """Append data to the buffer without changing the current position.""" 40 | pos = self.tell() 41 | self.write(data) 42 | self.seek(pos) 43 | 44 | 45 | async def async_comprehend(generator): 46 | """Emulate ``[elem async for elem in generator]``.""" 47 | results = [] 48 | async for msg in generator: 49 | results.append(msg) 50 | return results 51 | 52 | 53 | def inc_nonce(nonce): 54 | num = bytes_to_long(nonce) + 1 55 | if num > 2 ** MAX_NONCE: 56 | num = 0 57 | bnum = long_to_bytes(num) 58 | bnum = b'\x00' * (NONCE_SIZE - len(bnum)) + bnum 59 | return bnum 60 | 61 | 62 | def split_chunks(seq, n): 63 | """Split sequence in equal-sized chunks. 64 | The last chunk is not padded.""" 65 | while seq: 66 | yield seq[:n] 67 | seq = seq[n:] 68 | 69 | 70 | # Stolen from PyCypto (Public Domain) 71 | def b(s): 72 | return s.encode("latin-1") # utf-8 would cause some side-effects we don't want 73 | 74 | 75 | def long_to_bytes(n, blocksize=0): 76 | """long_to_bytes(n:long, blocksize:int) : string 77 | Convert a long integer to a byte string. 78 | If optional blocksize is given and greater than zero, pad the front of the 79 | byte string with binary zeros so that the length is a multiple of 80 | blocksize. 81 | """ 82 | # after much testing, this algorithm was deemed to be the fastest 83 | s = b('') 84 | pack = struct.pack 85 | while n > 0: 86 | s = pack('>I', n & 0xffffffff) + s 87 | n = n >> 32 88 | # strip off leading zeros 89 | for i in range(len(s)): 90 | if s[i] != b('\000')[0]: 91 | break 92 | else: 93 | # only happens when n == 0 94 | s = b('\000') 95 | i = 0 96 | s = s[i:] 97 | # add back some pad bytes. this could be done more efficiently w.r.t. the 98 | # de-padding being done above, but sigh... 99 | if blocksize > 0 and len(s) % blocksize: 100 | s = (blocksize - len(s) % blocksize) * b('\000') + s 101 | return s 102 | 103 | 104 | def bytes_to_long(s): 105 | """bytes_to_long(string) : long 106 | Convert a byte string to a long integer. 107 | This is (essentially) the inverse of long_to_bytes(). 108 | """ 109 | acc = 0 110 | unpack = struct.unpack 111 | length = len(s) 112 | if length % 4: 113 | extra = (4 - length % 4) 114 | s = b('\000') * extra + s 115 | length = length + extra 116 | for i in range(0, length, 4): 117 | acc = (acc << 32) + unpack('>I', s[i:i+4])[0] 118 | return acc 119 | -------------------------------------------------------------------------------- /ssb/shs/test_network.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2017 PySecretHandshake contributors (see AUTHORS for more details) 2 | # 3 | # Permission is hereby granted, free of charge, to any person obtaining a copy 4 | # of this software and associated documentation files (the "Software"), to deal 5 | # in the Software without restriction, including without limitation the rights 6 | # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 7 | # copies of the Software, and to permit persons to whom the Software is 8 | # furnished to do so, subject to the following conditions: 9 | # 10 | # The above copyright notice and this permission notice shall be included in all 11 | # copies or substantial portions of the Software. 12 | # 13 | # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 14 | # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 15 | # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 16 | # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 17 | # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 18 | # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 19 | # SOFTWARE. 20 | 21 | import os 22 | from asyncio import Event, wait_for 23 | 24 | import pytest 25 | from nacl.signing import SigningKey 26 | 27 | from secret_handshake.util import AsyncBuffer 28 | 29 | 30 | class DummyCrypto(object): 31 | """Dummy crypto module, pretends everything is fine.""" 32 | def verify_server_challenge(self, data): 33 | return True 34 | 35 | def verify_challenge(self, data): 36 | return True 37 | 38 | def verify_server_accept(self, data): 39 | return True 40 | 41 | def generate_challenge(self): 42 | return b'CHALLENGE' 43 | 44 | def generate_client_auth(self): 45 | return b'AUTH' 46 | 47 | def verify_client_auth(self, data): 48 | return True 49 | 50 | def generate_accept(self): 51 | return b'ACCEPT' 52 | 53 | def get_box_keys(self): 54 | return { 55 | 'encrypt_key': b'x' * 32, 56 | 'encrypt_nonce': b'x' * 32, 57 | 'decrypt_key': b'x' * 32, 58 | 'decrypt_nonce': b'x' * 32 59 | } 60 | 61 | def clean(self): 62 | return 63 | 64 | 65 | def _dummy_boxstream(stream, **kwargs): 66 | """Identity boxstream, no tansformation.""" 67 | return stream 68 | 69 | 70 | def _client_stream_mocker(): 71 | reader = AsyncBuffer(b'xxx') 72 | writer = AsyncBuffer(b'xxx') 73 | 74 | async def _create_mock_streams(host, port): 75 | return reader, writer 76 | 77 | return reader, writer, _create_mock_streams 78 | 79 | 80 | def _server_stream_mocker(): 81 | reader = AsyncBuffer(b'xxx') 82 | writer = AsyncBuffer(b'xxx') 83 | 84 | async def _create_mock_server(cb, host, port): 85 | await cb(reader, writer) 86 | 87 | return reader, writer, _create_mock_server 88 | 89 | 90 | @pytest.mark.asyncio 91 | async def test_client(mocker): 92 | reader, writer, _create_mock_streams = _client_stream_mocker() 93 | mocker.patch('asyncio.open_connection', new=_create_mock_streams) 94 | mocker.patch('secret_handshake.boxstream.BoxStream', new=_dummy_boxstream) 95 | mocker.patch('secret_handshake.boxstream.UnboxStream', new=_dummy_boxstream) 96 | 97 | from secret_handshake import SHSClient 98 | 99 | client = SHSClient('shop.local', 1111, SigningKey.generate(), os.urandom(32)) 100 | client.crypto = DummyCrypto() 101 | 102 | await client.open() 103 | reader.append(b'TEST') 104 | assert (await client.read()) == b'TEST' 105 | client.disconnect() 106 | 107 | 108 | @pytest.mark.asyncio 109 | async def test_server(mocker): 110 | from secret_handshake import SHSServer 111 | 112 | resolve = Event() 113 | 114 | async def _on_connect(conn): 115 | server.disconnect() 116 | resolve.set() 117 | 118 | reader, writer, _create_mock_server = _server_stream_mocker() 119 | mocker.patch('asyncio.start_server', new=_create_mock_server) 120 | mocker.patch('secret_handshake.boxstream.BoxStream', new=_dummy_boxstream) 121 | mocker.patch('secret_handshake.boxstream.UnboxStream', new=_dummy_boxstream) 122 | 123 | server = SHSServer('shop.local', 1111, SigningKey.generate(), os.urandom(32)) 124 | server.crypto = DummyCrypto() 125 | 126 | server.on_connect(_on_connect) 127 | 128 | await server.listen() 129 | await wait_for(resolve.wait(), 5) 130 | -------------------------------------------------------------------------------- /ssb/rpc/muxrpc.py: -------------------------------------------------------------------------------- 1 | # ssb/rpc/muxrpc.py 2 | 3 | # June 2017 (c) Pedro Ferreira 4 | # https://github.com/pferreir/pyssb 5 | 6 | from functools import wraps 7 | 8 | from async_generator import async_generator, yield_ 9 | 10 | from ssb.rpc.packet_stream import PSMessageType 11 | 12 | 13 | class MuxRPCAPIException(Exception): 14 | pass 15 | 16 | 17 | class MuxRPCHandler(object): 18 | def check_message(self, msg): 19 | body = msg.body 20 | if isinstance(body, dict) and 'name' in body and body['name'] == 'Error': 21 | raise MuxRPCAPIException(body['message']) 22 | 23 | 24 | class MuxRPCRequestHandler(MuxRPCHandler): 25 | def __init__(self, ps_handler): 26 | self.ps_handler = ps_handler 27 | 28 | def __await__(self): 29 | msg = (yield from self.ps_handler.__await__()) 30 | self.check_message(msg) 31 | return msg 32 | 33 | 34 | class MuxRPCSourceHandler(MuxRPCHandler): 35 | def __init__(self, ps_handler): 36 | self.ps_handler = ps_handler 37 | 38 | @async_generator 39 | async def __aiter__(self): 40 | async for msg in self.ps_handler: 41 | try: 42 | self.check_message(msg) 43 | await yield_(msg) 44 | except MuxRPCAPIException: 45 | raise 46 | 47 | 48 | class MuxRPCSinkHandlerMixin(object): 49 | 50 | def send(self, msg, msg_type=PSMessageType.JSON, end=False): 51 | self.connection.send(msg, stream=True, msg_type=msg_type, req=self.req, end_err=end) 52 | 53 | 54 | class MuxRPCDuplexHandler(MuxRPCSinkHandlerMixin, MuxRPCSourceHandler): 55 | def __init__(self, ps_handler, connection, req): 56 | super(MuxRPCDuplexHandler, self).__init__(ps_handler) 57 | self.connection = connection 58 | self.req = req 59 | 60 | 61 | class MuxRPCSinkHandler(MuxRPCHandler, MuxRPCSinkHandlerMixin): 62 | def __init__(self, connection, req): 63 | self.connection = connection 64 | self.req = req 65 | 66 | 67 | def _get_appropriate_api_handler(type_, connection, ps_handler, req): 68 | if type_ in {'sync', 'async'}: 69 | return MuxRPCRequestHandler(ps_handler) 70 | elif type_ == 'source': 71 | return MuxRPCSourceHandler(ps_handler) 72 | elif type_ == 'sink': 73 | return MuxRPCSinkHandler(connection, req) 74 | elif type_ == 'duplex': 75 | return MuxRPCDuplexHandler(ps_handler, connection, req) 76 | 77 | 78 | class MuxRPCRequest(object): 79 | @classmethod 80 | def from_message(cls, message): 81 | body = message.body 82 | return cls('.'.join(body['name']), body['args']) 83 | 84 | def __init__(self, name, args): 85 | self.name = name 86 | self.args = args 87 | 88 | def __repr__(self): 89 | return ''.format(self) 90 | 91 | 92 | class MuxRPCMessage(object): 93 | @classmethod 94 | def from_message(cls, message): 95 | return cls(message.body) 96 | 97 | def __init__(self, body): 98 | self.body = body 99 | 100 | def __repr__(self): 101 | return ''.format(self) 102 | 103 | 104 | class MuxRPCAPI(object): 105 | def __init__(self): 106 | self.handlers = {} 107 | self.connection = None 108 | 109 | async def __await__(self): 110 | async for req_message in self.connection: 111 | if req_message is None or req_message.body is None: 112 | return 113 | body = req_message.body 114 | # if isinstance(body, dict) and body.get('name'): 115 | # self.process(self.connection, MuxRPCRequest.from_message(req_message)) 116 | self.process(self.connection, req_message) 117 | 118 | def add_connection(self, connection, aux = None): 119 | self.connection = connection 120 | self.aux = aux 121 | 122 | def define(self, name): 123 | def _handle(f): 124 | self.handlers[name] = f 125 | 126 | @wraps(f) 127 | def _f(*args, **kwargs): 128 | return f(*args, **kwargs) 129 | return f 130 | return _handle 131 | 132 | #def process(self, connection, request): 133 | # handler = self.handlers.get(request.name) 134 | # if not handler: 135 | # raise MuxRPCAPIException('Method {} not found!'.format(request.name)) 136 | # handler(connection, request, self.aux) 137 | 138 | def process(self, connection, req_message): 139 | # print('.'.join(req_message.body['name'])) 140 | handler = self.handlers.get('.'.join(req_message.body['name'])) 141 | if not handler: 142 | raise MuxRPCAPIException('Method {} not found!'.format(req_message.body['name'][0])) 143 | handler(connection, req_message, self.aux) 144 | 145 | 146 | def call(self, name, args, type_='sync'): 147 | # if not self.connection.is_connected: 148 | # raise Exception('not connected') 149 | old_counter = self.connection.req_counter 150 | ps_handler = self.connection.send({ 151 | 'name': name.split('.'), 152 | 'args': args, 153 | 'type': type_ 154 | }, stream=type_ in {'sink', 'source', 'duplex'}) 155 | return _get_appropriate_api_handler(type_, self.connection, ps_handler, old_counter) 156 | -------------------------------------------------------------------------------- /ssb/rpc/tests/test_feed.py: -------------------------------------------------------------------------------- 1 | from base64 import b64decode 2 | from collections import OrderedDict 3 | 4 | import pytest 5 | from nacl.signing import SigningKey, VerifyKey 6 | 7 | from ssb.feed import LocalMessage, LocalFeed, Feed, Message, NoPrivateKeyException 8 | 9 | 10 | SERIALIZED_M1 = b"""{ 11 | "previous": null, 12 | "author": "@I/4cyN/jPBbDsikbHzAEvmaYlaJK33lW3UhWjNXjyrU=.ed25519", 13 | "sequence": 1, 14 | "timestamp": 1495706260190, 15 | "hash": "sha256", 16 | "content": { 17 | "type": "about", 18 | "about": "@I/4cyN/jPBbDsikbHzAEvmaYlaJK33lW3UhWjNXjyrU=.ed25519", 19 | "name": "neo", 20 | "description": "The Chosen One" 21 | }, 22 | "signature": "lPsQ9P10OgeyH6u0unFgiI2wV/RQ7Q2x2ebxnXYCzsJ055TBMXphRADTKhOMS2EkUxXQ9k3amj5fnWPudGxwBQ==.sig.ed25519" 23 | }""" 24 | 25 | 26 | @pytest.fixture() 27 | def local_feed(): 28 | secret = b64decode('Mz2qkNOP2K6upnqibWrR+z8pVUI1ReA1MLc7QMtF2qQ=') 29 | return LocalFeed(SigningKey(secret)) 30 | 31 | 32 | @pytest.fixture() 33 | def remote_feed(): 34 | public = b64decode('I/4cyN/jPBbDsikbHzAEvmaYlaJK33lW3UhWjNXjyrU=') 35 | return Feed(VerifyKey(public)) 36 | 37 | 38 | def test_local_feed(): 39 | secret = b64decode('Mz2qkNOP2K6upnqibWrR+z8pVUI1ReA1MLc7QMtF2qQ=') 40 | feed = LocalFeed(SigningKey(secret)) 41 | assert bytes(feed.private_key) == secret 42 | assert bytes(feed.public_key) == b64decode('I/4cyN/jPBbDsikbHzAEvmaYlaJK33lW3UhWjNXjyrU=') 43 | assert feed.id == '@I/4cyN/jPBbDsikbHzAEvmaYlaJK33lW3UhWjNXjyrU=.ed25519' 44 | 45 | 46 | def test_remote_feed(): 47 | public = b64decode('I/4cyN/jPBbDsikbHzAEvmaYlaJK33lW3UhWjNXjyrU=') 48 | feed = Feed(VerifyKey(public)) 49 | assert bytes(feed.public_key) == public 50 | assert feed.id == '@I/4cyN/jPBbDsikbHzAEvmaYlaJK33lW3UhWjNXjyrU=.ed25519' 51 | 52 | m1 = Message(feed, OrderedDict([ 53 | ('type', 'about'), 54 | ('about', feed.id), 55 | ('name', 'neo'), 56 | ('description', 'The Chosen One') 57 | ]), 'foo', timestamp=1495706260190) 58 | 59 | with pytest.raises(NoPrivateKeyException): 60 | feed.sign(m1) 61 | 62 | 63 | def test_local_message(local_feed): 64 | m1 = LocalMessage(local_feed, OrderedDict([ 65 | ('type', 'about'), 66 | ('about', local_feed.id), 67 | ('name', 'neo'), 68 | ('description', 'The Chosen One') 69 | ]), timestamp=1495706260190) 70 | assert m1.timestamp == 1495706260190 71 | assert m1.previous is None 72 | assert m1.sequence == 1 73 | assert m1.signature == \ 74 | 'lPsQ9P10OgeyH6u0unFgiI2wV/RQ7Q2x2ebxnXYCzsJ055TBMXphRADTKhOMS2EkUxXQ9k3amj5fnWPudGxwBQ==.sig.ed25519' 75 | assert m1.key == '%xRDqws/TrQmOd4aEwZ32jdLhP873ZKjIgHlggPR0eoo=.sha256' 76 | 77 | m2 = LocalMessage(local_feed, OrderedDict([ 78 | ('type', 'about'), 79 | ('about', local_feed.id), 80 | ('name', 'morpheus'), 81 | ('description', 'Dude with big jaw') 82 | ]), previous=m1, timestamp=1495706447426) 83 | assert m2.timestamp == 1495706447426 84 | assert m2.previous is m1 85 | assert m2.sequence == 2 86 | assert m2.signature == \ 87 | '3SY85LX6/ppOfP4SbfwZbKfd6DccbLRiB13pwpzbSK0nU52OEJxOqcJ2Uensr6RkrWztWLIq90sNOn1zRAoOAw==.sig.ed25519' 88 | assert m2.key == '%nx13uks5GUwuKJC49PfYGMS/1pgGTtwwdWT7kbVaroM=.sha256' 89 | 90 | 91 | def test_remote_message(remote_feed): 92 | signature = 'lPsQ9P10OgeyH6u0unFgiI2wV/RQ7Q2x2ebxnXYCzsJ055TBMXphRADTKhOMS2EkUxXQ9k3amj5fnWPudGxwBQ==.sig.ed25519' 93 | m1 = Message(remote_feed, OrderedDict([ 94 | ('type', 'about'), 95 | ('about', remote_feed.id), 96 | ('name', 'neo'), 97 | ('description', 'The Chosen One') 98 | ]), signature, timestamp=1495706260190) 99 | assert m1.timestamp == 1495706260190 100 | assert m1.previous is None 101 | assert m1.sequence == 1 102 | assert m1.signature == signature 103 | assert m1.key == '%xRDqws/TrQmOd4aEwZ32jdLhP873ZKjIgHlggPR0eoo=.sha256' 104 | 105 | signature = '3SY85LX6/ppOfP4SbfwZbKfd6DccbLRiB13pwpzbSK0nU52OEJxOqcJ2Uensr6RkrWztWLIq90sNOn1zRAoOAw==.sig.ed25519' 106 | m2 = Message(remote_feed, OrderedDict([ 107 | ('type', 'about'), 108 | ('about', remote_feed.id), 109 | ('name', 'morpheus'), 110 | ('description', 'Dude with big jaw') 111 | ]), signature, previous=m1, timestamp=1495706447426) 112 | assert m2.timestamp == 1495706447426 113 | assert m2.previous is m1 114 | assert m2.sequence == 2 115 | assert m2.signature == signature 116 | m2.verify(signature) 117 | assert m2.key == '%nx13uks5GUwuKJC49PfYGMS/1pgGTtwwdWT7kbVaroM=.sha256' 118 | 119 | 120 | def test_remote_no_signature(remote_feed): 121 | with pytest.raises(ValueError): 122 | Message(remote_feed, OrderedDict([ 123 | ('type', 'about'), 124 | ('about', remote_feed.id), 125 | ('name', 'neo'), 126 | ('description', 'The Chosen One') 127 | ]), None, timestamp=1495706260190) 128 | 129 | 130 | def test_serialize(local_feed): 131 | m1 = LocalMessage(local_feed, OrderedDict([ 132 | ('type', 'about'), 133 | ('about', local_feed.id), 134 | ('name', 'neo'), 135 | ('description', 'The Chosen One') 136 | ]), timestamp=1495706260190) 137 | 138 | assert m1.serialize() == SERIALIZED_M1 139 | 140 | 141 | def test_parse(local_feed): 142 | m1 = LocalMessage.parse(SERIALIZED_M1, local_feed) 143 | assert m1.content == { 144 | 'type': 'about', 145 | 'about': local_feed.id, 146 | 'name': 'neo', 147 | 'description': 'The Chosen One' 148 | } 149 | assert m1.timestamp == 1495706260190 150 | -------------------------------------------------------------------------------- /ssb/adt/tangle.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | # ssb/adt/tangle.py 4 | 5 | import copy 6 | 7 | import ssb.local.worm 8 | 9 | class SSB_TANGLE: 10 | 11 | def __init__(self, worm, baseName=None, use=None, salt=None, drv=None): 12 | self.worm = worm 13 | if not baseName: # create a new tangle 14 | m = {'type': 'tangle', 'height': 0} 15 | if use: 16 | m['use'] = use 17 | # FIXME: should these entries be in the tangle record *content*? 18 | if salt: 19 | m['salt'] = salt 20 | if drv: 21 | m['drvref'] = drv 22 | key = worm.writeMsg(m) 23 | worm.flush() 24 | self.base = [worm.id, key] 25 | else: 26 | self.base = baseName 27 | self.tips, self.height = self._getTips() 28 | 29 | def _getTips(self, stop=None): 30 | # print("searching", self.base[1]) 31 | # search the log backwards for any tangle msg for 'base' 32 | allx = [] 33 | for k in self.worm: 34 | if k == self.base[1]: 35 | allx.append(k) 36 | continue 37 | msg = self.worm.readMsg(k) 38 | if msg == None: 39 | # print("no msg for", key) 40 | continue 41 | tan = msg['value']['content'] 42 | if not isinstance(tan,dict) or tan['type'] != 'tangle': 43 | continue 44 | if 'base' in tan: 45 | # print(k, tan['base']) 46 | if tan['base'][1] == self.base[1]: 47 | allx.append(k) 48 | tips = copy.copy(allx) 49 | for k in allx: 50 | msg = self.worm.readMsg(k) 51 | if msg == None: 52 | # print("no msg for", key) 53 | continue 54 | tan = msg['value']['content'] 55 | if 'base' in tan and tan['base'][1] in tips: 56 | tips.remove(tan['base'][1]) 57 | if 'previous' in tan: 58 | for p in tan['previous']: 59 | if p[1] in tips: 60 | tips.remove(p[1]) 61 | maxH = 0 62 | for k in tips: 63 | msg = self.worm.readMsg(k) 64 | h = msg['value']['content']['height'] 65 | if h > maxH: 66 | maxH = h 67 | allx = tips 68 | tips = [] 69 | for k in allx: 70 | msg = self.worm.readMsg(k) 71 | tan = msg['value']['content'] 72 | tips.append( (msg['value']['author'], k, tan['height']) ) 73 | # print(tips, maxH) 74 | return (tips, maxH) 75 | 76 | def getBaseRef(self): 77 | return self.base 78 | 79 | def append(self, content, previous=None): 80 | if self.tips is None: 81 | raise Exception("can't find tangle") 82 | # print("append, #tips is", len(self.tips), "/ height", type(self.height)) 83 | msg = { 84 | 'type' : 'tangle', 85 | 'base' : self.base, 86 | # 'height' : self.height + 1, 87 | 'content' : content 88 | } 89 | if previous is None: 90 | previous = self.tips[:3] 91 | msg['previous'] = previous # merge up to three branches 92 | msg['height' ] = self.height + 1 93 | else: 94 | msg['previous'] = [previous] # only point to one branch 95 | msg['height' ] = self.worm.readMsg(previous[1])['value']['content']['height'] + 1 96 | previous = [] 97 | ref = [self.worm.id, self.worm.writeMsg(msg)] 98 | self.tips = self.tips[len(previous):] 99 | self.tips.append(ref) 100 | self.height += 1 101 | # print(" #tips now is", len(self.tips), "/ height", self.height) 102 | # for t in self.tips: 103 | # print(" ", t[1]) 104 | return ref 105 | 106 | def __iter__(self): 107 | return SSB_TANGLE_ITER(self.worm, self.tips) 108 | 109 | def refresh(self): 110 | self.worm.flush() 111 | self.worm.refresh() 112 | self.tips, self.height = self._getTips(self.tips) 113 | 114 | 115 | class SSB_TANGLE_ITER: 116 | 117 | def __init__(self, worm, tips): 118 | self.worm = worm 119 | self.front = [ (k[1], self.worm.readMsg(k[1])['value']['content']) 120 | for k in tips ] 121 | self.expanded = [] 122 | 123 | def __iter__(self): 124 | return self 125 | 126 | def __next__(self): 127 | while len(self.front) > 0: 128 | # find highest element 129 | self.front = sorted(self.front, 130 | key=lambda e: float("%d.%d" % \ 131 | (e[1]['height'], 132 | ssb.local.worm._hthash(e[0])))) 133 | k, m = self.front.pop() 134 | self.expanded.append(k) 135 | if 'previous' in m: # don't return the genesis node 136 | for p in m['previous']: 137 | if p[1] in self.expanded: 138 | continue 139 | m2 = self.worm.readMsg(p[1]) 140 | if not m2: 141 | continue 142 | e = (p[1], m2['value']['content']) 143 | if not e in self.front: 144 | self.front.append(e) 145 | return k 146 | raise(StopIteration) 147 | 148 | 149 | # --------------------------------------------------------------------------- 150 | if __name__ == '__main__' : 151 | 152 | pass 153 | 154 | # eof 155 | 156 | -------------------------------------------------------------------------------- /ssb/shs/network.py: -------------------------------------------------------------------------------- 1 | # ssb/shs/network.py 2 | 3 | # Copyright (c) 2017 PySecretHandshake contributors (see AUTHORS for more details) 4 | # 5 | # Permission is hereby granted, free of charge, to any person obtaining a copy 6 | # of this software and associated documentation files (the "Software"), to deal 7 | # in the Software without restriction, including without limitation the rights 8 | # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | # copies of the Software, and to permit persons to whom the Software is 10 | # furnished to do so, subject to the following conditions: 11 | # 12 | # The above copyright notice and this permission notice shall be included in all 13 | # copies or substantial portions of the Software. 14 | # 15 | # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | # SOFTWARE. 22 | 23 | 24 | import asyncio 25 | 26 | from async_generator import async_generator, yield_ 27 | 28 | from .boxstream import get_stream_pair 29 | from .crypto import SHSClientCrypto, SHSServerCrypto 30 | 31 | 32 | class SHSClientException(Exception): 33 | pass 34 | 35 | 36 | class SHSDuplexStream(object): 37 | def __init__(self): 38 | self.write_stream = None 39 | self.read_stream = None 40 | self.is_connected = False 41 | 42 | def write(self, data): 43 | self.write_stream.write(data) 44 | 45 | async def read(self): 46 | return await self.read_stream.read() 47 | 48 | def close(self): 49 | self.write_stream.close() 50 | self.read_stream.close() 51 | self.is_connected = False 52 | 53 | @async_generator 54 | async def __aiter__(self): 55 | async for msg in self.read_stream: 56 | await yield_(msg) 57 | 58 | 59 | class SHSEndpoint(object): 60 | def __init__(self): 61 | self._on_connect = None 62 | self.crypto = None 63 | 64 | def on_connect(self, cb): 65 | self._on_connect = cb 66 | 67 | def disconnect(self): 68 | raise NotImplementedError 69 | 70 | 71 | class SHSServer(SHSEndpoint): 72 | def __init__(self, host, port, server_kp, application_key=None, sess=None): 73 | super(SHSServer, self).__init__() 74 | self.host = host 75 | self.port = port 76 | self.sess = sess 77 | self.crypto = SHSServerCrypto(server_kp, application_key=application_key) 78 | self.connections = [] 79 | 80 | async def _handshake(self, reader, writer): 81 | data = await reader.readexactly(64) 82 | if not self.crypto.verify_challenge(data): 83 | raise SHSClientException('Client challenge is not valid') 84 | 85 | writer.write(self.crypto.generate_challenge()) 86 | 87 | data = await reader.readexactly(112) 88 | if not self.crypto.verify_client_auth(data): 89 | raise SHSClientException('Client auth is not valid') 90 | 91 | writer.write(self.crypto.generate_accept()) 92 | 93 | async def handle_connection(self, reader, writer): 94 | self.crypto.clean() 95 | await self._handshake(reader, writer) 96 | keys = self.crypto.get_box_keys() 97 | self.crypto.clean() 98 | 99 | conn = SHSServerConnection.from_byte_streams(reader, writer, **keys) 100 | self.connections.append(conn) 101 | 102 | if self._on_connect: 103 | asyncio.ensure_future(self._on_connect(conn, self.sess)) 104 | 105 | async def listen(self): 106 | await asyncio.start_server(self.handle_connection, self.host, self.port) 107 | 108 | def disconnect(self): 109 | for connection in self.connections: 110 | connection.close() 111 | 112 | 113 | class SHSServerConnection(SHSDuplexStream): 114 | def __init__(self, read_stream, write_stream): 115 | super(SHSServerConnection, self).__init__() 116 | self.read_stream = read_stream 117 | self.write_stream = write_stream 118 | 119 | @classmethod 120 | def from_byte_streams(cls, reader, writer, **keys): 121 | reader, writer = get_stream_pair(reader, writer, **keys) 122 | return cls(reader, writer) 123 | 124 | 125 | class SHSClient(SHSDuplexStream, SHSEndpoint): 126 | def __init__(self, host, port, client_kp, server_pub_key, ephemeral_key=None, application_key=None): 127 | SHSDuplexStream.__init__(self) 128 | SHSEndpoint.__init__(self) 129 | self.host = host 130 | self.port = port 131 | self.crypto = SHSClientCrypto(client_kp, server_pub_key, ephemeral_key=ephemeral_key, 132 | application_key=application_key) 133 | 134 | async def _handshake(self, reader, writer): 135 | writer.write(self.crypto.generate_challenge()) 136 | 137 | data = await reader.readexactly(64) 138 | if not self.crypto.verify_server_challenge(data): 139 | raise SHSClientException('Server challenge is not valid') 140 | 141 | writer.write(self.crypto.generate_client_auth()) 142 | 143 | data = await reader.readexactly(80) 144 | if not self.crypto.verify_server_accept(data): 145 | raise SHSClientException('Server accept is not valid') 146 | 147 | async def open(self): 148 | reader, writer = await asyncio.open_connection(self.host, self.port) 149 | await self._handshake(reader, writer) 150 | 151 | keys = self.crypto.get_box_keys() 152 | self.crypto.clean() 153 | 154 | self.read_stream, self.write_stream = get_stream_pair(reader, writer, **keys) 155 | self.writer = writer 156 | self.is_connected = True 157 | if self._on_connect: 158 | await self._on_connect() 159 | 160 | def disconnect(self): 161 | self.close() 162 | -------------------------------------------------------------------------------- /ssb/shs/test_crypto.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2017 PySecretHandshake contributors (see AUTHORS for more details) 2 | # 3 | # Permission is hereby granted, free of charge, to any person obtaining a copy 4 | # of this software and associated documentation files (the "Software"), to deal 5 | # in the Software without restriction, including without limitation the rights 6 | # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 7 | # copies of the Software, and to permit persons to whom the Software is 8 | # furnished to do so, subject to the following conditions: 9 | # 10 | # The above copyright notice and this permission notice shall be included in all 11 | # copies or substantial portions of the Software. 12 | # 13 | # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 14 | # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 15 | # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 16 | # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 17 | # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 18 | # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 19 | # SOFTWARE. 20 | 21 | 22 | import hashlib 23 | 24 | import pytest 25 | from nacl.public import PrivateKey 26 | from nacl.signing import SigningKey 27 | 28 | from secret_handshake.crypto import SHSClientCrypto, SHSServerCrypto 29 | 30 | APP_KEY = hashlib.sha256(b'app_key').digest() 31 | SERVER_KEY_SEED = b'\xcaw\x01\xc2cQ\xfd\x94\x9f\x14\x84\x0c0\x12\\\x96\xcd\x9b\x0c\x02z&\x96!\xe0\xa2' 32 | CLIENT_KEY_SEED = b'\xbf\x02<\xd3e\x9d\xac-\xd1\x9e-{\xe5q\x90\x03\x11\xba\x8cSQ\xa0\xc3p~\x89\xe6\xeeb\xaa\x1c\x17' 33 | SERVER_EPH_KEY_SEED = b"ed\x1c\x01\x03s\x04\xdc\x8e`\xd6Z\xd0u;\xcbX\x91\xd8ZO\xf8\xf0\xd6'\xd5\xb1Yy\x13yH" 34 | CLIENT_EPH_KEY_SEED = b'u8\xd0\xe3\x85d_Pz\x0c\xf5\xfd\x15\xce2p#\xb0\xf0\x9f\xe6!\xe1\xcb\xf6\x93\t\xebr{1\x8b' 35 | 36 | 37 | @pytest.fixture() 38 | def server(): 39 | server_key = SigningKey(SERVER_KEY_SEED) 40 | server_eph_key = PrivateKey(SERVER_EPH_KEY_SEED) 41 | return SHSServerCrypto(server_key, server_eph_key, application_key=APP_KEY) 42 | 43 | 44 | @pytest.fixture() 45 | def client(): 46 | client_key = SigningKey(CLIENT_KEY_SEED) 47 | server_key = SigningKey(SERVER_KEY_SEED) 48 | client_eph_key = PrivateKey(CLIENT_EPH_KEY_SEED) 49 | return SHSClientCrypto(client_key, bytes(server_key.verify_key), client_eph_key, application_key=APP_KEY) 50 | 51 | 52 | CLIENT_CHALLENGE = (b'd\xe8\xccD\xec\xb9E\xbb\xaa\xa7\x7f\xe38\x15\x16\xef\xca\xd22u\x1d\xfe<\xe7j' 53 | b'\xd7\xf0uc\xf0r\xf3\x7f\t\x18\xec\x8c\xf7\xff\x8e\xa9\xc83\x13\x18R\x16\x1d' 54 | b'\xe5\xc6K\xae\x94\xdbVt\x84\xdc\x1c@+D\x1c%') 55 | CLIENT_AUTH = (b'\xf2\xaf?z\x15\x10\xd0\xf0\xdf\xe3\x91\xfe\x14\x1c}z\xab\xeey\xf5\xef\xfc\xa1EdV\xf2T\x95s[!$z' 56 | b'\xeb\x8f\x1b\x96JP\x17^\x92\xc8\x9e\xb4*5`\xf2\x8fI.\x93\xb9\x14:\xca@\x06\xff\xd1\xf1J\xc8t\xc4' 57 | b'\xd8\xc3$[\xc5\x94je\x83\x00%\x99\x10\x16\xb1\xa2\xb2\xb7\xbf\xc9\x88\x14\xb9\xbb^\tzq\xa4\xef\xc5' 58 | b'\xf5\x1f7#\xed\x92X\xb2\xe3\xe5\x8b[t3') 59 | SERVER_CHALLENGE = (b'S\\\x06\x8d\xe5\xeb&*\xb8\x0bp\xb3Z\x8e\\\x85\x14\xaa\x1c\x8di\x9d\x7f\xa9\xeawl\xb9}\x85\xc3ik' 60 | b'\x0c ($E\xb4\x8ax\xc4)t<\xd7\x8b\xd6\x07\xb7\xecw\x84\r\xe1-Iz`\xeb\x04\x89\xd6{') 61 | SERVER_ACCEPT = (b'\xb4\xd0\xea\xfb\xfb\xf6s\xcc\x10\xc4\x99\x95"\x13 y\xa6\xea.G\xeed\x8d=t9\x88|\x94\xd1\xbcK\xd47' 62 | b'\xd8\xbcG1h\xac\xd0\xeb*\x1f\x8d\xae\x0b\x91G\xa1\xe6\x96b\xf2\xda90u\xeb_\xab\xdb\xcb%d7}\xb5\xce' 63 | b'(k\x15\xe3L\x9d)\xd5\xa1|:') 64 | INTER_SHARED_SECRET = (b'vf\xd82\xaeU\xda]\x08\x9eZ\xd6\x06\xcc\xd3\x99\xfd\xce\xc5\x16e8n\x9a\x04\x04\x84\xc5\x1a' 65 | b'\x8f\xf2M') 66 | BOX_SECRET = b'\x03\xfe\xe3\x8c u\xbcl^\x17eD\x96\xa3\xa6\x880f\x11\x7f\x85\xf2:\xa3[`\x06[#l\xbcr' 67 | 68 | SHARED_SECRET = b'UV\xad*\x8e\xce\x88\xf2\x87l\x13iZ\x12\xd7\xa6\xd1\x9c-\x9d\x07\xf5\xa96\x03w\x11\xe5\x96$m\x1d' 69 | CLIENT_ENCRYPT_KEY = (b'\xec\x1f,\x82\x9f\xedA\xc0\xda\x87[\xf9u\xbf\xac\x9cI\xa5T\xd1\x91\xff\xa8.\xd0 \xfbU\xc7\x14' 70 | b')\xc7') 71 | CLIENT_DECRYPT_KEY = b'\xf9e\xa0As\xb2=\xb7P~\xf3\xf9(\xfd\x7f\xfe\xb7TZhn\xd7\x8c=\xea.o\x9e\x8c9)\x10' 72 | CLIENT_ENCRYPT_NONCE = b'S\\\x06\x8d\xe5\xeb&*\xb8\x0bp\xb3Z\x8e\\\x85\x14\xaa\x1c\x8di\x9d\x7f\xa9' 73 | CLIENT_DECRYPT_NONCE = b'd\xe8\xccD\xec\xb9E\xbb\xaa\xa7\x7f\xe38\x15\x16\xef\xca\xd22u\x1d\xfe<\xe7' 74 | 75 | 76 | def test_handshake(client, server): 77 | client_challenge = client.generate_challenge() 78 | assert client_challenge == CLIENT_CHALLENGE 79 | assert server.verify_challenge(client_challenge) 80 | 81 | server_challenge = server.generate_challenge() 82 | assert server_challenge == SERVER_CHALLENGE 83 | assert client.verify_server_challenge(server_challenge) 84 | 85 | assert client.shared_secret == INTER_SHARED_SECRET 86 | 87 | client_auth = client.generate_client_auth() 88 | assert client_auth == CLIENT_AUTH 89 | assert server.verify_client_auth(client_auth) 90 | 91 | assert server.shared_secret == client.shared_secret 92 | 93 | server_accept = server.generate_accept() 94 | assert server_accept == SERVER_ACCEPT 95 | assert client.verify_server_accept(server_accept) 96 | 97 | assert client.box_secret == BOX_SECRET 98 | assert client.box_secret == server.box_secret 99 | 100 | client_keys = client.get_box_keys() 101 | server_keys = server.get_box_keys() 102 | 103 | assert client_keys['shared_secret'] == SHARED_SECRET 104 | assert client_keys['encrypt_key'] == CLIENT_ENCRYPT_KEY 105 | assert client_keys['decrypt_key'] == CLIENT_DECRYPT_KEY 106 | assert client_keys['encrypt_nonce'] == CLIENT_ENCRYPT_NONCE 107 | assert client_keys['decrypt_nonce'] == CLIENT_DECRYPT_NONCE 108 | 109 | assert client_keys['shared_secret'] == server_keys['shared_secret'] 110 | assert client_keys['encrypt_key'] == server_keys['decrypt_key'] 111 | assert client_keys['encrypt_nonce'] == server_keys['decrypt_nonce'] 112 | -------------------------------------------------------------------------------- /ssb/rpc/packet_stream.py: -------------------------------------------------------------------------------- 1 | # ssb/rpc/packet_stream.py 2 | 3 | # June 2017 (c) Pedro Ferreira 4 | # https://github.com/pferreir/pyssb 5 | 6 | from asyncio import Event, Queue 7 | from enum import Enum 8 | import logging 9 | import struct 10 | from time import time 11 | 12 | import json 13 | from async_generator import async_generator, yield_ 14 | 15 | from ssb.shs import SHSClient, SHSServer 16 | 17 | 18 | logger = logging.getLogger('packet_stream') 19 | 20 | 21 | class PSMessageType(Enum): 22 | BUFFER = 0 23 | TEXT = 1 24 | JSON = 2 25 | 26 | 27 | class PSStreamHandler(object): 28 | def __init__(self, req): 29 | super(PSStreamHandler).__init__() 30 | self.req = req 31 | self.queue = Queue() 32 | 33 | async def process(self, msg): 34 | await self.queue.put(msg) 35 | 36 | async def stop(self): 37 | await self.queue.put(None) 38 | 39 | @async_generator 40 | async def __aiter__(self): 41 | while True: 42 | elem = await self.queue.get() 43 | if not elem: 44 | return 45 | await yield_(elem) 46 | 47 | 48 | class PSRequestHandler(object): 49 | def __init__(self, req): 50 | super(PSRequestHandler).__init__() 51 | self.req = req 52 | self.event = Event() 53 | self._msg = None 54 | 55 | async def process(self, msg): 56 | self._msg = msg 57 | self.event.set() 58 | 59 | async def stop(self): 60 | if not self.event.is_set(): 61 | self.event.set() 62 | 63 | def __await__(self): 64 | # wait until 'process' is called 65 | yield from self.event.wait() 66 | return self._msg 67 | 68 | 69 | class PSMessage(object): 70 | 71 | @classmethod 72 | def from_header_body(cls, flags, req, body): 73 | type_ = PSMessageType(flags & 0x03) 74 | 75 | if type_ == PSMessageType.TEXT: 76 | body = body.decode('utf-8') 77 | elif type_ == PSMessageType.JSON: 78 | body = json.loads(body) 79 | 80 | return cls(type_, body, bool(flags & 0x08), bool(flags & 0x04), req=req) 81 | 82 | @property 83 | def data(self): 84 | if self.type == PSMessageType.TEXT: 85 | return self.body.encode('utf-8') 86 | elif self.type == PSMessageType.JSON: 87 | return json.dumps(self.body, ensure_ascii=False).encode('utf-8') 88 | return self.body 89 | 90 | def __init__(self, type_, body, stream, end_err, req=None): 91 | self.stream = stream 92 | self.end_err = end_err 93 | self.type = type_ 94 | self.body = body 95 | self.req = req 96 | 97 | def __repr__(self): 98 | if self.type == PSMessageType.BUFFER: 99 | body = '{} bytes'.format(len(self.body)) 100 | else: 101 | body = self.body 102 | return ''.format(self.type.name, body, 103 | '' if self.req is None else ' [{}]'.format(self.req), 104 | '~' if self.stream else '', '!' if self.end_err else '') 105 | 106 | 107 | class PacketStream(object): 108 | def __init__(self, connection): 109 | self.connection = connection 110 | self.req_counter = 1 111 | self._event_map = {} 112 | 113 | def register_handler(self, handler): 114 | self._event_map[handler.req] = (time(), handler) 115 | 116 | @property 117 | def is_connected(self): 118 | return self.connection.is_connected 119 | 120 | @async_generator 121 | async def __aiter__(self): 122 | while True: 123 | msg = await self.read() 124 | if not msg: 125 | return 126 | # filter out replies 127 | if msg.req >= 0: 128 | await yield_(msg) 129 | 130 | async def __await__(self): 131 | async for data in self: 132 | logger.info('RECV: %r', data) 133 | if data is None: 134 | return 135 | 136 | async def _read(self): 137 | try: 138 | header = await self.connection.read() 139 | if not header: 140 | return 141 | flags, length, req = struct.unpack('>BIi', header) 142 | 143 | n_packets = length // 4096 + 1 144 | 145 | body = b'' 146 | for n in range(n_packets): 147 | body += await self.connection.read() 148 | 149 | logger.debug('READ %s %s', header, len(body)) 150 | return PSMessage.from_header_body(flags, req, body) 151 | except StopAsyncIteration: 152 | logger.debug('DISCONNECT') 153 | self.connection.disconnect() 154 | return None 155 | 156 | async def read(self): 157 | msg = await self._read() 158 | if not msg: 159 | return None 160 | # check whether it's a reply and handle accordingly 161 | if msg.req < 0: 162 | # print(msg) 163 | t, handler = self._event_map[-msg.req] 164 | await handler.process(msg) 165 | logger.info('RESPONSE [%d]: %r', -msg.req, msg) 166 | if msg.end_err: 167 | await handler.stop() 168 | del self._event_map[-msg.req] 169 | logger.debug('RESPONSE [%d]: EOS', -msg.req) 170 | return msg 171 | 172 | def _write(self, msg): 173 | logger.info('SEND [%d]: %r', msg.req, msg) 174 | header = struct.pack('>BIi', (int(msg.stream) << 3) | (int(msg.end_err) << 2) | msg.type.value, len(msg.data), 175 | msg.req) 176 | self.connection.write(header) 177 | self.connection.write(msg.data) 178 | logger.debug('WRITE HDR: %s', header) 179 | logger.debug('WRITE DATA: %s', msg.data) 180 | 181 | def send(self, data, msg_type=PSMessageType.JSON, stream=False, end_err=False, req=None): 182 | update_counter = False 183 | if req is None: 184 | update_counter = True 185 | req = self.req_counter 186 | 187 | msg = PSMessage(msg_type, data, stream=stream, end_err=end_err, req=req) 188 | 189 | # send request 190 | self._write(msg) 191 | 192 | if stream: 193 | handler = PSStreamHandler(self.req_counter) 194 | else: 195 | handler = PSRequestHandler(self.req_counter) 196 | self.register_handler(handler) 197 | 198 | if update_counter: 199 | self.req_counter += 1 200 | return handler 201 | 202 | def disconnect(self): 203 | self._connected = False 204 | self.connection.disconnect() 205 | -------------------------------------------------------------------------------- /ssb/local/config.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | # ssb_local/config.py 4 | 5 | import base64 6 | import json 7 | import nacl.signing 8 | import os 9 | import sys 10 | 11 | def username2dir(n): 12 | ssb_home = os.path.expanduser('~/.ssb') 13 | if not n: 14 | return ssb_home 15 | return os.path.join(ssb_home, 'user.' + n) 16 | 17 | def id2bytes(id): 18 | return base64.b64decode(id.split('.')[0][1:]) 19 | 20 | def verify_signature(id, data, sig): 21 | if type(data) == str: 22 | data = data.encode('utf8') 23 | vk = nacl.signing.VerifyKey(base64.b64decode(id[1:-8])) 24 | try: 25 | vk.verify(data, sig) 26 | return True 27 | except: 28 | return False 29 | 30 | def load_ssb_secret(fname=None): 31 | if not fname: 32 | fname = os.path.expanduser('~/.ssb/secret') 33 | with open(fname, 'r') as f: 34 | s = json.loads('\n'.join([l for l in f.read().split('\n') \ 35 | if len(l) > 0 and l[0] != '#'])) 36 | return s 37 | 38 | # ---------------------------------------------------------------------- 39 | 40 | secret_prologue = """# this is your SECRET name. 41 | # this name gives you magical powers. 42 | # with it you can mark your messages so that your friends can verify 43 | # that they really did come from you. 44 | # 45 | # if any one learns this name, they can use it to destroy your identity 46 | # NEVER show this to anyone!!! 47 | 48 | """ 49 | 50 | secret_epilogue = """ 51 | 52 | # WARNING! It's vital that you DO NOT edit OR share your secret name 53 | # instead, share your public name 54 | # your public name: """ 55 | 56 | # ---------------------------------------------------------------------- 57 | 58 | def create_new_user_secret(path): 59 | sik = nacl.signing.SigningKey.generate() 60 | s = { 61 | 'curve' : 'ed25519', 62 | 'private' : base64.b64encode(sik._signing_key).decode('ascii') \ 63 | + '.ed25519', 64 | 'public' : base64.b64encode(sik.verify_key._key).decode('ascii') \ 65 | + '.ed25519' 66 | } 67 | s['id'] = '@' + s['public'] 68 | with open(path, "w") as f: 69 | f.write(secret_prologue) 70 | f.write(json.dumps(s, indent=2)) 71 | f.write(secret_epilogue + (s['id'] + '\n')) 72 | 73 | class SSB_SECRET: 74 | 75 | def __init__(self, username=None, create=False): 76 | dirname = username2dir(username) 77 | fname = os.path.join(dirname, 'secret') 78 | if not os.path.isfile(fname): 79 | if not create: 80 | raise Exception("no file with secret") 81 | create_new_user_secret(fname) 82 | s = load_ssb_secret(fname) 83 | if s['curve'] != 'ed25519': 84 | raise Exception("unknown curve %s in %s" % \ 85 | (s['curve'], fname)) 86 | self._secr = s 87 | self.id = self._secr['id'] 88 | self.pk = base64.b64decode(self._secr['public'][:-8]) 89 | self.pkc = nacl.bindings.crypto_sign_ed25519_pk_to_curve25519(self.pk) 90 | self.sk = base64.b64decode(self._secr['private'][:-8]) 91 | self.skc = nacl.bindings.crypto_sign_ed25519_sk_to_curve25519(self.sk) 92 | self.keypair = nacl.signing.SigningKey(base64.b64decode(self._secr['private'][:-8])[:32]) 93 | 94 | def sign(self, data): 95 | return nacl.bindings.crypto_sign(data, self.sk)[:64] 96 | 97 | def _sbox_open(self, data, nonce, key): 98 | return nacl.bindings.crypto_secretbox_open(data, nonce, key) 99 | 100 | def boxPrivateData(self, data, rcpts): 101 | # returns the ciphertext (bytes) 102 | if len(rcpts) > 8: 103 | return None 104 | kp = nacl.bindings.crypto_box_keypair() 105 | keks = [] # key encryption keys 106 | for r in rcpts: 107 | r = nacl.bindings.crypto_sign_ed25519_pk_to_curve25519(id2bytes(r)) 108 | keks.append(nacl.bindings.crypto_scalarmult(kp[1], r)) 109 | nonce = nacl.bindings.randombytes(24) 110 | dek = nacl.bindings.randombytes(32) 111 | ndek = bytes([len(rcpts)]) + dek 112 | c = nonce + kp[0] # nonce followed by public key 113 | for k in keks: # append wrapped deks for all recpts 114 | c += nacl.bindings.crypto_secretbox(ndek, nonce, k) 115 | return c + nacl.bindings.crypto_secretbox(data, nonce, dek) 116 | 117 | def unboxPrivateData(self, data): # ciphertext 118 | # returns decoded data (bytes) 119 | nonce = data[:24] 120 | mykek = nacl.bindings.crypto_scalarmult(self.skc, data[24:56]) 121 | rcpts = data[56:] 122 | for i in range(8): 123 | if len(rcpts) < 49: 124 | return None 125 | try: 126 | dek = self._sbox_open(rcpts[:49], nonce, mykek) 127 | return self._sbox_open(data[56+dek[0]*49:], nonce, dek[1:]) 128 | except: 129 | pass 130 | rcpts = rcpts[49:] 131 | return None 132 | 133 | # --------------------------------------------------------------------------- 134 | 135 | if __name__ == '__main__': 136 | 137 | import argparse 138 | 139 | parser = argparse.ArgumentParser(description='SSB-Drive configurator') 140 | parser.add_argument('-new', type=str, metavar='USERNAME', 141 | help="create new user") 142 | parser.add_argument('-friends', nargs=2, metavar='NAME', 143 | help="make two users follow each other") 144 | parser.add_argument('-list', action='store_true', 145 | help='list all users') 146 | args = parser.parse_args() 147 | ssb_home = os.path.expanduser('~/.ssb') 148 | 149 | if args.list: 150 | fname = os.path.join(ssb_home, 'secret') 151 | s = load_ssb_secret(fname) 152 | print("default user:\n %s" % s['id']) 153 | print("local users:") 154 | for e in os.listdir(ssb_home): 155 | fname = os.path.join(ssb_home, e, 'secret') 156 | e = e.split('.') 157 | if e[0] == 'user': 158 | s = load_ssb_secret(fname) 159 | print(" %s %s" % (s['id'], e[1])) 160 | elif args.friends: 161 | fn = ['', ''] 162 | s = [None, None] 163 | fr = [None, None] 164 | for i in range(2): 165 | fn[i] = username2dir(args.friends[i]) 166 | s[i] = load_ssb_secret(os.path.join(fn[i], 'secret')) 167 | fn[i] = os.path.join(fn[i], 'flume') 168 | if not os.path.isdir(fn[i]): 169 | os.mkdir(fn[i]) 170 | fn[i] = os.path.join(fn[i], 'friends.json') 171 | if not os.path.isfile(fn[i]): 172 | with open(fn[i], 'w') as f: 173 | f.write(json.dumps({ 174 | 'seq': 0, 175 | 'version': 2, 176 | 'value': { 177 | s[i]['id'] : {} 178 | } 179 | })) 180 | with open(fn[i], 'r') as f: 181 | fr[i] = json.load(f) 182 | if not s[0]['id'] in fr[1]['value'][s[1]['id']]: 183 | fr[1]['value'][s[1]['id']][s[0]['id']] = True 184 | if not s[1]['id'] in fr[0]['value'][s[0]['id']]: 185 | fr[0]['value'][s[0]['id']][s[1]['id']] = True 186 | for i in range(2): 187 | with open(fn[i], 'w') as f: 188 | f.write(json.dumps(fr[i])) 189 | print("** friend records updated") 190 | elif args.new: 191 | dname = username2dir(args.new) 192 | if os.path.isdir(dname) or os.path.isfile(dname): 193 | print("** user already exists, aborting") 194 | sys.exit(0) 195 | os.mkdir(dname) 196 | sname = os.path.join(dname, 'secret') 197 | create_new_user_secret(sname) 198 | os.mkdir(os.path.join(dname, 'flume')) 199 | s = load_ssb_secret(sname) 200 | print('** new user %s (%s)' % (args.new, s['id'])) 201 | else: 202 | print("** ?") 203 | 204 | # eof 205 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # SSB Drive 2 | 3 | This is a proof of concept for a **decentralized file system for Secure 4 | Scuttlebutt** (see [SSB](https://www.scuttlebutt.nz/)). It includes a 5 | partial but interoperable implementation of the SSB peer protocol written 6 | in Python. 7 | 8 | With _SBB Drive_ you can create as many file systems ("drives") as you 9 | like, share and organize files with your friends without requiring a 10 | central repository or server. When you work on your files while 11 | offline, the _SSB Drive_ file system will merge automatically with the 12 | rest of the world once you rejoin the grid. Name conflicts are handled 13 | with "Observed Removed Sets" (OR-Sets) from CRDT. 14 | 15 | Main usage: 16 | 17 | ```txt 18 | $ ./ssb-drive.py [-u USERNAME] [-l] [-peer ip:port:id] [UUID] 19 | ``` 20 | 21 | where UUID identifies the drive to work on. Use `-l` to list all 22 | available drives. The `-peer` option selects a specific SSB pub (peer 23 | node); the default behavior is to connect to the locally running SSB 24 | program at port 8008 (e.g. patchwork or sbot). 25 | 26 | __Code status:__ This is a proof-of-concept and is not a well-curated 27 | Python package, it also lacks testing routines. __DON'T RUN THIS CODE 28 | ON YOUR LOG__: It's very hard --and often impossible-- to correct a 29 | corrupted hash chain especially if it has leaked into the global SSB 30 | system. Please read the section "Experimenting with SSB Drive" for 31 | safe ways to work locally - you have been warned. 32 | 33 | Best is to unpack this Git repo and just run from the `ssbdrv` 34 | directory after installing the dependencies (see 35 | `requirements.txt`). In the future, when the internal interfaces of 36 | `ssbdrv` have stabilized, a full Python package will be provided, 37 | probably also factoring out the SSB peer-to-peer component as an 38 | independent package. We acknowledge the import of pferreir's 39 | [`pyssb`](https://github.com/pferreir/pyssb) package which had to be 40 | made more complete: The modified `pyssb` code is included for 41 | convenience, making this `ssbdrv` repo self-contained. 42 | 43 | __Doc status__: Read the source, Luke. Moreover, there is a draft document 44 | on tangles for SSB in this Git repo, see 45 | ['The Tangle data structure and its use in SSB Drive'](doc/tangle.md) 46 | 47 | ## Demo 48 | 49 | _SSB Drive_ behaves like a classic FTP client except that you don't have 50 | to connect to a server. The following demo shows the terminal-based user 51 | interface of this PoC. 52 | 53 | ![demo-20180831.gif](doc/demo-20180831.gif) 54 | 55 | What can be seen in the 30 second animated GIF: 56 | 57 | ```txt 58 | Alice: help // list available commands 59 | Alice: ls -l // show dir content 60 | Bob: ls -l // Bob sees same content 61 | Bob: put b.txt // Bob uploads a file 62 | Alice: ls -l // Alice finds it in her directory 63 | Alice: cat b.txt // and views it 64 | Alice: put x.txt first.txt // Alice races to upload first 65 | Bob: put y.txt first.txt // Bob races to upload first 66 | Bob: ls -l // surprise: no race condition, but two files 67 | Bob: tree // just another view 68 | Bob: ls -li // UNIX -i option: show inode (cypherlink in our case) 69 | // i.e., the two files differ, can be removed individually 70 | ``` 71 | 72 | ## Pragmatics 73 | 74 | Without UUID argument, the app first scans your SSB log and uses the 75 | most recent "root entry" it can find as the work drive -- beware that 76 | this drive could have been created by a friend. If you know the drive 77 | that you want to use (e.g. if you created several drives and/or want 78 | to be sure to work on a specific one), you should pass that drive's 79 | UUID as an argument. 80 | 81 | You can request the creation of a new drive with the `-n` option and list 82 | all available drives with `-l`. 83 | 84 | Because SSB is based on append-only logs, all changes ever made to a 85 | drive are preserved for as long as the log or copies of it exist. 86 | If one of your friends deletes a file from a drive that you shared, 87 | the file is still there and can be recovered: _SSB Drive_ is "time 88 | machine-ready" in the sense that all information is available, just 89 | that this PoC does not yet implement a method to browse a drive's 90 | history (and a method to resurrect old entries). 91 | 92 | Have fun and handle your friends' SSB drives with respect! 93 | 94 | cft, Aug 2018 95 | 96 | --- 97 | 98 | ## Example CLI sessions 99 | 100 | The full signature of the _SSB Drive_ app is: 101 | 102 | ```txt 103 | $ ./ssb-drive.py [options] [UUID] 104 | where options are: 105 | -h, --help show this help message and exit 106 | -del delete the given drive 107 | -list list all active drives 108 | -new create new drive 109 | -peer IP:PORT:ID remote's ip:port:id (default is localhost:8008:default_id 110 | -port PORT local port (i.e. become a server) 111 | -tty run in line mode (instead of fullscreen terminal) 112 | -user USERNAME username (default is ~/.ssb user, or ~/.ssb/user.USERNAME) 113 | -udel undelete the given drive 114 | UUID ssb-drive's uuid (default is youngest drive) 115 | ``` 116 | 117 | ### Experimenting with SSB Drive and/or testing local developments 118 | 119 | In order to perform local experiments with the SSB Drive Protocol, it 120 | is possible **and advised** to run with local SSB users rather than 121 | your own ID. To this end, for each user USERNAME, we keep a 122 | subdirectory with the following format: 123 | 124 | ```txt 125 | ~/.ssb/user.USERNAME 126 | ``` 127 | 128 | and populate it with the standard SSB data. The _SSB Drive_ software 129 | offers an easy way to create new users as follows: 130 | 131 | ```txt 132 | # LOCAL DEMO STEPS 1 133 | 134 | $ ./ssb/local/config.py -list 135 | default user: 136 | @AiBJDta+4boyh2USNGwIagH/wKjeruTcDX2Aj1r/haM=.ed25519 137 | local users: 138 | 139 | $ ./ssb/local/config.py -new Alice 140 | ** new user Alice (@C8pPydEHuGxCjFUYBLmBOGTIPkYQeZ3FnKvQTvT0MDk=.ed25519) 141 | $ ./ssb/local/config.py -new Bob 142 | ** new user Bob (@ihS4TZa55eMjjWOC5oN+oF9GTvc23GQcGyt0xqJ1XD0=.ed25519) 143 | 144 | $ ./ssb/local/config.py -list 145 | default user: 146 | @AiBJDta+4boyh2USNGwIagH/wKjeruTcDX2Aj1r/haM=.ed25519 147 | local users: 148 | @C8pPydEHuGxCjFUYBLmBOGTIPkYQeZ3FnKvQTvT0MDk=.ed25519 Alice 149 | @ihS4TZa55eMjjWOC5oN+oF9GTvc23GQcGyt0xqJ1XD0=.ed25519 Bob 150 | ``` 151 | 152 | Because peers only retrieve each other's logs if they follow each 153 | other, we have to populate the `friends.json` file for both, see below 154 | how this is done. Once this is established, we will (i) create a 155 | drive on Alice's side, (ii) let Bob sync with Alice's content, and 156 | (iii) start also Bob's _SSB Drive_ client: 157 | 158 | ```txt 159 | # LOCAL DEMO STEPS 2 160 | 161 | $ ./ssb/local/config.py -friends Alice Bob 162 | ** friend records updated 163 | 164 | $ ./ssb-drive.py -user Alice -new 165 | ** new drive created, uuid=9dfc8124-6a6b-5730-9c04-5eed67ac770e 166 | 167 | # start Alice's client in one terminal window: 168 | $ ./ssb-drive.py -user Alice -port 7007 169 | ... 170 | 171 | # in another terminal window, let Bob sync up: 172 | $ ./ssb-drive.py -user Bob -sync -peer localhost:7007:ID_OF_ALICE 173 | ... 174 | 175 | # and start his SSB Drive client: 176 | $ ./ssb-drive.py -user Bob -peer localhost:7007:ID_OF_ALICE 177 | ``` 178 | 179 | It is also possible to run the _SSB Drive_ app in line mode by 180 | selecting the `-tty` option. Note however that this mode does not 181 | yet support peer connections i.e., you will work on the given 182 | user's log __as if offline__: 183 | 184 | ```txt 185 | $ ./ssb-drive.py -user Alice -tty 186 | Secure Scuttlebutt Drive client (v2018-08-21). Type ? for help. 187 | running in unencrypted mode 188 | 189 | drv=9dfc8124-6a6b-5730-9c04-5eed67ac770e (2018-08-22 21:44:21) 190 | cwd=/ 191 | ssb_drv> help 192 | 193 | Documented commands (type help ): 194 | ======================================== 195 | cat cd exit get help ls mkdir put pwd rm rmdir stat sync tree 196 | 197 | ssb_drv> tree 198 | . 199 | '-- dir1/ 200 | |-- README.md 201 | '-- dir2/ 202 | ssb_drv> exit 203 | ``` 204 | 205 | --- 206 | 207 | ## Technical Details 208 | 209 | ### The "SSB Drive Protocol" (SDP) 210 | 211 | to be written 212 | 213 | ### Todo 214 | 215 | * "encrypted SSB Drive": adapt the tangles and let them run in the private log 216 | * implement a time machine (browse history, enable resurrecting files) 217 | * think about mounting other drives into a drive's name tree 218 | * run -tty mode with asyncio and serve the peer protocol in the background 219 | 220 | ---- 221 | -------------------------------------------------------------------------------- /ssb/shs/crypto.py: -------------------------------------------------------------------------------- 1 | # ssb/shs/crypto.py 2 | 3 | # Copyright (c) 2017 PySecretHandshake contributors (see AUTHORS for more details) 4 | # 5 | # Permission is hereby granted, free of charge, to any person obtaining a copy 6 | # of this software and associated documentation files (the "Software"), to deal 7 | # in the Software without restriction, including without limitation the rights 8 | # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | # copies of the Software, and to permit persons to whom the Software is 10 | # furnished to do so, subject to the following conditions: 11 | # 12 | # The above copyright notice and this permission notice shall be included in all 13 | # copies or substantial portions of the Software. 14 | # 15 | # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | # SOFTWARE. 22 | 23 | 24 | import hashlib 25 | import hmac 26 | from base64 import b64decode 27 | 28 | from nacl.bindings import (crypto_box_afternm, crypto_box_open_afternm, 29 | crypto_scalarmult) 30 | from nacl.exceptions import CryptoError 31 | from nacl.public import PrivateKey 32 | from nacl.signing import VerifyKey 33 | 34 | APPLICATION_KEY = b64decode('1KHLiKZvAvjbY1ziZEHMXawbCEIM6qwjCDm3VYRan/s=') 35 | 36 | 37 | class SHSError(Exception): 38 | """A SHS exception.""" 39 | pass 40 | 41 | 42 | class SHSCryptoBase(object): 43 | def __init__(self, local_key, ephemeral_key=None, application_key=None): 44 | self.local_key = local_key 45 | self.application_key = application_key or APPLICATION_KEY 46 | self._reset_keys(ephemeral_key or PrivateKey.generate()) 47 | 48 | def _reset_keys(self, ephemeral_key): 49 | self.local_ephemeral_key = ephemeral_key 50 | self.local_app_hmac = (hmac.new(self.application_key, bytes(ephemeral_key.public_key), digestmod='sha512') 51 | .digest()[:32]) 52 | 53 | def generate_challenge(self): 54 | """Generate and return a challenge to be sent to the server.""" 55 | return self.local_app_hmac + bytes(self.local_ephemeral_key.public_key) 56 | 57 | def verify_challenge(self, data): 58 | """Verify the correctness of challenge sent from the client.""" 59 | assert len(data) == 64 60 | sent_hmac, remote_ephemeral_key = data[:32], data[32:] 61 | 62 | h = hmac.new(self.application_key, remote_ephemeral_key, digestmod='sha512') 63 | self.remote_app_hmac = h.digest()[:32] 64 | ok = self.remote_app_hmac == sent_hmac 65 | 66 | if ok: 67 | # this is (a * b) 68 | self.shared_secret = crypto_scalarmult(bytes(self.local_ephemeral_key), remote_ephemeral_key) 69 | self.remote_ephemeral_key = remote_ephemeral_key 70 | # this is hash(a * b) 71 | self.shared_hash = hashlib.sha256(self.shared_secret).digest() 72 | return ok 73 | 74 | def clean(self, new_ephemeral_key=None): 75 | self._reset_keys(new_ephemeral_key or PrivateKey.generate()) 76 | self.shared_secret = None 77 | self.shared_hash = None 78 | self.remote_ephemeral_key = None 79 | 80 | def get_box_keys(self): 81 | shared_secret = hashlib.sha256(self.box_secret).digest() 82 | return { 83 | 'shared_secret': shared_secret, 84 | 'encrypt_key': hashlib.sha256(shared_secret + bytes(self.remote_pub_key)).digest(), 85 | 'decrypt_key': hashlib.sha256(shared_secret + bytes(self.local_key.verify_key)).digest(), 86 | 'encrypt_nonce': self.remote_app_hmac[:24], 87 | 'decrypt_nonce': self.local_app_hmac[:24] 88 | } 89 | 90 | 91 | class SHSServerCrypto(SHSCryptoBase): 92 | def verify_client_auth(self, data): 93 | assert len(data) == 112 94 | a_bob = crypto_scalarmult(bytes(self.local_key.to_curve25519_private_key()), self.remote_ephemeral_key) 95 | box_secret = hashlib.sha256(self.application_key + self.shared_secret + a_bob).digest() 96 | self.hello = crypto_box_open_afternm(data, b'\x00' * 24, box_secret) 97 | signature, public_key = self.hello[:64], self.hello[64:] 98 | signed = self.application_key + bytes(self.local_key.verify_key) + self.shared_hash 99 | pkey = VerifyKey(public_key) 100 | 101 | # will raise an exception if verification fails 102 | pkey.verify(signed, signature) 103 | self.remote_pub_key = pkey 104 | b_alice = crypto_scalarmult(bytes(self.local_ephemeral_key), 105 | bytes(self.remote_pub_key.to_curve25519_public_key())) 106 | self.box_secret = hashlib.sha256(self.application_key + self.shared_secret + a_bob + b_alice).digest()[:32] 107 | return True 108 | 109 | def generate_accept(self): 110 | okay = self.local_key.sign(self.application_key + self.hello + self.shared_hash).signature 111 | d = crypto_box_afternm(okay, b'\x00' * 24, self.box_secret) 112 | return d 113 | 114 | def clean(self, new_ephemeral_key=None): 115 | super(SHSServerCrypto, self).clean(new_ephemeral_key=new_ephemeral_key) 116 | self.hello = None 117 | self.b_alice = None 118 | 119 | 120 | class SHSClientCrypto(SHSCryptoBase): 121 | """An object that encapsulates all the SHS client-side crypto. 122 | 123 | :param local_key: the keypair used by the client (:class:`nacl.public.PrivateKey` object) 124 | :param server_pub_key: the server's public key (``byte`` string) 125 | :param ephemeral_key: a fresh local :class:`nacl.public.PrivateKey` 126 | :param application_key: the unique application key (``byte`` string), defaults to SSB's 127 | """ 128 | 129 | def __init__(self, local_key, server_pub_key, ephemeral_key, application_key=None): 130 | super(SHSClientCrypto, self).__init__(local_key, ephemeral_key, application_key) 131 | self.remote_pub_key = VerifyKey(server_pub_key) 132 | 133 | def verify_server_challenge(self, data): 134 | """Verify the correctness of challenge sent from the server.""" 135 | assert super(SHSClientCrypto, self).verify_challenge(data) 136 | curve_pkey = self.remote_pub_key.to_curve25519_public_key() 137 | 138 | # a_bob is (a * B) 139 | a_bob = crypto_scalarmult(bytes(self.local_ephemeral_key), bytes(curve_pkey)) 140 | self.a_bob = a_bob 141 | # this shall be hash(K | a * b | a * B) 142 | self.box_secret = hashlib.sha256(self.application_key + self.shared_secret + a_bob).digest() 143 | 144 | # and message_to_box will correspond to H = sign(A)[K | Bp | hash(a * b)] | Ap 145 | signed_message = self.local_key.sign(self.application_key + bytes(self.remote_pub_key) + self.shared_hash) 146 | message_to_box = signed_message.signature + bytes(self.local_key.verify_key) 147 | self.hello = message_to_box 148 | return True 149 | 150 | def generate_client_auth(self): 151 | """Generate box[K|a*b|a*B](H)""" 152 | 153 | nonce = b"\x00" * 24 154 | # return box(K | a * b | a * B)[H] 155 | return crypto_box_afternm(self.hello, nonce, self.box_secret) 156 | 157 | def verify_server_accept(self, data): 158 | """Verify that the server's accept message is sane""" 159 | curve_lkey = self.local_key.to_curve25519_private_key() 160 | # b_alice is (A * b) 161 | b_alice = crypto_scalarmult(bytes(curve_lkey), self.remote_ephemeral_key) 162 | self.b_alice = b_alice 163 | # this is hash(K | a * b | a * B | A * b) 164 | self.box_secret = hashlib.sha256(self.application_key + self.shared_secret + self.a_bob + 165 | b_alice).digest() 166 | 167 | nonce = b"\x00" * 24 168 | 169 | try: 170 | # let's use the box secret to unbox our encrypted message 171 | signature = crypto_box_open_afternm(data, nonce, self.box_secret) 172 | except CryptoError: 173 | raise SHSError('Error decrypting server acceptance message') 174 | 175 | # we should have received sign(B)[K | H | hash(a * b)] 176 | # let's see if that signature can verify the reconstructed data on our side 177 | self.remote_pub_key.verify(self.application_key + self.hello + self.shared_hash, signature) 178 | return True 179 | 180 | def clean(self, new_ephemeral_key=None): 181 | super(SHSClientCrypto, self).clean(new_ephemeral_key=new_ephemeral_key) 182 | self.a_bob = None 183 | self.b_alice = None 184 | -------------------------------------------------------------------------------- /ssb-drive.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | # ssb-drive.py 4 | 5 | # 2018-08-31 (c) 6 | 7 | import array 8 | from asyncio import gather, ensure_future, Task, get_event_loop 9 | from datetime import datetime 10 | import sys 11 | 12 | from prompt_toolkit.application import Application 13 | from prompt_toolkit.buffer import Buffer 14 | from prompt_toolkit.document import Document 15 | from prompt_toolkit.eventloop import use_asyncio_event_loop 16 | from prompt_toolkit.key_binding import KeyBindings 17 | from prompt_toolkit.layout.containers import VSplit, HSplit 18 | from prompt_toolkit.layout.layout import Layout 19 | from prompt_toolkit.widgets import Label, TextArea, HorizontalLine 20 | 21 | import logging 22 | logger = logging.getLogger('packet_stream') 23 | logger.setLevel(logging.INFO) 24 | 25 | import ssb.adt.lfs 26 | import ssb.app.drive 27 | import ssb.peer.session 28 | import ssb.local.config 29 | import ssb.local.worm 30 | 31 | # --------------------------------------------------------------------------- 32 | # prompt_toolkit config: 33 | 34 | use_asyncio_event_loop() 35 | 36 | kb = KeyBindings() 37 | 38 | @kb.add('c-q') 39 | def _(event): 40 | event.app.exit() 41 | 42 | @kb.add('c-c') 43 | def _(event): 44 | event.app.cli.text = '' 45 | 46 | @kb.add('c-l') 47 | def _(event): 48 | event.app.renderer.clear() 49 | 50 | @kb.add('c-i') 51 | def _(event): 52 | event.app.layout.focus_next() 53 | 54 | # --------------------------------------------------------------------------- 55 | 56 | def make_app(fs): 57 | global append_to_log 58 | 59 | class PTK_STDOUT(): # our stdout 60 | 61 | def __init__(self, out): 62 | self.out = out 63 | 64 | def write(self, s): 65 | append(s, self.out) 66 | return len(s) 67 | 68 | def flush(self): 69 | pass 70 | 71 | class PTK_LOGGER(logging.StreamHandler): 72 | 73 | def __init__(self, level=logging.NOTSET, out=None): 74 | super().__init__(level) 75 | self.out = out 76 | 77 | def handle(self, record): 78 | append(record.getMessage(), self.out) 79 | 80 | def get_screen_size(): 81 | import fcntl 82 | import termios 83 | 84 | # Buffer for the C call 85 | buf = array.array(u'h', [0, 0, 0, 0]) 86 | fcntl.ioctl(sys.stdout.fileno(), termios.TIOCGWINSZ, buf) 87 | return (buf[0], buf[1]) 88 | 89 | def my_on_resize(old_rs_handler, fs): 90 | fill_top(fs) 91 | return old_rs_handler() 92 | 93 | # ---------------------------------------------------------------------- 94 | # prepare the layout 95 | 96 | rows,_ = get_screen_size() 97 | 98 | top = Label('', style='reverse') 99 | log = TextArea(height=int((rows-4)/2), scrollbar=True) 100 | out = TextArea(text='\n^c: clear input, ^l: redraw, ^q: quit\n', 101 | scrollbar=True) 102 | msg = Label('cwd is /', style='reverse') 103 | cli_args = [] # for cli_accept(), filled later 104 | 105 | def append(s, c=out): 106 | if not c: 107 | c.text += '\n---\n' 108 | if c == log: 109 | s = s.split('\n')[0][:get_screen_size()[1]-2] + '\n' 110 | t = c.text + s 111 | c.buffer.document = Document(text=t, cursor_position=len(t)-1) 112 | 113 | def cli_accept(buf): 114 | app, cli, fs = cli_args 115 | append('\n---\n> ' + cli.text + '\n') 116 | app.cmd.onecmd(cli.text) 117 | msg.text = 'cwd is ' + fs.getcwd() 118 | cli.buffer.history.append_string(cli.text) 119 | cli.text = '' 120 | 121 | def fill_top(fs): 122 | s1 = ' SSB Drive' # (v20180831)' 123 | s2 = '[uuid ' + fs.uuid() + '] ' 124 | w = get_screen_size()[1] 125 | top.text = s1 + ' '*(w-len(s1)-len(s2)) + s2 126 | 127 | cli = TextArea(multiline=False, accept_handler=cli_accept) 128 | bot = VSplit([ Label('> ', dont_extend_width=True), cli ]) 129 | top_container = HSplit([ top, 130 | log, HorizontalLine(), 131 | out, msg, bot ]) 132 | 133 | app = Application(Layout(top_container), key_bindings=kb, full_screen=True) 134 | cli_args += [app, cli, fs] # for cli_accept() 135 | app.cli = cli # for retrieving it in the keyboard handler 136 | app.layout.focus(cli) 137 | fill_top(fs) 138 | 139 | old_rs_resize = app._on_resize 140 | app._on_resize = lambda : my_on_resize(old_rs_resize, fs) 141 | app.stdout = PTK_STDOUT(out) # used for cmd 142 | 143 | logging.getLogger('packet_stream').addHandler(PTK_LOGGER(out=log)) 144 | 145 | return app 146 | 147 | # --------------------------------------------------------------------------- 148 | 149 | if __name__ == '__main__': 150 | 151 | import argparse 152 | 153 | parser = argparse.ArgumentParser(description='SSB-Drive client') 154 | parser.add_argument('-del', dest='delete', action='store_true', 155 | help="del drive") 156 | parser.add_argument('-list', action='store_true', 157 | help='list all active drives') 158 | parser.add_argument('-new', action='store_true', 159 | help='create new drive ') 160 | parser.add_argument('-peer', metavar='IP:PORT:ID', 161 | help="remote's ip:port:id " + \ 162 | "(default is localhost:8008:default_id") 163 | parser.add_argument('-port', 164 | help="local port (i.e. become a server)") 165 | parser.add_argument('-sync', action='store_true', 166 | help="sync log and exit") 167 | parser.add_argument('-tty', action='store_true', 168 | help='run in line mode (instead of fullscreen terminal)') 169 | parser.add_argument('-user', type=str, metavar='USERNAME', dest='username', 170 | help='username (default is ~/.ssb user)') 171 | parser.add_argument('-udel', action='store_true', 172 | help="undelete drive") 173 | parser.add_argument('uuid', type=str, metavar='UUID', nargs='?', 174 | help="ssb-drive's uuid (default is youngest drive)") 175 | 176 | args = parser.parse_args() 177 | sess = ssb.peer.session.SSB_SESSION(args.username) 178 | 179 | if args.sync: 180 | if args.port: 181 | print("** cannot be server for syncing, aborting") 182 | else: 183 | logger.addHandler(logging.StreamHandler()) 184 | theLoop = get_event_loop() 185 | try: 186 | theLoop.run_until_complete(ssb.peer.session.main(args, sess)) 187 | finally: 188 | sess.worm.flush() 189 | for t in Task.all_tasks(): 190 | t.cancel() 191 | theLoop.close() 192 | sys.exit(0) 193 | 194 | if args.uuid: 195 | ref = ssb.adt.lfs.get_lfs_by_uuid(sess.worm, args.uuid) 196 | if not ref: 197 | print("** no such drive") 198 | sys.exit(0) 199 | fs = ssb.adt.lfs.SSB_LFS(sess.worm, ref) 200 | if args.udel: 201 | print("** not implemented") 202 | sys.exit(0) 203 | if args.delete: 204 | fs.close() 205 | sess.worm.flush() 206 | print("**", args.uuid, "was deleted") 207 | sys.exit(0) 208 | else: 209 | if args.delete or args.udel: 210 | print("** must specify a drive") 211 | sys.exit(0) 212 | if args.list: 213 | print("Available SSB drives:") 214 | for ref in ssb.adt.lfs.find_lfs_root_iter(sess.worm): 215 | m = sess.worm.readMsg(ref[1]) 216 | t = datetime.utcfromtimestamp(m['value']['timestamp']/1000) 217 | u = ssb.adt.lfs.uuid_from_key(sess.worm, ref[1]) 218 | print(" uuid=%s (%s)" % (u, str(t)[:19])) 219 | sys.exit(0) 220 | if args.new: 221 | fs = ssb.adt.lfs.SSB_LFS(sess.worm) 222 | sess.worm.flush() 223 | print("** new drive created, uuid=" + fs.uuid()) 224 | sys.exit(0) 225 | 226 | myroot = ssb.adt.lfs.find_lfs_mostRecent(sess.worm) 227 | if not myroot: 228 | print("** no drive found, aborting") 229 | sys.exit(0) 230 | 231 | fs = ssb.adt.lfs.SSB_LFS(sess.worm, myroot) 232 | sess.worm.flush() 233 | 234 | if args.tty: 235 | d = ssb.app.drive.DRIVE_CMD(fs) 236 | try: 237 | d.cmdloop() 238 | except KeyboardInterrupt: 239 | print('^C') 240 | sess.worm.flush() 241 | else: 242 | app = make_app(fs) 243 | app.cmd = ssb.app.drive.DRIVE_CMD(fs, stdout=app.stdout, 244 | prefetchBlob= lambda k: \ 245 | ensure_future(ssb.peer.session.fetch_blob(sess, k))) 246 | 247 | theLoop = get_event_loop() 248 | ensure_future(ssb.peer.session.main(args, sess)) 249 | try: 250 | theLoop.run_until_complete(app.run_async().to_asyncio_future()) 251 | finally: 252 | sess.worm.flush() 253 | for t in Task.all_tasks(): 254 | t.cancel() 255 | theLoop.close() 256 | 257 | # eof 258 | -------------------------------------------------------------------------------- /ssb/rpc/tests/test_packet_stream.py: -------------------------------------------------------------------------------- 1 | import json 2 | from asyncio import ensure_future, gather, Event 3 | 4 | import pytest 5 | from asynctest import patch 6 | from nacl.signing import SigningKey 7 | 8 | from secret_handshake.network import SHSDuplexStream 9 | from ssb.packet_stream import PacketStream, PSMessageType 10 | 11 | 12 | async def _collect_messages(generator): 13 | results = [] 14 | async for msg in generator: 15 | results.append(msg) 16 | return results 17 | 18 | MSG_BODY_1 = (b'{"previous":"%KTGP6W8vF80McRAZHYDWuKOD0KlNyKSq6Gb42iuV7Iw=.sha256","author":"@1+Iwm79DKvVBqYKFkhT6fWRbA' 19 | b'VvNNVH4F2BSxwhYmx8=.ed25519","sequence":116,"timestamp":1496696699331,"hash":"sha256","content":{"type"' 20 | b':"post","channel":"crypto","text":"Does anybody know any good resources (e.g. books) to learn cryptogra' 21 | b'phy? I\'m not speaking of basic concepts (e.g. what\'s a private key) but the actual mathematics behind' 22 | b' the whole thing.\\nI have a copy of the \\"Handbook of Applied Cryptography\\" on my bookshelf but I f' 23 | b'ound it too long/hard to follow. Are there any better alternatives?","mentions":[]},"signature":"hqKePb' 24 | b'bTXWxEi1njDnOWFsL0M0AoNoWyBFgNE6KXj//DThepaZSy9vRbygDHX5uNmCdyOrsQrwZsZhmUYKwtDQ==.sig.ed25519"}') 25 | 26 | MSG_BODY_2 = (b'{"previous":"%iQRhPyqmNLpGaO1Tpm1I22jqnUEwRwkCTDbwAGtM+lY=.sha256","author":"@1+Iwm79DKvVBqYKFkhT6fWRbA' 27 | b'VvNNVH4F2BSxwhYmx8=.ed25519","sequence":103,"timestamp":1496674211806,"hash":"sha256","content":{"type"' 28 | b':"post","channel":"git-ssb","text":"Is it only me or `git.scuttlebot.io` is timing out?\\n\\nE.g. try a' 29 | b'ccessing %vZCTqraoqKBKNZeATErXEtnoEr+wnT3p8tT+vL+29I4=.sha256","mentions":[{"link":"%vZCTqraoqKBKNZeATE' 30 | b'rXEtnoEr+wnT3p8tT+vL+29I4=.sha256"}]},"signature":"+i4U0HUGDDEyNoNr2NIROPnT3WQj3RuTaIhY5koWW8f0vwr4tZsY' 31 | b'mAkqqMwFWfP+eBIbc7DZ835er6r6h9CwAg==.sig.ed25519"}') 32 | 33 | 34 | class MockSHSSocket(SHSDuplexStream): 35 | def __init__(self, *args, **kwargs): 36 | super(MockSHSSocket, self).__init__() 37 | self.input = [] 38 | self.output = [] 39 | self.is_connected = False 40 | self._on_connect = [] 41 | 42 | def on_connect(self, cb): 43 | self._on_connect.append(cb) 44 | 45 | async def read(self): 46 | if not self.input: 47 | raise StopAsyncIteration 48 | return self.input.pop(0) 49 | 50 | def write(self, data): 51 | self.output.append(data) 52 | 53 | def feed(self, input): 54 | self.input += input 55 | 56 | def get_output(self): 57 | while True: 58 | if not self.output: 59 | break 60 | yield self.output.pop(0) 61 | 62 | def disconnect(self): 63 | self.is_connected = False 64 | 65 | 66 | class MockSHSClient(MockSHSSocket): 67 | async def connect(self): 68 | self.is_connected = True 69 | for cb in self._on_connect: 70 | await cb() 71 | 72 | 73 | class MockSHSServer(MockSHSSocket): 74 | def listen(self): 75 | self.is_connected = True 76 | for cb in self._on_connect: 77 | ensure_future(cb()) 78 | 79 | 80 | @pytest.fixture 81 | def ps_client(event_loop): 82 | return MockSHSClient() 83 | 84 | 85 | @pytest.fixture 86 | def ps_server(event_loop): 87 | return MockSHSServer() 88 | 89 | 90 | @pytest.mark.asyncio 91 | async def test_on_connect(ps_server): 92 | called = Event() 93 | 94 | async def _on_connect(): 95 | called.set() 96 | 97 | ps_server.on_connect(_on_connect) 98 | ps_server.listen() 99 | await called.wait() 100 | assert ps_server.is_connected 101 | 102 | 103 | @pytest.mark.asyncio 104 | async def test_message_decoding(ps_client): 105 | await ps_client.connect() 106 | 107 | ps = PacketStream(ps_client) 108 | 109 | assert ps.is_connected 110 | 111 | ps_client.feed([ 112 | b'\n\x00\x00\x00\x9a\x00\x00\x04\xfb', 113 | b'{"name":["createHistoryStream"],"args":[{"id":"@omgyp7Pnrw+Qm0I6T6Fh5VvnKmodMXwnxTIesW2DgMg=.ed25519",' 114 | b'"seq":10,"live":true,"keys":false}],"type":"source"}' 115 | ]) 116 | 117 | messages = (await _collect_messages(ps)) 118 | assert len(messages) == 1 119 | assert messages[0].type == PSMessageType.JSON 120 | assert messages[0].body == { 121 | 'name': ['createHistoryStream'], 122 | 'args': [ 123 | { 124 | 'id': '@omgyp7Pnrw+Qm0I6T6Fh5VvnKmodMXwnxTIesW2DgMg=.ed25519', 125 | 'seq': 10, 126 | 'live': True, 127 | 'keys': False 128 | } 129 | ], 130 | 'type': 'source' 131 | } 132 | 133 | 134 | @pytest.mark.asyncio 135 | async def test_message_encoding(ps_client): 136 | await ps_client.connect() 137 | 138 | ps = PacketStream(ps_client) 139 | 140 | assert ps.is_connected 141 | 142 | ps.send({ 143 | 'name': ['createHistoryStream'], 144 | 'args': [{ 145 | 'id': "@1+Iwm79DKvVBqYKFkhT6fWRbAVvNNVH4F2BSxwhYmx8=.ed25519", 146 | 'seq': 1, 147 | 'live': False, 148 | 'keys': False 149 | }], 150 | 'type': 'source' 151 | }, stream=True) 152 | 153 | header, body = list(ps_client.get_output()) 154 | 155 | assert header == b'\x0a\x00\x00\x00\xa6\x00\x00\x00\x01' 156 | assert json.loads(body.decode('utf-8')) == { 157 | "name": ["createHistoryStream"], 158 | "args": [ 159 | {"id": "@1+Iwm79DKvVBqYKFkhT6fWRbAVvNNVH4F2BSxwhYmx8=.ed25519", "seq": 1, "live": False, "keys": False} 160 | ], 161 | "type": "source" 162 | } 163 | 164 | 165 | @pytest.mark.asyncio 166 | async def test_message_stream(ps_client, mocker): 167 | await ps_client.connect() 168 | 169 | ps = PacketStream(ps_client) 170 | mocker.patch.object(ps, 'register_handler', wraps=ps.register_handler) 171 | 172 | assert ps.is_connected 173 | 174 | ps.send({ 175 | 'name': ['createHistoryStream'], 176 | 'args': [{ 177 | 'id': "@1+Iwm79DKvVBqYKFkhT6fWRbAVvNNVH4F2BSxwhYmx8=.ed25519", 178 | 'seq': 1, 179 | 'live': False, 180 | 'keys': False 181 | }], 182 | 'type': 'source' 183 | }, stream=True) 184 | 185 | assert ps.req_counter == 2 186 | assert ps.register_handler.call_count == 1 187 | handler = list(ps._event_map.values())[0][1] 188 | 189 | with patch.object(handler, 'process') as mock_process: 190 | ps_client.feed([b'\n\x00\x00\x02\xc5\xff\xff\xff\xff', MSG_BODY_1]) 191 | msg = await ps.read() 192 | assert mock_process.call_count == 1 193 | 194 | # responses have negative req 195 | assert msg.req == -1 196 | assert msg.body['previous'] == '%KTGP6W8vF80McRAZHYDWuKOD0KlNyKSq6Gb42iuV7Iw=.sha256' 197 | 198 | assert ps.req_counter == 2 199 | 200 | stream_handler = ps.send({ 201 | 'name': ['createHistoryStream'], 202 | 'args': [{ 203 | 'id': "@1+Iwm79DKvVBqYKFkhT6fWRbAVvNNVH4F2BSxwhYmx8=.ed25519", 204 | 'seq': 1, 205 | 'live': False, 206 | 'keys': False 207 | }], 208 | 'type': 'source' 209 | }, stream=True) 210 | 211 | assert ps.req_counter == 3 212 | assert ps.register_handler.call_count == 2 213 | handler = list(ps._event_map.values())[1][1] 214 | 215 | with patch.object(handler, 'process', wraps=handler.process) as mock_process: 216 | ps_client.feed([b'\n\x00\x00\x02\xc5\xff\xff\xff\xfe', MSG_BODY_1, 217 | b'\x0e\x00\x00\x023\xff\xff\xff\xfe', MSG_BODY_2]) 218 | 219 | # execute both message polling and response handling loops 220 | collected, handled = await gather(_collect_messages(ps), _collect_messages(stream_handler)) 221 | 222 | # No messages collected, since they're all responses 223 | assert collected == [] 224 | 225 | assert mock_process.call_count == 2 226 | 227 | for msg in handled: 228 | # responses have negative req 229 | assert msg.req == -2 230 | 231 | 232 | @pytest.mark.asyncio 233 | async def test_message_request(ps_server, mocker): 234 | ps_server.listen() 235 | 236 | ps = PacketStream(ps_server) 237 | 238 | mocker.patch.object(ps, 'register_handler', wraps=ps.register_handler) 239 | 240 | ps.send({ 241 | 'name': ['whoami'], 242 | 'args': [] 243 | }) 244 | 245 | header, body = list(ps_server.get_output()) 246 | assert header == b'\x02\x00\x00\x00 \x00\x00\x00\x01' 247 | assert json.loads(body.decode('utf-8')) == {"name": ["whoami"], "args": []} 248 | 249 | assert ps.req_counter == 2 250 | assert ps.register_handler.call_count == 1 251 | handler = list(ps._event_map.values())[0][1] 252 | 253 | with patch.object(handler, 'process') as mock_process: 254 | ps_server.feed([b'\x02\x00\x00\x00>\xff\xff\xff\xff', 255 | b'{"id":"@1+Iwm79DKvVBqYKFkhT6fWRbAVvNNVH4F2BSxwhYmx8=.ed25519"}']) 256 | msg = await ps.read() 257 | assert mock_process.call_count == 1 258 | 259 | # responses have negative req 260 | assert msg.req == -1 261 | assert msg.body['id'] == '@1+Iwm79DKvVBqYKFkhT6fWRbAVvNNVH4F2BSxwhYmx8=.ed25519' 262 | assert ps.req_counter == 2 263 | -------------------------------------------------------------------------------- /ssb/peer/session.py: -------------------------------------------------------------------------------- 1 | # #!/usr/bin/env python3 2 | 3 | # ssb/lib/session.py - the SSB protocol 4 | 5 | # 2018-08-31 (c) 6 | # June 2017 (c) Pedro Ferreira 7 | # https://github.com/pferreir/pyssb 8 | 9 | from asyncio import get_event_loop, gather, ensure_future 10 | import base64 11 | import hashlib 12 | import inspect 13 | import json 14 | import os 15 | import struct 16 | import sys 17 | import time 18 | 19 | from ssb.rpc.muxrpc import MuxRPCAPI, MuxRPCAPIException, MuxRPCRequest 20 | from ssb.rpc.packet_stream import PacketStream, PSMessage, PSMessageType 21 | from ssb.shs.network import SHSClient, SHSServer 22 | 23 | import ssb.local.config 24 | import ssb.local.worm 25 | 26 | import logging 27 | logger = logging.getLogger('packet_stream') 28 | 29 | # --------------------------------------------------------------------------- 30 | 31 | class SSB_SESSION(): 32 | 33 | def __init__(self, username): 34 | proc = ssb.local.worm.is_locked(username) 35 | if proc: 36 | raise Exception("log file is locked by process %d (%s)" % \ 37 | (proc.pid, proc.name())) 38 | self.secr = ssb.local.config.SSB_SECRET(username) 39 | self.id = self.secr.id 40 | self.peer_id = ssb.local.config.SSB_SECRET(None).id 41 | self.worm = ssb.local.worm.SSB_WORM(username, self.secr) 42 | 43 | # --------------------------------------------------------------------------- 44 | 45 | def my_notify(connection, req_msg, m): 46 | a = req_msg.body['args'][0] 47 | if 'key' in a and a['key']: 48 | connection.send(m, req = - req_msg.req) 49 | else: 50 | connection.send(m['value'], req = - req_msg.req) 51 | 52 | api = MuxRPCAPI() 53 | 54 | @api.define('createHistoryStream') 55 | def create_history_stream(connection, req_msg, sess=None): 56 | a = req_msg.body['args'][0] 57 | logger.info('RECV [%d] createHistoryStream id=%s', req_msg.req, a['id']) # str(req_msg), a['id']) 58 | i = a['seq'] 59 | while True: 60 | m = sess.worm.getMsgBySequence(a['id'], i) 61 | if not m: 62 | logger.debug("worm has no %s/%d", a['id'], i) 63 | break 64 | if 'key' in a and a['key']: 65 | connection.send(m, req = - req_msg.req) 66 | else: 67 | connection.send(m['value'], req = - req_msg.req) 68 | i += 1 69 | if a['id'] == sess.id and 'live' in a and a['live']: 70 | sess.worm.notify_on_extend(lambda e: my_notify(connection, req_msg, e)) 71 | else: 72 | connection.send(True, end_err = True, req = - req_msg.req) 73 | 74 | 75 | @api.define('blobs.createWants') 76 | def blobs_createWants(connection, req_msg, sess=None): 77 | logger.info('** createWants %s', str(req_msg)) 78 | connection.send(True, end_err = True, req = - req_msg.req) 79 | 80 | @api.define('blobs.get') 81 | def blobs_get(connection, req_msg, sess=None): 82 | a = req_msg.body['args'][0] 83 | logger.info('RECV [%d] blobs.get %s', req_msg.req, a) 84 | # while True: chunk the data etc 85 | if sess.worm.blobAvailable(a): 86 | data = sess.worm.readBlob(a) 87 | if data: 88 | connection.send(data, 89 | msg_type=ssb.rpc.packet_stream.PSMessageType.BUFFER, 90 | req= - req_msg.req) 91 | connection.send(True, end_err= True, req= - req_msg.req) 92 | return 93 | err = "local error" 94 | else: 95 | err = "no such blob" 96 | connection.send({ 'name': 'Error', 'message': err }, 97 | end_err = True, req= - req_msg.req) 98 | 99 | 100 | async def fetch_blob(sess, id): 101 | logger.info('me fetching blob %s', id) 102 | data = bytes(0) 103 | async for msg in api.call('blobs.get', [id], 'source'): 104 | chunk = msg.data 105 | logger.debug('RESP: %d (%d bytes)', msg.req, len(chunk)) 106 | if not msg.end_err: 107 | data += chunk 108 | nm = hashlib.sha256(data).digest() 109 | nm = '&' + base64.b64encode(nm).decode('ascii') 110 | if nm == id: 111 | sess.worm.writeBlob(data) 112 | else: 113 | logger.info('fetchBlob: mismatch %s (%d bytes)', nm, len(data)) 114 | 115 | 116 | async def request_log_feed(sess, id, seq, end_after_sync=False): 117 | logger.info('me requesting feed %s / %d..', id, seq) 118 | async for msg in api.call('createHistoryStream', [{ 119 | 'id': id, 120 | 'seq': seq, 121 | # 'live': False, 122 | 'live': not end_after_sync, 123 | 'keys': False 124 | }], 'source'): 125 | logger.debug('RESPONSE: %d', msg.req) 126 | # print(type(msg.body)) 127 | # print(msg.body) 128 | d = json.loads(msg.data) 129 | if type(d) == dict: 130 | _, seq = sess.worm._getMaxSeq(d['author']) 131 | if seq+1 != d['sequence']: 132 | print('seq gap:', d['sequence'], 'instead of', seq+1) 133 | else: 134 | logger.debug('* seq %s / %d', d['author'], d['sequence']) 135 | jmsg = ssb.local.worm.formatMsg(d['previous'] if 'previous' in d else None, 136 | d['sequence'], d['author'], 137 | d['timestamp'], d['hash'], 138 | d['content'], d['signature']) 139 | # print(jmsg) 140 | key = sess.worm.appendToLog(jmsg) 141 | if key: 142 | # sess.last.set_last_seq(d['author'], seq+1, key) 143 | sess.worm._updateMaxSeq(d['author'], key, seq+1) 144 | else: 145 | print("appendToLog failed, invalid signature?") 146 | print(msg.data) 147 | print(jmsg) 148 | if 'text' in d['content']: 149 | print(type(d['content']['text'])) 150 | print(d['content']['text']) 151 | elif d == True: 152 | logger.info("end of worm updating") 153 | sess.worm.flush() 154 | else: 155 | logger.debug("%s", str(msg)) 156 | 157 | # --------------------------------------------------------------------------- 158 | 159 | # client behavior 160 | async def become_client(sess, end_after_sync=False): 161 | logger.info('me starting to talk to the new peer') 162 | fname = os.path.join(sess.worm._logDname, 'friends.json') 163 | ids = [] 164 | if os.path.isfile(fname): 165 | with open(fname, "r") as f: 166 | friends = json.load(f) 167 | ids += [ id for (id,flag) in friends['value'][sess.id].items() \ 168 | if flag ] 169 | if not sess.id in ids: # add our id in case we lost our log 170 | ids.append(sess.id) 171 | for id in ids: 172 | await request_log_feed(sess, id, sess.worm._getMaxSeq(id)[1] + 1, 173 | end_after_sync) 174 | logger.info('end of become_client code') 175 | 176 | # server behavior 177 | async def on_connect(conn, sess): 178 | packet_stream = PacketStream(conn) 179 | api.add_connection(packet_stream) 180 | 181 | logger.info('incoming new peer detected') 182 | ensure_future(become_client(sess)) 183 | 184 | try: 185 | async for req_msg in packet_stream: 186 | logger.debug("incoming peer request %d", req_msg.req) 187 | 188 | nm = '.'.join(req_msg.body['name']) 189 | handler = api.handlers.get(nm) 190 | if not handler: 191 | packet_stream.send({'name': 'Error', 192 | 'message': 'no such method ' + nm, 193 | 'stack': ''}, end_err = True, 194 | req = - req_msg.req) 195 | else: 196 | handler(packet_stream, req_msg, sess) 197 | except Exception as e: 198 | logger.info("lost connecton? %s", str(e)) 199 | 200 | # --------------------------------------------------------------------------- 201 | 202 | async def main(args, sess): 203 | 204 | if args.port: # become a server, discard -peer option 205 | logger.info("main(): behaving as a SSB server") 206 | server = SHSServer('127.0.0.1', int(args.port), sess.secr.keypair, 207 | sess=sess) 208 | server.on_connect(on_connect) 209 | await server.listen() 210 | logger.info("end of server init, my ID is " + sess.id) 211 | else: 212 | logger.info("main(): behaving as a SSB client") 213 | if args.peer: 214 | p = args.peer.split(':') 215 | host, port, peer_id = (p[0], int(p[1]), p[2]) 216 | else: 217 | host, port, peer_id = ('127.0.0.1', 8008, sess.peer_id) 218 | client = SHSClient(host, port, sess.secr.keypair, 219 | base64.b64decode(peer_id[1:-8])) 220 | packet_stream = PacketStream(client) 221 | await client.open() 222 | api.add_connection(packet_stream, sess) 223 | if args.sync: 224 | fu = ensure_future(api) 225 | await become_client(sess, end_after_sync=True) 226 | fu.cancel() 227 | else: 228 | await gather(ensure_future(api), become_client(sess)) 229 | 230 | logger.info("end of main()") 231 | 232 | # --------------------------------------------------------------------------- 233 | 234 | if __name__ == '__main__': 235 | import argparse 236 | 237 | parser = argparse.ArgumentParser(description='SSB peer -- sync logs') 238 | parser.add_argument('-port', 239 | help="local port (i.e. become a server)") 240 | parser.add_argument('peer', nargs='?', 241 | help="remote's ip:port:id (default is localhost:8008:default_id") 242 | parser.add_argument('-user', type=str, nargs='?', dest='username', 243 | help='username (default is ~/.ssb user)') 244 | args = parser.parse_args() 245 | 246 | logger.addHandler(logging.StreamHandler()) 247 | logger.setLevel(logging.DEBUG) # INFO) 248 | 249 | sess = SSB_SESSION(args.username) 250 | 251 | theLoop = get_event_loop() 252 | theLoop.run_until_complete(main(args, sess)) 253 | if args.port: 254 | theLoop.run_forever() 255 | theLoop.close() 256 | 257 | # eof 258 | -------------------------------------------------------------------------------- /ssb/adt/lfs.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | # ssb/adt/lfs.ps 4 | # logical file system for SSB 5 | 6 | import copy 7 | from datetime import datetime 8 | import os 9 | import sys 10 | import uuid 11 | 12 | import ssb.adt.tangle 13 | 14 | # --------------------------------------------------------------------------- 15 | 16 | # this is the UUID for the SSB filesystem namespace 17 | # (we picked a random UUID and let it start with 'SSB', hex-alike) 18 | NS_UUID = '55bf2f4d-9915-4d86-a76f-7b7d6888c107' 19 | 20 | def uuid_from_key(worm, key): 21 | # key is a string 22 | try: 23 | m = worm.readMsg(key) 24 | salt = '' # backwards compatibility with initial version 25 | if 'salt' in m['value']['content']: 26 | salt = m['value']['content']['salt'] 27 | except: 28 | return None 29 | ns = uuid.UUID(NS_UUID) 30 | return str(uuid.uuid5(ns, salt+key)) 31 | 32 | tag_lfs_root = 'ssb_lfs:v1:root' # drive node 33 | tag_lfs_dir = 'ssb_lfs:v1:dir' # directory node 34 | # tag_lfs_cmd = 'ssb_lfs:v1:cmd' # command 35 | 36 | # ---------------------------------------------------------------------- 37 | 38 | """ 39 | 40 | root tangle record for a drive: 41 | 42 | 'type': 'tangle' 43 | 'use': 'ssb_lfs:v1:root', 44 | 'salt': NNN 45 | 46 | directory tangle record for a drive: 47 | 48 | 'type': 'tangle' 49 | 'use': 'ssb_lfs:v1:dir', 50 | 'drvref: [ '@..', '%..' ] 51 | 52 | content INSIDE a tangle record for directory entries (dent): 53 | 54 | * bind name to file: 55 | { 'type': 'bindF', 'name': '..', 'size': xx, 'blobkey': '%..' } 56 | 57 | * bind name to subdirectory: 58 | { 'type': 'bindD', 'name': '..', 'dirref': [ '@..', '%..' ] } 59 | 60 | * unbind: 61 | { 'unbind': 'unbind', 'key': '%..' } 62 | 'key' points to one of the above definition records 63 | 64 | * close (block) a drive: 65 | { 'type': 'blocked' } 66 | 67 | 68 | not implemented yet: 69 | 70 | * record for symlink (bindL / 'name' / path) 71 | * record for mount, umount 72 | * directory snapshots 73 | 74 | """ 75 | 76 | class SSB_LFS: 77 | 78 | def __init__(self, worm, rootRef=None, owners=None): 79 | self._worm = worm 80 | self._root = ssb.adt.tangle.SSB_TANGLE(worm, rootRef, 81 | # in case a new root is created: 82 | use=tag_lfs_root, 83 | salt=os.urandom(8).hex()) 84 | self._cwt = self._root # current working tangle 85 | self._pars = [self._cwt] # list of parent tangles 86 | self._path = [''] # list of strings 87 | 88 | def uuid(self): 89 | return uuid_from_key(self._worm, self._root.base[1]) 90 | 91 | def items(self): # iterate through cwd 92 | self._cwt.refresh() 93 | return LFS_ITER(self._worm, self._cwt, None) 94 | 95 | def ls(self, dirref): # iterate through this dir tagle 96 | dir = ssb.adt.tangle.SSB_TANGLE(self._worm, dirref) 97 | return LFS_ITER(self._worm, dir, None) 98 | 99 | def getcwd(self): # get current working directory 100 | return '/' + '/'.join(self._path[1:]) 101 | 102 | def close(self): 103 | self._root.append({ 'type': 'blocked' }) 104 | 105 | def cd(self, path): # change directory 106 | new_pars = copy.copy(self._pars) 107 | new_path = copy.copy(self._path) 108 | path = os.path.normpath(path) 109 | if path[0] == '/': 110 | path = path[1:] 111 | new_pars = new_pars[:1] 112 | new_path = new_path[:1] 113 | new_cwt = new_pars[-1] 114 | if len(path) > 0: 115 | for p in path.split('/'): 116 | if p == '.': 117 | continue 118 | if p == '..': 119 | if len(new_path) > 1: 120 | new_pars.pop() 121 | new_path.pop() 122 | new_cwt = new_pars[-1] 123 | else: 124 | for dent in LFS_ITER(self._worm, new_cwt, None): 125 | if dent['name'] == p and dent['type'] == 'bindD': 126 | break 127 | else: 128 | raise ValueError 129 | new_cwt = ssb.adt.tangle.SSB_TANGLE(self._worm, dent['dirref']) 130 | new_pars.append(new_cwt) 131 | new_path.append(p) 132 | self._cwt = new_cwt 133 | self._pars = new_pars 134 | self._path = new_path 135 | 136 | def mkdir(self, n): 137 | # FIXME: refuse if target name exists 138 | dirtan = ssb.adt.tangle.SSB_TANGLE(self._worm, 139 | use=tag_lfs_dir, 140 | drv=self._root.getBaseRef()) 141 | self._cwt.append({ 142 | 'type': 'bindD', 143 | 'name': n, 144 | 'dirref' : dirtan.getBaseRef(), 145 | }) 146 | 147 | def rmdir(self, bindkey): 148 | # FIXME: make sure that the entry is part of this file system 149 | for dent in iter(self.items()): 150 | if dent['this'][1] == bindkey: 151 | if dent['type'] != 'bindD': 152 | raise OSError 153 | dir = ssb.adt.tangle.SSB_TANGLE(self._worm, dent['dirref']) 154 | for e in LFS_ITER(self._worm, dir, None): # must be empty 155 | raise OSError("directory not empty") 156 | self._cwt.append({ 157 | 'type': 'unbind', 158 | 'key': bindkey 159 | }) 160 | return 161 | raise ValueError("no such directory entry") 162 | 163 | def linkBlob(self, n, size, key, overwrite=False): 164 | # FIXME: refuse if target name exists and is a dir 165 | self._cwt.append({ 166 | 'type': 'bindF', 167 | 'name': n, 168 | 'size': size, 169 | 'blobkey' : key, 170 | }) 171 | # FIXME: if overwrite: remove all files with the given name 172 | 173 | def unlinkBlob(self, bindkey): 174 | # FIXME: make sure that the entry is part of this file system 175 | for dent in iter(self.items()): 176 | if dent['this'][1] == bindkey: 177 | if dent['type'] != 'bindF': 178 | raise OSError 179 | self._cwt.append({ 180 | 'type': 'unbind', 181 | 'key': bindkey 182 | }) 183 | return 184 | raise ValueError 185 | 186 | """ 187 | def rename(self, new, old, key=None): 188 | # FIXME: make sure that the orig entry is part of this file system 189 | # FIXME: refuse if target name exists and is a dir 190 | # FIXME: refuse if it's a dir and target would create a cycle 191 | # FIXME: create target 192 | # FIXME: if file and overwrite: remove all files with the given name 193 | # FIXME: unlink/rm original entry 194 | pass 195 | """ 196 | 197 | 198 | class LFS_ITER: 199 | 200 | def __init__(self, worm, tang, cwdRef): 201 | # print("fs_iter") 202 | self._worm = worm 203 | self._tang = tang 204 | # self._cwd = cwdRef 205 | self._tomb = [] 206 | for k in tang: 207 | dent = self._worm.readMsg(k) 208 | if dent['value']['content']['content']['type'] == 'unbind': 209 | # print('tombstone', k, dent['key']) 210 | self._tomb.append(dent['value']['content']['content']['key']) 211 | # print('tomb', [t[:10] for t in self._tomb]) 212 | self._iter = iter(tang) 213 | 214 | def __iter__(self): 215 | return self 216 | 217 | def __next__(self): 218 | while True: 219 | k = self._iter.__next__() 220 | # print(".. ", k) 221 | if k in self._tomb: 222 | continue 223 | bind = self._worm.readMsg(k) 224 | if bind['value']['content']['content']['type'] == 'unbind': 225 | continue; 226 | # print('ok to go:', k) 227 | r = copy.copy(bind['value']['content']['content']) 228 | r['this'] = [bind['value']['author'], bind['key']] 229 | r['timestamp'] = bind['value']['timestamp'] 230 | return r 231 | 232 | # --------------------------------------------------------------------------- 233 | 234 | class LFS_ROOT_ITER: 235 | 236 | def __init__(self, worm): 237 | # print("lfs_root_iter for", worm._logFname) 238 | self._worm = worm 239 | self._i = iter(worm) 240 | self._closed = [] 241 | self._found = [] 242 | 243 | def __iter__(self): 244 | return self 245 | 246 | def __next__(self): 247 | while True: 248 | k = self._i.__next__() 249 | if k in self._closed: 250 | continue 251 | m = self._worm.readMsg(k) 252 | if not m: 253 | continue 254 | c = m['value']['content'] 255 | if type(c) == dict and c['type'] == 'tangle': 256 | try: 257 | if c['content']['type'] == 'blocked' and \ 258 | m['value']['author'] == self._worm.id: 259 | k = c['base'][1] 260 | # print(k) 261 | if not k in self._closed: 262 | self._closed.append(k) 263 | continue 264 | except: 265 | pass 266 | # fetch root node by folllowing the base ref 267 | if 'base' in c: 268 | k = c['base'][1] 269 | m = self._worm.readMsg(k) 270 | if not m: 271 | continue 272 | c = m['value']['content'] 273 | if type(c) != dict or c['type'] != 'tangle': 274 | continue 275 | if not 'use' in c or c['use'] != tag_lfs_root or \ 276 | m['key'] in self._found: 277 | continue 278 | self._found.append(m['key']) 279 | return [m['value']['author'], m['key']] 280 | raise StopIteration 281 | 282 | def find_lfs_root_iter(worm): 283 | return LFS_ROOT_ITER(worm) 284 | 285 | def find_lfs_mostRecent(worm): 286 | # find our most recently defined FS in the log 287 | for ref in find_lfs_root_iter(worm): 288 | # if ref[0] == worm.id: # return with first match 289 | return ref 290 | return None 291 | 292 | def get_lfs_by_uuid(worm, uuid): 293 | for ref in find_lfs_root_iter(worm): 294 | if uuid == uuid_from_key(worm, ref[1]): 295 | return ref 296 | return None 297 | 298 | # --------------------------------------------------------------------------- 299 | if __name__ == '__main__' : 300 | 301 | import ssb.local.config 302 | import ssb.local.worm 303 | 304 | asecr = ssb.local.config.SSB_SECRET('Alice') 305 | aworm = ssb.local.worm.SSB_WORM('Alice', asecr) 306 | 307 | for dent in find_lfs_root_iter(aworm): 308 | print(dent) 309 | 310 | # eof 311 | 312 | -------------------------------------------------------------------------------- /doc/tangle-2.gliffy: -------------------------------------------------------------------------------- 1 | {"contentType":"application/gliffy+json","version":"1.1","metadata":{"title":"untitled","revision":0,"exportBorder":false},"embeddedResources":{"index":0,"resources":[]},"stage":{"objects":[{"x":761,"y":16,"rotation":0,"id":134,"uid":"com.gliffy.shape.basic.basic_v1.default.round_rectangle","width":50,"height":50,"lockAspectRatio":false,"lockShape":false,"order":60,"graphic":{"type":"Shape","Shape":{"tid":"com.gliffy.stencil.round_rectangle.basic_v1","strokeWidth":1,"strokeColor":"#000000","fillColor":"#ffffff","gradient":false,"dropShadow":false,"state":0,"shadowX":0,"shadowY":0,"opacity":1}},"children":[],"linkMap":[]},{"x":768.6666666666666,"y":76.5,"rotation":0,"id":122,"uid":"com.gliffy.shape.basic.basic_v1.default.line","width":100,"height":100,"lockAspectRatio":false,"lockShape":false,"order":59,"graphic":{"type":"Line","Line":{"strokeWidth":1,"strokeColor":"#000000","fillColor":"none","dashStyle":"4.0,4.0","startArrow":0,"endArrow":1,"startArrowRotation":"auto","endArrowRotation":"auto","ortho":true,"interpolationType":"linear","cornerRadius":10,"controlPath":[[-7.666666666666629,-35.5],[-26.66666666666663,-35.5],[-26.66666666666663,24.144660940672622],[-67.66666666666663,24.144660940672622]],"lockSegments":{"1":true}}},"children":null,"constraints":{"constraints":[],"startConstraint":{"type":"StartPositionConstraint","StartPositionConstraint":{"nodeId":134,"px":0,"py":0.5}},"endConstraint":{"type":"EndPositionConstraint","EndPositionConstraint":{"nodeId":124,"px":1,"py":0.29289321881345237}}},"linkMap":[]},{"x":768.6666666666666,"y":3.5,"rotation":0,"id":123,"uid":"com.gliffy.shape.basic.basic_v1.default.line","width":100,"height":100,"lockAspectRatio":false,"lockShape":false,"order":58,"graphic":{"type":"Line","Line":{"strokeWidth":1,"strokeColor":"#000000","fillColor":"none","dashStyle":"4.0,4.0","startArrow":0,"endArrow":1,"startArrowRotation":"auto","endArrowRotation":"auto","ortho":true,"interpolationType":"linear","cornerRadius":10,"controlPath":[[-7.627255089997448,25],[-27.084836726665003,25],[-46.542418363332445,25],[-66,25]],"lockSegments":{}}},"children":null,"constraints":{"constraints":[],"startConstraint":{"type":"StartPositionConstraint","StartPositionConstraint":{"nodeId":134,"px":1.1102230246251563e-16,"py":0.2928932188134525}}},"linkMap":[]},{"x":651,"y":86,"rotation":0,"id":124,"uid":"com.gliffy.shape.basic.basic_v1.default.round_rectangle","width":50,"height":50,"lockAspectRatio":false,"lockShape":false,"order":57,"graphic":{"type":"Shape","Shape":{"tid":"com.gliffy.stencil.round_rectangle.basic_v1","strokeWidth":1,"strokeColor":"#000000","fillColor":"#ffffff","gradient":false,"dropShadow":false,"state":0,"shadowX":0,"shadowY":0,"opacity":1}},"children":[],"linkMap":[]},{"x":713,"y":101,"rotation":0,"id":125,"uid":"com.gliffy.shape.basic.basic_v1.default.line","width":100,"height":100,"lockAspectRatio":false,"lockShape":false,"order":56,"graphic":{"type":"Line","Line":{"strokeWidth":1,"strokeColor":"#000000","fillColor":"none","dashStyle":null,"startArrow":0,"endArrow":1,"startArrowRotation":"auto","endArrowRotation":"auto","ortho":true,"interpolationType":"linear","cornerRadius":10,"controlPath":[[-62,-0.3553390593273775],[-77,-0.3553390593273775],[-77,-60],[-92,-60]],"lockSegments":{}}},"children":null,"constraints":{"constraints":[],"startConstraint":{"type":"StartPositionConstraint","StartPositionConstraint":{"nodeId":124,"px":1.1102230246251563e-16,"py":0.2928932188134525}},"endConstraint":{"type":"EndPositionConstraint","EndPositionConstraint":{"nodeId":129,"px":1,"py":0.5}}},"linkMap":[]},{"x":650,"y":33,"rotation":0,"id":126,"uid":"com.gliffy.shape.basic.basic_v1.default.line","width":100,"height":100,"lockAspectRatio":false,"lockShape":false,"order":55,"graphic":{"type":"Line","Line":{"strokeWidth":1,"strokeColor":"#000000","fillColor":"none","dashStyle":null,"startArrow":0,"endArrow":1,"startArrowRotation":"auto","endArrowRotation":"auto","ortho":false,"interpolationType":"linear","cornerRadius":null,"controlPath":[[1,-2.3553390593273775],[-29,-2.3553390593273775]],"lockSegments":{}}},"children":null,"linkMap":[]},{"x":570,"y":33,"rotation":0,"id":127,"uid":"com.gliffy.shape.basic.basic_v1.default.line","width":100,"height":100,"lockAspectRatio":false,"lockShape":false,"order":54,"graphic":{"type":"Line","Line":{"strokeWidth":1,"strokeColor":"#000000","fillColor":"none","dashStyle":null,"startArrow":0,"endArrow":1,"startArrowRotation":"auto","endArrowRotation":"auto","ortho":false,"interpolationType":"linear","cornerRadius":null,"controlPath":[[1,-2.3553390593273775],[-29,-2.3553390593273775]],"lockSegments":{}}},"children":null,"linkMap":[]},{"x":651,"y":16,"rotation":0,"id":128,"uid":"com.gliffy.shape.basic.basic_v1.default.round_rectangle","width":50,"height":50,"lockAspectRatio":false,"lockShape":false,"order":53,"graphic":{"type":"Shape","Shape":{"tid":"com.gliffy.stencil.round_rectangle.basic_v1","strokeWidth":1,"strokeColor":"#000000","fillColor":"#ffffff","gradient":false,"dropShadow":false,"state":0,"shadowX":0,"shadowY":0,"opacity":1}},"children":[],"linkMap":[]},{"x":571,"y":16,"rotation":0,"id":129,"uid":"com.gliffy.shape.basic.basic_v1.default.round_rectangle","width":50,"height":50,"lockAspectRatio":false,"lockShape":false,"order":52,"graphic":{"type":"Shape","Shape":{"tid":"com.gliffy.stencil.round_rectangle.basic_v1","strokeWidth":1,"strokeColor":"#000000","fillColor":"#ffffff","gradient":false,"dropShadow":false,"state":0,"shadowX":0,"shadowY":0,"opacity":1}},"children":[],"linkMap":[]},{"x":490,"y":14,"rotation":0,"id":130,"uid":"com.gliffy.shape.basic.basic_v1.default.round_rectangle","width":50,"height":50,"lockAspectRatio":false,"lockShape":false,"order":51,"graphic":{"type":"Shape","Shape":{"tid":"com.gliffy.stencil.round_rectangle.basic_v1","strokeWidth":1,"strokeColor":"#000000","fillColor":"#93c47d","gradient":false,"dropShadow":false,"state":0,"shadowX":0,"shadowY":0,"opacity":1}},"children":[],"linkMap":[]},{"x":641,"y":11,"rotation":0,"id":132,"uid":"com.gliffy.shape.basic.basic_v1.default.rectangle","width":71,"height":140,"lockAspectRatio":false,"lockShape":false,"order":49,"graphic":{"type":"Shape","Shape":{"tid":"com.gliffy.stencil.rectangle.basic_v1","strokeWidth":1,"strokeColor":"#ea9999","fillColor":"#f4cccc","gradient":false,"dropShadow":false,"state":0,"shadowX":0,"shadowY":0,"opacity":1}},"children":[],"linkMap":[]},{"x":280,"y":49.5,"rotation":0,"id":117,"uid":"com.gliffy.shape.basic.basic_v1.default.text","width":192,"height":36,"lockAspectRatio":false,"lockShape":false,"order":48,"graphic":{"type":"Text","Text":{"tid":null,"valign":"middle","overflow":"none","vposition":"none","hposition":"none","html":"

? ==>

","paddingLeft":2,"paddingRight":2,"paddingBottom":2,"paddingTop":2}},"children":null,"linkMap":[]},{"x":288.66666666666663,"y":76.5,"rotation":0,"id":113,"uid":"com.gliffy.shape.basic.basic_v1.default.line","width":100,"height":100,"lockAspectRatio":false,"lockShape":false,"order":47,"graphic":{"type":"Line","Line":{"strokeWidth":1,"strokeColor":"#000000","fillColor":"none","dashStyle":"4.0,4.0","startArrow":0,"endArrow":1,"startArrowRotation":"auto","endArrowRotation":"auto","ortho":false,"interpolationType":"linear","cornerRadius":null,"controlPath":[[14.333333333333371,24.5],[-66,25]],"lockSegments":{"1":true}}},"children":null,"linkMap":[]},{"x":288.66666666666663,"y":3.5,"rotation":0,"id":114,"uid":"com.gliffy.shape.basic.basic_v1.default.line","width":100,"height":100,"lockAspectRatio":false,"lockShape":false,"order":46,"graphic":{"type":"Line","Line":{"strokeWidth":1,"strokeColor":"#000000","fillColor":"none","dashStyle":"4.0,4.0","startArrow":0,"endArrow":1,"startArrowRotation":"auto","endArrowRotation":"auto","ortho":false,"interpolationType":"linear","cornerRadius":null,"controlPath":[[16.33333333333337,25.5],[-66,25]],"lockSegments":{"1":true}}},"children":null,"linkMap":[]},{"x":171,"y":86,"rotation":0,"id":110,"uid":"com.gliffy.shape.basic.basic_v1.default.round_rectangle","width":50,"height":50,"lockAspectRatio":false,"lockShape":false,"order":45,"graphic":{"type":"Shape","Shape":{"tid":"com.gliffy.stencil.round_rectangle.basic_v1","strokeWidth":1,"strokeColor":"#000000","fillColor":"#ffffff","gradient":false,"dropShadow":false,"state":0,"shadowX":0,"shadowY":0,"opacity":1}},"children":[],"linkMap":[]},{"x":233,"y":101,"rotation":0,"id":109,"uid":"com.gliffy.shape.basic.basic_v1.default.line","width":100,"height":100,"lockAspectRatio":false,"lockShape":false,"order":44,"graphic":{"type":"Line","Line":{"strokeWidth":1,"strokeColor":"#000000","fillColor":"none","dashStyle":null,"startArrow":0,"endArrow":1,"startArrowRotation":"auto","endArrowRotation":"auto","ortho":true,"interpolationType":"linear","cornerRadius":10,"controlPath":[[-62,-0.3553390593273775],[-77,-0.3553390593273775],[-77,-60],[-92,-60]],"lockSegments":{}}},"children":null,"constraints":{"constraints":[],"startConstraint":{"type":"StartPositionConstraint","StartPositionConstraint":{"nodeId":110,"px":1.1102230246251563e-16,"py":0.2928932188134525}},"endConstraint":{"type":"EndPositionConstraint","EndPositionConstraint":{"nodeId":104,"px":1,"py":0.5}}},"linkMap":[]},{"x":170,"y":33,"rotation":0,"id":100,"uid":"com.gliffy.shape.basic.basic_v1.default.line","width":100,"height":100,"lockAspectRatio":false,"lockShape":false,"order":43,"graphic":{"type":"Line","Line":{"strokeWidth":1,"strokeColor":"#000000","fillColor":"none","dashStyle":null,"startArrow":0,"endArrow":1,"startArrowRotation":"auto","endArrowRotation":"auto","ortho":false,"interpolationType":"linear","cornerRadius":null,"controlPath":[[1,-2.3553390593273775],[-29,-2.3553390593273775]],"lockSegments":{}}},"children":null,"linkMap":[]},{"x":90,"y":33,"rotation":0,"id":102,"uid":"com.gliffy.shape.basic.basic_v1.default.line","width":100,"height":100,"lockAspectRatio":false,"lockShape":false,"order":42,"graphic":{"type":"Line","Line":{"strokeWidth":1,"strokeColor":"#000000","fillColor":"none","dashStyle":null,"startArrow":0,"endArrow":1,"startArrowRotation":"auto","endArrowRotation":"auto","ortho":false,"interpolationType":"linear","cornerRadius":null,"controlPath":[[1,-2.3553390593273775],[-29,-2.3553390593273775]],"lockSegments":{}}},"children":null,"linkMap":[]},{"x":171,"y":16,"rotation":0,"id":103,"uid":"com.gliffy.shape.basic.basic_v1.default.round_rectangle","width":50,"height":50,"lockAspectRatio":false,"lockShape":false,"order":41,"graphic":{"type":"Shape","Shape":{"tid":"com.gliffy.stencil.round_rectangle.basic_v1","strokeWidth":1,"strokeColor":"#000000","fillColor":"#ffffff","gradient":false,"dropShadow":false,"state":0,"shadowX":0,"shadowY":0,"opacity":1}},"children":[],"linkMap":[]},{"x":91,"y":16,"rotation":0,"id":104,"uid":"com.gliffy.shape.basic.basic_v1.default.round_rectangle","width":50,"height":50,"lockAspectRatio":false,"lockShape":false,"order":40,"graphic":{"type":"Shape","Shape":{"tid":"com.gliffy.stencil.round_rectangle.basic_v1","strokeWidth":1,"strokeColor":"#000000","fillColor":"#ffffff","gradient":false,"dropShadow":false,"state":0,"shadowX":0,"shadowY":0,"opacity":1}},"children":[],"linkMap":[]},{"x":10,"y":14,"rotation":0,"id":106,"uid":"com.gliffy.shape.basic.basic_v1.default.round_rectangle","width":50,"height":50,"lockAspectRatio":false,"lockShape":false,"order":39,"graphic":{"type":"Shape","Shape":{"tid":"com.gliffy.stencil.round_rectangle.basic_v1","strokeWidth":1,"strokeColor":"#000000","fillColor":"#93c47d","gradient":false,"dropShadow":false,"state":0,"shadowX":0,"shadowY":0,"opacity":1}},"children":[],"linkMap":[]},{"x":16,"y":74,"rotation":0,"id":107,"uid":"com.gliffy.shape.basic.basic_v1.default.text","width":80,"height":24,"lockAspectRatio":false,"lockShape":false,"order":38,"graphic":{"type":"Text","Text":{"tid":null,"valign":"middle","overflow":"none","vposition":"none","hposition":"none","html":"

genesis node\n

(\"root\")

","paddingLeft":2,"paddingRight":2,"paddingBottom":2,"paddingTop":2}},"children":null,"linkMap":[]},{"x":751,"y":80,"rotation":0,"id":67,"uid":"com.gliffy.shape.basic.basic_v1.default.text","width":68,"height":24,"lockAspectRatio":false,"lockShape":false,"order":28,"graphic":{"type":"Text","Text":{"tid":null,"valign":"middle","overflow":"none","vposition":"none","hposition":"none","html":"

new set of\n

tip nodes

","paddingLeft":2,"paddingRight":2,"paddingBottom":2,"paddingTop":2}},"children":null,"linkMap":[]},{"x":221,"y":130,"rotation":0,"id":57,"uid":"com.gliffy.shape.basic.basic_v1.default.text","width":150,"height":12,"lockAspectRatio":false,"lockShape":false,"order":14,"graphic":{"type":"Text","Text":{"tid":null,"valign":"middle","overflow":"none","vposition":"none","hposition":"none","html":"

old set of tip nodes

","paddingLeft":2,"paddingRight":2,"paddingBottom":2,"paddingTop":2}},"children":null,"linkMap":[]},{"x":161,"y":11,"rotation":0,"id":116,"uid":"com.gliffy.shape.basic.basic_v1.default.rectangle","width":71,"height":140,"lockAspectRatio":false,"lockShape":false,"order":1,"graphic":{"type":"Shape","Shape":{"tid":"com.gliffy.stencil.rectangle.basic_v1","strokeWidth":1,"strokeColor":"#ea9999","fillColor":"#ea9999","gradient":false,"dropShadow":false,"state":0,"shadowX":0,"shadowY":0,"opacity":1}},"children":[],"linkMap":[]},{"x":751,"y":11,"rotation":0,"id":138,"uid":"com.gliffy.shape.basic.basic_v1.default.rectangle","width":71,"height":63,"lockAspectRatio":false,"lockShape":false,"order":0,"graphic":{"type":"Shape","Shape":{"tid":"com.gliffy.stencil.rectangle.basic_v1","strokeWidth":1,"strokeColor":"#ea9999","fillColor":"#ea9999","gradient":false,"dropShadow":false,"state":0,"shadowX":0,"shadowY":0,"opacity":1}},"children":[],"linkMap":[]}],"background":"#FFFFFF","width":899,"height":151,"maxWidth":5000,"maxHeight":5000,"nodeIndex":141,"autoFit":true,"exportBorder":false,"gridOn":true,"snapToGrid":true,"drawingGuidesOn":true,"pageBreaksOn":false,"printGridOn":false,"printPaper":"LETTER","printShrinkToFit":false,"printPortrait":true,"shapeStyles":{"com.gliffy.shape.basic.basic_v1.default":{"fill":"#f4cccc","stroke":"#38761d","strokeWidth":1}},"lineStyles":{"global":{"stroke":"#000000","strokeWidth":1,"endArrow":1,"orthoMode":1}},"textStyles":{},"themeData":null}} -------------------------------------------------------------------------------- /ssb/app/drive.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | # ssb/app/drive.py - front end for a logical file system over SSB 4 | # 2018-08-31 (c) 5 | 6 | import cmd 7 | from datetime import datetime 8 | from fnmatch import fnmatch 9 | import json 10 | import os 11 | import sys 12 | import traceback 13 | 14 | import ssb.adt.lfs 15 | import ssb.local.config 16 | import ssb.local.worm 17 | 18 | # --------------------------------------------------------------------------- 19 | 20 | version='2018-08-27' 21 | 22 | class SSB_DRV_REPL: 23 | 24 | def __init__(self, fs, stdout=None, prefetchBlob=None): 25 | self.fs = fs 26 | self.stdout = stdout if stdout else sys.stdout 27 | self.prefetchBlob = prefetchBlob 28 | 29 | def close(self): 30 | self.fs.close() 31 | self.fs._worm.flush() 32 | self.print("drive deleted") 33 | 34 | def print(self, *args): 35 | self.stdout.write(' '.join([str(a) for a in args]) + '\n') 36 | 37 | def cat(self, remote): 38 | remote = os.path.split(remote)[1] # FIXME: we should follow the path 39 | for dent in sorted(iter(self.fs.items()), key=lambda e: e['name']): 40 | if dent['name'] == remote and dent['type'] == 'bindF': 41 | if self.fs._worm.blobAvailable(dent['blobkey']): 42 | data = self.fs._worm.readBlob(dent['blobkey']) 43 | self.print(data.decode('utf8')) 44 | return 45 | # self.print("** content not available (yet)") 46 | if self.prefetchBlob: 47 | self.prefetchBlob(dent['blobkey']) 48 | self.print("** no such file, or content not available (yet)") 49 | 50 | def cd(self, path=None): 51 | if not path: 52 | path = '/' 53 | try: 54 | self.fs.cd(path) 55 | except ValueError: 56 | self.print("** no such directory") 57 | return 58 | self.print(self.fs.getcwd()) 59 | 60 | def get(self, remote, local=None): 61 | if not local: 62 | local = os.path.split(remote)[1] 63 | remote = os.path.split(remote)[1] # FIXME: we should follow the path 64 | for dent in sorted(iter(self.fs.items()), key=lambda e: e['name']): 65 | if dent['name'] == remote and dent['type'] == 'bindF': 66 | if self.fs._worm.blobAvailable(dent['blobkey']): 67 | data = self.fs._worm.readBlob(dent['blobkey']) 68 | with open(local, "wb") as f: 69 | f.write(data) 70 | return 71 | # self.print("** content not available (yet)") 72 | if self.prefetchBlob: 73 | self.prefetchBlob(dent['blobkey']) 74 | self.print("** no such file, or content not available (yet)") 75 | 76 | def ls(self, opt=None, glob=None): 77 | if opt is not None and opt[0] != '-': 78 | glob = opt 79 | opt = None 80 | opt = '-' if not opt else opt 81 | dol = doh = doi = do1 = False 82 | for c in opt[1:]: 83 | if c == 'l': dol = True 84 | if c == 'h': doh = True 85 | if c == 'i': doi = True 86 | lines = [] 87 | for dent in sorted(iter(self.fs.items()), key=lambda e: e['name']): 88 | q = r = '' 89 | s = dent['name'] 90 | if glob and not fnmatch(s, glob): 91 | continue 92 | if dol: 93 | if 'size' in dent: 94 | if doh: 95 | i = int(dent['size'].bit_length()/10) 96 | if i == 0: 97 | r = str(dent['size']) 98 | else: 99 | f = 1 << (10*i) 100 | r = "%d%s" % ((dent['size']+f-1)/f, ' KMGTP'[i]) 101 | else: 102 | r = str(dent['size']) 103 | r += ' ' + \ 104 | str(datetime.utcfromtimestamp(dent['timestamp']/1000))[:19] 105 | 106 | if dent['type'] == 'bindF': 107 | # test if referenced blob is locally available 108 | if self.fs._worm.blobAvailable(dent['blobkey']): 109 | q = '- ' 110 | else: 111 | q = '-?' 112 | elif dent['type'] == 'bindD': 113 | # test if referenced dir is locally available 114 | if self.fs._worm.readMsg(dent['dirref'][1]): 115 | q = 'd ' 116 | else: 117 | q = 'd?' 118 | else: 119 | q = 'X ' 120 | if doi: 121 | q = dent['this'][1] + ' ' + q 122 | 123 | # trigger proactive fetch of blobs 124 | if self.prefetchBlob and dent['type'] == 'bindF' and \ 125 | not self.fs._worm.blobAvailable(dent['blobkey']): 126 | self.prefetchBlob(dent['blobkey']) 127 | 128 | lines.append((q,r,s)) 129 | w = 0 130 | for l in lines: 131 | if len(l[1]) > w: 132 | w = len(l[1]) 133 | fmt = "%s %{0}s %s".format(w) 134 | for l in lines: 135 | self.print(fmt % l) 136 | 137 | def mkdir(self, path): 138 | try: 139 | self.fs.mkdir(path) 140 | except ValueError: 141 | self.print("** no such path") 142 | 143 | def put(self, local, remote=None): 144 | with open(local, 'rb') as f: 145 | data = f.read() 146 | key = self.fs._worm.writeBlob(data) 147 | if not remote: 148 | remote = os.path.split(local)[1] 149 | else: 150 | remote = os.path.split(remote)[1] # FIXME: we should follow the path 151 | self.fs.linkBlob(remote, len(data), key) 152 | 153 | def pwd(self): 154 | self.print(self.fs.getcwd()) 155 | 156 | def rename(self, glob): 157 | self.print("rename() not implemented") 158 | 159 | def rm(self, glob, bindkey=None): 160 | cnt = 0 161 | for dent in sorted(iter(self.fs.items()), key=lambda e: e['name']): 162 | n = dent['name'] 163 | if glob and not fnmatch(n, glob): 164 | continue 165 | if dent['type'] != 'bindF': 166 | continue 167 | if bindkey and bindkey != dent['this'][1]: 168 | continue 169 | self.fs.unlinkBlob(dent['this'][1]) 170 | cnt += 1 171 | if cnt == 0: 172 | self.print("** no such file") 173 | 174 | def rmdir(self, glob, bindkey=None): 175 | cnt = 0 176 | for dent in sorted(iter(self.fs.items()), key=lambda e: e['name']): 177 | n = dent['name'] 178 | if glob and not fnmatch(n, glob): 179 | continue 180 | if dent['type'] != 'bindD': 181 | continue 182 | if bindkey and bindkey != dent['this'][1]: 183 | continue 184 | try: 185 | self.fs.rmdir(dent['this'][1]) 186 | except OSError as e: 187 | self.print("**", e) 188 | cnt += 1 189 | if cnt == 0: 190 | self.print("** no such directory") 191 | def stat(self, opt=None, glob=None): 192 | if opt is not None and opt[0] != '-': 193 | glob = opt 194 | opt = None 195 | for dent in sorted(iter(self.fs.items()), key=lambda e: e['name']): 196 | if glob and not fnmatch(dent['name'], glob): 197 | continue 198 | dent['creator'] = dent['this'][0] 199 | dent['dentkey'] = dent['this'][1] 200 | del dent['this'] 201 | self.print(dent if opt and opt == '-1' \ 202 | else json.dumps(dent, indent=2)) 203 | 204 | def sync(self, arg=None): 205 | self.print("not implemented") 206 | 207 | def tree(self): 208 | self.print('.') 209 | try: 210 | self._tree('', self.fs._cwt.getBaseRef()) 211 | except: 212 | traceback.self.print_exc() 213 | 214 | def _tree(self, lev, dirKey): 215 | if len(lev) > 75: # protect against cycles in the fs 216 | self.print(lev + '...') 217 | return 218 | lst = sorted(self.fs.ls(dirKey), key=lambda e:e['name']) 219 | cnt = len(lst) 220 | for dent in lst: 221 | x = dent['name'] 222 | if dent['type'] == 'bindD': 223 | x += '/' 224 | cnt -= 1 225 | if cnt > 0: 226 | x = '|-- ' + x 227 | else: 228 | x = "'-- " + x 229 | self.print(lev + x) 230 | if dent['type'] == 'bindD': 231 | if cnt > 0: 232 | self._tree(lev + '| ', dent['dirref']) 233 | else: 234 | self._tree(lev + ' ', dent['dirref']) 235 | 236 | 237 | # --------------------------------------------------------------------------- 238 | 239 | class DRIVE_CMD(cmd.Cmd): 240 | 241 | intro = "Secure Scuttlebutt Drive client (v%s). Type ? for help" % version 242 | intro += "\nrunning in unencrypted mode" 243 | prompt = "ssb_drv> " 244 | 245 | def __init__(self, fs, prefetchBlob=None, stdout=None): 246 | if stdout: 247 | super().__init__(stdout=stdout) 248 | else: 249 | super().__init__() 250 | self.repl = SSB_DRV_REPL(fs, stdout, prefetchBlob) 251 | key = self.repl.fs._root.getBaseRef()[1] 252 | self.intro += "\n\n" + \ 253 | "drv=" + ssb.adt.lfs.uuid_from_key(self.repl.fs._worm, key) 254 | m = self.repl.fs._worm.readMsg(key) 255 | t = datetime.utcfromtimestamp(m['value']['timestamp']/1000) 256 | self.intro += " (created %s)" % str(t)[:19] 257 | self.intro += "\ncwd='%s'" % self.repl.fs.getcwd() 258 | 259 | def doit(self, method, arg): 260 | arg = arg.split() 261 | try: 262 | method(*arg) 263 | except TypeError: 264 | c = sys._getframe(1).f_code.co_name[3:] 265 | traceback.print_exc() 266 | self.stdout.write("*** argument error\n") 267 | self.do_help(c) 268 | except: 269 | traceback.print_exc() 270 | 271 | def do_cat(self, arg): 272 | 'cat remote-path ; display content of remote file' 273 | self.doit(self.repl.cat, arg) 274 | 275 | def do_cd(self, arg): 276 | 'cd path ; change current directory' 277 | self.doit(self.repl.cd, arg) 278 | 279 | def do_get(self, arg): 280 | 'get remote-path [local-path]' 281 | self.doit(self.repl.get, arg) 282 | 283 | def do_ls(self, arg): 284 | 'ls [-lhi] [glob] ; list directory (for names matching glob)' 285 | self.doit(self.repl.ls, arg) 286 | 287 | def do_mkdir(self, arg): 288 | 'mkdir path ; make directory' 289 | self.doit(self.repl.mkdir, arg) 290 | 291 | def do_put(self, arg): 292 | 'put local-path [remote-path]' 293 | self.doit(self.repl.put, arg) 294 | 295 | def do_pwd(self, arg): 296 | 'pwd ; print working directory' 297 | self.doit(self.repl.pwd, arg) 298 | 299 | def do_exit(self, arg): 300 | 'exit ; end program' 301 | return True 302 | 303 | def do_rm(self, arg): 304 | 'rm glob [key] ; remove file(s) matching the glob pattern' 305 | self.doit(self.repl.rm, arg) 306 | 307 | def do_rmdir(self, arg): 308 | 'rmdir path [key] ; remove directory' 309 | self.doit(self.repl.rmdir, arg) 310 | 311 | def do_stat(self, arg): 312 | 'stat [-1] [glob] ; display file status (for names matching glob)' 313 | self.doit(self.repl.stat, arg) 314 | 315 | def do_sync(self, arg): 316 | 'sync ; download all referenced blobs' 317 | self.doit(self.repl.sync, arg) 318 | 319 | def do_tree(self, arg): 320 | 'tree ; list subtree starting from current directory' 321 | self.doit(self.repl.tree, arg) 322 | 323 | # def precmd(self, line): 324 | # return line 325 | 326 | def emptyline(self): 327 | pass 328 | 329 | def default(self, arg): 330 | if arg == 'EOF': 331 | self.stdout.write('exit\n') 332 | return True 333 | self.stdout.write("\nUnknown command '%s'\n" % arg) 334 | self.do_help('') 335 | 336 | # --------------------------------------------------------------------------- 337 | 338 | if __name__ == '__main__': 339 | 340 | import argparse 341 | 342 | parser = argparse.ArgumentParser(description='SSB-Drive client') 343 | parser.add_argument('uuid', type=str, nargs='?', 344 | help="ssb-drive's uuid (default is youngest drive") 345 | parser.add_argument('-user', metavar='USERNAME', type=str, dest='username', 346 | help='username (default is ~/.ssb user)') 347 | parser.add_argument('-list', action='store_true', 348 | help='list all available drives') 349 | parser.add_argument('-new', action='store_true', 350 | help='create new drive ') 351 | args = parser.parse_args() 352 | 353 | p = ssb.local.worm.is_locked(args.username) 354 | if p: 355 | raise Exception("log file is locked by process %d (%s)" % \ 356 | (p.pid, p.name())) 357 | 358 | secr = ssb.local.config.SSB_SECRET(args.username) 359 | wa = ssb.local.worm.SSB_WORM(args.username, secr) 360 | if args.uuid: 361 | ref = ssb.adt.lfs.get_lfs_by_uuid(wa, args.uuid) 362 | if not ref: 363 | print("** no such drive") 364 | sys.exit(0) 365 | fs = ssb.adt.lfs.SSB_LFS(wa, ref) 366 | else: 367 | if args.list: 368 | print("Available SSB drives:") 369 | for ref in ssb.adt.lfs.find_lfs_root_iter(wa): 370 | m = wa.readMsg(ref[1]) 371 | t = datetime.utcfromtimestamp(m['value']['timestamp']/1000) 372 | u = ssb.adt.lfs.uuid_from_key(wa, ref[1]) 373 | print(" uuid=%s (%s)" % (u, str(t)[:19])) 374 | sys.exit(0) 375 | if args.new: 376 | fs = ssb.adt.lfs.SSB_LFS(wa) 377 | print("new drive created, uuid=" + fs.uuid()) 378 | sys.exit(0) 379 | 380 | myroot = ssb.adt.lfs.find_lfs_mostRecent(wa) 381 | if not myroot: 382 | print("** no drive found, aborting") 383 | sys.exit(0) 384 | 385 | fs = ssb.adt.lfs.SSB_LFS(wa, myroot) 386 | wa.flush() 387 | 388 | d = DRIVE_CMD(fs) 389 | 390 | try: 391 | d.cmdloop() 392 | except KeyboardInterrupt: 393 | print('^C') 394 | wa.flush() 395 | 396 | # eof 397 | -------------------------------------------------------------------------------- /doc/tangle.md: -------------------------------------------------------------------------------- 1 | # The Tangle data structure and its use in _SSB Drive_ 2 | 3 | (incomplete draft 2018-09-04) 4 | 5 | This documents introduces the tangle concept, its implementation for 6 | Secure Scuttlebutt and how the _SSB Drive_ file system uses tangles to 7 | represent drives and directories. The Python code for SSB tangles as 8 | well as for the _SSB Drive_ application can be found 9 | [here]([https://github.com/cn-uofbasel/ssbdrv). 10 | 11 | Note (especially if you are a reader with a background in 12 | [IOTA](https://www.iota.org/)'s tangle) that the tangle concept 13 | discussed in this paper is not about a cryptocurreny and how its 14 | algorithms can defend it against attackers. Instead, this paper is 15 | about "cooperative tangles" which are operated among friends which have 16 | the technical means to shut out attackers, as we discuss in the 17 | "conformance rules" section. 18 | 19 | 20 | --- 21 | ## 1. Tangles 22 | 23 | A tangle is an append-only directed acylic graph (DAG) with 24 | special rules for naming old nodes and for appending new ones. 25 | 26 | A __node__ of a tangle is a data record which has a __name__; this 27 | name is computed by hashing the binary representation of the 28 | record. Each data record has a special __ancestors field__ 29 | consisting of a set of names referring to other nodes of the 30 | tangle. These names in the ancestors field are the graph's __edges__. 31 | 32 | ![tangle-1](tangle-1.png) 33 | 34 | The tangle has a dynamic set of nodes called __tips__ which contains 35 | the newest nodes that were added to the tangle. Tips can be seen as 36 | the "frontier of growth" i.e. the set of nodes to which future nodes 37 | will refer to. Tip nodes do not automatically become non-tip 38 | nodes. Instead, their status depends on future nodes referring to them 39 | and "approving" that they become part of the main tangle. Several 40 | conformance rules must be observed for a node to be approved, as we 41 | will discuss later. 42 | 43 | ### 1.1 Tangles as "discussion threads" 44 | 45 | Tangles offer a simple way to share information among SSB peers with 46 | minimal coordination effort. Think about a discussion thread in social 47 | media: The idea is that all parties participating in a discussion 48 | should append their contributions to "the end of the discussion 49 | thread". Ideally, this forms a single chain of references from the 50 | latest entry down to the "genesis node" which marks the discussion 51 | start. This is easy to implement with a central server. But how should 52 | that single thread emerge from fully decentralized operations (and 53 | without installing a heavyweight blockchain)? 54 | 55 | ![tangle-2](tangle-2.png) 56 | 57 | If two parties extend a discussion thread concurrently, the question 58 | arises of which of the two extensions is the valid one, or which came 59 | first. With tangles, the answer is _to not decide_ but to record this 60 | event: the next tangle extension will reference both of these thread's 61 | tips and, hopefully, bring the forking thread back to a single-tipped 62 | thread. There is a danger that several parties observe a fork event 63 | and by attempting to heal it produce another fork: But with careful 64 | (and randomized) timing it is possible to reduce the chances of 65 | repeated fork healing. 66 | 67 | In case a contributor to a discussion thread is offline for some time, 68 | that party will extend the discussion locally and graft its extension 69 | chain to the most recent node at the time of going offline. When coming 70 | back online, the absent party will now see that it created a "side 71 | chain", with the real thread having been extended by many new entries, 72 | and will heal that fork by creating a new tip that references the tip 73 | of the main thread and the one of its (accumulated) side thread. 74 | 75 | ![tangle-3](tangle-3.png) 76 | 77 | At this point in time, the other participants in the discussion thread 78 | will ask themselves questions, for example whether the absent 79 | participant really was absent or just pretends to not have known how 80 | the main thread evolved. This is where the conformance rules apply 81 | which regulate how a new node becomes an accepted tip (or is shun by 82 | the others forever). 83 | 84 | ### 1.2 A tangle for keeping a (virtual) log 85 | 86 | One property and problem of the discssion thread example above is that 87 | there may be multiple ways to arrange the tangle's nodes in a linear 88 | sequence of contributions: desirably, all readers of a discussion 89 | thread should see the same order of messages. 90 | 91 | Assume for a moment that the thread has a single tip, but somewhere in 92 | the past there was a temporary fork event. In this case there are two 93 | possible traversals of the directed acyclic graph (from the tip to the 94 | starting node). If there where multiple forks, the number of possible 95 | traversal paths increases rapidly and seems diffult to oversee and 96 | expensive to compute. In general, a _topological sort_ of the DAG is 97 | able to find all valid traversals of such a graph: The question then 98 | boils down to agreeing on one of these traversal variants. 99 | 100 | By introducing a tie breaking rule which all readers have to adopt, it 101 | is possible to select one of the many possible graph traversals and to 102 | linearize the tangle's entries, leading to a distributed "log of 103 | events" that looks the same for all readers. Whether such a __virtual 104 | log__, which gives preference to _one_ sequence of messages over others, 105 | corresponds with the absolute timing of events in reality, is 106 | impossible to say - but the sequence is guaranteed to respect all 107 | constraints imposed by causality (nodes always refer to older nodes, 108 | hence observe a "happened-after" relation). 109 | 110 | 111 | ### 1.3 Storing a tangle in SSB's individual logs, validity checks 112 | 113 | SSB's point of declaring the log of each participant as the source of 114 | truth (and rejecting any central storage component and authority) 115 | raises the question on how to store a tangle, which must be seen as a 116 | single higher-level data structure which has integrity requirements 117 | (we will come back to this immediately). The storing side is simple: 118 | because the names of tangle nodes are location-independent, we can 119 | just append the nodes to each creator's log: 120 | 121 | ![tangle-5](tangle-5.png) 122 | 123 | Regarding the integrity of the tangle, we start the discussion by a 124 | simplistic approach of just storing the `ancestors` fields in the 125 | personal logs. 126 | 127 | ```txt 128 | tangle_record { 129 | ancestors: [ ref1, ... refN ], 130 | content: any 131 | } 132 | ``` 133 | 134 | A problem is that either malicously or by software error, one or more 135 | references to old nodes can point to something that never was part of the 136 | tangle. Detecting such stray edges creates a burden for the reader 137 | software which must prepare for such incidents. This effort can be 138 | mitigated if each data record also contains a self-declaration about 139 | which tangle it belongs to, permitting a quick check whether the 140 | reader software got "off-tangle". The declaration can be implemented 141 | by a `root` field which references the genesis block, for example. The 142 | main point is that the `root` reference holds some ID of the tangle 143 | which is globally unique -- hashing is a way to produce such IDs (and we 144 | require that the genesis node contains a nonce). 145 | 146 | ```txt 147 | tangle_record { 148 | root: ref, 149 | ancestors: [ ref1, ... refN ], 150 | content: any 151 | } 152 | ``` 153 | 154 | ![tangle-4](tangle-4.png) 155 | 156 | It is still possible that contributors add a wrong declaration into a 157 | data record wherefore one has to check at add-time that this is not 158 | the case. The idea here is that a new node, pretending to be a valid 159 | tip node, must be vetted by others _before_ it becomes part of the 160 | tangle, which means that the next contributor must check, before 161 | referencing such a tip node in the fresh data record, that the 162 | information of the pretending tip node (PTN) is correct. These checks 163 | include: 164 | 165 | * Is the peer having produced and signed the PTN node trustworthy? (a blocked peer's PTN will not be visible, automatically) 166 | * Level 0: Does the PTN has the same tangle id? 167 | * Level 1: Do all nodes referenced in the PTN's `ancestors` field also have the same tangle id? 168 | * Recursive: Instead of answering the trustworthy condition, one could follow each of the PTN's ancestor links and verify that they all end in the same genesis node. 169 | 170 | As we will discuss later, there will be additional checks that depend 171 | on the tangle's use case. What we have seen so far is a base "tangle layer" 172 | which answers to the question of where the tangle is stored (namely in 173 | the individual logs) and what procedures have to be put in place to 174 | extend and traverse a tangle. 175 | 176 | 177 | ### 1.4 Comparing tangle entries with SSB's POST entries 178 | 179 | _This section needs review and commenting from people with more 180 | insights into the design intentions and implementation of 181 | Scuttlebutt._ 182 | 183 | In SSB, a `post` entry carries text that contribute to a discussion 184 | thread. At its core it has the following format, taken from the old 185 | [Scuttlebot](https://scuttlebot.io/docs/message-types/post.html) site: 186 | 187 | ```txt 188 | post_msg = { 189 | previous: MsgLink, // chaining of personal log entries 190 | content: { 191 | type: "post", 192 | root: MsgLink?, 193 | branch: [ MsgLink* ], 194 | text: String 195 | } 196 | } 197 | ``` 198 | 199 | At first sight, the content of this log entry corresponds to the 200 | tangle's data structure described above, `branch` potentially 201 | corresponding to `ancestors`. However, the rules about how to fill in 202 | the `branch` field are vague, although we recognize some 203 | conformance rules: 204 | 205 | > `root` and `branch` are for replies. `root` should point to the topmost 206 | > message in the thread. `branch` should point to the message or set of 207 | > messages in the thread which is being replied to. 208 | > 209 | > In the first reply of a thread, `root` === `branch`, and both should 210 | > be included. `root` and `branch` should only point to `type: post` messages. 211 | 212 | These SSB rules conflate two purposes: (i) tangle weaving vs (ii) 213 | back-referencing old messages. My recommendation is to separate the 214 | reply information from the tangle-forming aspect: instead of `branch` 215 | there should be the `ancestors` field which strictly points to some 216 | recent tip nodes, and a separate discussion-specific `reply` field 217 | that points to one or more tangle entries which can be quite old at 218 | the time of posting. The following figure shows such a configuration: 219 | 220 | ![tangle-6](tangle-6.png) 221 | 222 | In other words, "discussion thread" is a specific use case of a tangle 223 | where the `reply` field matters and is used. But other use cases will 224 | have other fields which matter to them. Regardless of the use cases 225 | there would be only one underlying way of weaving the tangle, namely with 226 | the `root` and `ancestors` fields. 227 | 228 | It is noteworthy that other application in SSB do 229 | __not__ use any chaining via tangles or otherwise, except for the 230 | inclusion in the creator's personal log. For example, `about` records 231 | are used for implementing event-participation where peers declare that 232 | they will be attendees: However, based on the available information in 233 | the logs, it is not possible to deduce strong happened-before 234 | relations among `about` events i.e. who committed first. Only the log 235 | entries' timestamp can be used as a hint for a possible ordering, but 236 | this information is prone to misrepresentation. (At the risk of adding 237 | to the confusion, we should note that `post` messages now also have a 238 | `reply` field containing MsgLink-ID pairs. Some of these entries, but 239 | not all of them, repeat what is already referenced in the `branch` 240 | field.) 241 | 242 | In our _SSB Drive_ design we have therefore clearly separated the 243 | tangle weaving from the fields that are relevant for the use-case 244 | alone. In our design, a tangle record is a SSB message of type 245 | `tangle`; in its `content` field it has a second, private `type` and 246 | `content` field which are reserved for tangle-using applications. For 247 | example, clarifying the above viewpoints with the use case of a 248 | discussion thread, a tangle-based `POST` entry in the personal log 249 | would look like this (contrast this with the `post_msg` layout above): 250 | 251 | ```txt 252 | post_over_tangle_msg = { 253 | previous: MsgLink, // chaining of personal log entries 254 | type: "tangle", 255 | content: { 256 | root: MsgLink?, // generic tangle weaving 257 | ancestors: [ MsgLink* ], // generic tangle weaving 258 | type: "chat:post", // this selects the tangle use case and subtype 259 | content: { // use case-specific content 260 | reply: [ MsgLink* ], 261 | channel: ..., 262 | mentions: ..., 263 | recps: ..., 264 | } 265 | } 266 | } 267 | ``` 268 | 269 | --- 270 | ## 2. From Tangles to Key-Value Stores 271 | 272 | _some text here_ 273 | 274 | 275 | ### 2.1 Recording Key-Value bindings 276 | 277 | ### 2.2 Observed Removed Sets (OR-Sets) 278 | 279 | * Original paper: [Bieniusa2010 in arXiv](https://arxiv.org/pdf/1210.3368.pdf) 280 | * Fig 2 from above paper: ![OR-Set algorithm](OR-Set-algo.png) 281 | * Implementing OR-Sets in tangles: using node names as 'unique tags' 282 | 283 | ### 2.3 Optimizations: Snapshots 284 | 285 | 286 | --- 287 | ## 3. A logical file system based on OR-Sets 288 | 289 | * drives, directories, files and symlinks 290 | * OR-sets for directories 291 | * list of all file system actions (and how they map to tangle entries) 292 | * atomicity, conformance with hierarchical file system names 293 | * outlook: mount and umount 294 | * outlook: time machine 295 | * outlook: encrypted tangles for encrypte drives 296 | 297 | --- 298 | ## 4. Conformance rules for extending a tangle 299 | 300 | * content of tangle node vs content of hosted application, see Sect 1.4 301 | * well-formed tangle node, conformance 302 | * well-formed app content, conformance 303 | * coping with malicious tangle extensions 304 | * the danger of "skinny tangles" and the need for broad node validation. 305 | 306 | --- 307 | ## 5. Collecting and dissaminating the set of tips 308 | 309 | * "Set of tip nodes" is not sharp, includes heuristics. Can the set of tips be extracted by all peers from local log copies, effectively and in a meaningfull way, leading to tangle convergence? 310 | * how to protect against "bushy tangles" (with arbitrary large set of tips)? 311 | 312 | --- 313 | ## 6. File system quirks and anomalies in SSB Drive due to gossip lag and blocked peers 314 | 315 | _some text here_ 316 | 317 | --- 318 | 319 | ## 7. Other use cases for tangles (than _SSB Drive_) 320 | 321 | Tangles serve as the basis for a "virtual log" abstract data type 322 | (ADT), as described in this paper. Virtual logs can be used as a 323 | building block for applications, relieving them from having to think 324 | about how to implement that base data structure, same for 325 | key-value-stores (maps): 326 | 327 | ```txt 328 | .--------------------------------. 329 | | SSB apps | 330 | +--------------------------+ | 331 | | virt_log | key_val_store | | 332 | +--------------------------+--+ | 333 | | tangles | | 334 | +-----------------------------+--+ 335 | | real logs | 336 | `--------------------------------' 337 | ``` 338 | 339 | This is the API of the virtual log ADT: 340 | 341 | ```txt 342 | class Virtual_Log(): 343 | def __init__(root_ref=None) // create new log if ref is None 344 | def get_log_ID() --> root ref 345 | def append(data) --> node ref 346 | def items_after(ref=None) --> iterator 347 | def items_before(ref=None) --> iterator 348 | ``` 349 | 350 | which is quite similar to the API of the _real_ log kept by individual 351 | peers: 352 | 353 | ```txt 354 | class SSB_Log(): 355 | def __init__(filename) 356 | def get_key_pair() // public key is log's ID 357 | def append(msg) -> ref 358 | def items_forward(ref=None, id=None) --> iterator // increasing seq numbers 359 | def items_backwards(ref=None, id=None) --> iterator // follow the hash chain 360 | ``` 361 | 362 | Note that real logs are a mix of messages from all friends and from 363 | the peer itself. The above API permits to filter the messages by 364 | author (the extra `id` parameter in the `items_*` methods), but this 365 | is not helping for cross-friends data structures. Moreover, SSB 366 | applications must validate the messages for integrity at the 367 | data-structure level. The virtual log and map abstractions relieve apps from 368 | having to implement these tasks over and over again. 369 | 370 | As a side note: It is worth thinking about the "conformance rules" for 371 | real logs in the same way as we discussed it for tangles in this 372 | paper: Real logs only accept new messages if their signature is valid, 373 | and in case of entries appended by the owner of the real log, the 374 | sequence numbers must be monotonically increasing. 375 | 376 | ## Virtual Logs for SSB applications 377 | 378 | Here is a tentative list of SSB applications which could benefit from 379 | virtual logs and key-value maps, both based on tangles: 380 | 381 | * SSB Drive -- the content of this paper, uses a key-value map for each directory 382 | 383 | * Encrypted chat rooms: requires two tangles and one encrypted DEK (data encryption key) 384 | * one tangle for a key-value store, serving as a directory of members. This maps the user ID to a wrapped KEK (key encryption key - this KEK is used to decrypt the room's DEK which is used to decrypt the individual chat messages) 385 | * the second tangle serves as a log of all encrypted text contributions 386 | * above design is simplistic: 387 | * no forward secrecy, key rotation needs more effort, see SIGNAL 388 | * not privacy-preserving (member directory is world-readable) 389 | * needs more care if roles are needed (adding or removing members, closing the room etc) 390 | 391 | * votes 392 | 393 | * collaborative event scheduling (doodle) 394 | 395 | * collaborative text editor 396 | 397 | * key sharding? (DarkCrystal) 398 | 399 | --- 400 | -------------------------------------------------------------------------------- /ssb/local/worm.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | # ssb/local/worm.py 4 | 5 | import base64 6 | import copy 7 | import json 8 | import hashlib 9 | import os 10 | import sys 11 | import time 12 | 13 | from ssb.local.config import verify_signature, SSB_SECRET 14 | from ssb.local.util import username2dir, is_locked, id2bytes 15 | 16 | # --------------------------------------------------------------------------- 17 | 18 | def formatMsg(prev, seq, auth, ts, hash, cont, sign): 19 | # returns SSB-compliant JSON string, cont still is a Python val 20 | if type(cont) == str: 21 | cont = json.dumps(cont, ensure_ascii=False) 22 | else: 23 | cont = json.dumps(cont, indent=2, ensure_ascii=False) 24 | cont = '\n '.join(cont.split('\n')) 25 | # print(cont) 26 | if not prev: 27 | jmsg = '{\n "previous": null,' 28 | else: 29 | jmsg = '{\n "previous": "%s",' % prev 30 | jmsg += """ 31 | "author": "%s", 32 | "sequence": %d, 33 | "timestamp": %d, 34 | "hash": "%s", 35 | "content": %s""" % (auth, seq, ts, hash, cont) 36 | if sign: 37 | jmsg = jmsg + ',\n "signature": "%s"\n}' % sign 38 | else: 39 | jmsg = jmsg + '\n}' 40 | return jmsg 41 | 42 | 43 | def _UInt32BE(buf): 44 | return int.from_bytes(buf, byteorder='big', signed=False) 45 | 46 | def _readUInt32BE(f): 47 | return _UInt32BE(f.read(4)) 48 | 49 | def _writeUInt32BE(f, val): 50 | return f.write(val.to_bytes(4, byteorder='big')) 51 | 52 | def _hthash(key): 53 | key = key[1:7] + '==' 54 | return _UInt32BE(base64.b64decode(key)) 55 | 56 | def _seq2key(key, seq): 57 | data = hashlib.sha1( (str(seq)+key).encode('utf8') ).digest() 58 | return '_' + base64.b64encode(data[:8]).decode('ascii') 59 | 60 | # --------------------------------------------------------------------------- 61 | 62 | class SSB_WORM_INDEX: 63 | 64 | def __init__(self, fname, readonly=False): 65 | # print('worm index is', fname) 66 | self._fname = fname 67 | if not os.path.isfile(self._fname): 68 | if readonly: 69 | raise Exception("no file", fname) 70 | with open(self._fname, 'wb') as ndx: 71 | _writeUInt32BE(ndx, 2) # vers 72 | _writeUInt32BE(ndx, 0) # seq? 73 | slots = 64*1024 74 | _writeUInt32BE(ndx, slots) 75 | _writeUInt32BE(ndx, 0) # cnt 76 | ndx.write(bytes(4*slots)) 77 | self._ndxTables = [] 78 | self._ndxDirty = False 79 | self._count = 0 80 | 81 | def load_from_disk(self): 82 | # read index table into memory 83 | self._ndxTables = [] 84 | self._count = 0 85 | with open(self._fname, 'rb') as ndx: 86 | self._ndxHdr = ndx.read(8) 87 | while True: 88 | slots = _readUInt32BE(ndx) 89 | if slots == 0: 90 | break 91 | cnt = _readUInt32BE(ndx) 92 | # print(slots, cnt) 93 | tbl = ndx.read(slots * 4) 94 | self._ndxTables.append( (bytearray(tbl),slots,cnt) ) 95 | self._count += cnt 96 | self._ndxDirty = False 97 | 98 | def save_to_disk(self): 99 | # write back changed hash table (keys.ht) 100 | with open(self._fname, 'wb') as ndx: 101 | ndx.write(self._ndxHdr) 102 | for (tbl, slots, cnt) in self._ndxTables: 103 | _writeUInt32BE(ndx, slots) 104 | _writeUInt32BE(ndx, cnt) 105 | ndx.write(tbl) 106 | self._ndxDirty = False 107 | 108 | def add(self, key, offs): 109 | # add 'key at offs' to the hash table (key is a string), flag as dirty 110 | (tbl,slots,cnt) = self._ndxTables[-1] 111 | # append new hashtable if current table is full 112 | if cnt >= 0.5*slots: 113 | slots *= 2 114 | cnt = 0 115 | tbl = bytearray(4*slots) 116 | self._ndxTables.append( (tbl, slots, cnt) ) 117 | # find free ht entry 118 | pos = _hthash(key) % slots 119 | while True: 120 | pos1 = pos+1 121 | val = _UInt32BE(tbl[pos*4:pos1*4]) 122 | if val == 0: 123 | tbl[pos*4:pos1*4] = (offs+1).to_bytes(4, byteorder='big') 124 | self._ndxTables[-1] = (tbl,slots,cnt+1) 125 | self._ndxDirty = True 126 | self._count += 1 127 | return key 128 | pos = pos1 % slots 129 | raise Exception('internal error in hash table') 130 | 131 | def offsets(self, key): 132 | return SSB_WORM_INDEX_ITER(self._ndxTables, key) 133 | 134 | def flush(self): 135 | if not self._ndxDirty: 136 | return 137 | self.save_to_disk() 138 | 139 | class SSB_WORM_INDEX_ITER(): 140 | 141 | def __init__(self, ndxTables, key): 142 | self.h = _hthash(key) 143 | self.tlst = copy.copy(ndxTables) 144 | self.tbl, self.slots,_ = self.tlst.pop() 145 | self.pos = self.h % self.slots 146 | 147 | def __iter__(self): 148 | return self 149 | 150 | def __next__(self): 151 | while True: 152 | pos1 = self.pos + 1 153 | offs = _UInt32BE(self.tbl[self.pos*4:pos1*4]) 154 | self.pos = pos1 % self.slots 155 | if offs != 0: 156 | return offs-1 157 | if len(self.tlst) == 0: 158 | break 159 | self.tbl, self.slots,_ = self.tlst.pop() 160 | self.pos = self.h % self.slots 161 | raise StopIteration 162 | 163 | # --------------------------------------------------------------------------- 164 | 165 | class SSB_WORM: 166 | 167 | def __init__(self, username, secret, readonly = False): 168 | self._secr = secret 169 | self.id = self._secr.id 170 | self._on_extend = None 171 | dir = username2dir(username) 172 | self._blobDname = os.path.join(dir, 'blobs', 'sha256') 173 | if not os.path.isdir(self._blobDname): 174 | if readonly: 175 | raise Exception("no blob directory") 176 | os.makedirs(self._blobDname) 177 | self._logDname = os.path.join(dir, 'flume') 178 | if not os.path.isdir(self._logDname): 179 | if readonly: 180 | raise Exception("no flume directory") 181 | os.makedirs(self._logDname) 182 | self._logFname = os.path.join(self._logDname, 'log.offset') 183 | # print('worm log file is', self._logFname) 184 | if not os.path.isfile(self._logFname): 185 | if readonly: 186 | raise Exception("no log.offset file") 187 | with open(self._logFname, "wb") as f: 188 | f.write(bytes(0)) 189 | self._readonly = readonly; 190 | self._log = open(self._logFname, 'rb' if readonly else 'r+b') 191 | 192 | self._keysHT = SSB_WORM_INDEX(os.path.join(self._logDname, 'keys.ht'), 193 | readonly) 194 | self._keysHT.load_from_disk() # loadKeysHT() 195 | if self._keysHT._count == 0: 196 | self._reindexKeysHT() 197 | 198 | self._seqsHT = SSB_WORM_INDEX(os.path.join(self._logDname, 'seqs.ht'), 199 | readonly) 200 | self._seqsHT.load_from_disk() # loadSeqsHT() 201 | if self._seqsHT._count == 0: 202 | self._reindexSeqsHT() 203 | 204 | self._lastFname = os.path.join(self._logDname, 'last.json') 205 | if not os.path.isfile(self._lastFname): 206 | self._reindexLast() 207 | with open(self._lastFname, "w") as f: 208 | json.dump(self._last, f) 209 | else: 210 | with open(self._lastFname, "rb") as f: 211 | self._last = json.load(f) 212 | 213 | # read latest (msgId,seqNo) from the log 214 | # self._maxSeq = self._getMaxSeq(self.id) 215 | 216 | def _reindexKeysHT(self): 217 | # print("reindexing") 218 | self._log.seek(0, os.SEEK_END) 219 | offs = self._log.tell() - 4 220 | while offs > 3: 221 | self._log.seek(offs - 4, os.SEEK_SET) 222 | sz = _readUInt32BE(self._log) 223 | self._log.seek(-4 - sz, os.SEEK_CUR) 224 | m = self._log.read(sz) 225 | offs -= sz + 12 226 | m = json.loads(m) 227 | self._keysHT.add(m['key'], offs+4) 228 | 229 | def _reindexSeqsHT(self): 230 | # print("reindexing") 231 | self._log.seek(0, os.SEEK_END) 232 | offs = self._log.tell() - 4 233 | while offs > 3: 234 | self._log.seek(offs - 4, os.SEEK_SET) 235 | sz = _readUInt32BE(self._log) 236 | self._log.seek(-4 - sz, os.SEEK_CUR) 237 | m = self._log.read(sz) 238 | offs -= sz + 12 239 | v = json.loads(m)['value'] 240 | self._seqsHT.add(_seq2key(v['author'], v['sequence']), offs+4) 241 | 242 | def _reindexLast(self): 243 | # print("reindexing") 244 | ts = 0 245 | self._last = { 246 | 'version': 1, 247 | 'value': {}, 248 | 'seq': 0 249 | } 250 | self._log.seek(0, os.SEEK_END) 251 | while self._log.tell() > 8: 252 | self._log.seek(-8, os.SEEK_CUR) 253 | sz = _readUInt32BE(self._log) 254 | self._log.seek(-4 - sz, os.SEEK_CUR) 255 | msg = self._log.read(sz) 256 | if msg is None: 257 | break 258 | msg = json.loads(msg.decode('utf8')) 259 | a = msg['value']['author'] 260 | if a in self._last['value']: 261 | r = self._last['value'][a] 262 | else: 263 | r = { 'sequence': 0 } 264 | self._last['value'][a] = r 265 | if r['sequence'] < msg['value']['sequence']: 266 | r['sequence'] = msg['value']['sequence'] 267 | r['id'] = msg['key'] 268 | r['ts'] = ts 269 | self._log.seek(-4 - sz, os.SEEK_CUR) 270 | 271 | def __iter__(self): 272 | return SSB_WORM_ITER(self) 273 | 274 | def _getMaxSeq(self, id=None): 275 | if not id: 276 | id = self.id 277 | if not id in self._last['value']: 278 | return (None, 0) 279 | r = self._last['value'][id] 280 | return (r['id'], r['sequence']) 281 | 282 | # search the log backwards for this author's newest message 283 | # id = self._key.id 284 | self._log.seek(0, os.SEEK_END) 285 | if self._log.tell() != 0: 286 | self._log.seek(-4, os.SEEK_END) 287 | while True: 288 | self._log.seek(-4, os.SEEK_CUR) 289 | sz = _readUInt32BE(self._log) 290 | self._log.seek(-4 - sz, os.SEEK_CUR) 291 | msg = self._log.read(sz) 292 | if msg is None: 293 | break 294 | msg = json.loads(msg.decode('utf8')) 295 | if msg['value']['author'] == id: 296 | return (msg['key'], msg['value']['sequence']) 297 | self._log.seek(-8 - sz, os.SEEK_CUR) 298 | return (None, 0) 299 | 300 | def _updateMaxSeq(self, id, key, seq): 301 | ts = 0 302 | self._last['value'][id] = { 303 | 'sequence': seq, 304 | 'id': key, 305 | 'ts': ts 306 | } 307 | 308 | def _fetchMsgAt(self, pos): # absolute byte position into the log 309 | # returns the log entry as a Python dict, or None 310 | self._log.seek(pos, os.SEEK_SET) 311 | sz = _readUInt32BE(self._log) 312 | msg = self._log.read(sz) 313 | if not msg: 314 | return None 315 | return json.loads(msg) 316 | 317 | def notify_on_extend(self, fct): 318 | # call this fct if the owner of this worm's log appends a msg 319 | # signature: fct(msgdict) 320 | self._on_extend = fct 321 | 322 | # ------------------------------------------------------------ 323 | 324 | def blobAvailable(self, key): 325 | key = id2bytes(key).hex() 326 | return os.path.isfile(os.path.join(self._blobDname, key[:2], key[2:])) 327 | 328 | def readBlob(self, key): 329 | key = id2bytes(key).hex() 330 | with open(os.path.join(self._blobDname, key[:2], key[2:]), "rb") as f: 331 | data = f.read() 332 | return data 333 | 334 | def writeBlob(self, data): 335 | h = hashlib.sha256(data).digest() 336 | hx = h.hex() 337 | os.makedirs(os.path.join(self._blobDname, hx[:2]), exist_ok=True) 338 | fn = os.path.join(self._blobDname, hx[:2], hx[2:]) 339 | if not os.path.isfile(fn): 340 | with open(fn, "wb") as f: 341 | f.write(data) 342 | return '&' + base64.b64encode(h).decode('ascii') 343 | 344 | # ------------------------------------------------------------ 345 | 346 | def readMsg(self, key): # 256bit key in SSB representation 347 | for offs in self._keysHT.offsets(key): 348 | msg = self._fetchMsgAt(offs) 349 | if not msg or msg['key'] == key: 350 | return msg 351 | return None 352 | 353 | def getMsgBySequence(self, auth, seq): 354 | for offs in self._seqsHT.offsets(_seq2key(auth, seq)): 355 | msg = self._fetchMsgAt(offs) 356 | if not msg: 357 | return msg 358 | val = msg['value'] 359 | if val['author'] == auth and val['sequence'] == seq: 360 | return msg 361 | return None 362 | 363 | # ------------------------------------------------------------ 364 | 365 | def appendToLog(self, msgStr): # signed msg as a formatted str 366 | # returns id 367 | 368 | # validate the msg before storing: 369 | jmsg = json.loads(msgStr) 370 | if not 'author' in jmsg or not 'signature' in jmsg: 371 | raise ValueError 372 | s = base64.b64decode( jmsg['signature'] ) 373 | i = msgStr.find(',\n "signature":') 374 | m = (msgStr[:i] + '\n}').encode('utf8') 375 | # m = (msgStr[:i] + '\n}').encode('ascii') 376 | if not verify_signature(jmsg['author'], m, s): 377 | print(" invalid signature") 378 | return None 379 | # print("it verified!") 380 | 381 | # compute id 382 | h = hashlib.sha256(msgStr.encode('utf8')).digest() 383 | id = '%' + base64.b64encode(h).decode('ascii') + '.sha256' 384 | 385 | # check that this id is not stored yet 386 | if self.readMsg(id) != None: 387 | print("msg %s (%d) already exists" % (id, jmsg['sequence'])) 388 | return id 389 | 390 | # format for storing the entry in the 'log.offset' file 391 | logStr = '\n '.join(msgStr.split('\n')) 392 | logStr = '{\n "key": "%s",\n "value": ' % id + logStr + \ 393 | ',\n "timestamp": %d\n}' % int(time.time()*1000) 394 | logStr = logStr.encode('utf8') 395 | 396 | if self._readonly: 397 | return id 398 | 399 | # append to the log 400 | self._log.close() 401 | self._log = open(self._logFname, 'r+b') 402 | 403 | self._log.seek(0, os.SEEK_END) 404 | offs = self._log.tell() 405 | sz = len(logStr).to_bytes(4, byteorder='big') 406 | self._log.write(sz) 407 | self._log.write(logStr) 408 | self._log.write(sz) 409 | pos = self._log.tell() + 4 410 | _writeUInt32BE(self._log, pos) 411 | self._log.flush() 412 | 413 | self._keysHT.add(id, offs) 414 | self._seqsHT.add(_seq2key(jmsg['author'], jmsg['sequence']), offs) 415 | 416 | if self._on_extend and jmsg['author'] == self.id: 417 | self._on_extend(json.loads(logStr)) 418 | 419 | return id 420 | 421 | def writeMsg(self, msg): # msg is a Python dict or string 422 | # returns the new msg id 423 | # a) format msg as a string 424 | # content = '\n '.join(json.dumps(msg, indent=2).split('\n')) 425 | maxs = self._getMaxSeq() 426 | jmsg = formatMsg(maxs[0] if maxs[0] else None, 427 | maxs[1]+1, self.id, 428 | int(time.time()*1000), 'sha256', msg, None) 429 | # b) sign and add signature field 430 | sig = self._secr.sign(jmsg.encode('utf8')) 431 | sig = base64.b64encode(sig).decode('ascii') + '.sig.ed25519' 432 | jmsg = jmsg[:-2] + ',\n "signature": "%s"\n}' % sig 433 | # c) call append() and bump maxSeq 434 | id = self.appendToLog(jmsg) 435 | self._updateMaxSeq(self.id, id, maxs[1]+1) 436 | 437 | return id 438 | 439 | def writePrivateData(self, data, rcps): # data is a byte array 440 | content = worm._secr.boxPrivateData(data, rcps) 441 | return worm.writeMsg(base64.b64encode(content).decode('ascii')) 442 | 443 | def writePrivateMsg(self, msg, rcps): # msg is (typically) a Python dict 444 | msg = json.dumps(msg, ensure_ascii=False) 445 | return self.writePrivateData(msg.encode('utf8'), rcps) 446 | 447 | # ------------------------------------------------------------ 448 | 449 | def flush(self): 450 | if self._readonly: 451 | return 452 | self._keysHT.flush() 453 | self._seqsHT.flush() 454 | with open(self._lastFname, "w") as f: 455 | json.dump(self._last, f) 456 | 457 | def refresh(self): 458 | if self._keysHT._ndxDirty or self._seqsHT._ndxDirty: 459 | print("warning, disregarding changed ndx information") 460 | self._log.close() 461 | self._log = open(self._logFname, 'rb' if self._readonly else 'r+b') 462 | 463 | self._keysHT.load_from_disk() 464 | self._seqsHT.load_from_disk() 465 | with open(self._lastFname, "rb") as f: 466 | self._last = json.load(f) 467 | 468 | 469 | class SSB_WORM_ITER(): 470 | 471 | def __init__(self, worm): 472 | # return log content BACKWARDS (youngest entry first) 473 | self._worm = worm 474 | self._log = worm._log 475 | self._log.seek(0, os.SEEK_END) 476 | self._pos = self._log.tell() 477 | if self._pos > 0: 478 | self._log.seek(-4, os.SEEK_END) 479 | self._pos = self._log.tell() # at end of a chunk (and its size) 480 | 481 | def __iter__(self): 482 | return self 483 | 484 | def __next__(self): 485 | # print("worm iter next", self._pos) 486 | if self._pos < 4: 487 | raise StopIteration 488 | self._log.seek(self._pos - 4, os.SEEK_SET) 489 | sz = _readUInt32BE(self._log) 490 | self._log.seek(-4 - sz, os.SEEK_CUR) 491 | m = self._log.read(sz) 492 | self._pos -= sz + 12 493 | m = json.loads(m) 494 | return m['key'] 495 | 496 | # ---------------------------------------------------------------------- 497 | 498 | if __name__ == '__main__': 499 | 500 | asecr = SSB_SECRET('Alice') 501 | aworm = SSB_WORM('Alice', asecr) 502 | 503 | for i in range(100): 504 | m = aworm.getMsgBySequence(asecr.id, i) 505 | if m: 506 | print(i, m['key']) 507 | 508 | sys.exit(0) 509 | 510 | # eof 511 | -------------------------------------------------------------------------------- /doc/tangle-3.gliffy: -------------------------------------------------------------------------------- 1 | {"contentType":"application/gliffy+json","version":"1.1","metadata":{"title":"untitled","revision":0,"exportBorder":false},"embeddedResources":{"index":0,"resources":[]},"stage":{"objects":[{"x":662,"y":148,"rotation":345,"id":163,"uid":"com.gliffy.shape.basic.basic_v1.default.text","width":150,"height":36,"lockAspectRatio":false,"lockShape":false,"order":163,"graphic":{"type":"Text","Text":{"tid":null,"valign":"middle","overflow":"none","vposition":"none","hposition":"none","html":"

set of tip nodes\n

when regaining\n

online access

","paddingLeft":2,"paddingRight":2,"paddingBottom":2,"paddingTop":2}},"children":null,"linkMap":[]},{"x":749,"y":48,"rotation":0,"id":160,"uid":"com.gliffy.shape.basic.basic_v1.default.line","width":100,"height":100,"lockAspectRatio":false,"lockShape":false,"order":160,"graphic":{"type":"Line","Line":{"strokeWidth":1,"strokeColor":"#000000","fillColor":"none","dashStyle":null,"startArrow":0,"endArrow":1,"startArrowRotation":"auto","endArrowRotation":"auto","ortho":true,"interpolationType":"linear","cornerRadius":10,"controlPath":[[1,-3],[-16,-3],[-16,66.64466094067262],[-49,66.64466094067262]],"lockSegments":{"1":true}}},"children":null,"constraints":{"constraints":[],"startConstraint":{"type":"StartPositionConstraint","StartPositionConstraint":{"nodeId":158,"px":0,"py":0.5}},"endConstraint":{"type":"EndPositionConstraint","EndPositionConstraint":{"nodeId":147,"px":1,"py":0.29289321881345237}}},"linkMap":[]},{"x":749,"y":37,"rotation":0,"id":157,"uid":"com.gliffy.shape.basic.basic_v1.default.line","width":100,"height":100,"lockAspectRatio":false,"lockShape":false,"order":32,"graphic":{"type":"Line","Line":{"strokeWidth":1,"strokeColor":"#000000","fillColor":"none","dashStyle":null,"startArrow":0,"endArrow":1,"startArrowRotation":"auto","endArrowRotation":"auto","ortho":false,"interpolationType":"linear","cornerRadius":null,"controlPath":[[1,-2.3553390593273775],[-49,-2.3553390593273775]],"lockSegments":{}}},"children":null,"constraints":{"constraints":[],"endConstraint":{"type":"EndPositionConstraint","EndPositionConstraint":{"nodeId":113,"px":1,"py":0.29289321881345237}}},"linkMap":[]},{"x":750,"y":20,"rotation":0,"id":158,"uid":"com.gliffy.shape.basic.basic_v1.default.round_rectangle","width":50,"height":50,"lockAspectRatio":false,"lockShape":false,"order":31,"graphic":{"type":"Shape","Shape":{"tid":"com.gliffy.stencil.round_rectangle.basic_v1","strokeWidth":1,"strokeColor":"#000000","fillColor":"#e06666","gradient":false,"dropShadow":false,"state":0,"shadowX":0,"shadowY":0,"opacity":1}},"children":[],"linkMap":[]},{"x":3,"y":90,"rotation":345,"id":150,"uid":"com.gliffy.shape.basic.basic_v1.default.text","width":150,"height":24,"lockAspectRatio":false,"lockShape":false,"order":28,"graphic":{"type":"Text","Text":{"tid":null,"valign":"middle","overflow":"none","vposition":"none","hposition":"none","html":"

was a tip node\n

when going offline

","paddingLeft":2,"paddingRight":2,"paddingBottom":2,"paddingTop":2}},"children":null,"linkMap":[]},{"x":130,"y":148,"rotation":0,"id":148,"uid":"com.gliffy.shape.basic.basic_v1.default.text","width":150,"height":24,"lockAspectRatio":false,"lockShape":false,"order":27,"graphic":{"type":"Text","Text":{"tid":null,"valign":"middle","overflow":"none","vposition":"none","hposition":"none","html":"

\"side chain\"\n

created while offline

","paddingLeft":2,"paddingRight":2,"paddingBottom":2,"paddingTop":2}},"children":null,"linkMap":[]},{"x":650,"y":100,"rotation":0,"id":147,"uid":"com.gliffy.shape.basic.basic_v1.default.round_rectangle","width":50,"height":50,"lockAspectRatio":false,"lockShape":false,"order":26,"graphic":{"type":"Shape","Shape":{"tid":"com.gliffy.stencil.round_rectangle.basic_v1","strokeWidth":1,"strokeColor":"#000000","fillColor":"#ffffff","gradient":false,"dropShadow":false,"state":0,"shadowX":0,"shadowY":0,"opacity":1}},"children":[],"linkMap":[]},{"x":539,"y":117,"rotation":0,"id":126,"uid":"com.gliffy.shape.basic.basic_v1.default.line","width":100,"height":100,"lockAspectRatio":false,"lockShape":false,"order":25,"graphic":{"type":"Line","Line":{"strokeWidth":1,"strokeColor":"#000000","fillColor":"none","dashStyle":null,"startArrow":0,"endArrow":1,"startArrowRotation":"auto","endArrowRotation":"auto","ortho":false,"interpolationType":"linear","cornerRadius":null,"controlPath":[[111,-2.3553390593273775],[-29,-2.3553390593273775]],"lockSegments":{}}},"children":null,"constraints":{"constraints":[],"startConstraint":{"type":"StartPositionConstraint","StartPositionConstraint":{"nodeId":147,"px":1.1102230246251563e-16,"py":0.2928932188134525}}},"linkMap":[]},{"x":459,"y":117,"rotation":0,"id":127,"uid":"com.gliffy.shape.basic.basic_v1.default.line","width":100,"height":100,"lockAspectRatio":false,"lockShape":false,"order":24,"graphic":{"type":"Line","Line":{"strokeWidth":1,"strokeColor":"#000000","fillColor":"none","dashStyle":null,"startArrow":0,"endArrow":1,"startArrowRotation":"auto","endArrowRotation":"auto","ortho":false,"interpolationType":"linear","cornerRadius":null,"controlPath":[[1,-2.3553390593273775],[-29,-2.3553390593273775]],"lockSegments":{}}},"children":null,"linkMap":[]},{"x":379,"y":117,"rotation":0,"id":128,"uid":"com.gliffy.shape.basic.basic_v1.default.line","width":100,"height":100,"lockAspectRatio":false,"lockShape":false,"order":23,"graphic":{"type":"Line","Line":{"strokeWidth":1,"strokeColor":"#000000","fillColor":"none","dashStyle":null,"startArrow":0,"endArrow":1,"startArrowRotation":"auto","endArrowRotation":"auto","ortho":false,"interpolationType":"linear","cornerRadius":null,"controlPath":[[1,-2.3553390593273775],[-29,-2.3553390593273775]],"lockSegments":{}}},"children":null,"linkMap":[]},{"x":299,"y":117,"rotation":0,"id":129,"uid":"com.gliffy.shape.basic.basic_v1.default.line","width":100,"height":100,"lockAspectRatio":false,"lockShape":false,"order":22,"graphic":{"type":"Line","Line":{"strokeWidth":1,"strokeColor":"#000000","fillColor":"none","dashStyle":null,"startArrow":0,"endArrow":1,"startArrowRotation":"auto","endArrowRotation":"auto","ortho":true,"interpolationType":"linear","cornerRadius":10,"controlPath":[[1,-2.3553390593273775],[-137,-2.3553390593273775],[-137,-72],[-159,-72]],"lockSegments":{"1":true}}},"children":null,"constraints":{"constraints":[],"endConstraint":{"type":"EndPositionConstraint","EndPositionConstraint":{"nodeId":106,"px":1,"py":0.5}}},"linkMap":[]},{"x":460,"y":100,"rotation":0,"id":130,"uid":"com.gliffy.shape.basic.basic_v1.default.round_rectangle","width":50,"height":50,"lockAspectRatio":false,"lockShape":false,"order":21,"graphic":{"type":"Shape","Shape":{"tid":"com.gliffy.stencil.round_rectangle.basic_v1","strokeWidth":1,"strokeColor":"#000000","fillColor":"#ffffff","gradient":false,"dropShadow":false,"state":0,"shadowX":0,"shadowY":0,"opacity":1}},"children":[],"linkMap":[]},{"x":380,"y":100,"rotation":0,"id":131,"uid":"com.gliffy.shape.basic.basic_v1.default.round_rectangle","width":50,"height":50,"lockAspectRatio":false,"lockShape":false,"order":20,"graphic":{"type":"Shape","Shape":{"tid":"com.gliffy.stencil.round_rectangle.basic_v1","strokeWidth":1,"strokeColor":"#000000","fillColor":"#ffffff","gradient":false,"dropShadow":false,"state":0,"shadowX":0,"shadowY":0,"opacity":1}},"children":[],"linkMap":[]},{"x":300,"y":100,"rotation":0,"id":132,"uid":"com.gliffy.shape.basic.basic_v1.default.round_rectangle","width":50,"height":50,"lockAspectRatio":false,"lockShape":false,"order":19,"graphic":{"type":"Shape","Shape":{"tid":"com.gliffy.stencil.round_rectangle.basic_v1","strokeWidth":1,"strokeColor":"#000000","fillColor":"#ffffff","gradient":false,"dropShadow":false,"state":0,"shadowX":0,"shadowY":0,"opacity":1}},"children":[],"linkMap":[]},{"x":649,"y":37,"rotation":0,"id":123,"uid":"com.gliffy.shape.basic.basic_v1.default.line","width":100,"height":100,"lockAspectRatio":false,"lockShape":false,"order":18,"graphic":{"type":"Line","Line":{"strokeWidth":1,"strokeColor":"#000000","fillColor":"none","dashStyle":null,"startArrow":0,"endArrow":1,"startArrowRotation":"auto","endArrowRotation":"auto","ortho":false,"interpolationType":"linear","cornerRadius":null,"controlPath":[[1,-2.3553390593273775],[-29,-2.3553390593273775]],"lockSegments":{}}},"children":null,"linkMap":[]},{"x":569,"y":37,"rotation":0,"id":122,"uid":"com.gliffy.shape.basic.basic_v1.default.line","width":100,"height":100,"lockAspectRatio":false,"lockShape":false,"order":17,"graphic":{"type":"Line","Line":{"strokeWidth":1,"strokeColor":"#000000","fillColor":"none","dashStyle":null,"startArrow":0,"endArrow":1,"startArrowRotation":"auto","endArrowRotation":"auto","ortho":false,"interpolationType":"linear","cornerRadius":null,"controlPath":[[1,-2.3553390593273775],[-29,-2.3553390593273775]],"lockSegments":{}}},"children":null,"linkMap":[]},{"x":489,"y":37,"rotation":0,"id":121,"uid":"com.gliffy.shape.basic.basic_v1.default.line","width":100,"height":100,"lockAspectRatio":false,"lockShape":false,"order":16,"graphic":{"type":"Line","Line":{"strokeWidth":1,"strokeColor":"#000000","fillColor":"none","dashStyle":null,"startArrow":0,"endArrow":1,"startArrowRotation":"auto","endArrowRotation":"auto","ortho":false,"interpolationType":"linear","cornerRadius":null,"controlPath":[[1,-2.3553390593273775],[-29,-2.3553390593273775]],"lockSegments":{}}},"children":null,"linkMap":[]},{"x":409,"y":37,"rotation":0,"id":120,"uid":"com.gliffy.shape.basic.basic_v1.default.line","width":100,"height":100,"lockAspectRatio":false,"lockShape":false,"order":15,"graphic":{"type":"Line","Line":{"strokeWidth":1,"strokeColor":"#000000","fillColor":"none","dashStyle":null,"startArrow":0,"endArrow":1,"startArrowRotation":"auto","endArrowRotation":"auto","ortho":false,"interpolationType":"linear","cornerRadius":null,"controlPath":[[1,-2.3553390593273775],[-29,-2.3553390593273775]],"lockSegments":{}}},"children":null,"linkMap":[]},{"x":329,"y":37,"rotation":0,"id":119,"uid":"com.gliffy.shape.basic.basic_v1.default.line","width":100,"height":100,"lockAspectRatio":false,"lockShape":false,"order":14,"graphic":{"type":"Line","Line":{"strokeWidth":1,"strokeColor":"#000000","fillColor":"none","dashStyle":null,"startArrow":0,"endArrow":1,"startArrowRotation":"auto","endArrowRotation":"auto","ortho":false,"interpolationType":"linear","cornerRadius":null,"controlPath":[[1,-2.3553390593273775],[-29,-2.3553390593273775]],"lockSegments":{}}},"children":null,"linkMap":[]},{"x":249,"y":37,"rotation":0,"id":118,"uid":"com.gliffy.shape.basic.basic_v1.default.line","width":100,"height":100,"lockAspectRatio":false,"lockShape":false,"order":13,"graphic":{"type":"Line","Line":{"strokeWidth":1,"strokeColor":"#000000","fillColor":"none","dashStyle":null,"startArrow":0,"endArrow":1,"startArrowRotation":"auto","endArrowRotation":"auto","ortho":false,"interpolationType":"linear","cornerRadius":null,"controlPath":[[1,-2.3553390593273775],[-29,-2.3553390593273775]],"lockSegments":{}}},"children":null,"linkMap":[]},{"x":169,"y":37,"rotation":0,"id":117,"uid":"com.gliffy.shape.basic.basic_v1.default.line","width":100,"height":100,"lockAspectRatio":false,"lockShape":false,"order":12,"graphic":{"type":"Line","Line":{"strokeWidth":1,"strokeColor":"#000000","fillColor":"none","dashStyle":null,"startArrow":0,"endArrow":1,"startArrowRotation":"auto","endArrowRotation":"auto","ortho":false,"interpolationType":"linear","cornerRadius":null,"controlPath":[[1,-2.3553390593273775],[-29,-2.3553390593273775]],"lockSegments":{}}},"children":null,"linkMap":[]},{"x":89,"y":37,"rotation":0,"id":114,"uid":"com.gliffy.shape.basic.basic_v1.default.line","width":100,"height":100,"lockAspectRatio":false,"lockShape":false,"order":11,"graphic":{"type":"Line","Line":{"strokeWidth":1,"strokeColor":"#000000","fillColor":"none","dashStyle":null,"startArrow":0,"endArrow":1,"startArrowRotation":"auto","endArrowRotation":"auto","ortho":false,"interpolationType":"linear","cornerRadius":null,"controlPath":[[1,-2.3553390593273775],[-29,-2.3553390593273775]],"lockSegments":{}}},"children":null,"constraints":{"constraints":[],"startConstraint":{"type":"StartPositionConstraint","StartPositionConstraint":{"nodeId":106,"px":1.1102230246251563e-16,"py":0.2928932188134525}},"endConstraint":{"type":"EndPositionConstraint","EndPositionConstraint":{"nodeId":102,"px":1,"py":0.29289321881345237}}},"linkMap":[]},{"x":650,"y":20,"rotation":0,"id":113,"uid":"com.gliffy.shape.basic.basic_v1.default.round_rectangle","width":50,"height":50,"lockAspectRatio":false,"lockShape":false,"order":10,"graphic":{"type":"Shape","Shape":{"tid":"com.gliffy.stencil.round_rectangle.basic_v1","strokeWidth":1,"strokeColor":"#000000","fillColor":"#ffffff","gradient":false,"dropShadow":false,"state":0,"shadowX":0,"shadowY":0,"opacity":1}},"children":[],"linkMap":[]},{"x":570,"y":20,"rotation":0,"id":112,"uid":"com.gliffy.shape.basic.basic_v1.default.round_rectangle","width":50,"height":50,"lockAspectRatio":false,"lockShape":false,"order":9,"graphic":{"type":"Shape","Shape":{"tid":"com.gliffy.stencil.round_rectangle.basic_v1","strokeWidth":1,"strokeColor":"#000000","fillColor":"#ffffff","gradient":false,"dropShadow":false,"state":0,"shadowX":0,"shadowY":0,"opacity":1}},"children":[],"linkMap":[]},{"x":490,"y":20,"rotation":0,"id":111,"uid":"com.gliffy.shape.basic.basic_v1.default.round_rectangle","width":50,"height":50,"lockAspectRatio":false,"lockShape":false,"order":8,"graphic":{"type":"Shape","Shape":{"tid":"com.gliffy.stencil.round_rectangle.basic_v1","strokeWidth":1,"strokeColor":"#000000","fillColor":"#ffffff","gradient":false,"dropShadow":false,"state":0,"shadowX":0,"shadowY":0,"opacity":1}},"children":[],"linkMap":[]},{"x":410,"y":20,"rotation":0,"id":110,"uid":"com.gliffy.shape.basic.basic_v1.default.round_rectangle","width":50,"height":50,"lockAspectRatio":false,"lockShape":false,"order":7,"graphic":{"type":"Shape","Shape":{"tid":"com.gliffy.stencil.round_rectangle.basic_v1","strokeWidth":1,"strokeColor":"#000000","fillColor":"#ffffff","gradient":false,"dropShadow":false,"state":0,"shadowX":0,"shadowY":0,"opacity":1}},"children":[],"linkMap":[]},{"x":330,"y":20,"rotation":0,"id":109,"uid":"com.gliffy.shape.basic.basic_v1.default.round_rectangle","width":50,"height":50,"lockAspectRatio":false,"lockShape":false,"order":6,"graphic":{"type":"Shape","Shape":{"tid":"com.gliffy.stencil.round_rectangle.basic_v1","strokeWidth":1,"strokeColor":"#000000","fillColor":"#ffffff","gradient":false,"dropShadow":false,"state":0,"shadowX":0,"shadowY":0,"opacity":1}},"children":[],"linkMap":[]},{"x":250,"y":20,"rotation":0,"id":108,"uid":"com.gliffy.shape.basic.basic_v1.default.round_rectangle","width":50,"height":50,"lockAspectRatio":false,"lockShape":false,"order":5,"graphic":{"type":"Shape","Shape":{"tid":"com.gliffy.stencil.round_rectangle.basic_v1","strokeWidth":1,"strokeColor":"#000000","fillColor":"#ffffff","gradient":false,"dropShadow":false,"state":0,"shadowX":0,"shadowY":0,"opacity":1}},"children":[],"linkMap":[]},{"x":170,"y":20,"rotation":0,"id":107,"uid":"com.gliffy.shape.basic.basic_v1.default.round_rectangle","width":50,"height":50,"lockAspectRatio":false,"lockShape":false,"order":4,"graphic":{"type":"Shape","Shape":{"tid":"com.gliffy.stencil.round_rectangle.basic_v1","strokeWidth":1,"strokeColor":"#000000","fillColor":"#ffffff","gradient":false,"dropShadow":false,"state":0,"shadowX":0,"shadowY":0,"opacity":1}},"children":[],"linkMap":[]},{"x":90,"y":20,"rotation":0,"id":106,"uid":"com.gliffy.shape.basic.basic_v1.default.round_rectangle","width":50,"height":50,"lockAspectRatio":false,"lockShape":false,"order":3,"graphic":{"type":"Shape","Shape":{"tid":"com.gliffy.stencil.round_rectangle.basic_v1","strokeWidth":1,"strokeColor":"#000000","fillColor":"#ea9999","gradient":false,"dropShadow":false,"state":0,"shadowX":0,"shadowY":0,"opacity":1}},"children":[],"linkMap":[]},{"x":10,"y":20,"rotation":0,"id":102,"uid":"com.gliffy.shape.basic.basic_v1.default.round_rectangle","width":50,"height":50,"lockAspectRatio":false,"lockShape":false,"order":2,"graphic":{"type":"Shape","Shape":{"tid":"com.gliffy.stencil.round_rectangle.basic_v1","strokeWidth":1,"strokeColor":"#000000","fillColor":"#93c47d","gradient":false,"dropShadow":false,"state":0,"shadowX":0,"shadowY":0,"opacity":1}},"children":[],"linkMap":[]},{"x":290,"y":80,"rotation":0,"id":134,"uid":"com.gliffy.shape.basic.basic_v1.default.rectangle","width":420,"height":90,"lockAspectRatio":false,"lockShape":false,"order":1,"graphic":{"type":"Shape","Shape":{"tid":"com.gliffy.stencil.rectangle.basic_v1","strokeWidth":0,"strokeColor":"#d9d9d9","fillColor":"#9fc5e8","gradient":false,"dropShadow":false,"state":0,"shadowX":0,"shadowY":0,"opacity":1}},"children":[],"linkMap":[]},{"x":641,"y":8,"rotation":0,"id":153,"uid":"com.gliffy.shape.basic.basic_v1.default.rectangle","width":80,"height":180,"lockAspectRatio":false,"lockShape":false,"order":0,"graphic":{"type":"Shape","Shape":{"tid":"com.gliffy.stencil.rectangle.basic_v1","strokeWidth":0,"strokeColor":"#d9d9d9","fillColor":"#f4cccc","gradient":false,"dropShadow":false,"state":0,"shadowX":0,"shadowY":0,"opacity":1}},"children":[],"linkMap":[]},{"x":770,"y":198,"rotation":0,"id":169,"uid":"com.gliffy.shape.basic.basic_v1.default.text","width":37,"height":12,"lockAspectRatio":false,"lockShape":false,"order":169,"graphic":{"type":"Text","Text":{"tid":null,"valign":"middle","overflow":"none","vposition":"none","hposition":"none","html":"

time

","paddingLeft":2,"paddingRight":2,"paddingBottom":2,"paddingTop":2}},"children":null,"linkMap":[]},{"x":22,"y":204,"rotation":0,"id":168,"uid":"com.gliffy.shape.basic.basic_v1.default.line","width":100,"height":100,"lockAspectRatio":false,"lockShape":false,"order":168,"graphic":{"type":"Line","Line":{"strokeWidth":1,"strokeColor":"#000000","fillColor":"none","dashStyle":null,"startArrow":0,"endArrow":1,"startArrowRotation":"auto","endArrowRotation":"auto","ortho":false,"interpolationType":"linear","cornerRadius":null,"controlPath":[[0,0],[744.0026881671866,0]],"lockSegments":{}}},"children":null,"linkMap":[]}],"background":"#FFFFFF","width":819,"height":210,"maxWidth":5000,"maxHeight":5000,"nodeIndex":171,"autoFit":true,"exportBorder":false,"gridOn":true,"snapToGrid":false,"drawingGuidesOn":true,"pageBreaksOn":false,"printGridOn":false,"printPaper":"LETTER","printShrinkToFit":false,"printPortrait":true,"shapeStyles":{"com.gliffy.shape.basic.basic_v1.default":{"fill":"#93c47d","stroke":"#000000","strokeWidth":0}},"lineStyles":{"global":{"stroke":"#000000","strokeWidth":1,"endArrow":1,"orthoMode":1}},"textStyles":{},"themeData":null}} --------------------------------------------------------------------------------