├── .gitignore ├── CONTRIBUTORS ├── CREDITS ├── LICENSE ├── Makefile ├── README ├── RUNNING ├── TODO-dht.txt ├── TODO-mnet.txt ├── TODO-python_segfaults.txt ├── VERSION ├── asymkey.py ├── base58.py ├── bittrie.py ├── brute.py ├── build.sh ├── chord.py ├── chord_packet.py ├── chord_tasks.py ├── chordexception.py ├── client.py ├── client_engine.py ├── consts.py ├── contrib └── morphisd │ ├── COPYING │ ├── README.md │ ├── etc │ └── default │ │ └── morphisd │ ├── opt │ └── morphis │ │ ├── morphisd │ │ └── setargv │ └── usr │ └── lib │ └── systemd │ └── system │ └── morphisd.service ├── db.py ├── dev_cluster.sh ├── dev_node.sh ├── dhgroup14.py ├── dmail.py ├── dsskey.py ├── enc.py ├── exceptions └── 20150716-01.txt ├── favicon.ico ├── hashbench.py ├── kex.py ├── kexdhgroup14sha1.py ├── llog.py ├── logging-debug.ini ├── logging-info.ini ├── logging-ms.ini ├── logging-prod.ini ├── logging-warn.ini ├── logging.ini ├── lots-bulk.sh ├── lots.sh ├── maalstroom ├── __init__.py ├── dispatcher.py ├── dmail.py ├── resources │ ├── images │ │ └── dmail │ │ │ ├── add_address.png │ │ │ ├── addressbook.png │ │ │ ├── addressbook_blue.png │ │ │ ├── addressbook_grey.png │ │ │ ├── bullet.png │ │ │ ├── bullet_white.png │ │ │ ├── compose_blue.png │ │ │ ├── compose_white.png │ │ │ ├── divider.gif │ │ │ ├── draft.png │ │ │ ├── draft_white.png │ │ │ ├── gradient.jpg │ │ │ ├── inbox.png │ │ │ ├── inbox_new.png │ │ │ ├── inbox_white.png │ │ │ ├── list_addresses.png │ │ │ ├── logo.png │ │ │ ├── mail_icon.png │ │ │ ├── mail_icon2.png │ │ │ ├── read.png │ │ │ ├── refresh.png │ │ │ ├── reply_blue.png │ │ │ ├── reply_dk.png │ │ │ ├── reply_icon.png │ │ │ ├── reply_icon_new.png │ │ │ ├── reply_purp.png │ │ │ ├── reply_white.png │ │ │ ├── save.png │ │ │ ├── save_blue.png │ │ │ ├── save_dk.png │ │ │ ├── save_purp.png │ │ │ ├── save_white.png │ │ │ ├── send.png │ │ │ ├── send_blue.png │ │ │ ├── send_dk.png │ │ │ ├── send_purp.png │ │ │ ├── send_white.png │ │ │ ├── sent.png │ │ │ ├── sent_white.png │ │ │ ├── settings.png │ │ │ ├── stripe.png │ │ │ ├── tags.png │ │ │ ├── tags_white.png │ │ │ ├── trash.png │ │ │ ├── trash_blue.png │ │ │ ├── trash_dk.png │ │ │ ├── trash_icon.png │ │ │ ├── trash_icon_new.png │ │ │ ├── trash_white.png │ │ │ ├── trash_white_in.png │ │ │ └── unread.png │ └── style.css ├── templates.py └── templates │ ├── dmail │ ├── address_config.html │ ├── address_list.html │ ├── address_list_row.html │ ├── aside.html │ ├── compose.html │ ├── create_address.html │ ├── logo.html │ ├── msg_list.html │ ├── msg_list_list_end.html │ ├── msg_list_list_row.html │ ├── msg_list_list_start.html │ ├── nav.html │ ├── new_mail.html │ ├── page_wrapper.html │ └── read.html │ └── main │ └── combined_upload.html ├── mbase32.py ├── mcc.py ├── mn1.py ├── multipart.py ├── mutil.py ├── node.py ├── old ├── n1.py ├── node.py └── server_test.py ├── packet.py ├── peer.py ├── putil.py ├── rsakey.py ├── run.sh ├── setup.py ├── setup_all.py ├── shell.py ├── sshexception.py ├── sshtype.py ├── test_100n.sh └── upload_page.html /.gitignore: -------------------------------------------------------------------------------- 1 | test/ 2 | 3 | *.log 4 | *.swp 5 | *.so 6 | *.c 7 | build/ 8 | __pycache__/ 9 | 10 | data/ 11 | 12 | server_key-rsa.mnk 13 | client_key-rsa.mnk 14 | node_key-rsa*.mnk 15 | 16 | *.sqlite 17 | *.sqlite-journal 18 | -------------------------------------------------------------------------------- /CONTRIBUTORS: -------------------------------------------------------------------------------- 1 | Herein is mentioned some of the many who have directly contributed to MORPHiS in ways that aren't reflected in the Git history otherwise. 2 | 3 | Melissa Facciolo: 4 | - The Dmail V2 GUI Design, including the HTML and CSS work. Also enduring the horrible task of IE testing. 5 | 6 | Not Jesus: 7 | - Coming up with the name DDS, Distributed Discussion System. 8 | 9 | shunt31: 10 | - For writing the FAQ! 11 | 12 | Florian Paul Schmidt: 13 | - For being morphis user 001 :) 14 | 15 | Some I may have forgot at the moment: 16 | 17 | And especially all supporters thus far! 18 | -------------------------------------------------------------------------------- /CREDITS: -------------------------------------------------------------------------------- 1 | The Creator 2 | ~~~~~~~~~~~ 3 | - For your revelations. 4 | - For this existence, where truth and good are inherently superior to evil. 5 | - For my talents which have enabled all that I have accomplished. 6 | 7 | Edward Snowden 8 | ~~~~~~~~~~~~~~ 9 | - For changing the world by exposing the crimes against humanity being committed by the governments of the world. For risking your mortal life to do so. For knowing your life would not be the same, with loss of family, homeland, Etc. Before you, I would tell people what was going on, and they would call me crazy. Now, they claim they never thought I was crazy. That is an example of the contrast of this post-Snowden world with the impossible struggle to free people it was previously. 10 | 11 | Someone 12 | ~~~~~~~ 13 | - Who requested to not be named, but without which I would certainly not have been able to do this in time, not including the fact that I would be dead at the moment. 14 | 15 | Satoshi Nakamoto 16 | ~~~~~~~~~~~~~~~~ 17 | - For showing me and everyone that technology can deprecate power structures that existed since eons past. Who would have thought that the most powerful entities in the world since the rothschilds started lending out other people's gold, even fictitious gold, who would ever have thought that they could ever be so completely DEPRECATED! You proved it can be done, and thus indicated that nothing stands against us doing the same with GOVERNMENT! Governance without government! 18 | - I would list you first Satoshi, because MORPHiS would absolutely not exist, perhaps even as a concept, if it were not for you Satoshi; however, Ed perhaps deserves to be above on this list due to the non anonymous risk he took with his very life. 19 | 20 | Linux Torvalds 21 | ~~~~~~~~~~~~~~ 22 | - For enabling me by having developed the ultimate operating system, and thus the ultimate development platform, learning tool, and more. 23 | - For showing us that open source community developed software can be vastly superior in fundemental ways (Ex: freedom) yet still be vastly superior in ways like quality to anything that any for profit corporations can ever create. 24 | 25 | RMS 26 | ~~~ 27 | - For explaining to people why we must control our own software, and why that means we must have access to all the source code to be safe. 28 | 29 | Mrs. Brunati 30 | ~~~~~~~~~~~~~ 31 | - My physics teacher throughout highschool. Bruatti, did you hear about pilot wave theory now proving quantum physics wrong? Feynman was proven wrong, absolutely wrong :) I told you the double slit experiment was flawed. Thanks to our deep talks beyond the subject mater of the curriculum, I learned more from you than any teacher. Also, if it weren't for you, I likely wouldn't have received my high school diploma, which enabled me to now go where I'm going! (I was too busy programming back then :), sorry, but you understood!) My finite math teacher said: "I'm going to fail you if I can." A week or so later she told me: "I don't know what Brunati sees in you." (Brunati was the head of the science/math department, so it is obvious what happened there :) Regardless, you were the best teacher. You are one of the people who were now critical for enabling me to be where I am today. Thanks. 32 | 33 | Alykhan Jetha 34 | ~~~~~~~~~~~~~ 35 | - For mentoring me at the very start of my career, teaching me RDBMS and enterprise level/quality development, and even enabling my career that then positioned me with the very necessary and quality experience that has enabled me to be able to create MORPHiS! 36 | 37 | Ian Clarke (and ALL freenetproject.org contributors ever!) 38 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 39 | - For inventing many of the concepts that MORPHiS is based upon. 40 | - For providing a platform for ideas to be invented. 41 | - For those who came up with the Web of Trust stuff, and thus freenet as a testing ground for such ideas. 42 | 43 | Steve Dougherty 44 | ~~~~~~~~~~~~~~~ 45 | - By talking with me when I was asking around for help with the design of the anti-entrapment feature. You politely analyzed my ideas, pointed out flaws or the good, constructively, which quickly lead to my invention of MORPHiS's fundamentally important anti-entrapment feature. 46 | 47 | real@freedomlayer.org 48 | ~~~~~~~~~~~~~~~~~~~~~ 49 | - For your awesome articles on Mesh Networking and DHTs up on freedomlayer.org. These showed me how it's suppose to be done; how you are suppose to combine math with computer science! If it weren't for these articles, I would absolutely still be in the planning stage -- instead of actually coding! 50 | 51 | steve2015@morph.is 52 | ~~~~~~~~~~~~~~~~~~ 53 | - For making the Firefox Maalstroom plugin. 54 | - For making the Windows package, and on such short notice! 55 | 56 | MANY OTHERS 57 | ~~~~~~~~~~~ 58 | - Friends and family who've helped me through difficult times. 59 | - Super smart co-workers I learned much from early in my career and who made it a joy; people like: Grande Chevalier de la Candy Depot, Andrew Krasnov, and Scott Pont -- to name pretty much the smartest and main contributors in that regard. 60 | - Everyone who has contributed to any of the open software I use. 61 | - Everyone who has contributed to any open source software in any way. Together we are replacing the old paradigm. Together we are freeing information so humanity can progress unhindered by greed! 62 | - Everyone working on open hardware to deprecate Satan's final piece of control over humanity. 63 | - Everyone working on the distributed mesh internet infrastructure technology. Your work will enable a kill switch proof internet for the World Brain to run on. 64 | - Everyone who has written or contributed to any of the literature that I consumed which helped make MORPHiS, as well as my realization of the truth of the world, at all possible. 65 | - All those acquaintances who have shown me love, and that illustrated the fact that humanity is worth this long and truly difficult struggle to save! 66 | - Anyone and everyone who believed in me. 67 | - Every user and future contributor to MORPHiS! 68 | 69 | For-profit Corporations 70 | ~~~~~~~~~~~~~~~~~~~~~~~ 71 | - For proving RMS right. 72 | 73 | Governments of the World 74 | ~~~~~~~~~~~~~~~~~~~~~~~~ 75 | - For being so inherently evil that working to deprecate you and your systems of 'necessary' evil is the best possible anti-depressant in this existence. 76 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | GPL v2 for everything except for: asymkey.py, base58.py, dhgroup14.py, dsskey.py, kex.py, kexdhgroup14sha1.py, mn1.py, packet.py, putil.py, rsakey.py, sshtype.py, sshexception.py, which are licensed under the LGPL, and mbase32.py and hashbench.py which are released as public domain. 2 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | all: 2 | ./build.sh 3 | 4 | test: 5 | ./build.sh _all 6 | 7 | clean: 8 | rm -f *.c *.so 9 | rm -rf __pycache__ 10 | -------------------------------------------------------------------------------- /README: -------------------------------------------------------------------------------- 1 | -=-= MORPHiS (c) 2014-2015 Sam Maloney (sam2015@morph.is) =-=- 2 | 3 | Version: 0.8.16 - Developer Preview Release 4 | 5 | License: GPL v2 for everything except for some files LGPL, and some public domain. See the LICENSE file for complete details. 6 | 7 | I have done this, released the asyncio SSH protocol code as LGPL so that it may be used by the broader Python community. I am willing to donate it fully to the Python Software Foundation itself so they may incorporate it into the standard library if wanted. See client.py for a simple example of how dead simple my asyncio SSH library is to use. 8 | 9 | ------------------------------------------------------------------------------- 10 | 11 | Maalstroom: morphis://sp1nara3xhndtgswh7fznt414we4mi3y 12 | Web: https://morph.is 13 | 14 | ------------------------------------------------------------------------------- 15 | 16 | NOTE 17 | 18 | This is a developer preview release. As such, it may not work well in your exact setup. It is fully functional however. Any problems that you have, suggestions, Etc., I am happy hear. 19 | 20 | ------------------------------------------------------------------------------- 21 | 22 | INSTRUCTIONS 23 | 24 | Dependencies: 25 | Python >= 2.4.1, PyCrypto (2.6 tested), sqlalchemy (0.9.8 tested). 26 | 27 | NOTE: SqlAlchemy 0.9.8 does have a bug and thus you should use the latest, 1.0.8 if you can. 28 | 29 | NOTE: If your system does not include packages for the above dependencies, then use pip3.4 to install them. Do not use pip, because that will only install the Python 2 versions. Example: "pip3.4 install sqlalchemy". 30 | 31 | To run, execute the following: 32 | 33 | > ./run.sh 34 | 35 | Or: 36 | 37 | > python3 node.py 38 | 39 | To get some help, try: 40 | 41 | > python3 node.py --help 42 | 43 | WARNING: If you are running on a shared host, you really should run with the shell disabled: 44 | 45 | > python3 node.py --disableshell 46 | 47 | NOTE: That will prevent the mcc.py commandline tool from working as well. I will fix that later by having it ssh key based authentication for shell connections. 48 | 49 | WEB UI: 50 | ------- 51 | 52 | Then simply point your web browser to http://localhost:4251. 53 | 54 | I recommend using the MORPHiS firefox plugin that will be listed on the front page when you hit localhost:4251. (ANY DEVS THAT CAN HELP WITH THIS DEAD SIMPLE PLUGIN WILL BE SUPER APPRECIATED!) 55 | 56 | You can also use curl or wget: 57 | 58 | Download: curl http://localhost:4251/ 59 | Example: curl http://localhost:4251/sp1nara3xhndtgswh7fznt414we4mi3y 60 | 61 | Upload: curl --data-binary @somefile.ext http://localhost:4251/.upload/upload 62 | 63 | SSH CONSOLE UI: 64 | --------------- 65 | 66 | Simply type: 67 | 68 | ssh 127.0.0.1 -p 4250 69 | 70 | And you will get the interactive console. 71 | 72 | Or you can pass the command through the ssh command: 73 | 74 | ssh 127.0.0.1 -p 4250 ? 75 | 76 | NOTE: You must type 127.0.0.1. Usually localhost resolves to ::1 which I will add later but right now MORPHiS ONLY allows shell connection from 127.0.0.1. 77 | 78 | NOTE: You will need to have an RSA key setup for your ssh. It doesn't mater what the key is for now (later it will support key pinning). To generate a key, type: ssh-keygen -b 4096 79 | 80 | MCC COMMAND LINE UI: 81 | -------------------- 82 | 83 | Just type: 84 | 85 | python3 mcc.py --stat 86 | 87 | NOTE: This client is not up to speed. The WEB UI is the most mature by far as I did it last and most people will be using it so it made sense to have it the one ready for this first release. I will bring the other 2 interfaces up to speed and they will always be fully supported as up to speed, all 3. 88 | 89 | Tor 90 | --- 91 | 92 | MORPHiS was designed from the beginning to not leak information and to be used over Tor! 93 | 94 | I still have to add SOCKS support, but I recently found a good library for that so it will come very soon. 95 | 96 | However, until then, you can use proxychains to run MORPHiS over Tor! 97 | 98 | proxychains python3 node.py 99 | 100 | It has been tested and it works great. It also works over torsocks if you --dm (disables the web UI). There is a bug in torsocks that causes torsocks to segfault on startup if you do not disable Maalstroom (--dm). I debugged it and will be able to submit a patch to the Tor project so they can fix it. 101 | 102 | ------------------------------------------------------------------------------- 103 | 104 | Contact (d-mail): samzu1ctt7kscitkrt5jft91gtw5c1i6 :) 105 | Contact (e-mail): sam2015@morph.is :( 106 | -------------------------------------------------------------------------------- /RUNNING: -------------------------------------------------------------------------------- 1 | Running MORPHiS is dead simple, and will be even simplier when I launch the official release. 2 | 3 | There are only two dependencies other than Python 3. 4 | 5 | pycrypto >= 2.6 (tested with 2.6) 6 | SQLAlchemy >= 0.9.8 (tested with 0.9.8) 7 | 8 | and of course: 9 | 10 | Python >= 3.4.1 (tested with 3.4.1 :) 11 | 12 | NOTE: SqlAlchemy 0.9.8 does have a bug and thus you should use the latest, 1.0.8 if you can. 13 | 14 | No installation of MORPHiS is neccesary. 15 | 16 | Although there is a Makefile which will compile some of the python files with Cython to give a big speed boost, it is not at all neccesary and already very fast without compliation. So just ignore the Makefile/Cython unless you are already familiar with Cython. You can try typing make, if it works, it works, if not, don't worry about it. 17 | 18 | To run, simply type this: 19 | 20 | python3 node.py 21 | 22 | The node will listen for user HTTP requests on port 4251. Instruct your browser to visit: http://localhost:4251/ and you will be presented with the root interface of MORPHiS. 23 | 24 | The node will listen for other nodes on port 4250 by default. You may choose a different port than 4250 for your own node to listen on by specifying the bind parameter: 25 | 26 | python3 node.py --bind : 27 | 28 | You do not need to open a firewall port. MORPHiS is designed to not care about firewalls much. If you can, do open the node (4250) port on your firewall, as it may slightly increase your performance, as well as help the network more. 29 | 30 | If you are going to do an upload, it is best to wait about 10 minutes (only maters for the very first run of a node). This is because otherwise your node won't neccesarily stick the data into optimal nodes and thus making requests for that data harder. You can just upload the data again later to reinforce it. MORPHiS keys are deterministic based upon the data, so a file will always have the same key no mater who or when uploaded it. 31 | 32 | That is it. Enjoy! 33 | -------------------------------------------------------------------------------- /TODO-dht.txt: -------------------------------------------------------------------------------- 1 | * _store_data(..) code needs to be fixed to use an additional table, something like DataBlockJournal, which tracks pending deletes or creations, thus ensuring the filesystem is kept in sync, even if crashes, Etc. 2 | 3 | + Add code to opportunistically store data passing through if it is wanted. This will make data spread by popularity and not need constant uploading to prevent from dropping off the network. 4 | 5 | - Needs an insert time prefix/suffix to the key so to efficiently reduce the chance of collisions. 6 | 7 | - rewrite the connection code (meaning the code that decideds who to try to connect to). It was my first attempt at doing complex concurrent stuff with asyncio, and is a bit wonky and very slow. It for instance tries to connect to the same Peer in a row multiple times. It should try to minimize using so many transactions. Etc. 8 | 9 | - have the shell channel open code mark this dbpeer as a shell client and from then on only let the peer with that id (client pubkey authenticated with) open a session channel. 10 | 11 | - consider morphis/ssh protocol disconnect message as authentic. consider tcp disconnect as ddos and simply reconnect. (issue with one host firewalled, thus only connecting host can reconnect). have some sanity to prevent loop. 12 | 13 | - have tunnels relay data if they've already seen it so that we don't have to send it the whole way from us for each destination. 14 | -------------------------------------------------------------------------------- /TODO-mnet.txt: -------------------------------------------------------------------------------- 1 | * Need to implement ssh re-keying. 2 | 3 | * Have local_cid an object that is unique so that closed channels are safe forever to call stuff on (and get exceptions and not misbehavior). 4 | 5 | - add feature to morphis-ssh to be able to resume an ssh session with 0 protocol overhead. Ie: if tcp disconnects, simply tcp connect again and continue as if nothing happened (ssh session never died, keys reused, etc). 6 | -------------------------------------------------------------------------------- /TODO-python_segfaults.txt: -------------------------------------------------------------------------------- 1 | Note, these only ever occurred when running a massive amount of multiple nodes in the same process. They have not happened since in testing I now start only 10 nodes in a single process and start many processes instead. Still, the bug may exist even without multiple nodes in one process, so this should be investigated. 2 | 3 | [COMMIT:0e042bb6b3d3be2ecde619d6ed42f8b794eda864] 4 | future: Task() 5 | Traceback (most recent call last): 6 | File "/usr/lib64/python3.4/asyncio/tasks.py", line 317, in _step 7 | result = coro.throw(exc) 8 | File "/home/mdev/src/l/morphis/chord.py", line 319, in process_connection_count 9 | yield from self._process_connection_count(needed) 10 | File "/home/mdev/src/l/morphis/chord.py", line 379, in _process_connection_count 11 | rs = yield from self.loop.run_in_executor(None, dbcall) 12 | File "/usr/lib64/python3.4/asyncio/futures.py", line 348, in __iter__ 13 | yield self # This tells Task to wait for completion. 14 | File "/usr/lib64/python3.4/asyncio/tasks.py", line 370, in _wakeup 15 | value = future.result() 16 | File "/usr/lib64/python3.4/asyncio/futures.py", line 243, in result 17 | raise self._exception 18 | File "/usr/lib64/python3.4/concurrent/futures/thread.py", line 54, in run 19 | result = self.fn(*self.args, **self.kwargs) 20 | File "/home/mdev/src/l/morphis/chord.py", line 372, in dbcall 21 | r = q.all() 22 | File "/usr/lib64/python3.4/site-packages/sqlalchemy/orm/query.py", line 2323, in all 23 | return list(self) 24 | File "/usr/lib64/python3.4/site-packages/sqlalchemy/orm/query.py", line 2441, in __iter__ 25 | return self._execute_and_instances(context) 26 | File "/usr/lib64/python3.4/site-packages/sqlalchemy/orm/query.py", line 2456, in _execute_and_instances 27 | result = conn.execute(querycontext.statement, self._params) 28 | File "/usr/lib64/python3.4/site-packages/sqlalchemy/engine/base.py", line 841, in execute 29 | return meth(self, multiparams, params) 30 | File "/usr/lib64/python3.4/site-packages/sqlalchemy/sql/elements.py", line 322, in _execute_on_connection 31 | return connection._execute_clauseelement(self, multiparams, params) 32 | File "/usr/lib64/python3.4/site-packages/sqlalchemy/engine/base.py", line 931, in _execute_clauseelement 33 | inline=len(distilled_params) > 1) 34 | File "", line 1, in 35 | File "/usr/lib64/python3.4/site-packages/sqlalchemy/sql/elements.py", line 493, in compile 36 | return self._compiler(dialect, bind=bind, **kw) 37 | File "/usr/lib64/python3.4/site-packages/sqlalchemy/sql/elements.py", line 499, in _compiler 38 | return dialect.statement_compiler(dialect, self, **kw) 39 | File "/usr/lib64/python3.4/site-packages/sqlalchemy/sql/compiler.py", line 395, in __init__ 40 | Compiled.__init__(self, dialect, statement, **kwargs) 41 | File "/usr/lib64/python3.4/site-packages/sqlalchemy/sql/compiler.py", line 199, in __init__ 42 | self.string = self.process(self.statement, **compile_kwargs) 43 | File "/usr/lib64/python3.4/site-packages/sqlalchemy/sql/compiler.py", line 222, in process 44 | return obj._compiler_dispatch(self, **kwargs) 45 | File "/usr/lib64/python3.4/site-packages/sqlalchemy/sql/visitors.py", line 80, in _compiler_dispatch 46 | return meth(self, **kw) 47 | File "/usr/lib64/python3.4/site-packages/sqlalchemy/sql/compiler.py", line 1559, in visit_select 48 | if select._order_by_clause.clauses: 49 | AttributeError: 'Future' object has no attribute 'clauses' 50 | 51 | [COMMIT:9a8a2b7e584b9012c061decf356111dc6ef379f2] 52 | (no segfault yet, but froze upload and is similar to above exception and is strange!) 53 | future: Task() 54 | Traceback (most recent call last): 55 | File "/usr/lib64/python3.4/asyncio/tasks.py", line 319, in _step 56 | result = coro.send(value) 57 | File "/home/mdev/src/l/morphis/chord.py", line 296, in do_stabilize 58 | yield from self.tasks.perform_stabilize() 59 | File "/home/mdev/src/l/morphis/chord_tasks.py", line 121, in perform_stabilize 60 | furthest_nodes, new_nodes = yield from self._perform_stabilize(node_id) 61 | File "/home/mdev/src/l/morphis/chord_tasks.py", line 181, in _perform_stabilize 62 | self.send_find_node(node_id, input_trie=input_trie) 63 | File "/home/mdev/src/l/morphis/chord_tasks.py", line 758, in send_find_node 64 | rnodes = [vpeer.peer for vpeer in result_trie if vpeer and vpeer.path] 65 | File "/home/mdev/src/l/morphis/chord_tasks.py", line 758, in 66 | rnodes = [vpeer.peer for vpeer in result_trie if vpeer and vpeer.path] 67 | AttributeError: 'Future' object has no attribute 'path' 68 | 69 | -------------------------------------------------------------------------------- /VERSION: -------------------------------------------------------------------------------- 1 | 0.8.18.6 2 | -------------------------------------------------------------------------------- /asymkey.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2014-2015 Sam Maloney. 2 | # License: LGPL. 3 | 4 | class AsymKey(): 5 | def _write_private_key_file(self, tag, filename, data, password): 6 | with open(filename, 'wb') as f: 7 | return self._write_private_key(tag, f, data, password) 8 | 9 | def _write_private_key(self, tag, fileobj, data, password): 10 | fileobj.write(data) 11 | 12 | def _read_private_key_file(self, tag, filename, password): 13 | with open(filename, 'rb') as f: 14 | return self._read_private_key(tag, f, password) 15 | 16 | def _read_private_key(self, tag, fileobj, password): 17 | return fileobj.read() 18 | -------------------------------------------------------------------------------- /base58.py: -------------------------------------------------------------------------------- 1 | # Copyright (C) 2011 Sam Rushing. 2 | # Copyright (C) 2013-2014 The python-bitcoinlib developers. 3 | # License: LGPL. 4 | # 5 | # This file is based upon parts from python-bitcoinlib (e3cdf68eaff2fc7cbcd10a488e0485e0b694f806). 6 | 7 | """Base58 encoding and decoding""" 8 | 9 | from __future__ import absolute_import, division, print_function, unicode_literals 10 | 11 | import sys 12 | _bchr = chr 13 | _bord = ord 14 | if sys.version > '3': 15 | long = int 16 | _bchr = lambda x: bytes([x]) 17 | _bord = lambda x: x 18 | 19 | import binascii 20 | 21 | #import bitcoin.core 22 | 23 | B58_DIGITS = '123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz' 24 | 25 | class Base58Error(Exception): 26 | pass 27 | 28 | class InvalidBase58Error(Base58Error): 29 | """Raised on generic invalid base58 data, such as bad characters. 30 | Checksum failures raise Base58ChecksumError specifically. 31 | """ 32 | pass 33 | 34 | def encode(b): 35 | """Encode bytes to a base58-encoded string""" 36 | 37 | # Convert big-endian bytes to integer 38 | n = int('0x0' + binascii.hexlify(b).decode('utf8'), 16) 39 | 40 | # Divide that integer into bas58 41 | res = [] 42 | while n > 0: 43 | n, r = divmod(n, 58) 44 | res.append(B58_DIGITS[r]) 45 | res = ''.join(res[::-1]) 46 | 47 | # Encode leading zeros as base58 zeros 48 | czero = b'\x00' 49 | if sys.version > '3': 50 | # In Python3 indexing a bytes returns numbers, not characters. 51 | czero = 0 52 | pad = 0 53 | for c in b: 54 | if c == czero: 55 | pad += 1 56 | else: 57 | break 58 | return B58_DIGITS[0] * pad + res 59 | 60 | def decode(s): 61 | """Decode a base58-encoding string, returning bytes""" 62 | if not s: 63 | return b'' 64 | 65 | # Convert the string to an integer 66 | n = 0 67 | for c in s: 68 | n *= 58 69 | if c not in B58_DIGITS: 70 | raise InvalidBase58Error(\ 71 | "Character %r is not a valid base58 character" % c) 72 | digit = B58_DIGITS.index(c) 73 | n += digit 74 | 75 | # Convert the integer to bytes 76 | h = '%x' % n 77 | if len(h) % 2: 78 | h = '0' + h 79 | res = binascii.unhexlify(h.encode('utf8')) 80 | 81 | # Add padding back. 82 | pad = 0 83 | for c in s[:-1]: 84 | if c == B58_DIGITS[0]: pad += 1 85 | else: break 86 | return b'\x00' * pad + res 87 | -------------------------------------------------------------------------------- /bittrie.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2014-2015 Sam Maloney. 2 | # License: GPL v2. 3 | 4 | import llog 5 | 6 | import logging 7 | 8 | log = logging.getLogger(__name__) 9 | 10 | none_found = object() 11 | 12 | class BitTrie(object): 13 | def __init__(self): 14 | self.trie = [None] * 0x10 15 | 16 | def get(self, key, default=None): 17 | r = self._get(key) 18 | if r: 19 | return r 20 | return default 21 | 22 | def __setitem__(self, key, value): 23 | self.put(key, value) 24 | 25 | def __getitem__(self, key): 26 | r = self.get(key) 27 | if not r: 28 | raise KeyError() 29 | return r 30 | 31 | def __str__(self): 32 | buf = "[" 33 | first = True 34 | 35 | for x in self.find(ZeroKey()): 36 | if not x: 37 | continue 38 | if not first: 39 | buf += ", " 40 | else: 41 | first = False 42 | buf += str(x) 43 | 44 | buf += "]" 45 | 46 | return buf 47 | 48 | def __iter__(self): 49 | for item in self.find(ZeroKey()): 50 | if item is none_found: 51 | continue 52 | yield item 53 | 54 | def __delitem__(self, key): 55 | self._del(key) 56 | 57 | default_default = object() 58 | 59 | def pop(self, key, default=default_default): 60 | r = self._del(key) 61 | 62 | if r: 63 | return r 64 | 65 | if default is not self.default_default: 66 | return default 67 | 68 | raise KeyError() 69 | 70 | def setdefault(self, key, default): 71 | r = self.put(key, default, False) 72 | if not r: 73 | return default 74 | return r 75 | 76 | def put(self, key, value, replace=True): 77 | node = self.trie 78 | 79 | keylen = len(key) 80 | for i in range(keylen): 81 | char = key[i] 82 | for j in range(4, -1, -4): 83 | bit = (char >> j) & 0x0F 84 | next_node = node[bit] 85 | 86 | if not next_node: 87 | node[bit] = TrieLeaf(key, value) 88 | return None 89 | 90 | if type(next_node) is TrieLeaf: 91 | other = next_node 92 | 93 | o_key = other.key 94 | if j == 0: 95 | ii = i + 1 96 | if ii == keylen: 97 | if replace: 98 | node[bit] = TrieLeaf(key, value) 99 | return other.value 100 | 101 | next_o_bit = (o_key[ii] >> 4) & 0x0F 102 | else: 103 | next_o_bit = (o_key[i] >> (j-4)) & 0x0F 104 | 105 | next_node = [None] * 0x10 106 | next_node[next_o_bit] = other 107 | 108 | node[bit] = next_node 109 | node = next_node 110 | continue 111 | 112 | assert type(next_node) is list, type(next_node) 113 | node = next_node 114 | 115 | def _del(self, key): 116 | #FIXME: Make this code prune empty trees more than one deep. 117 | prev_node = None 118 | prev_node_bit = None 119 | node = self.trie 120 | 121 | for i in range(len(key)): 122 | char = key[i] 123 | for j in range(4, -1, -4): 124 | bit = (char >> j) & 0x0F 125 | next_node = node[bit] 126 | 127 | if not next_node: 128 | return None 129 | 130 | if type(next_node) is TrieLeaf: 131 | node[bit] = None 132 | 133 | empty = True 134 | for n in node: 135 | if n: 136 | empty = False 137 | break; 138 | 139 | if empty and prev_node: 140 | prev_node[prev_node_bit] = None 141 | 142 | return next_node.value 143 | 144 | assert type(next_node) is list, type(next_node) 145 | 146 | prev_node = node 147 | prev_node_bit = bit 148 | node = next_node 149 | 150 | def _get(self, key): 151 | node = self.trie 152 | 153 | for i in range(len(key)): 154 | char = key[i] 155 | for j in range(4, -1, -4): 156 | bit = (char >> j) & 0x0F 157 | next_node = node[bit] 158 | 159 | if not next_node: 160 | return None 161 | 162 | if type(next_node) is TrieLeaf: 163 | if next_node.key == key: 164 | return next_node.value 165 | else: 166 | return None 167 | 168 | assert type(next_node) is list, type(next_node) 169 | 170 | node = next_node 171 | 172 | def find(self, key, forward=True): 173 | "Generator. First element can be None sometimes when no exact match." 174 | branches = [] 175 | node = self.trie 176 | 177 | key_len = len(key) 178 | i = 0 179 | while i < key_len: 180 | char = key[i] 181 | j = 4 182 | while j >= 0: 183 | bit = (char >> j) & 0x0F 184 | 185 | rng = range(0x0F, bit, -1) if forward else range(0, bit) 186 | 187 | for obit in rng: 188 | other = node[obit] 189 | if other: 190 | branches.append(other) 191 | 192 | next_node = node[bit] 193 | 194 | if not next_node: 195 | yield none_found 196 | 197 | func = self._iterate_next(branches) if forward\ 198 | else self._iterate_prev(branches) 199 | 200 | for r in func: 201 | yield r 202 | return None 203 | 204 | if type(next_node) is TrieLeaf: 205 | nnk = next_node.key 206 | kl = min(len(nnk), key_len) 207 | while True: 208 | if nnk[i] != key[i]: 209 | greater = nnk[i] > key[i] 210 | 211 | yield none_found 212 | 213 | if greater ^ (not forward): 214 | yield next_node.value 215 | 216 | break 217 | 218 | i = i + 1 219 | if i == kl: 220 | yield next_node.value 221 | break 222 | 223 | func = self._iterate_next(branches) if forward\ 224 | else self._iterate_prev(branches) 225 | 226 | for r in func: 227 | yield r 228 | 229 | return None 230 | 231 | assert type(next_node) is list, type(next_node) 232 | 233 | node = next_node 234 | j = j - 4 235 | 236 | i = i + 1 237 | 238 | def _iterate_next(self, branches): 239 | while True: 240 | if not branches: 241 | return None 242 | 243 | node = branches.pop() 244 | 245 | if type(node) is TrieLeaf: 246 | yield node.value 247 | continue 248 | 249 | assert type(node) is list, type(node) 250 | 251 | branches.extend(reversed([x for x in node if x])) 252 | 253 | def _iterate_prev(self, branches): 254 | while True: 255 | if not branches: 256 | return None 257 | 258 | node = branches.pop() 259 | 260 | if type(node) is TrieLeaf: 261 | yield node.value 262 | continue 263 | 264 | assert type(node) is list, type(node) 265 | 266 | branches.extend([x for x in node if x]) 267 | 268 | class TrieLeaf(object): 269 | def __init__(self, key, value): 270 | self.key = key 271 | self.value = value 272 | 273 | class XorKey(object): 274 | def __init__(self, key1, key2): 275 | self.key1 = key1 276 | self.key2 = key2 277 | 278 | def __getitem__(self, idx): 279 | return self.key1[idx] ^ self.key2[idx] 280 | 281 | def __len__(self): 282 | return len(self.key1) 283 | 284 | max_len_value = 0xFFFFFFFF 285 | 286 | class __TestLenObj(object): 287 | def __init__(self, length): 288 | self.length = max_len_value 289 | 290 | def __len__(self): 291 | return self.length 292 | 293 | try: 294 | __test_len_obj = __TestLenObj(max_len_value) 295 | __test_len_r = len(__test_len_obj) 296 | except OverflowError: 297 | log.info("Setting max_len_value to 0xFFFF.") 298 | max_len_value = 0xFFFF 299 | 300 | class ZeroKey(object): 301 | def __init__(self, length=max_len_value): 302 | self.length = length 303 | 304 | def __getitem__(self, idx): 305 | return 0x00 306 | 307 | def __len__(self): 308 | return self.length 309 | 310 | def __eq__(self, other): 311 | return other.length == self.length 312 | 313 | import random 314 | import os 315 | from datetime import datetime 316 | 317 | def _del_test(): 318 | print("del..") 319 | 320 | bt = BitTrie() 321 | 322 | dels = [] 323 | 324 | for i in range(10): 325 | ri = random.randint(0, 100) 326 | k = ri.to_bytes(1, "big") 327 | 328 | r = bt[k] = ri 329 | 330 | if i % 3: 331 | print("DEL: {}".format(ri)) 332 | dels.append(k) 333 | 334 | print(bt) 335 | 336 | for del_ in dels: 337 | del bt[del_] 338 | 339 | print(bt) 340 | 341 | def _speed_test(): 342 | bt = BitTrie() 343 | # bt = {} 344 | 345 | rval = os.urandom(512>>3) 346 | 347 | for i in range(500000): 348 | val = os.urandom(512>>3) 349 | 350 | xval = [rvalc ^ valc for rvalc, valc in zip(rval, val)] 351 | xiv = int.from_bytes(xval, "big") 352 | 353 | k = XorKey(rval, val) 354 | 355 | now = datetime.today() 356 | #r = bt.put(k, xiv) 357 | r = bt[k] = xiv 358 | if not i % 5000: 359 | print("put took: {}".format(datetime.today() - now)) 360 | 361 | n = XorKey(os.urandom(512>>3), os.urandom(512>>3)) 362 | bt[n] = int.from_bytes(n, "big") 363 | 364 | now = datetime.today() 365 | print("get: {}".format(bt.get(int(0).to_bytes(512>>3, "big")))) 366 | print("took: {}".format(datetime.today() - now)) 367 | now = datetime.today() 368 | print("get: {}".format(bt.get(int(42).to_bytes(512>>3, "big")))) 369 | print("took: {}".format(datetime.today() - now)) 370 | now = datetime.today() 371 | print("get: {}".format(bt.get(int(88).to_bytes(512>>3, "big")))) 372 | print("took: {}".format(datetime.today() - now)) 373 | 374 | print("find speed test:") 375 | 376 | cnt = 42 377 | now = datetime.today() 378 | 379 | for i in bt.find(int(100).to_bytes(512>>3, "big")): 380 | print("find: {}".format(i)) 381 | print("took: {}".format(datetime.today() - now)) 382 | cnt -= 1 383 | if not cnt: 384 | break 385 | now = datetime.today() 386 | 387 | def _validity_test(): 388 | bt = BitTrie() 389 | #bt = {} 390 | 391 | for i in range(10): 392 | ri = random.randint(0, 100) 393 | k = ri.to_bytes(1, "big") 394 | 395 | now = datetime.today() 396 | r = bt[k] = ri 397 | print("put took: {}".format(datetime.today() - now)) 398 | 399 | now = datetime.today() 400 | print("get: {}".format(bt.get(int(0).to_bytes(1, "big")))) 401 | print("took: {}".format(datetime.today() - now)) 402 | now = datetime.today() 403 | print("get: {}".format(bt.get(int(42).to_bytes(1, "big")))) 404 | print("took: {}".format(datetime.today() - now)) 405 | now = datetime.today() 406 | print("get: {}".format(bt.get(int(88).to_bytes(1, "big")))) 407 | print("took: {}".format(datetime.today() - now)) 408 | 409 | cnt = 42 410 | now = datetime.today() 411 | for i in bt.find(int(42).to_bytes(1, "big")): 412 | print("find: {}".format(i)) 413 | # print("took: {}".format(datetime.today() - now)) 414 | cnt -= 1 415 | if not cnt: 416 | break 417 | now = datetime.today() 418 | 419 | print("<>") 420 | 421 | for i in bt.find(int(42).to_bytes(1, "big"), False): 422 | print("find: {}".format(i)) 423 | # print("took: {}".format(datetime.today() - now)) 424 | cnt -= 1 425 | if not cnt: 426 | break 427 | now = datetime.today() 428 | 429 | def main(): 430 | _del_test() 431 | _validity_test() 432 | _speed_test() 433 | 434 | if __name__ == "__main__": 435 | main() 436 | -------------------------------------------------------------------------------- /brute.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2014-2015 Sam Maloney. 2 | # License: GPL v2. 3 | 4 | import llog 5 | 6 | import logging 7 | import multiprocessing as mp 8 | import os 9 | import time 10 | 11 | import enc 12 | import mbase32 13 | import multipart 14 | import mutil 15 | import rsakey 16 | 17 | log = logging.getLogger(__name__) 18 | 19 | WORKERS = os.cpu_count() 20 | HASH_BITS = enc.ID_BITS 21 | HASH_BYTES = HASH_BITS >> 3 22 | 23 | def generate_targeted_block(prefix, nbits, data, nonce_offset, nonce_size): 24 | "Brute force finds a nonce for the passed data which allows the data to" 25 | " hash to the desired prefix with nbits matching. This is the first hash" 26 | " of the block being targetd, thus the key, the id is not what is bruted." 27 | 28 | if type(data) is bytes: 29 | data = bytearray(data) 30 | else: 31 | assert type(data) is bytearray 32 | 33 | block = None 34 | 35 | pool = mp.Pool(WORKERS) 36 | 37 | pipes = [] 38 | refs = [] 39 | 40 | try: 41 | for i in range(WORKERS): 42 | log.debug("Starting worker.") 43 | 44 | lp, rp = mp.Pipe() 45 | 46 | pool.apply_async(\ 47 | _find_nonce,\ 48 | args=(rp,)) 49 | 50 | pipes.append(lp) 51 | refs.append(rp) 52 | 53 | lp.send((i, prefix, nbits, data, nonce_offset, nonce_size)) 54 | 55 | ready = mp.connection.wait(pipes) 56 | block = ready[0].recv() 57 | except Exception: 58 | log.exception("Exception generating targeted block.") 59 | 60 | log.info("Found TargetedBlock nonce; terminating workers.") 61 | 62 | pool.terminate() 63 | 64 | return block 65 | 66 | def generate_key(prefix): 67 | assert type(prefix) is str 68 | 69 | key = None 70 | 71 | pool = mp.Pool(WORKERS) 72 | 73 | pipes = [] 74 | refs = [] 75 | 76 | try: 77 | for i in range(WORKERS): 78 | log.debug("Starting worker.") 79 | 80 | lp, rp = mp.Pipe() 81 | 82 | pool.apply_async(\ 83 | _find_key,\ 84 | args=(rp,)) 85 | 86 | pipes.append(lp) 87 | refs.append(rp) 88 | 89 | lp.send((i, prefix)) 90 | 91 | ready = mp.connection.wait(pipes) 92 | privdata = ready[0].recv() 93 | key = rsakey.RsaKey(privdata=privdata) 94 | except Exception: 95 | log.exception("Exception generating key.") 96 | 97 | pool.terminate() 98 | 99 | return key 100 | 101 | def _find_nonce(rp): 102 | try: 103 | __find_nonce(rp) 104 | except Exception: 105 | log.exception("__find_nonce(..)") 106 | 107 | def __find_nonce(rp): 108 | # log.debug("Worker running.") 109 | 110 | wid, prefix, nbits, data, nonce_offset, nonce_size = rp.recv() 111 | 112 | max_dist = HASH_BITS - nbits 113 | nbytes = int(nbits / 8) 114 | nbytes += 4 # Extra bytes to increase probability of enough possibilities. 115 | nbytes = min(nbytes, nonce_size) 116 | ne = nonce_offset + nonce_size 117 | nonce_offset = ne - nbytes 118 | 119 | nonce = wid 120 | 121 | while True: 122 | nonce_bytes = nonce.to_bytes(nbytes, "big") 123 | data[nonce_offset:ne] = nonce_bytes 124 | 125 | h = enc.generate_ID(data) 126 | 127 | try: 128 | dist, direction = mutil.calc_log_distance(h, prefix) 129 | match = dist <= max_dist and direction == -1 130 | except IndexError: 131 | # log.debug("Exactly matched prefix.") 132 | match = True 133 | 134 | if match: 135 | # if log.isEnabledFor(logging.INFO): 136 | # log.info("nonce_bytes=[{}]."\ 137 | # .format(mutil.hex_string(nonce_bytes))) 138 | # if log.isEnabledFor(logging.DEBUG): 139 | # log.debug("resulting block=[\n{}]."\ 140 | # .format(mutil.hex_dump(data))) 141 | 142 | rp.send(nonce_bytes) 143 | return 144 | 145 | nonce += WORKERS 146 | 147 | def _find_key(rp): 148 | try: 149 | __find_key(rp) 150 | except Exception: 151 | log.exception("__find_key(..)") 152 | 153 | def __find_key(rp): 154 | # log.debug("Worker running.") 155 | 156 | wid, prefix = rp.recv() 157 | 158 | while True: 159 | key = rsakey.RsaKey.generate(bits=4096) 160 | pubkey_bytes = key.asbytes() 161 | 162 | pubkey_hash = enc.generate_ID(pubkey_bytes) 163 | 164 | pubkey_hash_enc = mbase32.encode(pubkey_hash) 165 | 166 | if pubkey_hash_enc.startswith(prefix): 167 | # if log.isEnabledFor(logging.INFO): 168 | # log.info("Worker #{} found key.".format(wid)) 169 | 170 | rp.send(key._encode_key()) 171 | return 172 | 173 | def main(): 174 | log.info("Testing...") 175 | 176 | r = generate_targeted_block(\ 177 | mbase32.decode("yyyyyyyy"), 20, b"test data message", 0, 4) 178 | 179 | log.info("Done, r=[{}].".format(r)) 180 | 181 | if __name__ == "__main__": 182 | main() 183 | 184 | -------------------------------------------------------------------------------- /build.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | python3 setup$1.py build_ext --inplace 3 | 4 | -------------------------------------------------------------------------------- /chord_packet.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2014-2015 Sam Maloney. 2 | # License: GPL v2. 3 | 4 | import llog 5 | 6 | from enum import Enum 7 | import logging 8 | import struct 9 | 10 | from chordexception import ChordException 11 | from db import Peer 12 | import peer as mnpeer 13 | import sshtype 14 | 15 | # Chord Message Types. 16 | CHORD_MSG_RELAY = 100 17 | CHORD_MSG_NODE_INFO = 110 18 | CHORD_MSG_GET_PEERS = 115 19 | CHORD_MSG_PEER_LIST = 120 20 | CHORD_MSG_FIND_NODE = 150 21 | CHORD_MSG_GET_DATA = 160 22 | CHORD_MSG_DATA_RESPONSE = 162 23 | CHORD_MSG_DATA_PRESENCE = 165 24 | CHORD_MSG_STORE_DATA = 170 25 | CHORD_MSG_STORE_KEY = 171 26 | CHORD_MSG_DATA_STORED = 172 27 | CHORD_MSG_STORAGE_INTEREST = 175 28 | 29 | class DataMode(Enum): 30 | none = 0 31 | get = 10 32 | store = 20 33 | 34 | log = logging.getLogger(__name__) 35 | 36 | class ChordMessage(object): 37 | @staticmethod 38 | def parse_type(buf): 39 | return struct.unpack("B", buf[0:1])[0] 40 | 41 | def __init__(self, packet_type=None, buf=None): 42 | self.buf = buf 43 | self.packet_type = packet_type 44 | 45 | if not buf: 46 | return 47 | 48 | self._packet_type = packet_type # Expected packet_type. 49 | 50 | self.parse() 51 | 52 | def parse(self): 53 | self.packet_type = struct.unpack("B", self.buf[0:1])[0] 54 | 55 | if self._packet_type and self.packet_type != self._packet_type: 56 | raise ChordException("Expecting packet type [{}] but got [{}]."\ 57 | .format(self._packet_type, self.packet_type)) 58 | 59 | def encode(self): 60 | nbuf = bytearray() 61 | nbuf += struct.pack("B", self.packet_type & 0xff) 62 | 63 | self.buf = nbuf 64 | 65 | return nbuf 66 | 67 | class ChordRelay(ChordMessage): 68 | def __init__(self, buf = None): 69 | self.index = None 70 | self.packets = None 71 | 72 | super().__init__(CHORD_MSG_RELAY, buf) 73 | 74 | @property 75 | def for_data(self): 76 | raise Exception("No more such property.") 77 | 78 | def encode(self): 79 | nbuf = super().encode() 80 | nbuf += struct.pack(">L", self.index) 81 | 82 | nbuf += struct.pack(">L", len(self.packets)) 83 | for packet in self.packets: 84 | nbuf += sshtype.encodeBinary(packet) 85 | 86 | return nbuf 87 | 88 | def parse(self): 89 | super().parse() 90 | i = 1 91 | self.index = struct.unpack(">L", self.buf[i:i+4])[0] 92 | i += 4 93 | 94 | cnt = struct.unpack(">L", self.buf[i:i+4])[0] 95 | i += 4 96 | self.packets = [] 97 | for n in range(cnt): 98 | l, packet = sshtype.parseBinary(self.buf[i:]) 99 | i += l 100 | self.packets.append(packet) 101 | 102 | class ChordNodeInfo(ChordMessage): 103 | def __init__(self, buf = None): 104 | self.sender_address = "" 105 | self.version = None 106 | 107 | super().__init__(CHORD_MSG_NODE_INFO, buf) 108 | 109 | def encode(self): 110 | nbuf = super().encode() 111 | 112 | nbuf += sshtype.encodeString(self.sender_address) 113 | nbuf += sshtype.encodeString(self.version) 114 | 115 | return nbuf 116 | 117 | def parse(self): 118 | super().parse() 119 | 120 | i = 1 121 | i, self.sender_address = sshtype.parse_string_from(self.buf, i) 122 | if i == len(self.buf): 123 | return 124 | if len(self.buf) - i > 64: 125 | raise ChordException("Version string in packet is too long.") 126 | i, self.version = sshtype.parse_string_from(self.buf, i) 127 | 128 | class ChordGetPeers(ChordMessage): 129 | def __init__(self, buf = None): 130 | self.sender_port = None 131 | 132 | super().__init__(CHORD_MSG_GET_PEERS, buf) 133 | 134 | def encode(self): 135 | nbuf = super().encode() 136 | 137 | nbuf += struct.pack(">L", self.sender_port) 138 | 139 | return nbuf 140 | 141 | def parse(self): 142 | super().parse() 143 | 144 | i = 1 145 | self.sender_port = struct.unpack(">L", self.buf[i:i+4])[0] 146 | 147 | class ChordPeerList(ChordMessage): 148 | def __init__(self, buf=None, peers=None): 149 | self.peers = peers # [peer.Peer or db.Peer] 150 | 151 | super().__init__(CHORD_MSG_PEER_LIST, buf) 152 | 153 | def encode(self): 154 | nbuf = super().encode() 155 | nbuf += struct.pack(">L", len(self.peers)) 156 | for peer in self.peers: 157 | nbuf += sshtype.encodeString(peer.address) 158 | nbuf += sshtype.encodeBinary(peer.node_id) 159 | if type(peer) is mnpeer.Peer: 160 | nbuf += sshtype.encodeBinary(peer.node_key.asbytes()) 161 | else: 162 | assert type(peer) is Peer 163 | nbuf += sshtype.encodeBinary(peer.pubkey) 164 | 165 | return nbuf 166 | 167 | def parse(self): 168 | super().parse() 169 | i = 1 170 | pcnt = struct.unpack(">L", self.buf[i:i+4])[0] 171 | i += 4 172 | self.peers = [] 173 | for n in range(pcnt): 174 | if log.isEnabledFor(logging.DEBUG): 175 | log.debug("Reading record {}.".format(n)) 176 | peer = Peer() # db.Peer. 177 | l, peer.address = sshtype.parseString(self.buf[i:]) 178 | i += l 179 | l, peer.node_id = sshtype.parseBinary(self.buf[i:]) 180 | i += l 181 | l, peer.pubkey = sshtype.parseBinary(self.buf[i:]) 182 | i += l 183 | 184 | self.peers.append(peer) 185 | 186 | class ChordFindNode(ChordMessage): 187 | def __init__(self, buf = None): 188 | self.node_id = None 189 | self.data_mode = DataMode.none 190 | self.version = None 191 | self.significant_bits = None 192 | self.target_key = None 193 | 194 | super().__init__(CHORD_MSG_FIND_NODE, buf) 195 | 196 | def encode(self): 197 | nbuf = super().encode() 198 | nbuf += sshtype.encodeBinary(self.node_id) 199 | nbuf += struct.pack("B", self.data_mode.value) 200 | 201 | nbuf += struct.pack("?", self.version is not None) 202 | if self.version is not None: 203 | nbuf += sshtype.encodeMpint(self.version) 204 | 205 | if self.significant_bits: 206 | nbuf += struct.pack(">H", self.significant_bits) 207 | if self.target_key: 208 | nbuf += sshtype.encodeBinary(self.target_key) 209 | 210 | return nbuf 211 | 212 | def parse(self): 213 | super().parse() 214 | i = 1 215 | l, self.node_id = sshtype.parseBinary(self.buf[i:]) 216 | i += l 217 | self.data_mode = DataMode(struct.unpack("B", self.buf[i:i+1])[0]) 218 | i += 1 219 | 220 | has_version = struct.unpack_from("?", self.buf, i)[0] 221 | i += 1 222 | if has_version: 223 | i, self.version = sshtype.parse_mpint_from(self.buf, i) 224 | 225 | if i == len(self.buf): 226 | return 227 | 228 | self.significant_bits = struct.unpack(">H", self.buf[i:i+2])[0] 229 | i += 2 230 | 231 | if i == len(self.buf): 232 | return 233 | 234 | i, self.target_key = sshtype.parse_binary_from(self.buf, i) 235 | 236 | class ChordGetData(ChordMessage): 237 | def __init__(self, buf = None): 238 | super().__init__(CHORD_MSG_GET_DATA, buf) 239 | 240 | def encode(self): 241 | nbuf = super().encode() 242 | 243 | return nbuf 244 | 245 | def parse(self): 246 | super().parse() 247 | 248 | class ChordDataResponse(ChordMessage): 249 | def __init__(self, buf = None): 250 | self.data = None 251 | self.original_size = 0 # Original (unencrypted) length. 252 | self.version = None 253 | self.signature = None 254 | self.epubkey = None 255 | self.pubkeylen = None 256 | 257 | super().__init__(CHORD_MSG_DATA_RESPONSE, buf) 258 | 259 | def encode(self): 260 | nbuf = super().encode() 261 | nbuf += sshtype.encodeBinary(self.data) 262 | nbuf += struct.pack(">L", self.original_size) 263 | 264 | if self.version is not None: 265 | nbuf += sshtype.encodeMpint(self.version) 266 | nbuf += sshtype.encodeBinary(self.signature) 267 | if self.epubkey: 268 | nbuf += sshtype.encodeBinary(self.epubkey) 269 | nbuf += struct.pack(">L", self.pubkeylen) 270 | 271 | return nbuf 272 | 273 | def parse(self): 274 | super().parse() 275 | i = 1 276 | l, self.data = sshtype.parseBinary(self.buf[i:]) 277 | i += l 278 | self.original_size = struct.unpack(">L", self.buf[i:i+4])[0] 279 | i += 4 280 | 281 | if i == len(self.buf): 282 | return 283 | 284 | l, self.version = sshtype.parseMpint(self.buf[i:]) 285 | i += l 286 | l, self.signature = sshtype.parseBinary(self.buf[i:]) 287 | i += l 288 | 289 | if i == len(self.buf): 290 | return 291 | 292 | l, self.epubkey = sshtype.parseBinary(self.buf[i:]) 293 | i += l 294 | self.pubkeylen = struct.unpack(">L", self.buf[i:i+4])[0] 295 | 296 | class ChordDataPresence(ChordMessage): 297 | def __init__(self, buf = None): 298 | self.data_present = False 299 | self.first_id = None 300 | 301 | super().__init__(CHORD_MSG_DATA_PRESENCE, buf) 302 | 303 | def encode(self): 304 | nbuf = super().encode() 305 | if self.first_id is None: 306 | nbuf += struct.pack("?", self.data_present) 307 | else: 308 | nbuf += sshtype.encodeBinary(self.first_id) 309 | 310 | return nbuf 311 | 312 | def parse(self): 313 | super().parse() 314 | i = 1 315 | 316 | if i + 1 == len(self.buf): 317 | self.data_present = struct.unpack("?", self.buf[i:i+1])[0] 318 | else: 319 | l, self.first_id = sshtype.parseBinary(self.buf[i:]) 320 | 321 | class ChordStoreData(ChordMessage): 322 | def __init__(self, buf = None): 323 | self.data = None 324 | self.targeted = False 325 | 326 | self.pubkey = None 327 | self.path_hash = None 328 | self.version = None 329 | self.signature = None 330 | 331 | super().__init__(CHORD_MSG_STORE_DATA, buf) 332 | 333 | @property 334 | def data_id(self): 335 | raise Exception("No more such property.") 336 | 337 | def encode(self): 338 | nbuf = super().encode() 339 | nbuf += sshtype.encodeBinary(self.data) 340 | nbuf += struct.pack("?", self.targeted) 341 | 342 | if self.pubkey: 343 | # Updateable keys. 344 | nbuf += sshtype.encodeBinary(self.pubkey) 345 | nbuf += sshtype.encodeBinary(self.path_hash) 346 | nbuf += sshtype.encodeMpint(self.version) 347 | nbuf += sshtype.encodeBinary(self.signature) 348 | 349 | return nbuf 350 | 351 | def parse(self): 352 | super().parse() 353 | i = 1 354 | l, self.data = sshtype.parseBinary(self.buf[i:]) 355 | i += l 356 | self.targeted = struct.unpack_from("?", self.buf, i)[0] 357 | i += 1 358 | 359 | if i == len(self.buf): 360 | return 361 | 362 | l, self.pubkey = sshtype.parseBinary(self.buf[i:]) 363 | i += l 364 | l, self.path_hash = sshtype.parseBinary(self.buf[i:]) 365 | i += l 366 | l, self.version = sshtype.parseMpint(self.buf[i:]) 367 | i += l 368 | l, self.signature = sshtype.parseBinary(self.buf[i:]) 369 | 370 | class ChordStoreKey(ChordMessage): 371 | def __init__(self, buf = None): 372 | self.data = None 373 | self.targeted = False 374 | 375 | super().__init__(CHORD_MSG_STORE_KEY, buf) 376 | 377 | def encode(self): 378 | nbuf = super().encode() 379 | nbuf += sshtype.encodeBinary(self.data) 380 | nbuf += struct.pack("?", self.targeted) 381 | 382 | return nbuf 383 | 384 | def parse(self): 385 | super().parse() 386 | i = 1 387 | l, self.data = sshtype.parseBinary(self.buf[i:]) 388 | i += l 389 | self.targeted = struct.unpack_from("?", self.buf, i)[0] 390 | 391 | class ChordDataStored(ChordMessage): 392 | def __init__(self, buf = None): 393 | self.stored = False 394 | 395 | super().__init__(CHORD_MSG_DATA_STORED, buf) 396 | 397 | def encode(self): 398 | nbuf = super().encode() 399 | nbuf += struct.pack("?", self.stored) 400 | 401 | return nbuf 402 | 403 | def parse(self): 404 | super().parse() 405 | i = 1 406 | self.stored = struct.unpack("?", self.buf[i:i+1])[0] 407 | 408 | class ChordStorageInterest(ChordMessage): 409 | def __init__(self, buf = None): 410 | self.will_store = False 411 | 412 | super().__init__(CHORD_MSG_STORAGE_INTEREST, buf) 413 | 414 | def encode(self): 415 | nbuf = super().encode() 416 | nbuf += struct.pack("?", self.will_store) 417 | 418 | return nbuf 419 | 420 | def parse(self): 421 | super().parse() 422 | i = 1 423 | self.will_store = struct.unpack("?", self.buf[i:i+1])[0] 424 | -------------------------------------------------------------------------------- /chordexception.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2014-2015 Sam Maloney. 2 | # License: GPL v2. 3 | 4 | class ChordException(Exception): 5 | pass 6 | -------------------------------------------------------------------------------- /client.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2014-2015 Sam Maloney. 2 | # License: GPL v2. 3 | 4 | import llog 5 | 6 | import asyncio 7 | import logging 8 | import os 9 | 10 | import base58 11 | import chord_tasks 12 | import mbase32 13 | import mn1 14 | import packet as mnp 15 | import rsakey 16 | from shell import BinaryMessage 17 | 18 | log = logging.getLogger(__name__) 19 | 20 | class Client(object): 21 | def __init__(self, loop, client_key=None, address="127.0.0.1:4250"): 22 | self.loop = loop 23 | self.address = address 24 | 25 | if client_key is None: 26 | client_key = rsakey.RsaKey.generate(bits=4096) 27 | self.client_key = client_key 28 | self.server_key = None 29 | 30 | self._ready = asyncio.Event(loop=loop) 31 | 32 | @asyncio.coroutine 33 | def connect(self): 34 | if log.isEnabledFor(logging.INFO): 35 | log.info("Connecting to addr=[{}].".format(self.address)) 36 | 37 | host, port = self.address.split(':') 38 | 39 | client = self.loop.create_connection( 40 | self._create_client_protocol, host, port) 41 | 42 | try: 43 | transport, protocol = yield from client 44 | except Exception as ex: 45 | if log.isEnabledFor(logging.INFO): 46 | log.info("Connection failed: {}: {}".format(type(ex), ex)) 47 | 48 | return False 49 | 50 | if log.isEnabledFor(logging.INFO): 51 | log.info("Connected!") 52 | 53 | yield from self._ready.wait() 54 | 55 | return True 56 | 57 | def _create_client_protocol(self): 58 | ph = mn1.SshClientProtocol(self.loop) 59 | ph.client_key = self.client_key 60 | if self.server_key: 61 | ph.server_key = self.server_key 62 | 63 | ph.connection_handler = ConnectionHandler(self) 64 | ph.channel_handler = ChannelHandler(self) 65 | 66 | self.protocol = ph 67 | 68 | return ph 69 | 70 | @asyncio.coroutine 71 | def disconnect(self): 72 | self.protocol.close() 73 | 74 | @asyncio.coroutine 75 | def send_command(self, command, args=None): 76 | if log.isEnabledFor(logging.INFO): 77 | log.info("Sending command [{}] with args [{}]."\ 78 | .format(command, args)) 79 | 80 | cid, queue = yield from\ 81 | self.protocol.open_channel("session", True) 82 | 83 | if not queue: 84 | return False 85 | 86 | msg = BinaryMessage() 87 | msg.value = command.encode() + b"\r\n" 88 | 89 | self.protocol.send_channel_request(\ 90 | cid, "exec", False, msg.encode()) 91 | 92 | data = yield from queue.get() 93 | 94 | if not data: 95 | return False 96 | 97 | msg = BinaryMessage(data) 98 | 99 | return msg.value 100 | 101 | @asyncio.coroutine 102 | def send_store_data(\ 103 | self, data, store_key=False, key_callback=None): 104 | data_enc = base58.encode(data) 105 | 106 | r = yield from\ 107 | self.send_command(\ 108 | "storeblockenc {} {}".format(data_enc, store_key)) 109 | 110 | p0 = r.find(b']') 111 | data_key = mbase32.decode(r[10:p0].decode("UTF-8")) 112 | 113 | key_callback(data_key) 114 | 115 | p0 = r.find(b"storing_nodes=[", p0) + 15 116 | p1 = r.find(b']', p0) 117 | 118 | return int(r[p0:p1]) 119 | 120 | @asyncio.coroutine 121 | def send_store_updateable_key(\ 122 | self, data, privkey, path=None, version=None, store_key=True,\ 123 | key_callback=None): 124 | privkey_enc = base58.encode(privkey._encode_key()) 125 | data_enc = base58.encode(data) 126 | 127 | cmd = "storeukeyenc {} {} {} {}"\ 128 | .format(privkey_enc, data_enc, version, store_key) 129 | 130 | r = yield from self.send_command(cmd) 131 | 132 | if not r: 133 | return 0 134 | 135 | if key_callback: 136 | p1 = r.find(b']', 10) 137 | r = r[10:p1].decode() 138 | key_enc = r 139 | key_callback(mbase32.decode(key_enc)) 140 | 141 | return 1 #FIXME: The shell API doesn't return this value as of yet. 142 | 143 | @asyncio.coroutine 144 | def send_store_targeted_data(\ 145 | self, data, store_key=False, key_callback=None): 146 | data_enc = base58.encode(data) 147 | 148 | r = yield from\ 149 | self.send_command(\ 150 | "storetargetedblockenc {} {}".format(data_enc, store_key)) 151 | 152 | p0 = r.find(b']') 153 | data_key = mbase32.decode(r[10:p0].decode("UTF-8")) 154 | 155 | key_callback(data_key) 156 | 157 | p0 = r.find(b"storing_nodes=[", p0) + 15 158 | p1 = r.find(b']', p0) 159 | 160 | return int(r[p0:p1]) 161 | 162 | @asyncio.coroutine 163 | def send_find_key(self, prefix, target_key=None, significant_bits=None,\ 164 | retry_factor=None): 165 | cmd = "findkey " + mbase32.encode(prefix) 166 | if target_key: 167 | cmd += " " + mbase32.encode(target_key) 168 | if significant_bits: 169 | cmd += " " + str(significant_bits) 170 | 171 | r = yield from self.send_command(cmd) 172 | 173 | p0 = r.find(b"data_key=[") + 10 174 | p1 = r.find(b']', p0) 175 | 176 | data_key = r[p0:p1].decode() 177 | 178 | if data_key == "None": 179 | data_key = None 180 | else: 181 | data_key = mbase32.decode(data_key) 182 | 183 | data_rw = chord_tasks.DataResponseWrapper(data_key) 184 | 185 | return data_rw 186 | 187 | @asyncio.coroutine 188 | def send_get_data(self, data_key, path=None, retry_factor=None): 189 | data_key_enc = mbase32.encode(data_key) 190 | 191 | if path: 192 | cmd = "getdata {} {}".format(data_key_enc, path) 193 | else: 194 | cmd = "getdata {}".format(data_key_enc) 195 | 196 | r = yield from self.send_command(cmd) 197 | 198 | data_rw = chord_tasks.DataResponseWrapper(data_key) 199 | 200 | p0 = r.find(b"version=[") + 9 201 | p1 = r.find(b']', p0) 202 | ver_str = r[p0:p1] 203 | data_rw.version = int(ver_str) if ver_str != b"None" else None 204 | p0 = p1 + 1 205 | 206 | p0 = r.find(b"data:\r\n", p0) + 7 207 | data = r[p0:-2] # -2 for the "\r\n". 208 | 209 | #FIXME: This is ambiguous with data that == "Not found." :) 210 | data_rw.data = data if data != b"Not found." else None 211 | 212 | return data_rw 213 | 214 | @asyncio.coroutine 215 | def send_get_targeted_data(self, data_key): 216 | data_key_enc = mbase32.encode(data_key) 217 | 218 | cmd = "gettargeteddata {}".format(data_key_enc) 219 | 220 | r = yield from self.send_command(cmd) 221 | 222 | data_rw = chord_tasks.DataResponseWrapper(data_key) 223 | 224 | p0 = r.find(b"data:\r\n") + 7 225 | data_rw.data = r[p0:-2] # -2 for the "\r\n". 226 | 227 | return data_rw 228 | 229 | class ConnectionHandler(mn1.ConnectionHandler): 230 | def __init__(self, client): 231 | self.client = client 232 | 233 | @asyncio.coroutine 234 | def connection_ready(self, protocol): 235 | self.client._ready.set() 236 | 237 | @asyncio.coroutine 238 | def peer_authenticated(self, protocol): 239 | return True 240 | 241 | class ChannelHandler(mn1.ChannelHandler): 242 | def __init__(self, client): 243 | self.client = client 244 | -------------------------------------------------------------------------------- /client_engine.py: -------------------------------------------------------------------------------- 1 | import llog 2 | 3 | import asyncio 4 | import logging 5 | import os 6 | import time 7 | 8 | from sqlalchemy.orm import joinedload 9 | 10 | import base58 11 | import chord 12 | from db import DmailAddress 13 | import dhgroup14 14 | import dmail 15 | import mbase32 16 | import multipart 17 | import rsakey 18 | import sshtype 19 | 20 | log = logging.getLogger(__name__) 21 | 22 | class ClientEngine(object): 23 | def __init__(self, engine, db): 24 | assert type(engine) is chord.ChordEngine 25 | 26 | self.engine = engine 27 | self.db = db 28 | self.loop = engine.loop 29 | 30 | self.latest_version_number = None 31 | self.latest_version_data = None 32 | 33 | self.auto_publish_enabled = True 34 | self.auto_scan_enabled = True 35 | 36 | self.csrf_token = base58.encode(os.urandom(64)) 37 | 38 | self._dmail_engine = None 39 | 40 | self._running = False 41 | 42 | self._data_key =\ 43 | mbase32.decode("sp1nara3xhndtgswh7fznt414we4mi3y6kdwbkz4jmt8ocb6x"\ 44 | "4w1faqjotjkcrefta11swe3h53dt6oru3r13t667pr7cpe3ocxeuma") 45 | self._path = b"latest_version" 46 | 47 | self._dmail_autoscan_processes = {} 48 | 49 | @property 50 | def update_test(self): 51 | raise Exception() 52 | 53 | @update_test.setter 54 | def update_test(self, value): 55 | if value: 56 | self._path = b"test_version" 57 | 58 | @asyncio.coroutine 59 | def start(self): 60 | if self._running: 61 | return 62 | 63 | self._running = True 64 | 65 | if not self._dmail_engine: 66 | self._dmail_engine = dmail.DmailEngine(self.engine.tasks, self.db) 67 | 68 | asyncio.async(self._start_version_poller(), loop=self.loop) 69 | if self.auto_scan_enabled: 70 | asyncio.async(self._start_dmail_autoscan(), loop=self.loop) 71 | if self.auto_publish_enabled: 72 | asyncio.async(self._start_dmail_auto_publish(), loop=self.loop) 73 | 74 | @asyncio.coroutine 75 | def stop(self): 76 | if self._running: 77 | self._running = False 78 | 79 | for processor in self._dmail_autoscan_processes.values(): 80 | processor.stop() 81 | 82 | @asyncio.coroutine 83 | def _start_version_poller(self): 84 | yield from self.engine.protocol_ready.wait() 85 | 86 | while self._running: 87 | data_rw = multipart.BufferingDataCallback() 88 | 89 | r =\ 90 | yield from\ 91 | multipart.get_data(self.engine, self._data_key,\ 92 | data_callback=data_rw, path=self._path) 93 | 94 | if data_rw.data: 95 | if data_rw.version: 96 | data = data_rw.data.decode() 97 | 98 | p0 = data.find('') 99 | p0 += 26 100 | p1 = data.find("", p0) 101 | self.latest_version_number = data[p0:p1] 102 | self.latest_version_data = data 103 | 104 | if log.isEnabledFor(logging.INFO): 105 | log.info("Found latest_version_number=[{}]"\ 106 | " (data_rw.version=[{}])."\ 107 | .format(\ 108 | self.latest_version_number,\ 109 | data_rw.version)) 110 | else: 111 | if log.isEnabledFor(logging.INFO): 112 | log.info("Found invalid latest_version record:"\ 113 | " data_rw.version=[{}], len(data)=[{}]."\ 114 | .format(data_rw.version, len(data_rw.data))) 115 | delay = 5*60 116 | else: 117 | log.info("Couldn't find latest_version in network.") 118 | delay = 60 119 | 120 | yield from asyncio.sleep(delay, loop=self.loop) 121 | 122 | @asyncio.coroutine 123 | def _start_dmail_auto_publish(self): 124 | yield from self.engine.protocol_ready.wait() 125 | 126 | def dbcall(): 127 | with self.db.open_session(True) as sess: 128 | q = sess.query(DmailAddress)\ 129 | .options(joinedload("keys")) 130 | 131 | r = q.all() 132 | 133 | sess.expunge_all() 134 | 135 | return r 136 | 137 | while self._running: 138 | addrs = yield from self.loop.run_in_executor(None, dbcall) 139 | 140 | for addr in addrs: 141 | yield from self._dmail_auto_publish(addr) 142 | 143 | log.info("Finished auto-publish scan, sleeping for now.") 144 | 145 | yield from asyncio.sleep(60 * 60 * 24, loop=self.loop) 146 | 147 | @asyncio.coroutine 148 | def _dmail_auto_publish(self, dmail_address): 149 | data_rw = yield from self.engine.tasks.send_get_data(\ 150 | dmail_address.site_key, retry_factor=100) 151 | 152 | if data_rw.data: 153 | if log.isEnabledFor(logging.DEBUG): 154 | log.debug("Succeeded in fetching dmail site [{}]; won't"\ 155 | " auto-publish."\ 156 | .format(mbase32.encode(dmail_address.site_key))) 157 | return 158 | 159 | if log.isEnabledFor(logging.INFO): 160 | log.info("Failed to fetch dmail site [{}]; republishing."\ 161 | .format(mbase32.encode(dmail_address.site_key))) 162 | 163 | private_key = rsakey.RsaKey(privdata=dmail_address.site_privatekey) 164 | 165 | dh = dhgroup14.DhGroup14() 166 | dh.x = sshtype.parseMpint(dmail_address.keys[0].x)[1] 167 | dh.generate_e() 168 | 169 | dms = dmail.DmailSite() 170 | root = dms.root 171 | root["ssm"] = "mdh-v1" 172 | root["sse"] = base58.encode(sshtype.encodeMpint(dh.e)) 173 | root["target"] =\ 174 | mbase32.encode(dmail_address.keys[0].target_key) 175 | root["difficulty"] = int(dmail_address.keys[0].difficulty) 176 | 177 | storing_nodes =\ 178 | yield from self._dmail_engine.publish_dmail_site(private_key, dms) 179 | 180 | if log.isEnabledFor(logging.INFO): 181 | log.info("Republished Dmail site with [{}] storing nodes."\ 182 | .format(storing_nodes)) 183 | 184 | @asyncio.coroutine 185 | def _start_dmail_autoscan(self): 186 | yield from self.engine.protocol_ready.wait() 187 | 188 | def dbcall(): 189 | with self.db.open_session() as sess: 190 | q = sess.query(DmailAddress)\ 191 | .options(joinedload("keys"))\ 192 | .filter(DmailAddress.scan_interval > 0) 193 | 194 | r = q.all() 195 | 196 | sess.expunge_all() 197 | 198 | return r 199 | 200 | addrs = yield from self.loop.run_in_executor(None, dbcall) 201 | 202 | for addr in addrs: 203 | self.update_dmail_autoscan(addr) 204 | 205 | def update_dmail_autoscan(self, addr): 206 | if not self.auto_scan_enabled: 207 | return 208 | 209 | if log.isEnabledFor(logging.INFO): 210 | log.info(\ 211 | "Starting/Updating autoscan (scan_interval=[{}]) process for"\ 212 | " DmailAddress (id=[{}])."\ 213 | .format(addr.scan_interval, addr.id)) 214 | 215 | process = self._dmail_autoscan_processes.get(addr.id) 216 | 217 | if not addr.scan_interval: 218 | if process: 219 | process.stop() 220 | del self._dmail_autoscan_processes[addr.id] 221 | else: 222 | return 223 | 224 | if process: 225 | process.update_scan_interval(addr.scan_interval) 226 | else: 227 | process = DmailAutoscanProcess(self, addr, addr.scan_interval) 228 | asyncio.async(process.run(), loop=self.loop) 229 | self._dmail_autoscan_processes[addr.id] = process 230 | 231 | def trigger_dmail_scan(self, addr): 232 | if log.isEnabledFor(logging.INFO): 233 | log.info("Ensuring scan of DmailAddress (id=[{}]) now."\ 234 | .format(addr.id)) 235 | 236 | process = self._dmail_autoscan_processes.get(addr.id) 237 | 238 | if process: 239 | process.scan_now() 240 | else: 241 | process = DmailAutoscanProcess(self, addr, 0) 242 | asyncio.async(process.run(), loop=self.loop) 243 | self._dmail_autoscan_processes[addr.id] = process 244 | 245 | class DmailAutoscanProcess(object): 246 | def __init__(self, client_engine, addr, interval): 247 | self.client_engine = client_engine 248 | self.loop = client_engine.loop 249 | self.dmail_address = addr 250 | self.scan_interval = interval 251 | 252 | self._running = False 253 | self._task = None 254 | self._scan_now = False 255 | 256 | def scan_now(self): 257 | if self._task: 258 | self._scan_now = True 259 | self._task.cancel() 260 | else: 261 | if self._running: 262 | log.info("Already scanning.") 263 | return 264 | asyncio.async(self.run(), loop=self.loop) 265 | 266 | def update_scan_interval(self, interval): 267 | if not interval: 268 | self._running = False 269 | if self._task: 270 | self._task.cancel() 271 | return 272 | 273 | self.scan_interval = interval 274 | 275 | if self._running: 276 | if log.isEnabledFor(logging.INFO): 277 | log.info("Notifying DmailAutoscanProcess (addr=[{}]) of"\ 278 | " interval change."\ 279 | .format(mbase32.encode(self.dmail_address.site_key))) 280 | if self._task: 281 | self._task.cancel() 282 | else: 283 | if log.isEnabledFor(logging.INFO): 284 | log.info("Starting DmailAutoscanProcess (addr=[{}])."\ 285 | .format(mbase32.encode(self.dmail_address.site_key))) 286 | asyncio.async(self.run(), loop=self.loop) 287 | 288 | @asyncio.coroutine 289 | def run(self): 290 | self._running = True 291 | 292 | if log.isEnabledFor(logging.INFO): 293 | addr_enc = mbase32.encode(self.dmail_address.site_key) 294 | log.info("DmailAutoscanProcess (addr=[{}]) running."\ 295 | .format(addr_enc)) 296 | 297 | while self._running: 298 | new_cnt, old_cnt, err_cnt = yield from\ 299 | self.client_engine._dmail_engine.scan_and_save_new_dmails(\ 300 | self.dmail_address) 301 | 302 | if log.isEnabledFor(logging.INFO): 303 | log.info("Finished scanning Dmails for address [{}];"\ 304 | " new_cnt=[{}], old_cnt=[{}], err_cnt=[{}]."\ 305 | .format(addr_enc, new_cnt, old_cnt, err_cnt)) 306 | 307 | if not self.scan_interval: 308 | self._running = False 309 | 310 | if not self._running: 311 | break 312 | 313 | time_left = self.scan_interval 314 | start = time.time() 315 | 316 | while time_left > 0: 317 | if log.isEnabledFor(logging.INFO): 318 | log.info("Sleeping for [{}] seconds.".format(time_left)) 319 | 320 | self._task =\ 321 | asyncio.async(\ 322 | asyncio.sleep(time_left, loop=self.loop),\ 323 | loop=self.loop) 324 | 325 | try: 326 | yield from self._task 327 | self._task = None 328 | break 329 | except asyncio.CancelledError: 330 | self._task = None 331 | if log.isEnabledFor(logging.INFO): 332 | log.info("Woken from sleep for address [{}]."\ 333 | .format(\ 334 | mbase32.encode(self.dmail_address.site_key))) 335 | if self._scan_now: 336 | self._scan_now = False 337 | break 338 | time_left = self.scan_interval - (time.time() - start) 339 | 340 | def stop(self): 341 | if self._running: 342 | if log.isEnabledFor(logging.INFO): 343 | log.info("Stopping DmailAutoscanProcess (addr=[{}])."\ 344 | .format(mbase32.encode(self.dmail_address.site_key))) 345 | self._running = False 346 | if self._task: 347 | self._task.cancel() 348 | -------------------------------------------------------------------------------- /consts.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2014-2015 Sam Maloney. 2 | # License: GPL v2. 3 | 4 | import enc 5 | 6 | NODE_ID_BITS = enc.ID_BITS 7 | NODE_ID_BYTES = NODE_ID_BITS >> 3 8 | MAX_DATA_BLOCK_SIZE = 32768 9 | 10 | NSK_DEFAULT_ADDRESS = "default_address" 11 | NSK_SCHEMA_VERSION = "schema_version" 12 | -------------------------------------------------------------------------------- /contrib/morphisd/README.md: -------------------------------------------------------------------------------- 1 | # morphisd 2 | 3 | Have MORPHiS run as a service under systemd. 4 | 5 | ## Contents 6 | . 7 | ├── etc 8 | │   └── default 9 | │   └── morphisd 10 | ├── opt 11 | │   └── morphis 12 | │   ├── morphisd 13 | │   └── setargv 14 | └── usr 15 | └── lib 16 | └── systemd 17 | └── system 18 | └── morphisd.service 19 | 20 | ## Install 21 | 22 | This setup assumes that MORPHiS is installed in `/opt/morphis` and 23 | will run as the user `morphis` (you can change this in the file 24 | `morphisd.service`). 25 | 26 | Copy the files above to the locations shown, then do 27 | 28 | # systemctl daemon-reload 29 | # systemctl start morphisd.service 30 | 31 | If you want MORPHiS to start at reboot, do 32 | 33 | # systemctl enable morphisd.service 34 | 35 | ## License 36 | 37 | GPL v3+ 38 | 39 | ## Bugs 40 | 41 | If you find any bugs, or have suggestions, please send me a Dmail: 42 | 43 | ksn3r6aw8ou6s6nq41xk51g9rktrcanz 44 | 45 | // Klaus Alexander Seistrup 46 | -------------------------------------------------------------------------------- /contrib/morphisd/etc/default/morphisd: -------------------------------------------------------------------------------- 1 | # /etc/default/morphisd 2 | # -*- mode: sh; coding: utf-8 -*- 3 | 4 | # If this variable is set to 'random', a random value is used 5 | # to seed the hashes of str, bytes and datetime objects. It 6 | # can also be set to an integer in the range [0,4294967295] to 7 | # get hash values with a predictable seed. 8 | PYTHONHASHSEED=random 9 | 10 | # Encoding[:errors] used for stdin/stdout/stderr. 11 | PYTHONIOENCODING=UTF-8 12 | 13 | # Unbuffered binary stdout and stderr (stdin always buffered) 14 | # for better systemd-journald logging 15 | PYTHONUNBUFFERED=x 16 | 17 | # Optimize generated bytecode slightly 18 | PYTHONOPTIMIZE=x 19 | 20 | # eof 21 | -------------------------------------------------------------------------------- /contrib/morphisd/opt/morphis/morphisd: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | if [ -r /etc/default/morphisd ]; then 4 | source /etc/default/morphisd 5 | fi 6 | 7 | if [ -x /opt/morphis/setargv ]; then 8 | exec /opt/morphis/setargv python morphisd node.py "${@}" 9 | fi 10 | exec python node.py "${@}" 11 | 12 | # eof 13 | -------------------------------------------------------------------------------- /contrib/morphisd/opt/morphis/setargv: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | # -*- mode: python; coding: utf-8 -*- 3 | """ 4 | usage: setargv [arg…] 5 | """ 6 | ############################################################################## 7 | # This program is free software; you can redistribute it and/or modify it # 8 | # under the terms of the GNU General Public License as published by the Free # 9 | # Software Foundation; either version 3 of the License, or (at your option) # 10 | # any later version. # 11 | # # 12 | # This program is distributed in the hope that it will be useful, but with- # 13 | # out any warranty; without even the implied warranty of merchantability or # 14 | # fitness for a particular purpose. See the GNU General Public License for # 15 | # more details. # 16 | ############################################################################## 17 | 18 | import os 19 | import sys 20 | 21 | __version__ = '0.5.0' 22 | __revision__ = '2013-05-03' 23 | __author__ = 'Klaus Alexander Seistrup ' 24 | 25 | argc = len(sys.argv) 26 | stderr = sys.stderr 27 | 28 | if argc == 2: 29 | arg = sys.argv[1] 30 | if arg in ('-h', '--help') or arg.endswith('help'): 31 | print(__doc__.strip()) 32 | sys.exit(0) 33 | if arg in ('-v', '--version') or arg.endswith('version'): 34 | print('setargv/%s (%s)' % (__version__, __revision__)) 35 | sys.exit(0) 36 | 37 | if argc < 3: 38 | print(__doc__.strip(), file=stderr) 39 | sys.exit(1) 40 | 41 | (real, fake, args) = (sys.argv[1], sys.argv[2], sys.argv[2:]) 42 | 43 | try: 44 | os.execvp(real, args) 45 | except OSError as err: 46 | print('unable to run %s: %s' % (real, str(err).split(']', 1)[-1].strip().lower()), file=stderr) 47 | 48 | sys.exit(-1) 49 | # eof 50 | -------------------------------------------------------------------------------- /contrib/morphisd/usr/lib/systemd/system/morphisd.service: -------------------------------------------------------------------------------- 1 | [Unit] 2 | Description=MORPHiS distributed data store 3 | After=syslog.target 4 | After=network.target 5 | 6 | [Service] 7 | Type=simple 8 | User=morphis 9 | Group=morphis 10 | WorkingDirectory=/opt/morphis 11 | ExecStart=/opt/morphis/morphisd 12 | Restart=always 13 | Environment=USER=morphis HOME=/opt/morphis 14 | 15 | [Install] 16 | WantedBy=multi-user.target 17 | -------------------------------------------------------------------------------- /dev_cluster.sh: -------------------------------------------------------------------------------- 1 | python3 node.py -l logging-warn.ini --bind 127.0.0.1:6000 --addpeer 127.0.0.1:7000 --nn 1000 --dbpoolsize 4 --parallellaunch --dumptasksonexit --dburl postgresql://m1:2d8VhzVIovxZy@pg1/m1 --nodecount 5 --dm --reinitds --dssize 32 & 2 | python3 node.py -l logging-warn.ini --bind 127.0.0.1:6000 --addpeer 127.0.0.1:7000 --nn 1000 --dbpoolsize 4 --parallellaunch --dumptasksonexit --dburl postgresql://m1:2d8VhzVIovxZy@pg1/m1 --nodecount 5 --instanceoffset 5 --dm --reinitds --dssize 32 & 3 | python3 node.py -l logging-warn.ini --bind 127.0.0.1:6000 --addpeer 127.0.0.1:7000 --nn 1000 --dbpoolsize 4 --parallellaunch --dumptasksonexit --dburl postgresql://m1:2d8VhzVIovxZy@pg1/m1 --nodecount 5 --instanceoffset 10 --dm --reinitds --dssize 32 & 4 | python3 node.py -l logging-warn.ini --bind 127.0.0.1:6000 --addpeer 127.0.0.1:7000 --nn 1000 --dbpoolsize 4 --parallellaunch --dumptasksonexit --dburl postgresql://m1:2d8VhzVIovxZy@pg1/m1 --nodecount 5 --instanceoffset 15 --dm --reinitds --dssize 32 & 5 | -------------------------------------------------------------------------------- /dev_node.sh: -------------------------------------------------------------------------------- 1 | python3 node.py -l logging-info.ini --bind 127.0.0.1:7000 --nn 7000 --dumptasksonexit --dburl postgresql://m1:2d8VhzVIovxZy@pg1/m1 --dssize 32 --reinitds --webdevel --enableeval 2 | -------------------------------------------------------------------------------- /dhgroup14.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2014-2015 Sam Maloney. 2 | # License: LGPL 3 | 4 | import llog 5 | 6 | import os 7 | import logging 8 | from hashlib import sha1 9 | 10 | b0000000000000000 = bytes((0x00,)) * 8 11 | b7fffffffffffffff = bytes((0x7f,)) + bytes((0xff,)) * 7 12 | 13 | class DhGroup14(object): 14 | # http://tools.ietf.org/html/rfc3526#section-3 15 | P = 0xFFFFFFFFFFFFFFFFC90FDAA22168C234C4C6628B80DC1CD129024E088A67CC74020BBEA63B139B22514A08798E3404DDEF9519B3CD3A431B302B0A6DF25F14374FE1356D6D51C245E485B576625E7EC6F44C42E9A637ED6B0BFF5CB6F406B7EDEE386BFB5A899FA5AE9F24117C4B1FE649286651ECE45B3DC2007CB8A163BF0598DA48361C55D39A69163FA8FD24CF5F83655D23DCA3AD961C62F356208552BB9ED529077096966D670C354E4ABC9804F1746C08CA18217C32905E462E36CE3BE39E772C180E86039B2783A2EC07A28FB5C55DF06F4C52C9DE2BCBF6955817183995497CEA956AE515D2261898FA051015728E5A8AACAA68FFFFFFFFFFFFFFFF 16 | G = 2 17 | 18 | def __init__(self): 19 | self.x = None 20 | self.e = None 21 | self.f = None 22 | self.k = None 23 | 24 | def generate_x(self): 25 | "generate an 'x' (1 < x < q), where q is (p-1)/2." 26 | "p is a 256-byte (2048-bit) number, where the first ?? bits are 1. " 27 | "therefore ?? q can be approximated as a 2^2047. we drop the subset" 28 | "of potential x where the first 63 bits are 1, because some of those" 29 | "will be larger than q (but this is a tiny tiny subset of potential" 30 | "x)." 31 | 32 | while True: 33 | xb = bytearray(os.urandom(256)) 34 | xb[0] = xb[0] & 0x7f 35 | 36 | start = xb[:8] 37 | if start != b7fffffffffffffff and start != b0000000000000000: 38 | break 39 | 40 | self.x = int.from_bytes(xb, "big") 41 | 42 | def generate_e(self): 43 | self.e = pow(self.G, self.x, self.P) 44 | 45 | def calculate_k(self): 46 | k = self.k 47 | if not k: 48 | k = self.k = pow(self.f, self.x, self.P) 49 | return k 50 | -------------------------------------------------------------------------------- /dsskey.py: -------------------------------------------------------------------------------- 1 | # Copyright (C) 2003-2007 Robey Pointer . 2 | # Copyright (C) 2014-2015 Sam Maloney. 3 | # License: LGPL. 4 | # 5 | # This file is based upon parts from paramiko (r85d5e95f9280aa236602b77e9f5bd0aa4d3c8fcd). 6 | 7 | """ 8 | DSS keys. 9 | """ 10 | 11 | import os 12 | from hashlib import sha1 13 | 14 | from Crypto.PublicKey import DSA 15 | 16 | import putil as util 17 | from putil import * 18 | 19 | import sshtype 20 | 21 | #class DssKey (PKey): 22 | class DssKey(): 23 | """ 24 | Representation of a DSS key which can be used to sign an verify SSH2 25 | data. 26 | """ 27 | 28 | def __init__(self, msg=None, data=None, filename=None, password=None, vals=None, file_obj=None): 29 | self.p = None 30 | self.q = None 31 | self.g = None 32 | self.y = None 33 | self.x = None 34 | if file_obj is not None: 35 | self._from_private_key(file_obj, password) 36 | return 37 | if filename is not None: 38 | self._from_private_key_file(filename, password) 39 | return 40 | if (msg is None) and (data is not None): 41 | msg = Message(data) 42 | if vals is not None: 43 | self.p, self.q, self.g, self.y = vals 44 | else: 45 | if msg is None: 46 | raise SSHException('Key object may not be empty') 47 | if msg.get_text() != 'ssh-dss': 48 | raise SSHException('Invalid key') 49 | self.p = msg.get_mpint() 50 | self.q = msg.get_mpint() 51 | self.g = msg.get_mpint() 52 | self.y = msg.get_mpint() 53 | self.size = util.bit_length(self.p) 54 | 55 | def asbytes(self): 56 | m = bytearray() 57 | m += sshtype.encodeString('ssh-dss') 58 | m += sshtype.encodeMpint(self.p) 59 | m += sshtype.encodeMpint(self.q) 60 | m += sshtype.encodeMpint(self.g) 61 | m += sshtype.encodeMpint(self.y) 62 | return m 63 | 64 | def __str__(self): 65 | return self.asbytes() 66 | 67 | def __hash__(self): 68 | h = hash(self.get_name()) 69 | h = h * 37 + hash(self.p) 70 | h = h * 37 + hash(self.q) 71 | h = h * 37 + hash(self.g) 72 | h = h * 37 + hash(self.y) 73 | # h might be a long by now... 74 | return hash(h) 75 | 76 | def get_name(self): 77 | return 'ssh-dss' 78 | 79 | def get_bits(self): 80 | return self.size 81 | 82 | def can_sign(self): 83 | return self.x is not None 84 | 85 | def sign_ssh_data(self, data): 86 | digest = sha1(data).digest() 87 | dss = DSA.construct((int(self.y), int(self.g), int(self.p), int(self.q), int(self.x))) 88 | # generate a suitable k 89 | qsize = len(util.deflate_long(self.q, 0)) 90 | while True: 91 | k = util.inflate_long(os.urandom(qsize), 1) 92 | if (k > 2) and (k < self.q): 93 | break 94 | r, s = dss.sign(util.inflate_long(digest, 1), k) 95 | m = bytearray() 96 | m += sshtype.encodeString("ssh-dss") 97 | # apparently, in rare cases, r or s may be shorter than 20 bytes! 98 | rstr = util.deflate_long(r, 0) 99 | sstr = util.deflate_long(s, 0) 100 | if len(rstr) < 20: 101 | rstr = zero_byte * (20 - len(rstr)) + rstr 102 | if len(sstr) < 20: 103 | sstr = zero_byte * (20 - len(sstr)) + sstr 104 | m += sshtype.encodeBinary(rstr + sstr) 105 | return m 106 | 107 | def verify_ssh_sig(self, data, msg): 108 | if len(msg.asbytes()) == 40: 109 | # spies.com bug: signature has no header 110 | sig = msg.asbytes() 111 | else: 112 | kind = msg.get_text() 113 | if kind != 'ssh-dss': 114 | return 0 115 | sig = msg.get_binary() 116 | 117 | # pull out (r, s) which are NOT encoded as mpints 118 | sigR = util.inflate_long(sig[:20], 1) 119 | sigS = util.inflate_long(sig[20:], 1) 120 | sigM = util.inflate_long(sha1(data).digest(), 1) 121 | 122 | dss = DSA.construct((int(self.y), int(self.g), int(self.p), int(self.q))) 123 | return dss.verify(sigM, (sigR, sigS)) 124 | 125 | def _encode_key(self): 126 | if self.x is None: 127 | raise SSHException('Not enough key information') 128 | keylist = [0, self.p, self.q, self.g, self.y, self.x] 129 | try: 130 | b = BER() 131 | b.encode(keylist) 132 | except BERException: 133 | raise SSHException('Unable to create ber encoding of key') 134 | return b.asbytes() 135 | 136 | def write_private_key_file(self, filename, password=None): 137 | self._write_private_key_file('DSA', filename, self._encode_key(), password) 138 | 139 | def write_private_key(self, file_obj, password=None): 140 | self._write_private_key('DSA', file_obj, self._encode_key(), password) 141 | 142 | @staticmethod 143 | def generate(bits=1024, progress_func=None): 144 | """ 145 | Generate a new private DSS key. This factory function can be used to 146 | generate a new host key or authentication key. 147 | 148 | :param int bits: number of bits the generated key should be. 149 | :param function progress_func: 150 | an optional function to call at key points in key generation (used 151 | by ``pyCrypto.PublicKey``). 152 | :return: new `.DssKey` private key 153 | """ 154 | dsa = DSA.generate(bits, os.urandom, progress_func) 155 | key = DssKey(vals=(dsa.p, dsa.q, dsa.g, dsa.y)) 156 | key.x = dsa.x 157 | return key 158 | 159 | ### internals... 160 | 161 | def _from_private_key_file(self, filename, password): 162 | data = self._read_private_key_file('DSA', filename, password) 163 | self._decode_key(data) 164 | 165 | def _from_private_key(self, file_obj, password): 166 | data = self._read_private_key('DSA', file_obj, password) 167 | self._decode_key(data) 168 | 169 | def _decode_key(self, data): 170 | # private key file contains: 171 | # DSAPrivateKey = { version = 0, p, q, g, y, x } 172 | try: 173 | keylist = BER(data).decode() 174 | except BERException as e: 175 | raise SSHException('Unable to parse key file: ' + str(e)) 176 | if (type(keylist) is not list) or (len(keylist) < 6) or (keylist[0] != 0): 177 | raise SSHException('not a valid DSA private key file (bad ber encoding)') 178 | self.p = keylist[1] 179 | self.q = keylist[2] 180 | self.g = keylist[3] 181 | self.y = keylist[4] 182 | self.x = keylist[5] 183 | self.size = util.bit_length(self.p) 184 | -------------------------------------------------------------------------------- /enc.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2014-2015 Sam Maloney. 2 | # License: GPL v2. 3 | 4 | from hashlib import sha512 5 | import os 6 | 7 | from Crypto.Cipher import AES 8 | from Crypto.Hash import SHA512 9 | from Crypto.PublicKey import RSA 10 | 11 | # generate_ID outputs 512 bits. 12 | ID_BITS = 512 13 | 14 | def generate_RSA(bits=4096): 15 | ''' 16 | Generate an RSA keypair with an exponent of 65537 in PEM format 17 | param: bits The key length in bits 18 | Return private key and public key 19 | ''' 20 | 21 | new_key = RSA.generate(bits, e=65537) 22 | #public_key = new_key.publickey().exportKey("PEM") 23 | #private_key = new_key.exportKey("PEM") 24 | 25 | #return private_key, public_key 26 | return new_key; 27 | 28 | def generate_ID(data): 29 | # return SHA512.new(data).digest() 30 | return sha512(data).digest() 31 | 32 | def _generate_ID(data): 33 | return SHA512.new(data) 34 | 35 | def _setup_data_cipher(data_key): 36 | assert len(data_key) == 64, len(data_key) 37 | 38 | key = data_key[:32] 39 | iv = data_key[32:48] 40 | 41 | return AES.new(key, AES.MODE_CBC, iv) 42 | 43 | def encrypt_data_block(data, data_key): 44 | cipher = _setup_data_cipher(data_key) 45 | 46 | data_len = len(data) 47 | remainder_len = data_len % 16 48 | main_len = data_len - remainder_len 49 | 50 | # Blasted pycrypto, does bytes(bytearray(...)) copy data? We Need a python 51 | # expert to go through the code and minimize or even hack fix all copying 52 | # of data. 53 | main_chunk = cipher.encrypt(bytes(data[:main_len])) 54 | if remainder_len: 55 | last_chunk = data[main_len:] + data_key[48:64 - remainder_len] 56 | remainder = cipher.encrypt(bytes(last_chunk)) 57 | else: 58 | remainder = None 59 | 60 | return main_chunk, remainder 61 | 62 | def decrypt_data_block(enc_data, data_key): 63 | cipher = _setup_data_cipher(data_key) 64 | 65 | data_len = len(enc_data) 66 | remainer_len = data_len % 16 67 | 68 | assert not remainer_len 69 | 70 | if type(enc_data) is bytearray: 71 | enc_data = bytes(enc_data) # Silly pycrypto. 72 | 73 | return cipher.decrypt(enc_data) 74 | -------------------------------------------------------------------------------- /exceptions/20150716-01.txt: -------------------------------------------------------------------------------- 1 | dev@dev-py:~/src/l/morphis> py mcc.py --address 127.0.0.1:7000 --create-dmail 2 | 2015-07-16 15:09:32,566 INFO [mcc:__main__:48] mcc running. 3 | 2015-07-16 15:09:32,567 INFO [mcc:__main__:77] mcc private key file found, loading. 4 | 2015-07-16 15:09:32,567 INFO [client:client:32] Connecting to addr=[127.0.0.1:7000]. 5 | 2015-07-16 15:09:32,573 INFO [mn1:mn1:382] P: Connection made with [('127.0.0.1', 7000)]. 6 | 2015-07-16 15:09:32,573 INFO [mn1:mn1:1124] X: Sending banner. 7 | 2015-07-16 15:09:32,573 INFO [mn1:mn1:802] P: Waiting for packet. 8 | 2015-07-16 15:09:32,573 INFO [client:client:48] Connected! 9 | 2015-07-16 15:09:32,573 INFO [mn1:mn1:812] P: Notified of packet. 10 | 2015-07-16 15:09:32,574 INFO [mn1:mn1:1131] X: Received banner [bytearray(b'SSH-2.0-mNet_0.0.1+cleartext')]. 11 | 2015-07-16 15:09:32,574 INFO [mn1:mn1:839] Writing packet_type=[20] (142 bytes) to address=[('127.0.0.1', 7000)]. 12 | 2015-07-16 15:09:32,574 INFO [mn1:mn1:882] Writing 142 bytes of data to connection (address=[('127.0.0.1', 7000)]). 13 | 2015-07-16 15:09:32,574 INFO [mn1:mn1:802] P: Waiting for packet. 14 | 2015-07-16 15:09:32,614 INFO [mn1:mn1:812] P: Notified of packet. 15 | 2015-07-16 15:09:32,614 INFO [mn1:mn1:1215] packet_type=[20]. 16 | 2015-07-16 15:09:32,614 INFO [mn1:mn1:1228] keyExchangeAlgorithms=[diffie-hellman-group14-sha1]. 17 | 2015-07-16 15:09:32,614 INFO [mn1:mn1:1236] Calling kex->run()... 18 | 2015-07-16 15:09:32,664 INFO [mn1:mn1:839] Writing packet_type=[30] (261 bytes) to address=[('127.0.0.1', 7000)]. 19 | 2015-07-16 15:09:32,665 INFO [mn1:mn1:882] Writing 261 bytes of data to connection (address=[('127.0.0.1', 7000)]). 20 | 2015-07-16 15:09:32,665 INFO [mn1:mn1:802] P: Waiting for packet. 21 | 2015-07-16 15:09:33,003 INFO [mn1:mn1:812] P: Notified of packet. 22 | 2015-07-16 15:09:33,042 INFO [kexdhgroup14sha1:kexdhgroup14sha1:108] Verifying signature... 23 | 2015-07-16 15:09:33,045 INFO [mn1:mn1:296] Signature validated correctly! 24 | 2015-07-16 15:09:33,045 INFO [mn1:mn1:839] Writing packet_type=[21] (1 bytes) to address=[('127.0.0.1', 7000)]. 25 | 2015-07-16 15:09:33,045 INFO [mn1:mn1:882] Writing 1 bytes of data to connection (address=[('127.0.0.1', 7000)]). 26 | 2015-07-16 15:09:33,046 INFO [mn1:mn1:320] Initializing outbound encryption. 27 | 2015-07-16 15:09:33,046 INFO [mn1:mn1:339] Initializing inbound encryption. 28 | 2015-07-16 15:09:33,046 INFO [mn1:mn1:802] P: Waiting for packet. 29 | 2015-07-16 15:09:33,047 WARNING [mn1:mn1:1046] Illegal packet_length [2097037467] received. 30 | 2015-07-16 15:09:33,047 ERROR [mn1:mn1:926] _process_buffer() threw: 31 | Traceback (most recent call last): 32 | File "/home/dev/src/l/morphis/mn1.py", line 924, in process_buffer 33 | self._process_buffer() 34 | File "/home/dev/src/l/morphis/mn1.py", line 936, in _process_buffer 35 | self._process_encrypted_buffer() 36 | File "/home/dev/src/l/morphis/mn1.py", line 1047, in _process_encrypted_buffer 37 | raise SshException(errmsg) 38 | sshexception.SshException: Illegal packet_length [2097037467] received. 39 | 2015-07-16 15:09:33,048 INFO [mn1:mn1:700] X: Connection lost to [('127.0.0.1', 7000)]. 40 | 41 | -------------------------------------------------------------------------------- /favicon.ico: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bitcoinembassy/morphis/abaf01af43ad30938d1542b956ce3c9ca1ee9dcd/favicon.ico -------------------------------------------------------------------------------- /hashbench.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # Written by Yu-Jie Lin 3 | # Public Domain 4 | 5 | from __future__ import print_function 6 | import hashlib 7 | import os 8 | import sys 9 | import timeit 10 | 11 | # https://pypi.python.org/pypi/pysha3/ 12 | # sha3_* are introduced in Python 3.4+ 13 | if sys.version_info < (3, 4): 14 | import sha3 15 | 16 | DATASIZE = 2**20 17 | REPEAT = 3 18 | NUMBER = 10 19 | HASHES = ( 20 | 'md5', 21 | 'sha1', 'sha224', 'sha256', 'sha384', 'sha512', 22 | 'sha3_224', 'sha3_256', 'sha3_384', 'sha3_512', 23 | ) 24 | 25 | width = max(len(f) for f in HASHES) 26 | 27 | print(sys.version.replace('\n', '')) 28 | print() 29 | 30 | print('Generating random data %d...' % DATASIZE) 31 | data = os.urandom(DATASIZE) 32 | 33 | print('timeit for %d repeats, %d runs' % (REPEAT, NUMBER)) 34 | print() 35 | 36 | for f in HASHES: 37 | t = timeit.Timer( 38 | '%s(data).hexdigest()' % f, 39 | 'from __main__ import data; from hashlib import %s' % f 40 | ) 41 | result = t.repeat(repeat=REPEAT, number=NUMBER) 42 | average = sum(result) / len(result) 43 | print('{:{width}s}: {:9.6f} seconds @ {:9.6f} MiB/s'.format( 44 | f, 45 | average, 46 | DATASIZE / average / (2**20), 47 | width=width 48 | )) 49 | -------------------------------------------------------------------------------- /kex.py: -------------------------------------------------------------------------------- 1 | # Copyright (C) 2003-2007 Robey Pointer . 2 | # Copyright (C) 2014-2015 Sam Maloney. 3 | # License: LGPL. 4 | # 5 | # This file is based upon parts from paramiko (r85d5e95f9280aa236602b77e9f5bd0aa4d3c8fcd). 6 | 7 | import llog 8 | 9 | import os 10 | import asyncio 11 | import logging 12 | from hashlib import sha1 13 | 14 | from putil import * 15 | import putil 16 | 17 | import sshtype 18 | import packet as mnetpacket 19 | 20 | """ 21 | Standard SSH key exchange ("kex" if you wanna sound cool). Diffie-Hellman of 22 | 2048 bit key halves, using a known "p" prime and "g" generator. 23 | """ 24 | 25 | log = logging.getLogger(__name__) 26 | 27 | _MSG_KEXDH_INIT, _MSG_KEXDH_REPLY = range(30, 32) 28 | c_MSG_KEXDH_INIT, c_MSG_KEXDH_REPLY = [byte_chr(c) for c in range(30, 32)] 29 | 30 | b7fffffffffffffff = byte_chr(0x7f) + max_byte * 7 31 | b0000000000000000 = zero_byte * 8 32 | 33 | class KexGroup14(): 34 | 35 | # http://tools.ietf.org/html/rfc3526#section-3 36 | P = 0xFFFFFFFFFFFFFFFFC90FDAA22168C234C4C6628B80DC1CD129024E088A67CC74020BBEA63B139B22514A08798E3404DDEF9519B3CD3A431B302B0A6DF25F14374FE1356D6D51C245E485B576625E7EC6F44C42E9A637ED6B0BFF5CB6F406B7EDEE386BFB5A899FA5AE9F24117C4B1FE649286651ECE45B3DC2007CB8A163BF0598DA48361C55D39A69163FA8FD24CF5F83655D23DCA3AD961C62F356208552BB9ED529077096966D670C354E4ABC9804F1746C08CA18217C32905E462E36CE3BE39E772C180E86039B2783A2EC07A28FB5C55DF06F4C52C9DE2BCBF6955817183995497CEA956AE515D2261898FA051015728E5A8AACAA68FFFFFFFFFFFFFFFF 37 | G = 2 38 | 39 | name = 'diffie-hellman-group14-sha1' 40 | 41 | def __init__(self, protocol): 42 | self.protocol = protocol 43 | self.x = int(0) 44 | self.e = int(0) 45 | self.f = int(0) 46 | 47 | @asyncio.coroutine 48 | def do_kex(self): 49 | # This method can return False for client mode if the client, 50 | # after successfull authentication, is rejected by its id. 51 | 52 | self._generate_x() 53 | 54 | if log.isEnabledFor(logging.DEBUG): 55 | log.debug("x=[{}]".format(self.x)) 56 | 57 | if self.protocol.server_mode: 58 | # compute f = g^x mod p, but don't send it yet 59 | self.f = pow(self.G, self.x, self.P) 60 | # self.transport._expect_packet(_MSG_KEXDH_INIT) 61 | 62 | pkt = yield from self.protocol.read_packet() 63 | if not pkt: 64 | return False 65 | m = mnetpacket.SshKexdhInitMessage(pkt) 66 | if log.isEnabledFor(logging.DEBUG): 67 | log.debug("Client sent e=[{}].".format(m.e)) 68 | self._parse_kexdh_init(m) 69 | 70 | # pkt = yield from self.protocol.read_packet() 71 | # m = mnetpacket.SshNewKeysMessage(pkt) 72 | # log.debug("Received SSH_MSG_NEWKEYS.") 73 | 74 | m = mnetpacket.SshNewKeysMessage() 75 | m.encode() 76 | self.protocol.write_packet(m) 77 | return True 78 | 79 | # compute e = g^x mod p (where g=2), and send it 80 | self.e = pow(self.G, self.x, self.P) 81 | if log.isEnabledFor(logging.DEBUG): 82 | log.debug("Sending e=[{}].".format(self.e)) 83 | m = mnetpacket.SshKexdhInitMessage() 84 | m.e = self.e 85 | m.encode() 86 | self.protocol.write_packet(m) 87 | 88 | # self.transport._expect_packet(_MSG_KEXDH_REPLY) 89 | pkt = yield from self.protocol.read_packet() 90 | if not pkt: 91 | return False 92 | m = mnetpacket.SshKexdhReplyMessage(pkt) 93 | 94 | r = yield from self._parse_kexdh_reply(m) 95 | 96 | if not r: 97 | # Client is rejected for some reason by higher level. 98 | return False 99 | 100 | m = mnetpacket.SshNewKeysMessage() 101 | m.encode() 102 | self.protocol.write_packet(m) 103 | 104 | # pkt = yield from self.protocol.read_packet() 105 | # m = mnetpacket.SshNewKeysMessage(pkt) 106 | # log.debug("Received SSH_MSG_NEWKEYS.") 107 | 108 | return True 109 | 110 | ### internals... 111 | 112 | def _generate_x(self): 113 | # generate an "x" (1 < x < q), where q is (p-1)/2. 114 | # p is a 256-byte (2048-bit) number, where the first ?? bits are 1. 115 | # therefore ?? q can be approximated as a 2^2047. we drop the subset of 116 | # potential x where the first 63 bits are 1, because some of those will be 117 | # larger than q (but this is a tiny tiny subset of potential x). 118 | while 1: 119 | x_bytes = os.urandom(256) 120 | x_bytes = byte_mask(x_bytes[0], 0x7f) + x_bytes[1:] 121 | if (x_bytes[:8] != b7fffffffffffffff and 122 | x_bytes[:8] != b0000000000000000): 123 | break 124 | self.x = putil.inflate_long(x_bytes) 125 | 126 | @asyncio.coroutine 127 | def _parse_kexdh_reply(self, m): 128 | # client mode 129 | host_key = m.host_key 130 | self.f = m.f 131 | if (self.f < 1) or (self.f > self.P - 1): 132 | raise SshException('Server kex "f" is out of range') 133 | sig = m.signature 134 | K = pow(self.f, self.x, self.P) 135 | if log.isEnabledFor(logging.DEBUG): 136 | log.debug("K=[{}].".format(K)) 137 | # okay, build up the hash H of (V_C || V_S || I_C || I_S || K_S || e || f || K) 138 | hm = bytearray() 139 | hm += sshtype.encodeString(self.protocol.local_banner) 140 | hm += sshtype.encodeString(self.protocol.remote_banner) 141 | hm += sshtype.encodeBinary(self.protocol.local_kex_init_message) 142 | hm += sshtype.encodeBinary(self.protocol.remote_kex_init_message) 143 | hm += sshtype.encodeBinary(host_key) 144 | hm += sshtype.encodeMpint(self.e) 145 | hm += sshtype.encodeMpint(self.f) 146 | hm += sshtype.encodeMpint(K) 147 | 148 | H = sha1(hm).digest() 149 | 150 | self.protocol.set_K_H(K, H) 151 | 152 | log.info("Verifying signature...") 153 | r = yield from self.protocol.verify_server_key(host_key, sig) 154 | return r 155 | # self.transport._activate_outbound() 156 | 157 | def _parse_kexdh_init(self, m): 158 | # server mode 159 | self.e = m.e 160 | if (self.e < 1) or (self.e > self.P - 1): 161 | raise SshException('Client kex "e" is out of range') 162 | K = pow(self.e, self.x, self.P) 163 | if log.isEnabledFor(logging.DEBUG): 164 | log.debug("K=[{}].".format(K)) 165 | key = self.protocol.server_key.asbytes() 166 | # okay, build up the hash H of (V_C || V_S || I_C || I_S || K_S || e || f || K) 167 | hm = bytearray() 168 | hm += sshtype.encodeString(self.protocol.remote_banner) 169 | hm += sshtype.encodeString(self.protocol.local_banner) 170 | hm += sshtype.encodeBinary(self.protocol.remote_kex_init_message) 171 | hm += sshtype.encodeBinary(self.protocol.local_kex_init_message) 172 | hm += sshtype.encodeBinary(key) 173 | hm += sshtype.encodeMpint(self.e) 174 | hm += sshtype.encodeMpint(self.f) 175 | hm += sshtype.encodeMpint(K) 176 | 177 | H = sha1(hm).digest() 178 | 179 | self.protocol.set_K_H(K, H) 180 | 181 | # sign it 182 | sig = self.protocol.server_key.sign_ssh_data(H) 183 | # send reply 184 | m = mnetpacket.SshKexdhReplyMessage() 185 | m.host_key = key 186 | m.f = self.f 187 | m.signature = sig 188 | m.encode() 189 | 190 | self.protocol.write_packet(m) 191 | # self.transport._activate_outbound() 192 | -------------------------------------------------------------------------------- /kexdhgroup14sha1.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2014-2015 Sam Maloney. 2 | # License: LGPL 3 | 4 | import llog 5 | 6 | import asyncio 7 | import logging 8 | from hashlib import sha1 9 | 10 | import dhgroup14 11 | from sshexception import SshException 12 | import sshtype 13 | import packet as mnp 14 | 15 | log = logging.getLogger(__name__) 16 | 17 | class KexDhGroup14Sha1(object): 18 | name = "diffie-hellman-group14-sha1" 19 | 20 | def __init__(self, protocol): 21 | self.dh = dhgroup14.DhGroup14() 22 | 23 | self.protocol = protocol 24 | 25 | @asyncio.coroutine 26 | def run(self): 27 | dh = self.dh 28 | p = self.protocol 29 | server_mode = p.server_mode 30 | 31 | dh.generate_x() 32 | if log.isEnabledFor(logging.DEBUG): 33 | log.debug("x=[{}]".format(dh.x)) 34 | 35 | dh.generate_e() 36 | if log.isEnabledFor(logging.DEBUG): 37 | log.debug("e=[{}]".format(dh.e)) 38 | 39 | if server_mode: 40 | pkt = yield from p.read_packet() 41 | if not pkt: 42 | return False 43 | 44 | m = mnp.SshKexdhInitMessage(pkt) 45 | if log.isEnabledFor(logging.DEBUG): 46 | log.debug("Client sent e=[{}].".format(m.e)) 47 | 48 | self._parse_kexdh_init(m) 49 | 50 | m = mnp.SshNewKeysMessage() 51 | m.encode() 52 | p.write_packet(m) 53 | 54 | return True 55 | 56 | # Client mode: 57 | m = mnp.SshKexdhInitMessage() 58 | m.e = dh.e 59 | m.encode() 60 | p.write_packet(m) 61 | 62 | pkt = yield from p.read_packet() 63 | if not pkt: 64 | return False 65 | 66 | m = mnp.SshKexdhReplyMessage(pkt) 67 | 68 | r = yield from self._parse_kexdh_reply(m) 69 | 70 | if not r: 71 | # Client signature failed OR the client sig was valid but the id 72 | # now verified is not wanted/allowed for connection. 73 | return False 74 | 75 | m = mnp.SshNewKeysMessage() 76 | m.encode() 77 | p.write_packet(m) 78 | 79 | # Signal successful authentication. 80 | return True 81 | 82 | @asyncio.coroutine 83 | def _parse_kexdh_reply(self, m): 84 | # The client runs this function. 85 | host_key = m.host_key 86 | 87 | server_f = self.dh.f = m.f 88 | 89 | if (server_f < 1) or (server_f > self.dh.P - 1): 90 | raise SshException('Server kex "f" is out of range') 91 | 92 | K = self.dh.calculate_k() 93 | 94 | if log.isEnabledFor(logging.DEBUG): 95 | log.debug("K=[{}].".format(K)) 96 | 97 | # H = (V_C || V_S || I_C || I_S || K_S || e || f || K). 98 | hm = bytearray() 99 | hm += sshtype.encodeString(self.protocol.local_banner) 100 | hm += sshtype.encodeString(self.protocol.remote_banner) 101 | hm += sshtype.encodeBinary(self.protocol.local_kex_init_message) 102 | hm += sshtype.encodeBinary(self.protocol.remote_kex_init_message) 103 | hm += sshtype.encodeBinary(host_key) 104 | hm += sshtype.encodeMpint(self.dh.e) 105 | hm += sshtype.encodeMpint(server_f) 106 | hm += sshtype.encodeMpint(K) 107 | 108 | H = sha1(hm).digest() 109 | 110 | self.protocol.set_K_H(K, H) 111 | 112 | log.info("Verifying signature...") 113 | r = yield from self.protocol.verify_server_key(host_key, m.signature) 114 | return r 115 | 116 | def _parse_kexdh_init(self, m): 117 | # The server runs this function. 118 | client_e = self.dh.f = m.e 119 | 120 | if (client_e < 1) or (client_e > self.dh.P - 1): 121 | raise SshException("Client kex 'e' is out of range") 122 | 123 | K = self.dh.calculate_k() 124 | 125 | if log.isEnabledFor(logging.DEBUG): 126 | log.debug("K=[{}].".format(K)) 127 | 128 | key = self.protocol.server_key.asbytes() 129 | 130 | # H = (V_C || V_S || I_C || I_S || K_S || e || f || K). 131 | hm = bytearray() 132 | hm += sshtype.encodeString(self.protocol.remote_banner) 133 | hm += sshtype.encodeString(self.protocol.local_banner) 134 | hm += sshtype.encodeBinary(self.protocol.remote_kex_init_message) 135 | hm += sshtype.encodeBinary(self.protocol.local_kex_init_message) 136 | hm += sshtype.encodeBinary(key) 137 | hm += sshtype.encodeMpint(client_e) 138 | hm += sshtype.encodeMpint(self.dh.e) 139 | hm += sshtype.encodeMpint(K) 140 | 141 | H = sha1(hm).digest() 142 | 143 | self.protocol.set_K_H(K, H) 144 | 145 | # Sign it. 146 | sig = self.protocol.server_key.sign_ssh_data(H) 147 | 148 | # Send reply. 149 | m = mnp.SshKexdhReplyMessage() 150 | m.host_key = key 151 | m.f = self.dh.e 152 | m.signature = sig 153 | m.encode() 154 | 155 | self.protocol.write_packet(m) 156 | -------------------------------------------------------------------------------- /llog.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2014-2015 Sam Maloney. 2 | # License: GPL v2. 3 | 4 | import logging 5 | import logging.config 6 | import traceback 7 | import sys 8 | 9 | logging_initialized = False; 10 | 11 | def init(): 12 | global logging_initialized 13 | # logging.basicConfig(level=logging.DEBUG, format="%(asctime)s %(levelname)s [%(module)s:%(lineno)d] %(message)s") 14 | 15 | if (logging_initialized): 16 | return 17 | 18 | # print("Reading in logging.ini.") 19 | config_file = "logging.ini" 20 | if len(sys.argv) >= 3: 21 | if sys.argv[1] == "-l": 22 | config_file = sys.argv[2] 23 | 24 | logging.config.fileConfig(config_file) 25 | logger = logging.getLogger(__name__) 26 | # logger.info("Logger initialized.") 27 | logging_initialized = True 28 | 29 | def handle_exception(log, info): 30 | log.fatal("{} threw [{}]: {}".format(info, sys.exc_info()[0], str(sys.exc_info()[0]))) 31 | traceback.print_tb(sys.exc_info()[2]) 32 | 33 | if not logging_initialized: 34 | init() 35 | -------------------------------------------------------------------------------- /logging-debug.ini: -------------------------------------------------------------------------------- 1 | [loggers] 2 | keys=root,asyncio,brute,chord,dhgroup14,dmail,kex,kexdhgroup14sha1,mcc,mn1,mutil,peer,rsakey,shell,sqlalchemy.engine,sshtype 3 | 4 | [handlers] 5 | keys=consoleHandler 6 | 7 | [formatters] 8 | keys=simpleFormatter 9 | 10 | [logger_root] 11 | level=DEBUG 12 | handlers=consoleHandler 13 | 14 | [logger_asyncio] 15 | level=INFO 16 | handlers= 17 | propagate=1 18 | qualname=asyncio 19 | 20 | [logger_brute] 21 | level=DEBUG 22 | handlers= 23 | propagate=1 24 | qualname=brute 25 | 26 | [logger_chord] 27 | level=INFO 28 | handlers= 29 | propagate=1 30 | qualname=chord 31 | 32 | [logger_dhgroup14] 33 | level=INFO 34 | handlers= 35 | propagate=1 36 | qualname=dhgroup14 37 | 38 | [logger_dmail] 39 | level=DEBUG 40 | handlers= 41 | propagate=1 42 | qualname=dmail 43 | 44 | [logger_kex] 45 | level=INFO 46 | handlers= 47 | propagate=1 48 | qualname=kex 49 | 50 | [logger_kexdhgroup14sha1] 51 | level=INFO 52 | handlers= 53 | propagate=1 54 | qualname=kexdhgroup14sha1 55 | 56 | [logger_mcc] 57 | level=DEBUG 58 | handlers= 59 | propagate=1 60 | qualname=mcc 61 | 62 | [logger_mn1] 63 | level=INFO 64 | handlers= 65 | propagate=1 66 | qualname=mn1 67 | 68 | [logger_mutil] 69 | level=INFO 70 | handlers= 71 | propagate=1 72 | qualname=mutil 73 | 74 | [logger_peer] 75 | level=INFO 76 | handlers= 77 | propagate=1 78 | qualname=peer 79 | 80 | [logger_rsakey] 81 | level=INFO 82 | handlers= 83 | propagate=1 84 | qualname=rsakey 85 | 86 | [logger_shell] 87 | level=INFO 88 | handlers= 89 | propagate=1 90 | qualname=shell 91 | 92 | [logger_sqlalchemy.engine] 93 | level=INFO 94 | handlers= 95 | propagate=1 96 | qualname=sqlalchemy.engine 97 | 98 | [logger_sshtype] 99 | level=INFO 100 | handlers= 101 | propagate=1 102 | qualname=sshtype 103 | 104 | [handler_consoleHandler] 105 | class=StreamHandler 106 | level=DEBUG 107 | formatter=simpleFormatter 108 | args=(sys.stdout,) 109 | 110 | [formatter_simpleFormatter] 111 | format=%(asctime)s %(levelname)s [%(module)s:%(name)s:%(lineno)d] %(message)s 112 | datefmt= 113 | -------------------------------------------------------------------------------- /logging-info.ini: -------------------------------------------------------------------------------- 1 | [loggers] 2 | keys=root,asyncio,brute,chord,dhgroup14,kex,kexdhgroup14sha1,mn1,mutil,peer,rsakey,shell,sqlalchemy.engine,sshtype 3 | 4 | [handlers] 5 | keys=consoleHandler 6 | 7 | [formatters] 8 | keys=simpleFormatter 9 | 10 | [logger_root] 11 | level=INFO 12 | handlers=consoleHandler 13 | 14 | [logger_asyncio] 15 | level=WARNING 16 | handlers= 17 | propagate=1 18 | qualname=asyncio 19 | 20 | [logger_brute] 21 | level=INFO 22 | handlers= 23 | propagate=1 24 | qualname=brute 25 | 26 | [logger_chord] 27 | level=INFO 28 | handlers= 29 | propagate=1 30 | qualname=chord 31 | 32 | [logger_dhgroup14] 33 | level=WARNING 34 | handlers= 35 | propagate=1 36 | qualname=dhgroup14 37 | 38 | [logger_kex] 39 | level=WARNING 40 | handlers= 41 | propagate=1 42 | qualname=kex 43 | 44 | [logger_kexdhgroup14sha1] 45 | level=WARNING 46 | handlers= 47 | propagate=1 48 | qualname=kexdhgroup14sha1 49 | 50 | [logger_mn1] 51 | level=INFO 52 | handlers= 53 | propagate=1 54 | qualname=mn1 55 | 56 | [logger_mutil] 57 | level=INFO 58 | handlers= 59 | propagate=1 60 | qualname=mutil 61 | 62 | [logger_peer] 63 | level=INFO 64 | handlers= 65 | propagate=1 66 | qualname=peer 67 | 68 | [logger_rsakey] 69 | level=WARNING 70 | handlers= 71 | propagate=1 72 | qualname=rsakey 73 | 74 | [logger_shell] 75 | level=INFO 76 | handlers= 77 | propagate=1 78 | qualname=shell 79 | 80 | [logger_sqlalchemy.engine] 81 | level=WARNING 82 | handlers= 83 | propagate=1 84 | qualname=sqlalchemy.engine 85 | 86 | [logger_sshtype] 87 | level=WARNING 88 | handlers= 89 | propagate=1 90 | qualname=sshtype 91 | 92 | [handler_consoleHandler] 93 | class=StreamHandler 94 | level=INFO 95 | formatter=simpleFormatter 96 | args=(sys.stdout,) 97 | 98 | [formatter_simpleFormatter] 99 | format=%(asctime)s %(levelname)s [%(module)s:%(name)s:%(lineno)d] %(message)s 100 | datefmt= 101 | -------------------------------------------------------------------------------- /logging-ms.ini: -------------------------------------------------------------------------------- 1 | [loggers] 2 | keys=root,maalstroom,maalstroom.dispatcher,maalstroom.dmail,dmail,client_engine 3 | 4 | [handlers] 5 | keys=consoleHandler 6 | 7 | [formatters] 8 | keys=simpleFormatter 9 | 10 | [logger_root] 11 | level=WARNING 12 | handlers=consoleHandler 13 | 14 | [logger_maalstroom] 15 | level=DEBUG 16 | handlers= 17 | propagate=1 18 | qualname=maalstroom 19 | 20 | [logger_maalstroom.dispatcher] 21 | level=DEBUG 22 | handlers= 23 | propagate=1 24 | qualname=maalstroom.dispatcher 25 | 26 | [logger_maalstroom.dmail] 27 | level=DEBUG 28 | handlers= 29 | propagate=1 30 | qualname=maalstroom.dmail 31 | 32 | [logger_dmail] 33 | level=DEBUG 34 | handlers= 35 | propagate=1 36 | qualname=dmail 37 | 38 | [logger_client_engine] 39 | level=DEBUG 40 | handlers= 41 | propagate=1 42 | qualname=client_engine 43 | 44 | [handler_consoleHandler] 45 | class=StreamHandler 46 | level=DEBUG 47 | formatter=simpleFormatter 48 | args=(sys.stdout,) 49 | 50 | [formatter_simpleFormatter] 51 | format=%(asctime)s %(levelname)s [%(module)s:%(name)s:%(lineno)d] %(message)s 52 | datefmt= 53 | -------------------------------------------------------------------------------- /logging-prod.ini: -------------------------------------------------------------------------------- 1 | [loggers] 2 | keys=root,shell 3 | 4 | [handlers] 5 | keys=fileHandler 6 | 7 | [formatters] 8 | keys=simpleFormatter 9 | 10 | [logger_root] 11 | level=WARNING 12 | handlers=fileHandler 13 | 14 | [logger_shell] 15 | level=INFO 16 | handlers= 17 | propagate=1 18 | qualname=shell 19 | 20 | [handler_fileHandler] 21 | class=FileHandler 22 | level=INFO 23 | formatter=simpleFormatter 24 | args=('morphis.log', 'a', 'utf8', 0) 25 | 26 | [formatter_simpleFormatter] 27 | format=%(asctime)s %(levelname)s [%(module)s:%(name)s:%(lineno)d] %(message)s 28 | datefmt= 29 | -------------------------------------------------------------------------------- /logging-warn.ini: -------------------------------------------------------------------------------- 1 | [loggers] 2 | keys=root 3 | 4 | [handlers] 5 | keys=consoleHandler 6 | 7 | [formatters] 8 | keys=simpleFormatter 9 | 10 | [logger_root] 11 | level=WARNING 12 | handlers=consoleHandler 13 | 14 | [handler_consoleHandler] 15 | class=StreamHandler 16 | level=WARNING 17 | formatter=simpleFormatter 18 | args=(sys.stdout,) 19 | 20 | [formatter_simpleFormatter] 21 | format=%(asctime)s %(levelname)s [%(module)s:%(name)s:%(lineno)d] %(message)s 22 | datefmt= 23 | -------------------------------------------------------------------------------- /logging.ini: -------------------------------------------------------------------------------- 1 | [loggers] 2 | keys=root 3 | 4 | [handlers] 5 | keys=consoleHandler 6 | 7 | [formatters] 8 | keys=simpleFormatter 9 | 10 | [logger_root] 11 | level=WARNING 12 | handlers=consoleHandler 13 | 14 | [handler_consoleHandler] 15 | class=StreamHandler 16 | level=WARNING 17 | formatter=simpleFormatter 18 | args=(sys.stdout,) 19 | 20 | [formatter_simpleFormatter] 21 | format=%(asctime)s %(levelname)s [%(module)s:%(name)s:%(lineno)d] %(message)s 22 | datefmt= 23 | -------------------------------------------------------------------------------- /lots-bulk.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | IDX=0; 3 | while :; do 4 | rm -f morphis.log; 5 | python3 node.py -l logging-prod.ini --bind 127.0.0.2:6${IDX}00 --addpeer 127.0.0.2:7002 --nn 2${IDX}00 --nodecount 100 --dbpoolsize 2 --cleartexttransport --dumptasksonexit --parallellaunch --dburl postgresql://m1:2d8VhzVIovxZy@pg1/m1 --dm & 6 | # sleep 1 7 | read 8 | mv morphis.log morphis-${IDX}.log 9 | 10 | IDX=$((IDX+1)) 11 | if [ $IDX == 8 ]; then 12 | break 13 | fi 14 | done 15 | -------------------------------------------------------------------------------- /lots.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | rm -f morphis-*.log 4 | 5 | IDX=0; 6 | while :; do 7 | rm -f morphis.log; 8 | python3 node.py -l logging-prod.ini --bind 127.0.0.1:6000 --addpeer 127.0.0.1:7002 --nn 2000 --nodecount 10 --dbpoolsize 10 --dumptasksonexit --parallellaunch --dburl postgresql://m1:2d8VhzVIovxZy@pg1/m1 --dm --reinitds --instanceoffset ${IDX} & 9 | sleep 1 10 | mv morphis.log morphis-${IDX}.log 11 | 12 | IDX=$((IDX+10)) 13 | if [ $IDX == 200 ]; then 14 | break 15 | fi 16 | done 17 | -------------------------------------------------------------------------------- /maalstroom/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2014-2015 Sam Maloney. 2 | # License: GPL v2. 3 | 4 | import llog 5 | 6 | import asyncio 7 | import functools 8 | from http.server import BaseHTTPRequestHandler, HTTPServer 9 | import importlib 10 | import logging 11 | import queue 12 | from socketserver import ThreadingMixIn 13 | import threading 14 | 15 | import client_engine as cengine 16 | import enc 17 | import mbase32 18 | import maalstroom.templates as templates 19 | import maalstroom.dispatcher as dispatcher 20 | import maalstroom.dmail 21 | 22 | log = logging.getLogger(__name__) 23 | 24 | host = "" 25 | port = 4251 26 | 27 | node = None 28 | server = None 29 | client_engine = None 30 | 31 | dmail_enabled = True 32 | upload_enabled = True 33 | 34 | proxy_url = None 35 | 36 | _request_lock = threading.Lock() 37 | _concurrent_request_count = 0 38 | 39 | req_dict = [] 40 | 41 | class MaalstroomHandler(BaseHTTPRequestHandler): 42 | def __init__(self, a, b, c): 43 | global node 44 | self.loop = node.loop 45 | self.protocol_version = "HTTP/1.1" 46 | self.node = node 47 | 48 | self.maalstroom_plugin_used = False 49 | self.maalstroom_url_prefix = None 50 | self.maalstroom_url_prefix_str = None 51 | 52 | self.proxy_used = False 53 | 54 | # self._inq = queue.Queue() 55 | self._inq = asyncio.Queue(loop=self.loop) 56 | self._outq = queue.Queue() 57 | 58 | self._abort_event = threading.Event() 59 | 60 | self._dispatcher = self._create_dispatcher() 61 | 62 | self._maalstroom_http_url_prefix = "http://{}/" 63 | self._maalstroom_morphis_url_prefix = "morphis://" 64 | 65 | super().__init__(a, b, c) 66 | 67 | def do_GET(self): 68 | self._prepare_for_request() 69 | 70 | if log.isEnabledFor(logging.DEBUG): 71 | log.debug("Handler do_GET(): path=[{}].".format(self.path)) 72 | req_dict.append(self) 73 | 74 | self.loop.call_soon_threadsafe(\ 75 | asyncio.async,\ 76 | self._dispatcher.do_GET(self._get_rpath())) 77 | 78 | self._write_response() 79 | 80 | if self.node.web_devel and self.headers["Cache-Control"] == "no-cache": 81 | global _concurrent_request_count 82 | with _request_lock: 83 | _concurrent_request_count -= 1 84 | 85 | if log.isEnabledFor(logging.DEBUG): 86 | req_dict.remove(self) 87 | log.debug("Done do_GET(): path=[{}], reqs=[{}]."\ 88 | .format(self.path, len(req_dict))) 89 | 90 | def do_POST(self): 91 | self._prepare_for_request() 92 | 93 | self.loop.call_soon_threadsafe(\ 94 | asyncio.async,\ 95 | self._dispatcher.do_POST(self._get_rpath())) 96 | 97 | log.debug("Reading request.") 98 | self._read_request() 99 | 100 | log.debug("Writing response.") 101 | self._write_response() 102 | 103 | if self.node.web_devel and self.headers["Cache-Control"] == "no-cache": 104 | global _concurrent_request_count 105 | with _request_lock: 106 | _concurrent_request_count -= 1 107 | 108 | if log.isEnabledFor(logging.DEBUG): 109 | log.debug("Done do_POST(): path=[{}].".format(self.path)) 110 | 111 | def log_message(self, mformat, *args): 112 | if log.isEnabledFor(logging.INFO): 113 | log.info("{}: {}".format(self.address_string(), args)) 114 | 115 | def _create_dispatcher(self): 116 | return dispatcher.MaalstroomDispatcher(\ 117 | self, self._inq, self._outq, self._abort_event) 118 | 119 | def _prepare_for_request(self): 120 | self._abort_event.clear() 121 | 122 | global proxy_url 123 | 124 | if self.headers["X-Forwarded-For"]: 125 | self.proxy_used = True 126 | 127 | if self.headers["X-Maalstroom-Plugin"]: 128 | self.maalstroom_plugin_used = True 129 | self.maalstroom_url_prefix_str =\ 130 | self._maalstroom_morphis_url_prefix 131 | self.maalstroom_url_prefix =\ 132 | self.maalstroom_url_prefix_str.encode() 133 | else: 134 | global port 135 | if self.proxy_used and proxy_url: 136 | host = proxy_url 137 | else: 138 | host = self.headers["Host"] 139 | 140 | if log.isEnabledFor(logging.DEBUG): 141 | log.debug(\ 142 | "No plugin used for request, rewriting URLs using"\ 143 | " host=[{}]."\ 144 | .format(host)) 145 | 146 | # Host header includes port. 147 | self.maalstroom_url_prefix_str =\ 148 | self._maalstroom_http_url_prefix.format(host) 149 | self.maalstroom_url_prefix =\ 150 | self.maalstroom_url_prefix_str.encode() 151 | 152 | if self.node.web_devel and self.headers["Cache-Control"] == "no-cache": 153 | global _concurrent_request_count 154 | with _request_lock: 155 | _concurrent_request_count += 1 156 | if _concurrent_request_count == 1: 157 | log.warning(\ 158 | "Reloading maalstroom packages due to web_dev mode.") 159 | try: 160 | importlib.reload(maalstroom.templates) 161 | importlib.reload(maalstroom.dispatcher) 162 | importlib.reload(maalstroom.dmail) 163 | except KeyboardInterrupt: 164 | raise 165 | except Exception as e: 166 | log.exception(e) 167 | 168 | self._dispatcher = self._create_dispatcher() 169 | 170 | def _get_rpath(self): 171 | rpath = self.path[1:] 172 | 173 | if self.maalstroom_plugin_used and len(rpath) == 1 and rpath[0] == '/': 174 | rpath = "" 175 | 176 | return rpath 177 | 178 | def _read_request(self): 179 | inq = self._inq 180 | loop = self.loop 181 | 182 | rlen = int(self.headers["Content-Length"]) 183 | 184 | while rlen: 185 | data = self.rfile.read(min(rlen, 65536)) 186 | self.loop.call_soon_threadsafe(\ 187 | functools.partial(\ 188 | asyncio.async,\ 189 | inq.put(data),\ 190 | loop=self.loop)) 191 | rlen -= len(data) 192 | 193 | self.loop.call_soon_threadsafe(\ 194 | functools.partial(\ 195 | asyncio.async,\ 196 | inq.put(None),\ 197 | loop=self.loop)) 198 | 199 | def _write_response(self): 200 | outq = self._outq 201 | 202 | while True: 203 | resp = outq.get() 204 | 205 | if not resp: 206 | # Python bug as far as I can tell. Randomly outq has a None 207 | # in it in front of what we really added. What we really added 208 | # is still there, just has a spurious None in front; so we 209 | # ignore it. 210 | log.debug("Got spurious None from queue; ignoring.") 211 | continue 212 | elif resp is Done: 213 | log.debug("Got Done from queue; finished.") 214 | break 215 | elif resp is Flush: 216 | self.wfile.flush() 217 | continue 218 | elif resp is Close: 219 | log.debug("Got Close from queue; closing connection.") 220 | self.close_connection = True 221 | break 222 | 223 | try: 224 | self.wfile.write(resp) 225 | except ConnectionError as e: 226 | log.warning("Browser broke connection: {}".format(e)) 227 | self._abort_event.set() 228 | # Replace _outq, as dispatcher may still write to it. 229 | self._outq = queue.Queue() 230 | break 231 | 232 | class ThreadedHTTPServer(ThreadingMixIn, HTTPServer): 233 | daemon_threads = True 234 | 235 | class Flush(object): 236 | pass 237 | 238 | class Close(object): 239 | pass 240 | 241 | class Done(object): 242 | pass 243 | 244 | @asyncio.coroutine 245 | def start_maalstroom_server(the_node): 246 | global node, server 247 | 248 | if node: 249 | #TODO: Handle this better, but for now this is how we only start one 250 | # maalstroom process even when running in multi-instance test mode. 251 | return 252 | 253 | node = the_node 254 | 255 | log.info("Starting Maalstroom server instance.") 256 | 257 | server = ThreadedHTTPServer((host, port), MaalstroomHandler) 258 | 259 | def threadcall(): 260 | try: 261 | server.serve_forever() 262 | except KeyboardInterrupt: 263 | pass 264 | 265 | server.server_close() 266 | 267 | node.loop.run_in_executor(None, threadcall) 268 | 269 | def set_client_engine(ce): 270 | global _client_engine 271 | 272 | _client_engine = ce 273 | 274 | @asyncio.coroutine 275 | def get_client_engine(): 276 | global node, client_engine, _client_engine 277 | 278 | if client_engine: 279 | return client_engine 280 | 281 | yield from node.ready.wait() 282 | 283 | yield from _client_engine.start() 284 | 285 | client_engine = _client_engine 286 | 287 | return client_engine 288 | 289 | def shutdown(): 290 | if not server: 291 | return 292 | 293 | log.info("Shutting down Maalstroom server instance.") 294 | server.server_close() 295 | log.info("Maalstroom server instance stopped.") 296 | -------------------------------------------------------------------------------- /maalstroom/resources/images/dmail/add_address.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bitcoinembassy/morphis/abaf01af43ad30938d1542b956ce3c9ca1ee9dcd/maalstroom/resources/images/dmail/add_address.png -------------------------------------------------------------------------------- /maalstroom/resources/images/dmail/addressbook.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bitcoinembassy/morphis/abaf01af43ad30938d1542b956ce3c9ca1ee9dcd/maalstroom/resources/images/dmail/addressbook.png -------------------------------------------------------------------------------- /maalstroom/resources/images/dmail/addressbook_blue.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bitcoinembassy/morphis/abaf01af43ad30938d1542b956ce3c9ca1ee9dcd/maalstroom/resources/images/dmail/addressbook_blue.png -------------------------------------------------------------------------------- /maalstroom/resources/images/dmail/addressbook_grey.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bitcoinembassy/morphis/abaf01af43ad30938d1542b956ce3c9ca1ee9dcd/maalstroom/resources/images/dmail/addressbook_grey.png -------------------------------------------------------------------------------- /maalstroom/resources/images/dmail/bullet.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bitcoinembassy/morphis/abaf01af43ad30938d1542b956ce3c9ca1ee9dcd/maalstroom/resources/images/dmail/bullet.png -------------------------------------------------------------------------------- /maalstroom/resources/images/dmail/bullet_white.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bitcoinembassy/morphis/abaf01af43ad30938d1542b956ce3c9ca1ee9dcd/maalstroom/resources/images/dmail/bullet_white.png -------------------------------------------------------------------------------- /maalstroom/resources/images/dmail/compose_blue.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bitcoinembassy/morphis/abaf01af43ad30938d1542b956ce3c9ca1ee9dcd/maalstroom/resources/images/dmail/compose_blue.png -------------------------------------------------------------------------------- /maalstroom/resources/images/dmail/compose_white.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bitcoinembassy/morphis/abaf01af43ad30938d1542b956ce3c9ca1ee9dcd/maalstroom/resources/images/dmail/compose_white.png -------------------------------------------------------------------------------- /maalstroom/resources/images/dmail/divider.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bitcoinembassy/morphis/abaf01af43ad30938d1542b956ce3c9ca1ee9dcd/maalstroom/resources/images/dmail/divider.gif -------------------------------------------------------------------------------- /maalstroom/resources/images/dmail/draft.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bitcoinembassy/morphis/abaf01af43ad30938d1542b956ce3c9ca1ee9dcd/maalstroom/resources/images/dmail/draft.png -------------------------------------------------------------------------------- /maalstroom/resources/images/dmail/draft_white.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bitcoinembassy/morphis/abaf01af43ad30938d1542b956ce3c9ca1ee9dcd/maalstroom/resources/images/dmail/draft_white.png -------------------------------------------------------------------------------- /maalstroom/resources/images/dmail/gradient.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bitcoinembassy/morphis/abaf01af43ad30938d1542b956ce3c9ca1ee9dcd/maalstroom/resources/images/dmail/gradient.jpg -------------------------------------------------------------------------------- /maalstroom/resources/images/dmail/inbox.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bitcoinembassy/morphis/abaf01af43ad30938d1542b956ce3c9ca1ee9dcd/maalstroom/resources/images/dmail/inbox.png -------------------------------------------------------------------------------- /maalstroom/resources/images/dmail/inbox_new.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bitcoinembassy/morphis/abaf01af43ad30938d1542b956ce3c9ca1ee9dcd/maalstroom/resources/images/dmail/inbox_new.png -------------------------------------------------------------------------------- /maalstroom/resources/images/dmail/inbox_white.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bitcoinembassy/morphis/abaf01af43ad30938d1542b956ce3c9ca1ee9dcd/maalstroom/resources/images/dmail/inbox_white.png -------------------------------------------------------------------------------- /maalstroom/resources/images/dmail/list_addresses.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bitcoinembassy/morphis/abaf01af43ad30938d1542b956ce3c9ca1ee9dcd/maalstroom/resources/images/dmail/list_addresses.png -------------------------------------------------------------------------------- /maalstroom/resources/images/dmail/logo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bitcoinembassy/morphis/abaf01af43ad30938d1542b956ce3c9ca1ee9dcd/maalstroom/resources/images/dmail/logo.png -------------------------------------------------------------------------------- /maalstroom/resources/images/dmail/mail_icon.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bitcoinembassy/morphis/abaf01af43ad30938d1542b956ce3c9ca1ee9dcd/maalstroom/resources/images/dmail/mail_icon.png -------------------------------------------------------------------------------- /maalstroom/resources/images/dmail/mail_icon2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bitcoinembassy/morphis/abaf01af43ad30938d1542b956ce3c9ca1ee9dcd/maalstroom/resources/images/dmail/mail_icon2.png -------------------------------------------------------------------------------- /maalstroom/resources/images/dmail/read.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bitcoinembassy/morphis/abaf01af43ad30938d1542b956ce3c9ca1ee9dcd/maalstroom/resources/images/dmail/read.png -------------------------------------------------------------------------------- /maalstroom/resources/images/dmail/refresh.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bitcoinembassy/morphis/abaf01af43ad30938d1542b956ce3c9ca1ee9dcd/maalstroom/resources/images/dmail/refresh.png -------------------------------------------------------------------------------- /maalstroom/resources/images/dmail/reply_blue.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bitcoinembassy/morphis/abaf01af43ad30938d1542b956ce3c9ca1ee9dcd/maalstroom/resources/images/dmail/reply_blue.png -------------------------------------------------------------------------------- /maalstroom/resources/images/dmail/reply_dk.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bitcoinembassy/morphis/abaf01af43ad30938d1542b956ce3c9ca1ee9dcd/maalstroom/resources/images/dmail/reply_dk.png -------------------------------------------------------------------------------- /maalstroom/resources/images/dmail/reply_icon.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bitcoinembassy/morphis/abaf01af43ad30938d1542b956ce3c9ca1ee9dcd/maalstroom/resources/images/dmail/reply_icon.png -------------------------------------------------------------------------------- /maalstroom/resources/images/dmail/reply_icon_new.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bitcoinembassy/morphis/abaf01af43ad30938d1542b956ce3c9ca1ee9dcd/maalstroom/resources/images/dmail/reply_icon_new.png -------------------------------------------------------------------------------- /maalstroom/resources/images/dmail/reply_purp.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bitcoinembassy/morphis/abaf01af43ad30938d1542b956ce3c9ca1ee9dcd/maalstroom/resources/images/dmail/reply_purp.png -------------------------------------------------------------------------------- /maalstroom/resources/images/dmail/reply_white.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bitcoinembassy/morphis/abaf01af43ad30938d1542b956ce3c9ca1ee9dcd/maalstroom/resources/images/dmail/reply_white.png -------------------------------------------------------------------------------- /maalstroom/resources/images/dmail/save.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bitcoinembassy/morphis/abaf01af43ad30938d1542b956ce3c9ca1ee9dcd/maalstroom/resources/images/dmail/save.png -------------------------------------------------------------------------------- /maalstroom/resources/images/dmail/save_blue.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bitcoinembassy/morphis/abaf01af43ad30938d1542b956ce3c9ca1ee9dcd/maalstroom/resources/images/dmail/save_blue.png -------------------------------------------------------------------------------- /maalstroom/resources/images/dmail/save_dk.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bitcoinembassy/morphis/abaf01af43ad30938d1542b956ce3c9ca1ee9dcd/maalstroom/resources/images/dmail/save_dk.png -------------------------------------------------------------------------------- /maalstroom/resources/images/dmail/save_purp.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bitcoinembassy/morphis/abaf01af43ad30938d1542b956ce3c9ca1ee9dcd/maalstroom/resources/images/dmail/save_purp.png -------------------------------------------------------------------------------- /maalstroom/resources/images/dmail/save_white.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bitcoinembassy/morphis/abaf01af43ad30938d1542b956ce3c9ca1ee9dcd/maalstroom/resources/images/dmail/save_white.png -------------------------------------------------------------------------------- /maalstroom/resources/images/dmail/send.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bitcoinembassy/morphis/abaf01af43ad30938d1542b956ce3c9ca1ee9dcd/maalstroom/resources/images/dmail/send.png -------------------------------------------------------------------------------- /maalstroom/resources/images/dmail/send_blue.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bitcoinembassy/morphis/abaf01af43ad30938d1542b956ce3c9ca1ee9dcd/maalstroom/resources/images/dmail/send_blue.png -------------------------------------------------------------------------------- /maalstroom/resources/images/dmail/send_dk.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bitcoinembassy/morphis/abaf01af43ad30938d1542b956ce3c9ca1ee9dcd/maalstroom/resources/images/dmail/send_dk.png -------------------------------------------------------------------------------- /maalstroom/resources/images/dmail/send_purp.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bitcoinembassy/morphis/abaf01af43ad30938d1542b956ce3c9ca1ee9dcd/maalstroom/resources/images/dmail/send_purp.png -------------------------------------------------------------------------------- /maalstroom/resources/images/dmail/send_white.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bitcoinembassy/morphis/abaf01af43ad30938d1542b956ce3c9ca1ee9dcd/maalstroom/resources/images/dmail/send_white.png -------------------------------------------------------------------------------- /maalstroom/resources/images/dmail/sent.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bitcoinembassy/morphis/abaf01af43ad30938d1542b956ce3c9ca1ee9dcd/maalstroom/resources/images/dmail/sent.png -------------------------------------------------------------------------------- /maalstroom/resources/images/dmail/sent_white.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bitcoinembassy/morphis/abaf01af43ad30938d1542b956ce3c9ca1ee9dcd/maalstroom/resources/images/dmail/sent_white.png -------------------------------------------------------------------------------- /maalstroom/resources/images/dmail/settings.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bitcoinembassy/morphis/abaf01af43ad30938d1542b956ce3c9ca1ee9dcd/maalstroom/resources/images/dmail/settings.png -------------------------------------------------------------------------------- /maalstroom/resources/images/dmail/stripe.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bitcoinembassy/morphis/abaf01af43ad30938d1542b956ce3c9ca1ee9dcd/maalstroom/resources/images/dmail/stripe.png -------------------------------------------------------------------------------- /maalstroom/resources/images/dmail/tags.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bitcoinembassy/morphis/abaf01af43ad30938d1542b956ce3c9ca1ee9dcd/maalstroom/resources/images/dmail/tags.png -------------------------------------------------------------------------------- /maalstroom/resources/images/dmail/tags_white.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bitcoinembassy/morphis/abaf01af43ad30938d1542b956ce3c9ca1ee9dcd/maalstroom/resources/images/dmail/tags_white.png -------------------------------------------------------------------------------- /maalstroom/resources/images/dmail/trash.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bitcoinembassy/morphis/abaf01af43ad30938d1542b956ce3c9ca1ee9dcd/maalstroom/resources/images/dmail/trash.png -------------------------------------------------------------------------------- /maalstroom/resources/images/dmail/trash_blue.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bitcoinembassy/morphis/abaf01af43ad30938d1542b956ce3c9ca1ee9dcd/maalstroom/resources/images/dmail/trash_blue.png -------------------------------------------------------------------------------- /maalstroom/resources/images/dmail/trash_dk.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bitcoinembassy/morphis/abaf01af43ad30938d1542b956ce3c9ca1ee9dcd/maalstroom/resources/images/dmail/trash_dk.png -------------------------------------------------------------------------------- /maalstroom/resources/images/dmail/trash_icon.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bitcoinembassy/morphis/abaf01af43ad30938d1542b956ce3c9ca1ee9dcd/maalstroom/resources/images/dmail/trash_icon.png -------------------------------------------------------------------------------- /maalstroom/resources/images/dmail/trash_icon_new.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bitcoinembassy/morphis/abaf01af43ad30938d1542b956ce3c9ca1ee9dcd/maalstroom/resources/images/dmail/trash_icon_new.png -------------------------------------------------------------------------------- /maalstroom/resources/images/dmail/trash_white.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bitcoinembassy/morphis/abaf01af43ad30938d1542b956ce3c9ca1ee9dcd/maalstroom/resources/images/dmail/trash_white.png -------------------------------------------------------------------------------- /maalstroom/resources/images/dmail/trash_white_in.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bitcoinembassy/morphis/abaf01af43ad30938d1542b956ce3c9ca1ee9dcd/maalstroom/resources/images/dmail/trash_white_in.png -------------------------------------------------------------------------------- /maalstroom/resources/images/dmail/unread.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bitcoinembassy/morphis/abaf01af43ad30938d1542b956ce3c9ca1ee9dcd/maalstroom/resources/images/dmail/unread.png -------------------------------------------------------------------------------- /maalstroom/templates/dmail/address_config.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 10 |

Settings for Dmail Address: 

${DMAIL_ADDRESS}
11 |

NOTE: Difficulty is the anti-spam setting that determines how much work it is to send you a Dmail. Its effect is exponential (work=2^difficulty). Do not set it too low -- I would recommend no lower than 20. If 2^difficulty is lower than the amount of nodes in the network, then the network will likely have trouble finding your Dmails.

12 |

NOTE: At difficulty=20, sending you a Dmail will take ~30 seconds. A setting of 21 will take 1 minute. At 22, it is 2 minutes, 23 = 4 minutes, 24 = 8 minutes, 25 = 16 minutes, 26 = 32 minutes, 27 = 64 minutes, 28 = 128 minutes, 29 = 256 minutes, and 30 = 512 minutes, Etc. Every 10 that you increase it is 1024x more cpu work for the sender. The maximum value of difficulty=512 will take more time than the lifespan of the universe. If you want to receive Dmails anytime soon, don't increase this value much. Also, there is no point in increasing it unless you are going to publish your address and thus expect to be the target of spam. The times here are a rough guide and depend on the processing capability of the sending computer.

13 |

NOTE: If you turn this up, you will no longer see any new Dmails that were sent to you while you had the lower setting. This is because they won't include enough work to be found. Dmails stored locally already (Inbox) won't be affected. In a future version of Maalstroom this will be solved. For now it is not, because it will take a bunch more UI to allow you to manage properly, and I want to release ASAP :).

14 |
15 | 16 | 17 |

18 | 19 | 20 |

21 | 22 |
23 |

NOTE: Do not give out these values! The site private key controls your Dmail address. The DH secret is the key to decrypting your Dmail.

24 |
25 |

26 | 27 | 28 |

29 |

30 | 31 | 32 |

33 |

34 | 35 | 36 |

37 |
38 | 39 | -------------------------------------------------------------------------------- /maalstroom/templates/dmail/address_list.html: -------------------------------------------------------------------------------- 1 | 2 | 3 |

Your Dmail Addresses:

4 | {address_list} 5 | 6 | -------------------------------------------------------------------------------- /maalstroom/templates/dmail/address_list_row.html: -------------------------------------------------------------------------------- 1 |
2 | 3 | 4 | [set default] 5 | 6 | [{autoscan_link_text}] 7 | 8 | 9 | -------------------------------------------------------------------------------- /maalstroom/templates/dmail/aside.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 |

MAILBOXES FOR {addr}

8 | 9 | 16 | 17 |
18 |
    19 | 20 | 21 |
  • 22 |
    23 | 24 | {tag_rows} 25 |
26 | 27 | 28 | -------------------------------------------------------------------------------- /maalstroom/templates/dmail/compose.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | New Message 8 |
9 | 14 |


15 |
16 |
17 |
    18 | 19 | 20 | 21 | 24 |
  • 25 | 26 |    27 |
  • 28 |
  • 29 | 30 | 31 |
  • 32 |
  • 33 | 34 |
  • 35 |
  • 36 | 37 | 38 |
  • 39 |
40 |


41 |
42 | 43 | -------------------------------------------------------------------------------- /maalstroom/templates/dmail/create_address.html: -------------------------------------------------------------------------------- 1 | 2 | 3 |
4 | 5 |

Dmail Address Generation

6 |

To create yourself a new Dmail Address, simply click the Create button below. Changing these values from their defaults is not needed at all.

7 |

8 | 9 | 10 |

11 |

NOTE: Difficulty is the anti-spam setting that determines how much work it is to send you a Dmail. Its effect is exponential (work=2^difficulty). Do not set it too low -- I would recommend no lower than 20. If 2^difficulty is lower than the amount of nodes in the network, then the network will likely have trouble finding your Dmails.

12 |

13 | 14 |  (Optional) 15 |

16 |

NOTE: Each letter in the prefix will make the address take 32x longer to generate. Three letters takes almost an hour on my test machine.

17 | 18 |
19 | 20 | -------------------------------------------------------------------------------- /maalstroom/templates/dmail/logo.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 |
8 | 9 |
[{version}]
{connections}
10 |
11 | 12 | 13 | -------------------------------------------------------------------------------- /maalstroom/templates/dmail/msg_list.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | {tag}  for 8 | 9 | 10 |

{addr}


11 | 12 | 13 | -------------------------------------------------------------------------------- /maalstroom/templates/dmail/msg_list_list_end.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | -------------------------------------------------------------------------------- /maalstroom/templates/dmail/msg_list_list_row.html: -------------------------------------------------------------------------------- 1 | 2 |
3 | 4 |
{sender}
5 |
{timestamp}
6 |
7 |
8 | 9 | -------------------------------------------------------------------------------- /maalstroom/templates/dmail/msg_list_list_start.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | {unread_check} 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | -------------------------------------------------------------------------------- /maalstroom/templates/dmail/nav.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 31 | 32 | 33 | -------------------------------------------------------------------------------- /maalstroom/templates/dmail/new_mail.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 |
{unread_count}
9 | 10 | 11 | -------------------------------------------------------------------------------- /maalstroom/templates/dmail/page_wrapper.html: -------------------------------------------------------------------------------- 1 | 2 | MORPHiS Maalstroom Dmail Client 3 | 4 | 5 | 6 |
7 |
8 |
9 |
10 | 11 |
12 |
13 |
14 | 19 | 20 | 21 | -------------------------------------------------------------------------------- /maalstroom/templates/dmail/read.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | View Message 8 |
9 | 13 |


14 |
15 |
16 |
SUBJECT
{addr_heading}
DATE
Subject:
17 |
18 |
From: {sender}
19 |
20 |
21 |
22 |
To: {dest_addr}
23 |
24 |
Date: {date}
25 | 26 | 27 | 43 | 44 | 45 | -------------------------------------------------------------------------------- /maalstroom/templates/main/combined_upload.html: -------------------------------------------------------------------------------- 1 | MORPHiS Maalstroom Upload 2 | 13 | 14 | 15 |

NOTE: Bookmark this page to save your private key in the bookmark!

16 |

Select the file to upload below:

17 |
18 | 19 | 20 | 21 |

22 | 23 |
24 | 25 |
26 | 27 | 28 | 29 |
30 |
31 |
32 |

33 | switch to updateable key mode 34 |

35 |

36 | switch to static key mode 37 |

38 |
<- MORPHiS UI
39 | 40 | -------------------------------------------------------------------------------- /mbase32.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2014-2015 Sam Maloney. 2 | # License: Public Domain. 3 | 4 | charset = "13456789abcdefghijkmnopqrstuwxyz" 5 | 6 | def encode(val): 7 | result = "" 8 | 9 | if not val: 10 | return result 11 | 12 | assert type(val) in (bytes, bytearray), type(val) 13 | 14 | r = 0 15 | rbits = 0 16 | 17 | for char in val: 18 | r = (r << 8) | char 19 | rbits += 8 20 | 21 | while rbits >= 5: 22 | rbits -= 5 23 | idx = r >> rbits 24 | r &= (1 << rbits) - 1 25 | 26 | result += charset[idx] 27 | 28 | if rbits: 29 | result += charset[r << (5 - rbits)] 30 | 31 | return result 32 | 33 | def decode(val, padded=True): 34 | result = bytearray() 35 | 36 | if not val: 37 | return result 38 | 39 | a = 0 40 | abits = 0 41 | 42 | for char in val: 43 | a = (a << 5) | charset.index(char) 44 | abits += 5 45 | 46 | if abits >= 8: 47 | abits -= 8 48 | result.append(a >> abits) 49 | a &= (1 << abits) -1 50 | 51 | if not padded and abits: 52 | result.append(a << (8 - abits)) 53 | 54 | return result 55 | -------------------------------------------------------------------------------- /mcc.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2014-2015 Sam Maloney. 2 | # License: GPL v2. 3 | 4 | import llog 5 | 6 | import argparse 7 | import asyncio 8 | import logging 9 | import os 10 | from sys import stdin 11 | 12 | import base58 13 | import brute 14 | import client 15 | import db 16 | import dmail 17 | import enc 18 | import mbase32 19 | import mutil 20 | import rsakey 21 | import sshtype 22 | 23 | log = logging.getLogger(__name__) 24 | 25 | def main(): 26 | global loop 27 | 28 | loop = asyncio.get_event_loop() 29 | 30 | asyncio.async(_main(), loop=loop) 31 | 32 | try: 33 | loop.run_forever() 34 | except KeyboardInterrupt: 35 | log.info("Got KeyboardInterrupt; shutting down.") 36 | except Exception: 37 | log.exception("loop.run_forever()") 38 | 39 | log.info("Shutdown.") 40 | 41 | @asyncio.coroutine 42 | def _main(): 43 | global loop 44 | 45 | try: 46 | yield from __main() 47 | except BaseException as e: 48 | if type(e) is not SystemExit: 49 | log.exception("__main()") 50 | loop.stop() 51 | 52 | @asyncio.coroutine 53 | def __main(): 54 | global loop 55 | 56 | log.info("mcc running.") 57 | 58 | parser = argparse.ArgumentParser() 59 | parser.add_argument(\ 60 | "--address",\ 61 | help="The address of the Morphis node to connect to.",\ 62 | default="127.0.0.1:4250") 63 | parser.add_argument(\ 64 | "--create-dmail",\ 65 | help="Generate and upload a new dmail site.",\ 66 | action="store_true") 67 | parser.add_argument(\ 68 | "--dburl",\ 69 | help="Specify the database url to use.") 70 | parser.add_argument(\ 71 | "--fetch-dmail", 72 | help="Fetch dmail for specified key_id.") 73 | parser.add_argument(\ 74 | "-i",\ 75 | help="Read file as stdin.") 76 | parser.add_argument("--nn", type=int,\ 77 | help="Node instance number.") 78 | parser.add_argument(\ 79 | "--prefix",\ 80 | help="Specify the prefix for various things (currently --create-dmail"\ 81 | ").") 82 | parser.add_argument(\ 83 | "--scan-dmail",\ 84 | help="Scan the network for available dmails.") 85 | parser.add_argument(\ 86 | "--send-dmail",\ 87 | help="Send stdin as a dmail with the specified subject. The"\ 88 | " sender and recipients may be specified at the beginning of the"\ 89 | " data as with email headers: 'from: ' and 'to: '.") 90 | parser.add_argument(\ 91 | "--stat",\ 92 | help="Report node status.",\ 93 | action="store_true") 94 | parser.add_argument("-l", dest="logconf",\ 95 | help="Specify alternate logging.ini [IF SPECIFIED, THIS MUST BE THE"\ 96 | " FIRST PARAMETER!].") 97 | parser.add_argument(\ 98 | "--dmail-target",\ 99 | help="Specify the dmail target to validate dmail against.") 100 | parser.add_argument(\ 101 | "-x",\ 102 | help="Specify the x (Diffie-Hellman private secret) to use.") 103 | 104 | args = parser.parse_args() 105 | 106 | # Load or generate client mcc key. 107 | key_filename = "data/mcc_key-rsa.mnk" 108 | if os.path.exists(key_filename): 109 | log.info("mcc private key file found, loading.") 110 | client_key = rsakey.RsaKey(filename=key_filename) 111 | else: 112 | log.info("mcc private key file missing, generating.") 113 | client_key = rsakey.RsaKey.generate(bits=4096) 114 | client_key.write_private_key_file(key_filename) 115 | 116 | # Connect a Morphis Client (lightweight Node) instance. 117 | mc = client.Client(loop, client_key=client_key, address=args.address) 118 | r = yield from mc.connect() 119 | 120 | if not r: 121 | log.warning("Connection failed; exiting.") 122 | loop.stop() 123 | return 124 | 125 | dbase = init_db(args) 126 | de = dmail.DmailEngine(mc, dbase) 127 | 128 | log.info("Processing command requests...") 129 | 130 | if args.stat: 131 | r = yield from mc.send_command("stat") 132 | print(r.decode("UTF-8"), end='') 133 | 134 | if args.create_dmail: 135 | log.info("Creating and uploading dmail site.") 136 | 137 | privkey, data_key, dms, storing_nodes =\ 138 | yield from de.generate_dmail_address(args.prefix) 139 | 140 | print("privkey: {}".format(base58.encode(privkey._encode_key()))) 141 | print("x: {}".format(base58.encode(sshtype.encodeMpint(dms.dh.x)))) 142 | print("dmail address: {}".format(mbase32.encode(data_key))) 143 | print("storing_nodes=[{}]."\ 144 | .format(base58.encode(privkey._encode_key()))) 145 | 146 | if args.send_dmail: 147 | log.info("Sending dmail.") 148 | 149 | if args.i: 150 | with open(args.i, "rb") as fh: 151 | dmail_data = fh.read().decode() 152 | else: 153 | dmail_data = stdin.read() 154 | 155 | if log.isEnabledFor(logging.DEBUG): 156 | log.debug("dmail_data=[{}].".format(dmail_data)) 157 | 158 | yield from de.send_dmail_text(args.send_dmail, dmail_data) 159 | 160 | if args.scan_dmail: 161 | log.info("Scanning dmail address.") 162 | 163 | addr, sig_bits = mutil.decode_key(args.scan_dmail) 164 | 165 | def key_callback(key): 166 | print("dmail key: [{}].".format(mbase32.encode(key))) 167 | 168 | yield from de.scan_dmail_address(\ 169 | addr, sig_bits, key_callback=key_callback) 170 | 171 | if args.fetch_dmail: 172 | log.info("Fetching dmail for key=[{}].".format(args.fetch_dmail)) 173 | 174 | key = mbase32.decode(args.fetch_dmail) 175 | 176 | if args.x: 177 | l, x_int = sshtype.parseMpint(base58.decode(args.x)) 178 | else: 179 | x_int = None 180 | 181 | dmail_target = args.dmail_target 182 | 183 | dm, valid_sig =\ 184 | yield from de.fetch_dmail(key, x_int, None, dmail_target) 185 | 186 | if not dm: 187 | raise Exception("No dmail found.") 188 | 189 | if not x_int: 190 | print("Encrypted dmail data=[\n{}].".format(mutil.hex_dump(dm))) 191 | else: 192 | print("Subject: {}\n".format(dm.subject)) 193 | 194 | if dm.sender_pubkey: 195 | print("From: {}"\ 196 | .format(mbase32.encode(enc.generate_ID(dm.sender_pubkey)))) 197 | 198 | i = 0 199 | for part in dm.parts: 200 | print("DmailPart[{}]:\n mime-type=[{}]\n data=[{}]\n"\ 201 | .format(i, part.mime_type, part.data)) 202 | i += 1 203 | 204 | log.info("Disconnecting.") 205 | 206 | yield from mc.disconnect() 207 | 208 | loop.stop() 209 | 210 | def init_db(args): 211 | if args.dburl: 212 | if args.nn: 213 | dbase = db.Db(loop, args.dburl, 'n' + str(args.nn)) 214 | else: 215 | dbase = db.Db(loop, args.dburl) 216 | else: 217 | if args.nn: 218 | dbase = db.Db(loop, "sqlite:///data/morphis-{}.sqlite"\ 219 | .format(args.nn)) 220 | else: 221 | dbase = db.Db(loop, "sqlite:///data/morphis.sqlite") 222 | 223 | dbase.init_engine() 224 | 225 | return dbase 226 | 227 | if __name__ == "__main__": 228 | main() 229 | -------------------------------------------------------------------------------- /mutil.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2014-2015 Sam Maloney. 2 | # License: GPL v2. 3 | 4 | import llog 5 | 6 | from bisect import bisect_left 7 | from datetime import datetime, tzinfo, timedelta 8 | import logging 9 | import time 10 | 11 | import consts 12 | import mbase32 13 | 14 | log = logging.getLogger(__name__) 15 | 16 | accept_chars = b" !\"#$%&`()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_'abcdefghijklmnopqrstuvwxyz{|}~" 17 | accept_chars = sorted(accept_chars) 18 | 19 | width = 16 20 | 21 | def hex_dump(data, offset = 0, length = None): 22 | assert type(data) in (bytes, bytearray), type(data) 23 | 24 | output = bytearray() 25 | col1 = bytearray() 26 | col2 = bytearray() 27 | 28 | if length == None: 29 | length = len(data) 30 | 31 | line = 0 32 | i = offset 33 | while i < length: 34 | j = 0 35 | while j < width and i < length: 36 | val = data[i] 37 | col1 += format(val, "02x").encode() 38 | 39 | si = bisect_left(accept_chars, data[i]) 40 | if si != len(accept_chars) and accept_chars[si] == data[i]: 41 | col2.append(data[i]) 42 | else: 43 | col2 += b'.' 44 | 45 | if j % 2 == 1: 46 | col1 += b' ' 47 | 48 | j += 1 49 | i += 1 50 | 51 | output += format(line * width, "#06x").encode() 52 | output += b" " 53 | line += 1 54 | while len(col1) < (width*5/2): 55 | col1 += b' ' 56 | 57 | output += col1 58 | output += b' ' 59 | output += col2 60 | output += b'\n' 61 | col1.clear() 62 | col2.clear() 63 | 64 | return output.decode() 65 | 66 | bc_masks = [0x2, 0xC, 0xF0] 67 | bc_shifts = [1, 2, 4] 68 | 69 | def log_base2_8bit(val): 70 | r = 0 71 | 72 | for i in range(2, -1, -1): 73 | if val & bc_masks[i]: 74 | val >>= bc_shifts[i] 75 | r |= bc_shifts[i] 76 | 77 | return r 78 | 79 | def hex_string(val): 80 | if not val: 81 | return None 82 | 83 | buf = "" 84 | 85 | for b in val: 86 | if b <= 0x0F: 87 | buf += '0' 88 | buf += hex(b)[2:] 89 | 90 | return buf 91 | 92 | #TODO: Maybe move this to db.py and make it use cursor if in PostgreSQL mode. 93 | def page_query(query, page_size=10): 94 | "Batch fetch an SQLAlchemy query." 95 | 96 | offset = 0 97 | 98 | while True: 99 | page = query.limit(page_size).offset(offset).all() 100 | 101 | for row in page: 102 | yield row 103 | 104 | if len(page) < page_size: 105 | break 106 | 107 | offset += page_size 108 | 109 | def decode_key(encoded): 110 | assert consts.NODE_ID_BITS == 512 111 | assert type(encoded) is str, type(encoded) 112 | 113 | significant_bits = None 114 | 115 | kl = len(encoded) 116 | 117 | if kl == 128: 118 | data_key = bytes.fromhex(encoded) 119 | elif kl in (102, 103): 120 | data_key = bytes(mbase32.decode(encoded)) 121 | if len(data_key) < consts.NODE_ID_BYTES: 122 | significant_bits = 5 * kl 123 | else: 124 | data_key = mbase32.decode(encoded, False) 125 | significant_bits = 5 * kl 126 | 127 | return data_key, significant_bits 128 | 129 | def calc_raw_distance(data1, data2): 130 | "Calculates the XOR distance, return is absolute value." 131 | 132 | assert type(data1) in (bytes, bytearray)\ 133 | and type(data2) in (bytes, bytearray) 134 | 135 | buf = bytearray() 136 | 137 | for i in range(len(data1)): 138 | buf.append(data1[i] ^ data2[i]) 139 | 140 | return buf 141 | 142 | def calc_log_distance(nid, pid): 143 | "Returns: distance, direction." 144 | " distance is in log base2." 145 | 146 | id_size = len(nid) 147 | assert id_size >= len(pid) 148 | 149 | if log.isEnabledFor(logging.DEBUG): 150 | log.debug("pid=\n[{}], nid=\n[{}].".format(hex_dump(pid),\ 151 | hex_dump(nid))) 152 | 153 | dist = 0 154 | direction = 0 155 | 156 | for i in range(id_size): 157 | if pid[i] != nid[i]: 158 | direction = 1 if pid[i] > nid[i] else -1 159 | 160 | xv = pid[i] ^ nid[i] 161 | xv = log_base2_8bit(xv) + 1 162 | 163 | # (byte * 8) + bit. 164 | dist = ((id_size - 1 - i) << 3) + xv 165 | 166 | break 167 | 168 | return dist, direction 169 | 170 | ZERO_TIMEDELTA = timedelta(0) 171 | class UtcTzInfo(tzinfo): 172 | def utcoffset(self, dt): 173 | return ZERO_TIMEDELTA 174 | 175 | def tzname(self, dt): 176 | return "UTC" 177 | 178 | def dst(self, dt): 179 | return ZERO_TIMEDELTA 180 | 181 | UTC_TZINFO = UtcTzInfo() 182 | 183 | def utc_datetime(): 184 | return datetime.now(UTC_TZINFO) 185 | 186 | ISO_FMT_UTC = "%Y-%m-%dT%H:%M:%S.%fZ" 187 | ISO_FMT = "%Y-%m-%dT%H:%M:%S.%f" 188 | 189 | def parse_iso_datetime(date_str): 190 | if date_str.endswith('Z'): 191 | return datetime.strptime(date_str, ISO_FMT_UTC)\ 192 | .replace(tzinfo=UTC_TZINFO) 193 | else: 194 | return datetime.strptime(date_str, ISO_FMT) 195 | 196 | def format_iso_datetime(adatetime): 197 | if adatetime.tzinfo is UTC_TZINFO: 198 | return adatetime.strftime(ISO_FMT_UTC) 199 | else: 200 | return adatetime.strftime(ISO_FMT) 201 | 202 | iso_fmt_human_no_ms = "%Y-%m-%d %H:%M:%S" 203 | 204 | def get_utc_offset_seconds(): 205 | return time.altzone if time.daylight else time.timezone 206 | 207 | def format_human_no_ms_datetime(datetime, convert_local=True, assume_gmt=False): 208 | if convert_local and (assume_gmt or datetime.tzinfo is UTC_TZINFO): 209 | datetime = datetime - timedelta(seconds=get_utc_offset_seconds()) 210 | return datetime.strftime(iso_fmt_human_no_ms) 211 | -------------------------------------------------------------------------------- /old/n1.py: -------------------------------------------------------------------------------- 1 | import enc 2 | 3 | def debug(message): 4 | print ("debug: %s" % message) 5 | 6 | def main(): 7 | debug("Entered main().") 8 | 9 | private_key = enc.generate_RSA(4096) 10 | public_key = private_key.publickey(); 11 | 12 | debug("Private Key=[%s], Public Key=[%s]." % (str(private_key.exportKey("PEM")), str(public_key.exportKey("PEM")))) 13 | 14 | id = enc.generate_ID(public_key.exportKey("DER")) 15 | 16 | debug("id=[%s]." % id.hexdigest()) 17 | 18 | main() 19 | -------------------------------------------------------------------------------- /old/node.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import docopt 3 | import sys 4 | import threading 5 | import time 6 | import traceback 7 | import zmq 8 | 9 | from zhelpers import zpipe 10 | 11 | import enc 12 | 13 | private_key = None; 14 | public_key = None; 15 | 16 | in_pipe = None; 17 | out_pipe = None; 18 | 19 | def handleException(info, e): 20 | debug("FATAL: {} threw [{}]: {}".format(info, sys.exc_info()[0], str(e))) 21 | traceback.print_tb(sys.exc_info()[2]) 22 | 23 | def debug(message): 24 | print(message) 25 | 26 | def engageNet(loop, context, in_pipe, out_pipe, config): 27 | try: 28 | _engageNet(loop, context, in_pipe, out_pipe, config) 29 | except BaseException as e: 30 | handleException("_engageNet", e) 31 | debug("Exiting due to FATAL error.") 32 | sys.exit(1) 33 | 34 | def handleServerRequest(): 35 | global in_pipe, out_pipe 36 | 37 | meta = in_pipe[0].recv_pyobj() 38 | message = in_pipe[0].recv_multipart() 39 | # in_pipe[0].send(b"ok") 40 | 41 | print("S(a): Received request [{}] from [{}].".format(message, meta["address"])) 42 | 43 | cmd = message.pop(0) 44 | 45 | if cmd == b"pub_key_req": 46 | remote_pkey = message.pop(0) 47 | 48 | out_pipe[1].send_multipart([b"sresp", meta["address"], b"pub_key_response", public_key.exportKey("PEM")]) 49 | 50 | def handleClientResponse(): 51 | global in_pipe 52 | 53 | meta = in_pipe[0].recv_pyobj() 54 | message = in_pipe[0].recv_multipart() 55 | 56 | print("C(a): Received response [{}] from [{}].".format(message, meta["ssockid"])) 57 | 58 | def _engageNet(loop, context, in_pipe, out_pipe, config): 59 | global public_key 60 | 61 | listen_address = "tcp://*:{}".format(config["port"]) 62 | 63 | print("S: listen_address=[{}].".format(listen_address)) 64 | 65 | ssocket = context.socket(zmq.ROUTER) 66 | ssocket.identity = b"asdf"; 67 | ssocket.bind(listen_address) 68 | 69 | print("S: Listening on {}.".format(listen_address)) 70 | 71 | poller = zmq.Poller() 72 | poller.register(in_pipe, zmq.POLLIN) 73 | poller.register(ssocket, zmq.POLLIN) 74 | 75 | i = 0 76 | clientid = 0 77 | 78 | # fd, id 79 | csockids = {} 80 | # id, socket 81 | csockets = {} 82 | 83 | while True: 84 | try: 85 | ready_socks = poller.poll() 86 | except BaseException as e: 87 | handleException("poller.poll()", e) 88 | debug("Exiting due to FATAL error.") 89 | sys.exit(1) 90 | 91 | print("WOKEN ready_socks=[{}].".format(ready_socks)) 92 | 93 | for sockt in ready_socks: 94 | sock = sockt[0] 95 | csockid = csockids.get(sock.fd) 96 | 97 | if csockid != None: 98 | message = sock.recv_multipart() 99 | 100 | print("C: Received response [{}] from [{}].".format(message, csockid)) 101 | 102 | meta = {"type": "clientResponse", 103 | "csockid": csockid} 104 | 105 | in_pipe.send_pyobj(meta, zmq.SNDMORE) 106 | in_pipe.send_multipart(message) 107 | 108 | loop.call_soon_threadsafe(handleClientResponse) 109 | 110 | # sock.send(b"Hello") 111 | # print("C: Sent request to [{}]!".format(csockid)) 112 | elif sock == ssocket: 113 | # Wait for next request from client. 114 | address = ssocket.recv(); 115 | empty = ssocket.recv(); 116 | 117 | message = ssocket.recv_multipart() 118 | 119 | print("S: Received request [{}] from [{}].".format(message, address)) 120 | 121 | meta = {"type": "serverRequest", 122 | "address": address} 123 | 124 | out_pipe.send_pyobj(meta, zmq.SNDMORE) 125 | out_pipe.send_multipart(message) 126 | 127 | loop.call_soon_threadsafe(handleServerRequest) 128 | 129 | # ssocket.send_multipart([address, b'', b"World"]) 130 | # print("S: Sent response #{} to [{}]!".format(i, address)) 131 | # i = i + 1 132 | elif sock == in_pipe: 133 | message = in_pipe.recv_multipart() 134 | print("XS: Received [{}] message.".format(message[0])) 135 | 136 | # in_pipe.send(b"ok") 137 | 138 | cmd = message.pop(0) 139 | if cmd == b"conn": 140 | addr = message.pop(0) 141 | #addr = message[1]; 142 | print("C: Connecting to [{}].".format(addr)) 143 | 144 | csocket = context.socket(zmq.REQ) 145 | csocket.connect(addr) 146 | 147 | csockids[csocket.fd] = clientid 148 | csockets[clientid] = csocket 149 | clientid += 1 150 | poller.register(csocket, zmq.POLLIN) 151 | 152 | csocket.send(b"pub_key_req", zmq.SNDMORE) 153 | csocket.send(public_key.exportKey("PEM")) 154 | print("C: Sent request!") 155 | elif cmd == b"sresp": 156 | addr = message.pop(0) 157 | 158 | print("S: Sending response [{}] to [{}].".format(message, addr)) 159 | 160 | ssocket.send(addr, zmq.SNDMORE) 161 | ssocket.send(b"", zmq.SNDMORE) 162 | ssocket.send_multipart(message) 163 | elif cmd == b"shutdown": 164 | return 165 | 166 | def main(): 167 | global in_pipe, out_pipe, public_key, private_key 168 | 169 | try: 170 | docopt_config = "Usage: my_program.py [--port=PORT] [--connect=PORT]" 171 | arguments = docopt.docopt(docopt_config) 172 | port = arguments["--port"] 173 | if port == None: 174 | port = 5555 175 | 176 | connect_dest = arguments["--connect"] 177 | except docopt.DocoptExit as e: 178 | print(e.message) 179 | return 180 | 181 | context = zmq.Context() 182 | 183 | in_pipe = zpipe(context) 184 | out_pipe = zpipe(context) 185 | 186 | loop = asyncio.get_event_loop() 187 | 188 | net_config = {"port": port} 189 | 190 | # Generate Node Keys & Id. 191 | private_key = enc.generate_RSA(4096) 192 | public_key = private_key.publickey(); 193 | 194 | # debug("Private Key=[%s], Public Key=[%s]." % (str(private_key.exportKey("PEM")), str(public_key.exportKey("PEM")))) 195 | 196 | node_id = enc.generate_ID(public_key.exportKey("DER")) 197 | 198 | debug("node_id=[%s]." % node_id.hexdigest()) 199 | 200 | # Start Net Engine. 201 | zmq_future = loop.run_in_executor(None, engageNet, loop, context, out_pipe[0], in_pipe[1], net_config) 202 | # thread = threading.Thread(target=engageNet, args=(loop, context, out_pipe[0], in_pipe[1], net_config)) 203 | # thread.daemon = True 204 | # thread.start() 205 | 206 | # Connect for testing. 207 | if connect_dest != None: 208 | out_pipe[1].send_multipart([b"conn", "tcp://{}".format(connect_dest).encode()]) 209 | # out_pipe[0].send_multipart([b"conn", "tcp://localhost:{}".format(port).encode()]) 210 | 211 | try: 212 | loop.run_until_complete(zmq_future) 213 | except BaseException as e: 214 | handleException("loop.run_until_complete()", e) 215 | out_pipe[1].send_multipart([b"shutdown"]) 216 | loop.stop() 217 | loop.close() 218 | zmq_future.cancel() 219 | sys.exit(1) 220 | 221 | main() 222 | -------------------------------------------------------------------------------- /old/server_test.py: -------------------------------------------------------------------------------- 1 | import time 2 | import zmq 3 | 4 | listen_address = "tcp://*:5555" 5 | 6 | context = zmq.Context() 7 | ssocket = context.socket(zmq.ROUTER) 8 | ssocket.bind(listen_address) 9 | 10 | print("Listening on %s." % listen_address) 11 | 12 | while True: 13 | # Wait for next request from client. 14 | address, empty, message = ssocket.recv_multipart() 15 | 16 | # message = ssocket.recv() 17 | 18 | print("Received request: %s" % message) 19 | 20 | # simulate work 21 | # time.sleep(1); 22 | 23 | ssocket.send_multipart([address, b'', b"World"]) 24 | print("Sent response!") 25 | 26 | -------------------------------------------------------------------------------- /peer.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2014-2015 Sam Maloney. 2 | # License: GPL v2. 3 | 4 | import llog 5 | 6 | import asyncio 7 | import logging 8 | 9 | import packet as mnpacket 10 | import rsakey 11 | import mn1 12 | import mutil 13 | import enc 14 | 15 | log = logging.getLogger(__name__) 16 | 17 | class Peer(): 18 | def __init__(self, engine, dbpeer=None): 19 | self.engine = engine 20 | 21 | self.version = None 22 | self.full_node = False 23 | 24 | self.dbid = None 25 | self.distance = None 26 | self.direction = None 27 | 28 | self.address = None 29 | 30 | self.node_key = None 31 | self.node_id = None 32 | self.channel_handler = ChannelHandler(self) 33 | self.connection_handler = ConnectionHandler(self) 34 | 35 | self.connection_coop_lock = asyncio.Lock() 36 | 37 | if dbpeer: 38 | self.dbid = dbpeer.id 39 | if dbpeer.pubkey: 40 | self.node_key = rsakey.RsaKey(dbpeer.pubkey) 41 | self.node_id = dbpeer.node_id 42 | self.distance = dbpeer.distance 43 | self.direction = dbpeer.direction 44 | 45 | self._protocol = None 46 | 47 | @property 48 | def protocol(self): 49 | return self._protocol 50 | 51 | @protocol.setter 52 | def protocol(self, value): 53 | self._protocol = value 54 | 55 | self._protocol.channel_handler = self.channel_handler 56 | self._protocol.connection_handler = self.connection_handler 57 | 58 | def ready(self): 59 | return self._protocol.status is mn1.Status.ready 60 | 61 | def update_distance(self): 62 | self.distance, self.direction =\ 63 | mutil.calc_log_distance(self.engine.node_id, self.node_id) 64 | 65 | def _peer_authenticated(self, key): 66 | self.node_key = key 67 | 68 | if not self.node_id: 69 | self.node_id = enc.generate_ID(self.node_key.asbytes()) 70 | 71 | if not self.distance: 72 | self.update_distance() 73 | 74 | class ConnectionHandler(): 75 | def __init__(self, peer): 76 | self.peer = peer 77 | 78 | def connection_made(self, protocol): 79 | if log.isEnabledFor(logging.INFO): 80 | log.info("connection_made(): Peer (dbid=[{}], address=[{}],"\ 81 | " protocol.address=[{}])."\ 82 | .format(self.peer.dbid, self.peer.address, protocol.address)) 83 | 84 | if not self.peer.engine.node.tormode: 85 | self.peer.address = "{}:{}".format(\ 86 | self.peer.protocol.address[0],\ 87 | self.peer.protocol.address[1]) 88 | 89 | self.peer.engine.connection_made(self.peer) 90 | 91 | def error_recieved(self, protocol, exc): 92 | pass 93 | 94 | def connection_lost(self, protocol, exc): 95 | if log.isEnabledFor(logging.INFO): 96 | log.info("connection_lost(): Peer (dbid=[{}], address=[{}],"\ 97 | " protocol.address=[{}])."\ 98 | .format(self.peer.dbid, self.peer.address, protocol.address)) 99 | 100 | self.peer.engine.connection_lost(self.peer, exc) 101 | 102 | @asyncio.coroutine 103 | def peer_disconnected(self, protocol, msg): 104 | self.peer.engine.peer_disconnected(self.peer, msg) 105 | 106 | @asyncio.coroutine 107 | def peer_authenticated(self, protocol): 108 | if protocol.server_mode: 109 | self.peer._peer_authenticated(self.peer.protocol.client_key) 110 | else: 111 | self.peer._peer_authenticated(self.peer.protocol.server_key) 112 | 113 | r = yield from self.peer.engine.peer_authenticated(self.peer) 114 | 115 | return r 116 | 117 | @asyncio.coroutine 118 | def connection_ready(self, protocol): 119 | log.info("Connection to Peer (dbid=[{}], address=[{}],"\ 120 | " protocol.address=[{}], server_mode=[{}]) is now ready."\ 121 | .format(self.peer.dbid, self.peer.address, protocol.address,\ 122 | protocol.server_mode)) 123 | 124 | yield from self.peer.engine.connection_ready(self.peer) 125 | 126 | class ChannelHandler(): 127 | def __init__(self, peer): 128 | self.peer = peer 129 | 130 | @asyncio.coroutine 131 | def request_open_channel(self, protocol, message): 132 | r = yield from\ 133 | self.peer.engine.request_open_channel(self.peer, message) 134 | return r 135 | 136 | @asyncio.coroutine 137 | def channel_open_failed(self, protocol, msg): 138 | r = yield from\ 139 | self.peer.engine.channel_open_failed(self.peer, msg) 140 | return r 141 | 142 | @asyncio.coroutine 143 | def channel_opened(self, protocol, channel_type, local_cid, queue): 144 | yield from\ 145 | self.peer.engine.channel_opened(\ 146 | self.peer, channel_type, local_cid, queue) 147 | 148 | @asyncio.coroutine 149 | def channel_closed(self, protocol, local_cid): 150 | yield from self.peer.engine.channel_closed(self.peer, local_cid) 151 | 152 | @asyncio.coroutine 153 | def channel_request(self, protocol, msg): 154 | yield from self.peer.engine.channel_request(self.peer, msg) 155 | 156 | @asyncio.coroutine 157 | def channel_data(self, protocol, local_cid, data): 158 | if log.isEnabledFor(logging.DEBUG): 159 | log.debug("Received data: local_cid=[{}], value=[\n{}]."\ 160 | .format(local_cid, mutil.hex_dump(data))) 161 | 162 | # Return value controls if the data gets added to the channel queue. 163 | r = yield from self.peer.engine.channel_data(\ 164 | self.peer, local_cid, data) 165 | return r 166 | -------------------------------------------------------------------------------- /putil.py: -------------------------------------------------------------------------------- 1 | # Copyright (C) 2003-2007 Robey Pointer . 2 | # Copyright (C) 2014-2015 Sam Maloney. 3 | # License: LGPL. 4 | # 5 | # This file is based upon parts from paramiko (r85d5e95f9280aa236602b77e9f5bd0aa4d3c8fcd). 6 | 7 | import struct 8 | 9 | xffffffff = int(0xffffffff) 10 | 11 | def byte_chr(c): 12 | assert isinstance(c, int) 13 | return struct.pack('B', c) 14 | 15 | zero_byte = byte_chr(0) 16 | one_byte = byte_chr(1) 17 | max_byte = byte_chr(0xff) 18 | 19 | def byte_ord(c): 20 | # In case we're handed a string instead of an int. 21 | if not isinstance(c, int): 22 | c = ord(c) 23 | return c 24 | 25 | def byte_mask(c, mask): 26 | assert isinstance(c, int) 27 | return struct.pack('B', c & mask) 28 | 29 | def inflate_long(s, always_positive=False): 30 | """turns a normalized byte string into a long-int (adapted from Crypto.Util.number)""" 31 | out = int(0) 32 | negative = 0 33 | if not always_positive and (len(s) > 0) and (byte_ord(s[0]) >= 0x80): 34 | negative = 1 35 | if len(s) % 4: 36 | filler = zero_byte 37 | if negative: 38 | filler = max_byte 39 | # never convert this to ``s +=`` because this is a string, not a number 40 | # noinspection PyAugmentAssignment 41 | s = filler * (4 - len(s) % 4) + s 42 | for i in range(0, len(s), 4): 43 | out = (out << 32) + struct.unpack('>I', s[i:i+4])[0] 44 | if negative: 45 | out -= (int(1) << (8 * len(s))) 46 | return out 47 | 48 | deflate_zero = 0 49 | deflate_ff = 0xff 50 | 51 | def deflate_long(n, add_sign_padding=True): 52 | """turns a long-int into a normalized byte string (adapted from Crypto.Util.number)""" 53 | # after much testing, this algorithm was deemed to be the fastest 54 | s = bytes() 55 | n = int(n) 56 | while (n != 0) and (n != -1): 57 | s = struct.pack('>I', n & xffffffff) + s 58 | n >>= 32 59 | # strip off leading zeros, FFs 60 | for i in enumerate(s): 61 | if (n == 0) and (i[1] != deflate_zero): 62 | break 63 | if (n == -1) and (i[1] != deflate_ff): 64 | break 65 | else: 66 | # degenerate case, n was either 0 or -1 67 | i = (0,) 68 | if n == 0: 69 | s = zero_byte 70 | else: 71 | s = max_byte 72 | s = s[i[0]:] 73 | if add_sign_padding: 74 | if (n == 0) and (byte_ord(s[0]) >= 0x80): 75 | s = zero_byte + s 76 | if (n == -1) and (byte_ord(s[0]) < 0x80): 77 | s = max_byte + s 78 | return s 79 | 80 | def bit_length(n): 81 | try: 82 | return n.bitlength() 83 | except AttributeError: 84 | norm = deflate_long(n, False) 85 | hbyte = byte_ord(norm[0]) 86 | if hbyte == 0: 87 | return 1 88 | bitlen = len(norm) * 8 89 | while not (hbyte & 0x80): 90 | hbyte <<= 1 91 | bitlen -= 1 92 | return bitlen 93 | -------------------------------------------------------------------------------- /rsakey.py: -------------------------------------------------------------------------------- 1 | # Copyright (C) 2003-2007 Robey Pointer . 2 | # Copyright (C) 2014-2015 Sam Maloney. 3 | # License: LGPL. 4 | # 5 | # This file is based upon parts from paramiko (r85d5e95f9280aa236602b77e9f5bd0aa4d3c8fcd). 6 | 7 | """ 8 | RSA keys. 9 | """ 10 | import llog 11 | 12 | import os 13 | from hashlib import sha1 14 | import logging 15 | 16 | from Crypto.PublicKey import RSA 17 | from Crypto.Signature import PKCS1_PSS 18 | 19 | import putil as util 20 | from putil import * 21 | 22 | import sshtype 23 | import asymkey 24 | import enc 25 | from sshexception import * 26 | 27 | log = logging.getLogger(__name__) 28 | 29 | SHA1_DIGESTINFO =\ 30 | b'\x30\x21\x30\x09\x06\x05\x2b\x0e\x03\x02\x1a\x05\x00\x04\x14' 31 | 32 | #class RsaKey (PKey): 33 | class RsaKey(asymkey.AsymKey): 34 | """ 35 | Representation of an RSA key which can be used to sign and verify SSH2 36 | data. 37 | """ 38 | 39 | def __init__(self, data=None, privdata=None, filename=None, password=None, vals=None, file_obj=None): 40 | self.n = None 41 | self.e = None 42 | self.d = None 43 | self.p = None 44 | self.q = None 45 | 46 | self.__public_key = None 47 | self.__public_key_bytes = None 48 | self.__private_key = None 49 | self.__rsassa_pss_signer = None 50 | self.__rsassa_pss_verifier = None 51 | 52 | if file_obj is not None: 53 | self._from_private_key(file_obj, password) 54 | return 55 | if filename is not None: 56 | self._from_private_key_file(filename, password) 57 | return 58 | if vals is not None: 59 | self.e, self.n = vals 60 | else: 61 | if data is None: 62 | if privdata is None: 63 | raise SshException('Key object may not be empty') 64 | else: 65 | self._decode_key(privdata) 66 | else: 67 | i, v = sshtype.parseString(data) 68 | if v != 'ssh-rsa': 69 | raise SshException('Invalid key') 70 | l, self.e = sshtype.parseMpint(data[i:]) 71 | i += l 72 | l, self.n = sshtype.parseMpint(data[i:]) 73 | self.size = util.bit_length(self.n) 74 | 75 | def asbytes(self): 76 | m = self.__public_key_bytes 77 | 78 | if m: 79 | return m 80 | 81 | m = bytearray() 82 | m += sshtype.encodeString('ssh-rsa') 83 | m += sshtype.encodeMpint(self.e) 84 | m += sshtype.encodeMpint(self.n) 85 | 86 | self.__public_key_bytes = m 87 | 88 | return m 89 | 90 | def __str__(self): 91 | return self.asbytes() 92 | 93 | def __hash__(self): 94 | h = hash(self.get_name()) 95 | h = h * 37 + hash(self.e) 96 | h = h * 37 + hash(self.n) 97 | return hash(h) 98 | 99 | def get_name(self): 100 | return 'ssh-rsa' 101 | 102 | def get_bits(self): 103 | return self.size 104 | 105 | def can_sign(self): 106 | return self.d is not None 107 | 108 | def calc_rsassa_pss_sig(self, data): 109 | h = enc._generate_ID(data) 110 | 111 | privkey = self._private_key() 112 | 113 | signer = self._rsassa_pss_signer() 114 | 115 | return signer.sign(h) 116 | 117 | def sign_ssh_data(self, data): 118 | digest = sha1(data).digest() 119 | rsa = self._private_key() 120 | sig = util.deflate_long(\ 121 | rsa.sign(self._pkcs1imify(digest), bytes())[0], 0) 122 | 123 | m = bytearray() 124 | m += sshtype.encodeString('ssh-rsa') 125 | m += sshtype.encodeBinary(sig) 126 | return m 127 | 128 | def verify_rsassa_pss_sig(self, data, signature): 129 | h = enc._generate_ID(data) 130 | 131 | signer = self._rsassa_pss_verifier() 132 | 133 | return signer.verify(h, signature) 134 | 135 | def verify_ssh_sig(self, key_data, sig_msg): 136 | i, v = sshtype.parseString(sig_msg) 137 | if v != 'ssh-rsa': 138 | log.warning("Not an ssh-rsa signature!") 139 | return False 140 | if log.isEnabledFor(logging.DEBUG): 141 | log.debug("l[{}][{}]".format(i, len(sig_msg))) 142 | sig = util.inflate_long(sshtype.parseBinary(sig_msg[i:])[1], True) 143 | # verify the signature by SHA'ing the key_data and encrypting it using the 144 | # public key. some wackiness ensues where we "pkcs1imify" the 20-byte 145 | # hash into a string as long as the RSA key. 146 | if log.isEnabledFor(logging.DEBUG): 147 | log.debug("sig=[{}].".format(sig)) 148 | hash_obj = util.inflate_long(self._pkcs1imify(sha1(key_data).digest()), True) 149 | rsa = self._public_key() 150 | return rsa.verify(hash_obj, (sig, )) 151 | 152 | def _public_key(self): 153 | key = self.__public_key 154 | 155 | if not key: 156 | self.__public_key = key = RSA.construct((int(self.n), int(self.e))) 157 | 158 | return key 159 | 160 | def _private_key(self): 161 | key = self.__private_key 162 | 163 | if not key: 164 | self.__private_key = key =\ 165 | RSA.construct((int(self.n), int(self.e), int(self.d))) 166 | 167 | return key 168 | 169 | def _rsassa_pss_signer(self): 170 | signer = self.__rsassa_pss_signer 171 | 172 | if not signer: 173 | signer = self.__rsassa_pss_signer =\ 174 | PKCS1_PSS.new(self._private_key()) 175 | 176 | return signer 177 | 178 | def _rsassa_pss_verifier(self): 179 | verifier = self.__rsassa_pss_verifier 180 | 181 | if not verifier: 182 | verifier = self.__rsassa_pss_verifier =\ 183 | PKCS1_PSS.new(self._public_key()) 184 | 185 | return verifier 186 | 187 | def _encode_key(self): 188 | "Encode the private components into an mnk structure." 189 | 190 | if (self.p is None) or (self.q is None): 191 | raise SshException('Not enough key info to write private key file') 192 | """ 193 | keylist = [0, self.n, self.e, self.d, self.p, self.q, 194 | self.d % (self.p - 1), self.d % (self.q - 1), 195 | util.mod_inverse(self.q, self.p)] 196 | try: 197 | b = BER() 198 | b.encode(keylist) 199 | except BERException: 200 | raise SshException('Unable to create ber encoding of key') 201 | return b.asbytes() 202 | """ 203 | b = bytearray() 204 | 205 | b += struct.pack("B", 1) # mnk version. 206 | b += sshtype.encodeMpint(self.e) 207 | b += sshtype.encodeMpint(self.n) 208 | b += sshtype.encodeMpint(self.d) 209 | b += sshtype.encodeMpint(self.p) 210 | b += sshtype.encodeMpint(self.q) 211 | 212 | return b 213 | 214 | def write_private_key_file(self, filename, password=None): 215 | self._write_private_key_file('RSA', filename, self._encode_key(), password) 216 | 217 | def write_private_key(self, file_obj, password=None): 218 | self._write_private_key('RSA', file_obj, self._encode_key(), password) 219 | 220 | @staticmethod 221 | def generate(bits, progress_func=None): 222 | """ 223 | Generate a new private RSA key. This factory function can be used to 224 | generate a new host key or authentication key. 225 | 226 | :param int bits: number of bits the generated key should be. 227 | :param function progress_func: 228 | an optional function to call at key points in key generation (used 229 | by ``pyCrypto.PublicKey``). 230 | :return: new `.RsaKey` private key 231 | """ 232 | rsa = RSA.generate(bits, os.urandom, progress_func) 233 | key = RsaKey(vals=(rsa.e, rsa.n)) 234 | key.d = rsa.d 235 | key.p = rsa.p 236 | key.q = rsa.q 237 | return key 238 | 239 | ### internals... 240 | 241 | def _pkcs1imify(self, data): 242 | """ 243 | turn a 20-byte SHA1 hash into a blob of data as large as the key's N, 244 | using PKCS1's \"emsa-pkcs1-v1_5\" encoding. totally bizarre. 245 | """ 246 | size = len(util.deflate_long(self.n, 0)) 247 | filler = max_byte * (size - len(SHA1_DIGESTINFO) - len(data) - 3) 248 | return zero_byte + one_byte + filler + zero_byte + SHA1_DIGESTINFO + data 249 | 250 | def _from_private_key_file(self, filename, password): 251 | data = self._read_private_key_file('RSA', filename, password) 252 | self._decode_key(data) 253 | 254 | def _from_private_key(self, file_obj, password): 255 | data = self._read_private_key('RSA', file_obj, password) 256 | self._decode_key(data) 257 | 258 | def _decode_key(self, data): 259 | """ 260 | # private key file contains: 261 | # RSAPrivateKey = { version = 0, n, e, d, p, q, d mod p-1, d mod q-1, q**-1 mod p } 262 | try: 263 | keylist = BER(data).decode() 264 | except BERException: 265 | raise SshException('Unable to parse key file') 266 | if (type(keylist) is not list) or (len(keylist) < 4) or (keylist[0] != 0): 267 | raise SshException('Not a valid RSA private key file (bad ber encoding)') 268 | self.n = keylist[1] 269 | self.e = keylist[2] 270 | self.d = keylist[3] 271 | # not really needed 272 | self.p = keylist[4] 273 | self.q = keylist[5] 274 | self.size = util.bit_length(self.n) 275 | """ 276 | 277 | ver = struct.unpack("B", data[:1])[0] 278 | if ver != 1: 279 | raise SshException("Unsupported mnk version [{}].".format(ver)) 280 | i = 1 281 | l, self.e = sshtype.parseMpint(data[i:]) 282 | i += l 283 | l, self.n = sshtype.parseMpint(data[i:]) 284 | i += l 285 | l, self.d = sshtype.parseMpint(data[i:]) 286 | i += l 287 | l, self.p = sshtype.parseMpint(data[i:]) 288 | i += l 289 | l, self.q = sshtype.parseMpint(data[i:]) 290 | -------------------------------------------------------------------------------- /run.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | python3 node.py $* 3 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2014-2015 Sam Maloney. 2 | # License: GPL v2. 3 | 4 | from distutils.core import setup 5 | from Cython.Build import cythonize 6 | 7 | modules = [\ 8 | "asymkey", 9 | "bittrie", 10 | "brute", 11 | "chord_packet", 12 | "dhgroup14", 13 | "enc", 14 | "llog", 15 | "mbase32", 16 | "mutil", 17 | "putil", 18 | "packet", 19 | "rsakey", 20 | "sshtype"\ 21 | ] 22 | 23 | setup( 24 | name = 'n1', 25 | ext_modules = cythonize(\ 26 | [x + ".py" for x in modules]) 27 | ) 28 | -------------------------------------------------------------------------------- /setup_all.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2014-2015 Sam Maloney. 2 | # License: GPL v2. 3 | 4 | from distutils.core import setup 5 | from Cython.Build import cythonize 6 | 7 | setup( 8 | name = 'n1', 9 | ext_modules = cythonize("*.py") 10 | ) 11 | -------------------------------------------------------------------------------- /sshexception.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2014-2015 Sam Maloney. 2 | # License: LGPL 3 | 4 | class SshException(Exception): 5 | pass 6 | -------------------------------------------------------------------------------- /sshtype.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2014-2015 Sam Maloney. 2 | # License: LGPL 3 | 4 | import struct 5 | import logging 6 | 7 | import llog 8 | import putil 9 | 10 | log = logging.getLogger(__name__) 11 | 12 | def parseNameList(buf): 13 | return parseString(buf) 14 | 15 | def parse_string_from(buf, i): 16 | l, v = parse_binary_from(buf, i) 17 | return l, v.decode() 18 | 19 | def parseString(buf): 20 | l, v = parseBinary(buf) 21 | return l, v.decode() 22 | 23 | def parse_binary_from(buf, i): 24 | length = struct.unpack_from(">L", buf, i)[0] 25 | 26 | start = i + 4 27 | end = start + length 28 | 29 | value = buf[start:end] 30 | 31 | return end, value 32 | 33 | def parseBinary(buf): 34 | length = struct.unpack(">L", buf[0:4])[0] 35 | if log.isEnabledFor(logging.DEBUG): 36 | log.debug("length={}".format(length)) 37 | value = buf[4:4 + length] 38 | 39 | return length + 4, value 40 | 41 | def parse_mpint_from(buf, i): 42 | length = struct.unpack_from(">L", buf, i)[0] 43 | 44 | if log.isEnabledFor(logging.DEBUG): 45 | log.debug("length={}".format(length)) 46 | 47 | start = i + 4 48 | end = start + length 49 | 50 | value = putil.inflate_long(buf[start:end]) 51 | 52 | return end, value 53 | 54 | def parseMpint(buf): 55 | length = struct.unpack(">L", buf[0:4])[0] 56 | if log.isEnabledFor(logging.DEBUG): 57 | log.debug("length={}".format(length)) 58 | return length + 4, putil.inflate_long(buf[4:4+length]) 59 | 60 | def encodeMpint(val): 61 | buf = putil.deflate_long(val) 62 | length = struct.pack(">L", len(buf)) 63 | return length + buf 64 | 65 | def encodeNameList(val): 66 | return encodeString(val) 67 | 68 | def encodeString(val): 69 | # if log.isEnabledFor(logging.DEBUG): 70 | # log.debug("type=[{}].".format(type(val))) 71 | # if isinstance(val, bytes) or isinstance(val, bytearray): 72 | # buf = val 73 | # else: 74 | buf = val.encode(encoding="UTF-8") 75 | 76 | length = struct.pack(">L", len(buf)) 77 | return length + buf 78 | 79 | def encodeBinary(buf): 80 | length = struct.pack(">L", len(buf)) 81 | return length + buf 82 | -------------------------------------------------------------------------------- /test_100n.sh: -------------------------------------------------------------------------------- 1 | rm -f morphis.log ; python3 node.py -l logging-prod.ini --bind 127.0.0.1:6000 --addpeer 127.0.0.1:7000 --nn 1000 --nodecount 100 --dbpoolsize 2 --cleartexttransport --dumptasksonexit --parallellaunch --dburl postgresql://m1:2d8VhzVIovxZy@pg1/m1 2 | -------------------------------------------------------------------------------- /upload_page.html: -------------------------------------------------------------------------------- 1 | Morphis Maalstroom Upload

Select the file to upload below:

2 | --------------------------------------------------------------------------------