├── .gitattributes ├── .github └── ISSUE_TEMPLATE.md ├── .gitignore ├── .vscode ├── launch.json ├── settings.json └── vscode_tests.py ├── LICENSE ├── MANIFEST.in ├── README.rst ├── bin └── hashit ├── build.cmd ├── build.sh ├── changelog ├── conf.py ├── debian ├── changelog ├── compat ├── control ├── copyrigth ├── rules └── source │ ├── format │ └── options ├── docs ├── Gemfile ├── _config.yml ├── _layouts │ └── default.html ├── extra.md ├── favicon.ico ├── hashes.md ├── index.md ├── plugins.md ├── pydoc.md ├── pydocs │ ├── hashit.__main__.html │ ├── hashit.detection.html │ ├── hashit.extra.html │ ├── hashit.html │ └── hashit.version.html └── usage.md ├── hashit.spec ├── hashit ├── __init__.py ├── __main__.py ├── detection.py ├── extra.py └── version.py ├── img ├── demo.gif └── icon.png ├── release ├── hashit.tar.gz └── hashit.zip ├── setup.py ├── snapcraft.yaml └── tests ├── __init__.py ├── benchmarks ├── res │ ├── benchmarks.json │ ├── benchmarks2.json │ ├── crc_hashcollisions.txt │ ├── file.json │ └── pycrypto_vs_hashlib.json ├── speed.py └── speed2.py ├── config.py ├── spec ├── arg.py ├── gui.py ├── oldmain.py └── test.py ├── test_load.py └── unit.py /.gitattributes: -------------------------------------------------------------------------------- 1 | *=auto -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE.md: -------------------------------------------------------------------------------- 1 | [//]: # (Answer the following) 2 | 3 | - [ ] I am not using any plugins/configuration other than the default 4 | - [ ] I can recreate it on another machine/instance 5 | - I downloaded it from [here]() 6 | - I am using version [version] 7 | 8 | [//]: # (pastebin/gist/text in triple-backwardquotes) 9 | - --trace from it: 10 | 11 | ## Problem 12 | [//]: # (Decribe your problem) 13 | 14 | ## Tried 15 | [//]: # (Optional, what have you done so far) -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # python files/temps 2 | *.egg-info 3 | *.pyc 4 | __pycache__/ 5 | dist/ 6 | build/ 7 | 8 | # snap files and dirs 9 | parts/ 10 | prime/ 11 | snap/ 12 | stage/ 13 | 14 | # extra snap stuff 15 | *.snap* 16 | *.bz2 17 | 18 | # other folders 19 | deb_dist/ 20 | .vs*/ 21 | 22 | # other files 23 | *.chk 24 | dist.zip 25 | 26 | # jekyll 27 | _site/ 28 | .sass-cache/ 29 | 30 | # sphinx 31 | _build/ 32 | _static/ 33 | _templates/ -------------------------------------------------------------------------------- /.vscode/launch.json: -------------------------------------------------------------------------------- 1 | { 2 | "configurations": [ 3 | { 4 | "name": "External Terminal/Console", 5 | "type": "python", 6 | "request": "launch", 7 | "stopOnEntry": true, 8 | "pythonPath": "${config:python.pythonPath}", 9 | "program": "${file}", 10 | "cwd": "", 11 | "console": "externalTerminal", 12 | "env": {}, 13 | "envFile": "${workspaceFolder}/.env", 14 | "debugOptions": [] 15 | }, 16 | { 17 | "name": "Python", 18 | "type": "python", 19 | "request": "launch", 20 | "stopOnEntry": true, 21 | "pythonPath": "${config:python.pythonPath}", 22 | "program": "${file}", 23 | "cwd": "${workspaceFolder}", 24 | "env": {}, 25 | "envFile": "${workspaceFolder}/.env", 26 | "debugOptions": [ 27 | "RedirectOutput" 28 | ] 29 | } 30 | ] 31 | } -------------------------------------------------------------------------------- /.vscode/settings.json: -------------------------------------------------------------------------------- 1 | { 2 | "python.testing.unittestArgs": [ 3 | "-v", 4 | "-s", 5 | "./.vscode", 6 | "-p", 7 | "*_tests.py" 8 | ], 9 | "python.testing.unittestEnabled": true, 10 | "python.linting.enabled": true, 11 | "restructuredtext.confPath": "/home/javad/Dropbox/Projekter/hashit/hashit", 12 | "python.pythonPath": "/usr/local/bin/python3" 13 | } -------------------------------------------------------------------------------- /.vscode/vscode_tests.py: -------------------------------------------------------------------------------- 1 | # Unittests for vscodes python.unittest 2 | 3 | import sys 4 | sys.path.insert(0, "..") 5 | from tests.unit import * -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | \\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\ 2 | ©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©© 3 | ©-------------------------------------------------------------------------------© 4 | ©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©© 5 | MIT License 6 | 7 | Copyright (c) 2018 Javad Shafique 8 | 9 | Permission is hereby granted, free of charge, to any person obtaining a copy 10 | of this software and associated documentation files (the "Software"), to deal 11 | in the Software without restriction, including without limitation the rights 12 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 13 | copies of the Software, and to permit persons to whom the Software is 14 | furnished to do so, subject to the following conditions: 15 | 16 | The above copyright notice and this permission notice shall be included in all 17 | copies or substantial portions of the Software. 18 | 19 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 20 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 21 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 22 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 23 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 24 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 25 | SOFTWARE. 26 | 27 | NO ONE CAN CLAIM OWNERSHIP OF THIS "SOFTWARE" AND ASSOCIATED DOCUMENTATION FILES. 28 | 29 | Icon from freepik.com all rights reserved 30 | ©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©© 31 | ©-------------------------------------------------------------------------------© 32 | ©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©© 33 | ///////////////////////////////////////////////////////////////////////////////// 34 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | include README.rst 2 | include LICENSE 3 | include img/icon.png 4 | include snapcraft.yaml 5 | include changelog 6 | include docs/*.md 7 | include tests/*.py 8 | include hashit.spec -------------------------------------------------------------------------------- /README.rst: -------------------------------------------------------------------------------- 1 | .. image:: https://build.snapcraft.io/badge/cjavad/hashit.svg 2 | :target: https://build.snapcraft.io/user/cjavad/hashit 3 | 4 | Project is hosted on `pypi `__ and `launchpad `__ 5 | 6 | .. image:: img/icon.png 7 | :target: https://github.com/cjavad/hashit 8 | :align: right 9 | 10 | Hashit, an hashing application 11 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 12 | 13 | .. image:: img/demo.gif 14 | :target: https://asciinema.org/a/TZQCel3DNy2sCWOFBtQcqVMMM 15 | :alt: asciinema demo usage. 16 | 17 | Description 18 | ~~~~~~~~~~~ 19 | Hashit, is an hashing application used as an verification tool, intendet to replace the "standard" linux hashing utilities such as 20 | md5sum, sha1sum and so on. One of the main reasons why this program was develop was to create an easy-to-use command line tool for 21 | newcomers and professionals alike to hash/verify files and other data. For more see our homepage at `cjavad.github.io/hashit `__ 22 | 23 | What is this magic (hashing) 24 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 25 | .. 26 | 27 | Hashing - The Greatest Idea In Programming 28 | 29 | A quote from `here `__, which i think sums up the hole thing pretty well. 30 | Even though, that stament is purely opinion based, i still thing there is some weigth in it, because you can do so many things with "hashing". 31 | First of all the concept of hashing is that you can, with an matematical algorithm generate a string that is unique to that piece of data, but 32 | you cannot turn that string into the data again, this is done by generating a string which size is constant (or at least not changing from data to data). 33 | And this is actually extreamlly useful, because this enables you to generate a string that is smaller than the original while still being totally unique, 34 | this can be using in databases for bigdata where you can create a lookup table without needing to use the hole amount of data, it can also be used to verify data 35 | such as passwords and file-checksums, which by the way is what this program is. I use the standard python hashes libary hashlib, which comes with most versions of 36 | python, some function like crc32 are from other libaries, i use these to hash some files and store the results in a checksum-file, which can be read back and check 37 | if the files have changed, this is very important when it comes to packaging, and other critical files, were one must be absoulutly sure that the file has not 38 | been changed, because if it has it could be due to corruption or infection of some kind of malware, so by making sure that the package is the same as the original. 39 | Some file systems use these hashes to make sure that the files havn't been alteret externally. 40 | 41 | For more see `docs/hashes `__ and the `wikipedia page `__ 42 | 43 | 44 | Background 45 | ~~~~~~~~~~ 46 | 47 | Hashit is an hashing program which can be uses to hash and verify 48 | muliple files on a system. I got the idea from an ubuntu iso image which 49 | have this hash table, so i got the idea to make such a program using 50 | python. 51 | 52 | I also found that the linux 'standard' hashing commands was named like this: 53 | - md5sum 54 | - sha1sum 55 | - sha256sum 56 | - cksum 57 | - sum 58 | 59 | hashname + sum, which i thougth was a pretty lame naming convention. 60 | 61 | Notice: 62 | ~~~~~~~ 63 | 64 | some hashes as blake2b and blake2s is not supported in python2. 65 | 66 | I would recommend python3 for this program as its version of hashlib 67 | supports sha3 (Keccak) 68 | 69 | And for compatibly reasons does detect not work for sha3 yet. so basicly to many confusions between sha2 and sha3 70 | 71 | BSD can be useful with the -A --append because then multiple diffrent hashtypes can be stored 72 | in the same file, good for multi-sized file validation. (remember -m) 73 | 74 | 75 | Usage 76 | -------------- 77 | 78 | See `docs/usage `__ 79 | 80 | Changelog 81 | -------------- 82 | See `debian/changelog `__ 83 | 84 | 85 | Works with python2 and python3. (python3 is recommended) 86 | 87 | 88 | .. image:: https://badges.gitter.im/cjavad/hashit.svg 89 | :alt: Join the chat at https://gitter.im/cjavad/hashit 90 | :target: https://gitter.im/cjavad/hashit?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge -------------------------------------------------------------------------------- /bin/hashit: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | """ 3 | MIT License 4 | 5 | Copyright (c) 2020 Javad Shafique 6 | 7 | Permission is hereby granted, free of charge, to any person obtaining a copy 8 | of this software and associated documentation files (the "Software"), to deal 9 | in the Software without restriction, including without limitation the rights 10 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 11 | copies of the Software, and to permit persons to whom the Software is 12 | furnished to do so, subject to the following conditions: 13 | 14 | The above copyright notice and this permission notice shall be included in all 15 | copies or substantial portions of the Software. 16 | 17 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 18 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 19 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 20 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 21 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 22 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 23 | SOFTWARE. 24 | 25 | NO ONE CAN CLAIM OWNERSHIP OF THIS SOFTWARE. 26 | """ 27 | 28 | import sys 29 | from hashit.__main__ import main 30 | 31 | # start hashit 32 | try: 33 | main(sys.argv[1:]) 34 | except Exception as e: 35 | print(e) 36 | finally: 37 | sys.exit() -------------------------------------------------------------------------------- /build.cmd: -------------------------------------------------------------------------------- 1 | @echo off 2 | REM Requires some kind of bashshell (WSL or cygwin) 3 | set shell="bash" 4 | set python="py" 5 | 6 | REM windows build 7 | IF "%1"=="build" ( 8 | %python% setup.py bdist_wininst 9 | ) 10 | 11 | %shell% build.sh %1 -------------------------------------------------------------------------------- /build.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # in here all the tools for this project is automatazied 4 | 5 | # this script manages this project by building pydocs, uploading to pypi 6 | # and compiling the source running tests pushing to launchpad and more 7 | 8 | # set python version 9 | PY=python3 10 | # get version of package 11 | # combine with name to get filenames 12 | V="$($PY setup.py -V)" # get version 13 | TO="release" # get release dir 14 | NAME="hashit-${V}" # set combined name 15 | ZIP="dist/${NAME}.zip" # set zip output 16 | TAR="dist/${NAME}.tar.gz" # se tarball output 17 | COM=("build" "clean" "docs" "push" "test" "install" "upload") # array of commands 18 | 19 | # for if statment 20 | containsElement () { 21 | local e match="$1" 22 | shift 23 | for e; do [[ "$e" == "$match" ]] && return 0; done 24 | return 1 25 | } 26 | 27 | if [ "$1" == "docs" ] 28 | then 29 | # build docs using pydoc 30 | $PY -m pydoc -w hashit 31 | $PY -m pydoc -w hashit.__main__ 32 | $PY -m pydoc -w hashit.detection 33 | $PY -m pydoc -w hashit.extra 34 | $PY -m pydoc -w hashit.version 35 | mv -f *.html ./docs/pydocs 36 | #cd ./docs/pydocs 37 | #find . -name "*.ht*" | while read i; do pandoc -f html -t markdown "$i" -o "${i%.*}.md"; done 38 | #cd ../.. 39 | # clean/create file 40 | echo -e "---\nlayout: default\n---\n" > ./docs/pydoc.md 41 | $PY -c "from pydocmd.__main__ import main; import sys; sys.argv = ['', 'simple', 'hashit+', 'hashit.__main__+', 'hashit.detection+', 'hashit.extra+']; main()" >> ./docs/pydoc.md 42 | printf "\n\n[back](index.md)" >> ./docs/pydoc.md # add back button 43 | 44 | # push new documentation to wiki 45 | sudo cp ./docs/*.md ../hashit.wiki 46 | sudo mv ../hashit.wiki/index.md ../hashit.wiki/Home.md 47 | cd ../hashit.wiki 48 | # remove layout 49 | sudo chmod 777 *.md 50 | sudo sed -i '/layout: default/d' *.md 51 | sudo sed -i '/---/d' *.md 52 | sudo sed -e 's/\.md//g' -i *.md 53 | # push to git 54 | sudo git add . 55 | sudo git commit -m "Updated wiki" 56 | exit 57 | fi 58 | 59 | if [ "$1" == "push" ] 60 | then 61 | echo "push hashit to origin (github) and launchpad (ppa)" 62 | git push origin master 63 | git push launchpad master 64 | cd ../hashit.wiki 65 | echo "PUSH wiki" 66 | sudo git push 67 | exit 68 | fi 69 | 70 | if [ "$1" == "install" ] 71 | then 72 | $PY setup.py install 73 | rm -rf ./dist 74 | exit 75 | fi 76 | 77 | 78 | if [ "$1" == "test" ] 79 | then 80 | python3 setup.py test 81 | python setup.py test 82 | rm */*.pyc # remove .pyc files from python2 83 | # and exit 84 | exit 85 | fi 86 | 87 | if [ "$1" == "clean" ] 88 | then 89 | if [ -d "hashit.egg-info" ] 90 | then 91 | rm -rf "hashit.egg-info" 92 | echo "Removed egg-info" 93 | fi 94 | 95 | if [ -d "dist" ] 96 | then 97 | rm -rf "dist" 98 | echo "Removed dist" 99 | fi 100 | 101 | if [ -d "build" ] 102 | then 103 | rm -rf "build" 104 | echo "Removed build" 105 | fi 106 | 107 | if [ -d "release/deb_dist" ] 108 | then 109 | rm -rf "release/deb_dist" 110 | echo "Removed deb_dist" 111 | fi 112 | 113 | if [ -d "_build" ] 114 | then 115 | rm -rf "_build" 116 | echo "Removed _build" 117 | fi 118 | 119 | # clean debian dir 120 | cd debian 121 | if [ -d "hashit" ]; then rm -rf "hashit"; echo "Removed debian/hashit/*"; fi 122 | if [ -f "debhelper-build-stamp" ]; then rm -rf "debhelper-build-stamp"; echo "Removed debian/debhelper-build-stamp"; fi 123 | if [ -f "files" ]; then rm -rf "files"; echo "Removed debian/files"; fi 124 | if [ -f "hashit.debhelper.log" ]; then rm -rf "hashit.debhelper.log"; echo "Removed debian/hashit.debhelper.log"; fi 125 | if [ -f "hashit.postinst.debhelper" ]; then rm -rf "hashit.postinst.debhelper"; echo "Removed debian/hashit.postinst.debhelper"; fi 126 | if [ -f "hashit.prerm.debhelper" ]; then rm -rf "hashit.prerm.debhelper"; echo "Removed debian/hashit.prerm.debhelper"; fi 127 | if [ -f "hashit.substvars" ]; then rm -rf "hashit.substvars"; echo "Removed debian/hashit.substvars"; fi 128 | 129 | cd .. 130 | # delete pycache 131 | find . -name "__pycache__" -exec rm -rf {} + 132 | rm -rf */*.pyc 133 | exit 134 | fi 135 | 136 | 137 | if [ "$1" == "upload" ] 138 | then 139 | $PY setup.py sdist upload 140 | rm -rf ./dist 141 | exit 142 | fi 143 | 144 | if [ "$1" == "build" ] 145 | then 146 | # Move and print messages 147 | SILENT="$($PY setup.py sdist --quiet --formats zip,gztar)" 148 | echo "Version, $V at $ZIP and $TAR" 149 | mv $ZIP "${TO}/hashit.zip" 150 | mv $TAR "${TO}/hashit.tar.gz" 151 | echo "Moved to ${TO}/hashit.zip and ${TO}/hashit.tar.gz" 152 | rm -r dist/ 153 | 154 | 155 | # copy changelog to debian 156 | cp "./changelog" "debian/" 157 | read -p "Push to launchpad (y/n)? " choice 158 | case "$choice" in 159 | # for launchpad dailybuilds 160 | # add remote and force push deb package 161 | y|Y ) git remote add launchpad git+ssh://javadsm@git.launchpad.net/python3-hashit; git push --force --set-upstream launchpad master;; 162 | n|N ) :;; 163 | * ) :;; 164 | esac 165 | 166 | read -p "Build locally (y/n)? " choice 167 | case "$choice" in 168 | # build locally on your machine 169 | y|Y ) rm ./release/*.deb; debuild -b -uc -us; mv ../*.deb ./release/; rm ../hashit_*;; 170 | n|N ) :;; 171 | * ) :;; 172 | esac 173 | # exit 174 | exit 175 | fi 176 | 177 | containsElement "$1" ${COM[@]} 178 | # check for element 179 | if [ $? ] 180 | then 181 | echo -e "No arguments selected, use one of the following:\n" 182 | echo " build - builds python, to either deb or source dist (can upload to launchpad)" 183 | echo " clean - removed extra folders and files from either build or python (*.pyc and __pycache__)" 184 | echo " docs - builds docs (markdown and html) and updates wiki (commits)" 185 | echo " push - pushes to both github and launchpad (such does build)" 186 | echo " test - runs unittests" 187 | echo " help (anything really) - prints this messages and exits" 188 | echo " install - installs used pip/setuptools" 189 | echo -e " upload - upload to pypi\n" 190 | exit 191 | fi -------------------------------------------------------------------------------- /changelog: -------------------------------------------------------------------------------- 1 | hashit (3.5.2) stable; urgency=low 2 | * 3.5.2 - Fixed issue with #5 3 | 4 | * 3.5.1 - Standerdized input/output 5 | 6 | * 3.4.5 - Added support for mdc2 hash 7 | 8 | * 3.4.4 - Fixed a bug with splitpath and added a removal for * appended by md5sum. 9 | 10 | * 3.4.3 - Fixed some more bugs, and improved some functions including Strip path to addapt a more md5sum style 11 | 12 | * 3.4.2 - Fixed some bugs, improved performance and set -s to the default command (posix). updated docs 13 | 14 | * 3.4.1 - Added the --list and --check-list option to verify lists of text 15 | 16 | * 3.4.0 - Added the --dry-run option and fixed the shake hash to you can now to shake_128_32. And bugfixes 17 | 18 | * 3.3.9 - Windows bugfixes. 19 | 20 | * 3.3.8 - Added more documentation and added sha3 (Keccak) support for detect, also added -e --exclude that can exclude dirs from list 21 | 22 | * 3.3.7 - Refractored the hashit.check code so you can now use it from python! (see `docs/extra.md#gui `__ for an example) 23 | 24 | * 3.3.6 - Minor bugfixes and removed -a, added -p --page for a help-page in the terminal for the python-api 25 | 26 | * 3.3.5 - hashit now supports a list of files such as the wildcard in linux, and can detect if that element is a directory 27 | 28 | * 3.3.4 - Bugfixes and more, fixed parsers added some benchmarks and fixing some more of the snap-related issues 29 | 30 | * 3.3.3 - Full release 31 | 32 | * 3.3.3a3 - Extended Configs working on homepage and docs 33 | 34 | * 3.3.3a2 - Added --trace and fixed some of the issues with detect and argparse 35 | 36 | * 3.3.0a1 - removed argc depend, using argparse 37 | 38 | * 3.3.0a0 - Fixing snap releated issues 39 | 40 | * 3.3.1 - Fixed bug in windows where \ would not be replaced by / in hashit.fixpath 41 | 42 | * 3.3.0 - Added BSD Style output and check format detection. Also an -s option that can hash a piece of text see `docs `__ for more 43 | 44 | * 3.2.1 - ReRelease for snap 45 | 46 | * 3.2.0 - Full support for snap 47 | 48 | * 3.1.5 - Skipped 3.1.4 cause i have been renaming varibles, cleaning code and improving performance. 49 | 50 | * 3.1.2-3.1.3 - fixed this document 51 | 52 | * 3.1.0 - A bunch of bugfixes in comparing of hashes, shake and more 53 | 54 | * 3.0.2 - Fixed size positioning 55 | 56 | * 3.0.1 - Added --size option that allows the program to check file sizes to 57 | 58 | * 3.0.0 - New release, added tests full color-support and CRC32 hashing! (Also added an -a option) 59 | 60 | * 2.3.0 - Fixed a bunch of code, made it faster better more powerful. Full support for sfv and more! 61 | 62 | * 2.1.3 - Fixed detect bugs added unit tests and some fixes 63 | 64 | * 2.1.2 - Done with detect.py working hash detection 65 | 66 | * 2.1.1 - Some more bugfixed, started working on detect.py 67 | 68 | * 2.1.0 - Updated to support newest version of argc 69 | 70 | * 2.0.1 - Bugfixes 71 | 72 | * 1.2.0 - Full Release 73 | 74 | * 1.1.0 - Added support for python2 and 75 | 76 | * 1.0.2 - Double exits' fixed 77 | 78 | * 1.0.1 - Fixed printing bug 79 | 80 | * 1.0.0 - Major version 81 | 82 | * 0.0.3 - Added documentation and license 83 | 84 | * 0.0.2 - Fixing script bugs 85 | 86 | * 0.0.1 - Initial release, ready for use 87 | 88 | -- Javad Shafique Mon, 05 Feb 2020 15:59:57 +0100 89 | -------------------------------------------------------------------------------- /conf.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | # 4 | # hashit documentation build configuration file, created by 5 | # sphinx-quickstart on Mon Feb 5 16:07:13 2020. 6 | # 7 | # This file is execfile()d with the current directory set to its 8 | # containing dir. 9 | # 10 | # Note that not all possible configuration values are present in this 11 | # autogenerated file. 12 | # 13 | # All configuration values have a default; values that are commented out 14 | # serve to show the default. 15 | 16 | # If extensions (or modules to document with autodoc) are in another directory, 17 | # add these directories to sys.path here. If the directory is relative to the 18 | # documentation root, use os.path.abspath to make it absolute, like shown here. 19 | # 20 | # import os 21 | # import sys 22 | # sys.path.insert(0, os.path.abspath('.')) 23 | from hashit.version import __version__ 24 | 25 | 26 | # -- General configuration ------------------------------------------------ 27 | 28 | # If your documentation needs a minimal Sphinx version, state it here. 29 | # 30 | # needs_sphinx = '1.0' 31 | 32 | # Add any Sphinx extension module names here, as strings. They can be 33 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom 34 | # ones. 35 | extensions = [] 36 | 37 | # Add any paths that contain templates here, relative to this directory. 38 | # templates_path = ['_templates'] 39 | 40 | # static_path = ['_static'] 41 | 42 | # The suffix(es) of source filenames. 43 | # You can specify multiple suffix as a list of string: 44 | # 45 | # source_suffix = ['.rst', '.md'] 46 | source_suffix = '.rst' 47 | 48 | # The master toctree document. 49 | master_doc = 'README' 50 | 51 | # General information about the project. 52 | project = 'hashit' 53 | copyright = '2020, Javad Shafique' 54 | author = 'Javad Shafique' 55 | 56 | # The version info for the project you're documenting, acts as replacement for 57 | # |version| and |release|, also used in various other places throughout the 58 | # built documents. 59 | # 60 | # The short X.Y version. 61 | version = __version__ 62 | # The full version, including alpha/beta/rc tags. 63 | release = '' 64 | 65 | # The language for content autogenerated by Sphinx. Refer to documentation 66 | # for a list of supported languages. 67 | # 68 | # This is also used if you do content translation via gettext catalogs. 69 | # Usually you set "language" from the command line for these cases. 70 | language = None 71 | 72 | # List of patterns, relative to source directory, that match files and 73 | # directories to ignore when looking for source files. 74 | # This patterns also effect to html_static_path and html_extra_path 75 | exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store'] 76 | 77 | # The name of the Pygments (syntax highlighting) style to use. 78 | pygments_style = 'sphinx' 79 | 80 | # If true, `todo` and `todoList` produce output, else they produce nothing. 81 | todo_include_todos = False 82 | 83 | 84 | # -- Options for HTML output ---------------------------------------------- 85 | 86 | # The theme to use for HTML and HTML Help pages. See the documentation for 87 | # a list of builtin themes. 88 | # 89 | html_theme = 'alabaster' 90 | 91 | # Theme options are theme-specific and customize the look and feel of a theme 92 | # further. For a list of options available for each theme, see the 93 | # documentation. 94 | # 95 | # html_theme_options = {} 96 | 97 | # Add any paths that contain custom static files (such as style sheets) here, 98 | # relative to this directory. They are copied after the builtin static files, 99 | # so a file named "default.css" will overwrite the builtin "default.css". 100 | html_static_path = ['_static'] 101 | 102 | # Custom sidebar templates, must be a dictionary that maps document names 103 | # to template names. 104 | # 105 | # This is required for the alabaster theme 106 | # refs: http://alabaster.readthedocs.io/en/latest/installation.html#sidebars 107 | #html_sidebars = { 108 | # '**': [ 109 | # 'relations.html', # needs 'show_related': True theme option to display 110 | # 'searchbox.html', 111 | # ] 112 | #} 113 | 114 | 115 | # -- Options for HTMLHelp output ------------------------------------------ 116 | 117 | # Output file base name for HTML help builder. 118 | htmlhelp_basename = 'hashitdoc' 119 | 120 | 121 | # -- Options for LaTeX output --------------------------------------------- 122 | 123 | latex_elements = { 124 | # The paper size ('letterpaper' or 'a4paper'). 125 | # 126 | # 'papersize': 'letterpaper', 127 | 128 | # The font size ('10pt', '11pt' or '12pt'). 129 | # 130 | # 'pointsize': '10pt', 131 | 132 | # Additional stuff for the LaTeX preamble. 133 | # 134 | # 'preamble': '', 135 | 136 | # Latex figure (float) alignment 137 | # 138 | # 'figure_align': 'htbp', 139 | } 140 | 141 | # Grouping the document tree into LaTeX files. List of tuples 142 | # (source start file, target name, title, 143 | # author, documentclass [howto, manual, or own class]). 144 | latex_documents = [ 145 | (master_doc, 'hashit.tex', 'hashit Documentation', 146 | 'Javad Shafique', 'manual'), 147 | ] 148 | 149 | 150 | # -- Options for manual page output --------------------------------------- 151 | 152 | # One entry per manual page. List of tuples 153 | # (source start file, name, description, authors, manual section). 154 | man_pages = [ 155 | (master_doc, 'hashit', 'hashit Documentation', 156 | [author], 1) 157 | ] 158 | 159 | 160 | # -- Options for Texinfo output ------------------------------------------- 161 | 162 | # Grouping the document tree into Texinfo files. List of tuples 163 | # (source start file, target name, title, author, 164 | # dir menu entry, description, category) 165 | texinfo_documents = [ 166 | (master_doc, 'hashit', 'hashit Documentation', 167 | author, 'hashit', 'One line description of project.', 168 | 'Miscellaneous'), 169 | ] 170 | 171 | 172 | 173 | -------------------------------------------------------------------------------- /debian/changelog: -------------------------------------------------------------------------------- 1 | hashit (3.5.2) stable; urgency=low 2 | * 3.5.2 - Fixed issue with #5 3 | 4 | * 3.5.1 - Standerdized input/output 5 | 6 | * 3.4.5 - Added support for mdc2 hash 7 | 8 | * 3.4.4 - Fixed a bug with splitpath and added a removal for * appended by md5sum. 9 | 10 | * 3.4.3 - Fixed some more bugs, and improved some functions including Strip path to addapt a more md5sum style 11 | 12 | * 3.4.2 - Fixed some bugs, improved performance and set -s to the default command (posix). updated docs 13 | 14 | * 3.4.1 - Added the --list and --check-list option to verify lists of text 15 | 16 | * 3.4.0 - Added the --dry-run option and fixed the shake hash to you can now to shake_128_32. And bugfixes 17 | 18 | * 3.3.9 - Windows bugfixes. 19 | 20 | * 3.3.8 - Added more documentation and added sha3 (Keccak) support for detect, also added -e --exclude that can exclude dirs from list 21 | 22 | * 3.3.7 - Refractored the hashit.check code so you can now use it from python! (see `docs/extra.md#gui `__ for an example) 23 | 24 | * 3.3.6 - Minor bugfixes and removed -a, added -p --page for a help-page in the terminal for the python-api 25 | 26 | * 3.3.5 - hashit now supports a list of files such as the wildcard in linux, and can detect if that element is a directory 27 | 28 | * 3.3.4 - Bugfixes and more, fixed parsers added some benchmarks and fixing some more of the snap-related issues 29 | 30 | * 3.3.3 - Full release 31 | 32 | * 3.3.3a3 - Extended Configs working on homepage and docs 33 | 34 | * 3.3.3a2 - Added --trace and fixed some of the issues with detect and argparse 35 | 36 | * 3.3.0a1 - removed argc depend, using argparse 37 | 38 | * 3.3.0a0 - Fixing snap releated issues 39 | 40 | * 3.3.1 - Fixed bug in windows where \ would not be replaced by / in hashit.fixpath 41 | 42 | * 3.3.0 - Added BSD Style output and check format detection. Also an -s option that can hash a piece of text see `docs `__ for more 43 | 44 | * 3.2.1 - ReRelease for snap 45 | 46 | * 3.2.0 - Full support for snap 47 | 48 | * 3.1.5 - Skipped 3.1.4 cause i have been renaming varibles, cleaning code and improving performance. 49 | 50 | * 3.1.2-3.1.3 - fixed this document 51 | 52 | * 3.1.0 - A bunch of bugfixes in comparing of hashes, shake and more 53 | 54 | * 3.0.2 - Fixed size positioning 55 | 56 | * 3.0.1 - Added --size option that allows the program to check file sizes to 57 | 58 | * 3.0.0 - New release, added tests full color-support and CRC32 hashing! (Also added an -a option) 59 | 60 | * 2.3.0 - Fixed a bunch of code, made it faster better more powerful. Full support for sfv and more! 61 | 62 | * 2.1.3 - Fixed detect bugs added unit tests and some fixes 63 | 64 | * 2.1.2 - Done with detect.py working hash detection 65 | 66 | * 2.1.1 - Some more bugfixed, started working on detect.py 67 | 68 | * 2.1.0 - Updated to support newest version of argc 69 | 70 | * 2.0.1 - Bugfixes 71 | 72 | * 1.2.0 - Full Release 73 | 74 | * 1.1.0 - Added support for python2 and 75 | 76 | * 1.0.2 - Double exits' fixed 77 | 78 | * 1.0.1 - Fixed printing bug 79 | 80 | * 1.0.0 - Major version 81 | 82 | * 0.0.3 - Added documentation and license 83 | 84 | * 0.0.2 - Fixing script bugs 85 | 86 | * 0.0.1 - Initial release, ready for use 87 | 88 | -- Javad Shafique Mon, 05 Feb 2020 15:59:57 +0100 89 | -------------------------------------------------------------------------------- /debian/compat: -------------------------------------------------------------------------------- 1 | 7 2 | -------------------------------------------------------------------------------- /debian/control: -------------------------------------------------------------------------------- 1 | Source: hashit 2 | Maintainer: Javad Shafique 3 | Section: utils 4 | Priority: optional 5 | Build-Depends: python3-setuptools, python3-all, debhelper (>= 7.4.3) 6 | Standards-Version: 3.9.1 7 | 8 | 9 | Package: hashit 10 | Architecture: all 11 | Depends: ${misc:Depends}, ${python3:Depends} 12 | Description: Hashing Application with muliple modes, settings and more! 13 | Hashit, is an hashing application used as an verification tool, intendet to replace the "standard" linux hashing utilities such as 14 | md5sum, sha1sum and so on. One of the main reasons why this program was develop was to create an easy-to-use command line tool for 15 | newcomers and professionals alike to hash/verify files and other data. For more see our homepage at https://cjavad.github.io/hashit 16 | -------------------------------------------------------------------------------- /debian/copyrigth: -------------------------------------------------------------------------------- 1 | Format: http://www.debian.org/doc/packaging-manuals/copyright-format/1.0/ 2 | Upstream-Name: hashit 3 | Source: https://github.com/cjavad/hashit 4 | 5 | Files: * 6 | Copyright: Copyright (C) 2020 Javad Shafique 7 | License: MIT 8 | From: https://opensource.org/licenses/MIT 9 | 10 | Files: img/icon.png 11 | Copyright: Copyright (C) freepik.com 12 | License: Proprietary 13 | Icon from freepik.com all rights reserved 14 | -------------------------------------------------------------------------------- /debian/rules: -------------------------------------------------------------------------------- 1 | #!/usr/bin/make -f 2 | 3 | # This file was automatically generated by stdeb 0.8.5 at 4 | # Sat, 03 Feb 2020 15:48:57 +0100 5 | 6 | %: 7 | dh $@ --with python3 --buildsystem=python_distutils 8 | 9 | 10 | override_dh_auto_clean: 11 | python3 setup.py clean -a 12 | find . -name \*.pyc -exec rm {} \; 13 | 14 | 15 | 16 | override_dh_auto_build: 17 | python3 setup.py build --force 18 | 19 | 20 | 21 | override_dh_auto_install: 22 | python3 setup.py install --force --root=debian/hashit --no-compile -O0 --install-layout=deb 23 | 24 | 25 | 26 | override_dh_python2: 27 | dh_python2 --no-guessing-versions 28 | 29 | 30 | 31 | 32 | -------------------------------------------------------------------------------- /debian/source/format: -------------------------------------------------------------------------------- 1 | 3.0 (quilt) 2 | -------------------------------------------------------------------------------- /debian/source/options: -------------------------------------------------------------------------------- 1 | extend-diff-ignore="\.egg-info$" -------------------------------------------------------------------------------- /docs/Gemfile: -------------------------------------------------------------------------------- 1 | source 'https://rubygems.org' 2 | 3 | # Updated versions. 4 | gem "jekyll", ">= 3.7.4" 5 | gem "ffi", ">= 1.9.24"https://github.com/cjavad/hashit/compare/a0173adebe558e7e0aa992e64339daa776943ce4...4d81ee79fd0e22368db350534935a91e3ec23c0d 6 | 7 | 8 | gem "jekyll-theme-dinky" 9 | # Plugins 10 | gem 'jekyll-relative-links' 11 | -------------------------------------------------------------------------------- /docs/_config.yml: -------------------------------------------------------------------------------- 1 | theme: jekyll-theme-dinky 2 | description: Hashit an hashing application 3 | show_downloads: True 4 | 5 | github: 6 | owner_url: https://github.com/cjavad/hashit/ 7 | tar_url: https://github.com/cjavad/hashit/raw/master/release/hashit.tar.gz 8 | zip_url: https://github.com/cjavad/hashit/raw/master/release/hashit.zip 9 | 10 | author: 11 | name: Javad Shafique 12 | url: cjavad.wordpress.com 13 | 14 | layout: default 15 | markdown: kramdown 16 | extra: "Hosted with love on github" 17 | 18 | plugins: 19 | - jekyll-relative-links 20 | -------------------------------------------------------------------------------- /docs/_layouts/default.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | {% seo %} 8 | 9 | 10 | 11 | 12 | 13 | 16 | 17 | 18 |
19 |
20 |

{{ site.title | default: site.github.repository_name }}

21 |

{{ site.description | default: site.github.project_tagline }}

22 | 23 | 30 | 31 | {% if site.github.is_project_page %} 32 |

This project is maintained by {{ site.github.owner_name }}

33 | {% endif %} 34 | 35 | {% if site.github.is_user_page %} 36 | 39 | {% endif %} 40 |
41 | 42 |
43 | {{ content }} 44 |
45 | 46 |
47 |

{{ site.extra }}

48 |
49 |
50 | 51 | {% if site.google_analytics %} 52 | 60 | {% endif %} 61 | 62 | 63 | -------------------------------------------------------------------------------- /docs/extra.md: -------------------------------------------------------------------------------- 1 | --- 2 | layout: default 3 | --- 4 | 5 | ## Extra 6 | 7 | One of the ways i have setup hashit, is that it's very flexible which means 8 | that you the user can add change the way the program behaves quite easily 9 | 10 | This is achived by making the program needing and global config which in this case 11 | is the dict ```GLOBAL``` and with it you can translate, change and command this program. 12 | 13 | Some notes before i go into details: 14 | - The OSError Error message for linux need to include the ```{}``` for the .format to insert a list of linux distos 15 | 16 | Ok the load() function from ```hasht.__init__``` which also is in ```hashit.__main__``` is what you'll need 17 | for all your plugin loading needs. 18 | 19 | To use this and the GLOBAL config all you need to do is to create your own little python file 20 | lets call it hit.py 21 | 22 | ```py 23 | 24 | from hashit.__main__ import main, load, new, GLOBAL 25 | 26 | # set some config 27 | GLOBAL["DEFAULTS"]["DETECT"] = True # always detect 28 | GLOBAL["DEFAULTS"]["RECURS"] = False # dont use -r by default 29 | GLOBAL["MESSAGES"]["FILE_NOT"] = "I guess that file was like... Oh No" # insert sarcatic error-messages 30 | 31 | 32 | class my_hash_api: 33 | name = "hash_3-2-1" 34 | def __init__(self, data=b''): 35 | self.data = data 36 | 37 | def update(self, data): 38 | self.data += data 39 | 40 | def digest(self): 41 | # calculate hash 42 | return 2312319230193912123 43 | 44 | def hexdigest(self): 45 | return hex(self.digest()) 46 | 47 | load(my_hash_api) # loads' hash_3-2-1 48 | # test it if you want to 49 | # assert new("hash_3-2-1", b'My Custom Hash').hexdigest() == "0x20170216b303493b" 50 | 51 | # and then add a the executable code 52 | if __name__ == "__main__": 53 | main() 54 | ``` 55 | and then you can call the program as such 56 | ```python3 hit.py -H hash_3-2-1``` 57 | 58 | see [plugins](plugins.md) for more about the loading of plugins 59 | 60 | ## GUI 61 | [](#gui) 62 | 63 | One of my goals with this project is to create an easy-to-use gui, which i have yet to acomplish 64 | for now see [this](https://github.com/cjavad/hashit/blob/master/tests/spec/gui.py) file for an example of how to create an gui for hashit 65 | 66 | [back](index.md) 67 | -------------------------------------------------------------------------------- /docs/favicon.ico: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cjavad/hashit/b4f0fd3f84267f3a330a6aa34abd14adb31bc8d0/docs/favicon.ico -------------------------------------------------------------------------------- /docs/hashes.md: -------------------------------------------------------------------------------- 1 | --- 2 | layout: default 3 | --- 4 | 5 | ## Supported 6 | 7 | List off all supported hashes for all versions 8 | 9 | For all versions crc32 is supported 10 | 11 | For python2 is 12 | 13 | - md4 14 | - md5 15 | - mdc2 16 | - sha 17 | - sha1 ('DSA-SHA', 'ecdsa-with-SHA1', 'dsaEncryption', 'DSA', 'dsaWithSHA') 18 | - sha224 19 | - sha256 20 | - sha384 21 | - sha512 22 | - ripemd160 23 | - whirlpool 24 | 25 | For python3 is all from python2 and more: 26 | 27 | - blake2s 28 | - blake2b 29 | - sha3_224 30 | - sha3_256 31 | - sha3_384 32 | - sha3_512 33 | 34 | ## Experimental 35 | 36 | And for python3 a special syntax for the shake hash is used like this 37 | ```shake_(type)_(length)``` 38 | 39 | so 40 | 41 | ```bash 42 | $ touch file 43 | $ python3 -m hashit -H shake_2 file -sp 44 | 49b9 ./file 45 | ``` 46 | 47 | [back](index.md) 48 | -------------------------------------------------------------------------------- /docs/index.md: -------------------------------------------------------------------------------- 1 | --- 2 | layout: default 3 | --- 4 | 5 | [![](https://raw.githubusercontent.com/cjavad/hashit/master/img/icon.png)](https://pypi.org/project/hashit) 6 | # Hashit, an hashing application 7 | 8 | Hashit is an command line hashing application that supports a large varity of fileformats and hashing algorithms 9 | written in python(3) for hashing an verifing files. 10 | 11 | File Formats it supports: 12 | 13 | - Default: where ```hash [size] filename``` 14 | * used by md5sum 15 | 16 | - Simple File Verification ```filename hash [size]``` 17 | * popular checksum file format 18 | 19 | - BSD-tag style ```hashname (filename) = hash [size]``` 20 | * output from bsd systems hashing commands 21 | 22 | One of the reasons i created this program was because i thougth (from README.rst) that the current naming convention for 23 | hashing and file verification tools on debian based systems (they did better on bsd but still) where seriosly inefficent 24 | by seperating each hash into a different tool (yes i am aware of that these tools are implemented in C and making them into one program could most likely also cause the same kind of confusion) so hashit ships with all hash functions in one program/command where md5 is the default. 25 | 26 | See [docs/hashes.md](hashes.md) for the diffrent types of hashes supported 27 | 28 | [](#usage) 29 | ## Usage 30 | see [docs/usage.md](usage.md) 31 | 32 | 33 | [](#installing) 34 | ## Installing 35 | 36 | I would recommend installing it from pypi like this 37 | 38 | pip(3) install hashit 39 | 40 | But you can also install it from snap (linux only) 41 | 42 | snap install hashit (--devmode or --classic is recommend) 43 | 44 | And if you're using an debian based distro, you can use my ppa 45 | 46 | sudo add-apt-repository ppa:javadsm/javads 47 | sudo apt-get update 48 | sudo apt-get install hashit 49 | 50 | ## Technical Notes 51 | [](#technical) 52 | 53 | There where a memory leak in the check function which caused wrong hashes to be resolved from my observatitions it 54 | has something to do with pythons generators, i fixed it by added an read-all mode (normal) and made it default in 55 | the check function and other systems to, but by given the application ```-m``` you will enable the generator for 56 | the initial hash, which so far hasn't shown any memory leaks but the check function will still be running on the default 57 | read-all mode. 58 | 59 | The check function works by detect/selecting hashtype and file format then applying the data by creating indexes for diffrent values 60 | such as the hash, path and filesize. These indexes can then be used on the lists we create from the line in the file using the file formats 61 | parser. 62 | 63 | Due to some interface problems with snap, is it not posible to access devices than home and external drives. therefore i would recommend you to install it in --devmode but if you want you can also use classic. (Bypass: use sudo) 64 | 65 | Due to the way exclude works it is not needed to use a wildcard '*' to exclude specific extentions for that just do '.ext'. 66 | it works by doing: 67 | ```py 68 | if 'exclude-string`' in 'path': 69 | remove_from_list('path') 70 | ``` 71 | 72 | ### Extra, see [extra](extra.md) for more 73 | 74 | > 75 | > The hash-classes are built like so 76 | > all classes added needs this kind of 77 | > api due to compatibility. See [plugins](plugins.md) for more 78 | > 79 | >```py 80 | >class hashname: 81 | > name = "hashname" 82 | > 83 | > def __init__(self, data=b''): 84 | > self.data = data 85 | > 86 | > def update(self, data=b''): 87 | > self.data += data 88 | > 89 | > def copy(self): 90 | > # pass data on 91 | > return hashname(self.data) 92 | > 93 | > def digest(self): 94 | > # generate hash 95 | > return GENERATED_HASH 96 | > 97 | > def hexdigest(self): 98 | > # return as hex 99 | > G_HASH = self.digest() 100 | > return convert_to_hex(G_HASH) 101 | >``` 102 | 103 | ## Links 104 | [](#links) 105 | [extra](extra.md) how to setup hashit (plugins & config) 106 | 107 | [pydocmd](pydoc.md) generated with pydocmd 108 | 109 | [pydocs](pydocs/hashit.html) generated with pydocs 110 | 111 | ## Notes: 112 | [](#notes) 113 | 114 | - I interpet N/a as None At All because i can 115 | - Now -s --string (input) is the default option so it is posix compatible 116 | - Detect does not work with shake due to its integration 117 | - I would not recommend using -S --size because then you will have to specify it everytime you check 118 | - Detect format benchmarks (using timeit on python3 ubuntu): 119 | * BSD: 7-5 seconds (10**6 times) 120 | * SFV: 4.2 seconds (10**6 times) 121 | * N/A: 2.4 seconds (10**6 times) 122 | -------------------------------------------------------------------------------- /docs/plugins.md: -------------------------------------------------------------------------------- 1 | --- 2 | layout: default 3 | --- 4 | 5 | With the new release, i have added support for loading thirdparty hashfunction to hashit via load() and GLOBAL 6 | basicly it adds a new entry to GLOBAL.EXTRA with its' name and class. The way these plugins work are quite simple, 7 | all it needs is a hashlib compatible api as such: 8 | > plugin.py 9 | 10 | [//]: # (Blank Comment as seperator) 11 | ```py 12 | class thirdpartyhash: 13 | """Api for another hashfunction""" 14 | name = "hashname" # define now or in self.name 15 | 16 | def __init__(self, data=b''): 17 | # self.name = "hashname" 18 | self.data = data 19 | 20 | def update(self, data): 21 | self.data += data 22 | 23 | def digest(self): 24 | # use whatever function you need 25 | return hashname(self.data).raw 26 | 27 | def hexdigest(self): 28 | # convert output to hex 29 | return convert_to_hex(self.digest()) 30 | 31 | # the copy function is optional, not needed 32 | # but is still a part of the api 33 | def copy(self): 34 | return thirdpartyhash(self.data) 35 | 36 | 37 | ``` 38 | > hashit (.py) a new executable 39 | 40 | [//]: # (Blank Comment as seperator) 41 | ```py 42 | import sys 43 | from hashit.__main__ import main, load 44 | from plugin import thirdpartyhash as tph 45 | 46 | load(tph) 47 | main(sys.argv[1:]) 48 | 49 | ``` 50 | or 51 | > pythonprogram.py your own program using hashits' hashing functions 52 | 53 | [//]: # (Blank Comment as seperator) 54 | ```py 55 | from hashit import new, load, hashFile 56 | from plugin import thirdpartyhash as tph 57 | 58 | load(tph) 59 | hasher = new("hashname") 60 | print(hashFile("somefile.ext", hasher, True)) 61 | ``` 62 | 63 | See [extra](extra.md) for more 64 | 65 | [back](index.md) 66 | -------------------------------------------------------------------------------- /docs/pydoc.md: -------------------------------------------------------------------------------- 1 | --- 2 | layout: default 3 | --- 4 | 5 | # hashit 6 | hashit module for hashit command is contaning all the code for hashit 7 | 8 | hashit is an hashing application which main purpose is to replace all the 'default' 9 | hashing commands that comes with linux and also provide a usable hashing program 10 | for windows hence the choice of using python. while hashit supports both python 2 and 3 11 | i would strongly recommend using python3 because that python3 comes with a newer version 12 | of hashlib and therefore many new hash-functions, altough it is posible to add these into 13 | python2 with the load() function which acts like a 'connecter' and enables hashit to use 14 | third-party hashing-functions as long as the have the same api as specified in docs/index.md 15 | 16 | The GLOBAL dict contains all the configurations for this program, translations, error messages 17 | settings, plugins and more. 18 | 19 | __algorithms__ is a list that contains all the builtin algorithms including crc32 20 | 21 | LICENSE: 22 | 23 | MIT License 24 | 25 | Copyright (c) 2020 Javad Shafique 26 | 27 | Permission is hereby granted, free of charge, to any person obtaining a copy 28 | of this software and associated documentation files (the "Software"), to deal 29 | in the Software without restriction, including without limitation the rights 30 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 31 | copies of the Software, and to permit persons to whom the Software is 32 | furnished to do so, subject to the following conditions: 33 | 34 | The above copyright notice and this permission notice shall be included in all 35 | copies or substantial portions of the Software. 36 | 37 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 38 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 39 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 40 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 41 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 42 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 43 | SOFTWARE. 44 | 45 | NO ONE CAN CLAIM OWNERSHIP OF THIS "SOFTWARE" AND ASSOCIATED DOCUMENTATION FILES. 46 | 47 | ## fixpath 48 | ```python 49 | fixpath(path) 50 | ``` 51 | Fixpath converts the releative path into an absolute path 52 | and if needed can append the path to the snap host-filesystem 53 | which if the application is in devmode gives hashit access to 54 | the hole filesystem, if you're not in devmode and you're still 55 | using snap, then you will need sudo to access the intire system. 56 | Also replaces / with \ on windows 57 | ## reader 58 | ```python 59 | reader(filename, mode='r', comments=True, newlines=False) 60 | ``` 61 | Creates generator for a file or stdin, better for larger files not part of the MEMOPT, 62 | so an standard reader for most uses. Works like readlines but instead of a list it 63 | creates an generator that sortof clean the input before it is parsed by something like 64 | BSD() or SFV(). 65 | ## SFV 66 | ```python 67 | SFV(self, filename=None, size=False) 68 | ``` 69 | Class for parsing and creating sfv strings 70 | SFV() contains all functions needed for parsing, 71 | creating and formating SFV strings 72 | ## BSD 73 | ```python 74 | BSD(self, filename=None, size=False) 75 | ``` 76 | Parser for bsd and formater, also the 77 | same as SFV() but BSD() instead of sfv uses 78 | the bsd checksum output which is like this: 79 | hashname (filename) = hash [size] 80 | ## eprint 81 | ```python 82 | eprint(*args, **kwargs) 83 | ``` 84 | Prints to stderr usefull for warnings and error messages 85 | ## supports_color 86 | ```python 87 | supports_color() 88 | ``` 89 | 90 | Returns True if the running system's terminal supports color, and False 91 | otherwise. 92 | 93 | ## detect_format 94 | ```python 95 | detect_format(hashstr, use_size=False) 96 | ``` 97 | Autodetect hash format, by checking the length and what it contains 98 | ## choose_hash 99 | ```python 100 | choose_hash(hash1, hashit, cli=True) 101 | ``` 102 | 103 | Uses detect.decect to identify hashes with a high accuracy but when 104 | there if some issues it will take user input. CLI-only 105 | 106 | ## new 107 | ```python 108 | new(hashname, data=b'') 109 | ``` 110 | Custom hash-init function that returns the hashes 111 | depends on hashlib.new and GLOBAL["EXTRA"]. One of its' 112 | features is it's support for the python3 only shake-hash 113 | scheme were the default hash is shake_256 and the input is 114 | taken like this: 115 | shake_[amount of output] 116 | ## load 117 | ```python 118 | load(hashclass) 119 | ``` 120 | 121 | Add hashes to GLOBAL.EXTRA which is the dict that contains all the "extra" 122 | hash-functions such as Crc32, which allows external hashing algorithms to 123 | be used as long as the have the same api as specified in docs/README.md 124 | 125 | returns True/False based on whether or not the data is loaded 126 | 127 | ## load_all 128 | ```python 129 | load_all(list_of_hashclasses) 130 | ``` 131 | Just for it, a function that loads all plugins in a list 132 | ## hashIter 133 | ```python 134 | hashIter(bytesiter, hasher, ashexstr=True) 135 | ``` 136 | Will hash the blockIter generator and return digest 137 | ## blockIter 138 | ```python 139 | blockIter(afile, blocksize=65536) 140 | ``` 141 | Will create a generator for reading a file 142 | ## hashFile 143 | ```python 144 | hashFile(filename, hasher, memory_opt=False) 145 | ``` 146 | hashFile is a simple way to hash files using diffrent methods 147 | ## check_files 148 | ```python 149 | check_files(file_read, hashit, first_line, sfv=False, size=False, bsdtag=False, dry_run=False) 150 | ``` 151 | Will read an file which have a SFV compatible checksum-file or a standard one and verify the files checksum 152 | by creating an generator which loops over another generator which parses/reads the file and then it will check 153 | if the hash and optionally the size of the files matches the current state of them. For more info on how this work 154 | see docs/index.md#technical. 155 | 156 | ## check 157 | ```python 158 | check(path, hashit, usecolors=False, be_quiet=False, detecthash=True, sfv=False, size=False, bsdtag=False, strict=False, trace=False, dry_run=False) 159 | ``` 160 | Uses check_() to print the error messages and statuses corrent (for CLI) 161 | they are seperated so that you can use the python api, if you so please. 162 | 163 | # hashit.__main__ 164 | Command line application for hashit 165 | 166 | this module "__main__" contains all the code for argparsing, running 167 | and anything needed for an command lin application such as hashit. 168 | 169 | it uses argc another package by me, but i am considering switching to argparse 170 | 171 | ## Print 172 | ```python 173 | Print(self, nargs=0, **kwargs) 174 | ``` 175 | Print action for argparse, takes one kwarg which is text the varible which contains the string to be printed 176 | ## Execute 177 | ```python 178 | Execute(self, nargs=0, **kwargs) 179 | ``` 180 | Same as Print() but instead of printing an object it calls it takes func (function), and exit (bool) 181 | ## walk 182 | ```python 183 | walk(go_over) 184 | ``` 185 | Goes over a path an finds all files, appends them to a list and returns that list 186 | ## exclude 187 | ```python 188 | exclude(items, excludes) 189 | ``` 190 | Exclude removes all items in a list that is in the excludes list (for dirs) 191 | ## config 192 | ```python 193 | config(parser) 194 | ``` 195 | Sets argvs' config and commands with argparse and returns it for good sake 196 | ## main_ 197 | ```python 198 | main_(args) 199 | ``` 200 | Main function which is the cli parses arguments and runs appropriate commands 201 | ## main 202 | ```python 203 | main(args=None) 204 | ``` 205 | 206 | Main function with error catching, can force-exit with os._exit(1) 207 | 208 | this main function calls main_() and cathes any error while giving the user some "pretty" 209 | errors. 210 | 211 | # hashit.detection 212 | 213 | Copyrigth (c) 2020-present Javad Shafique 214 | 215 | this module using length and connections to find a match 216 | for an hashing algorithem. It's basicly a matching algorigtem 217 | it can be used for almost any pure function in this case for hashes. 218 | 219 | __Copyright (c) 2020-present Javad Shafique__ 220 | 221 | __This 'Software' can't be used without permission__ 222 | 223 | __from Javad Shafique.__ 224 | 225 | 226 | __this module using length and connections to find a match__ 227 | 228 | __for an hashing algorithem. It's basicly a matching algorigtem__ 229 | 230 | __it can be used for almost any pure function in this case for hashes.__ 231 | 232 | __basic template:__ 233 | 234 | 235 | 236 | def generate_some_dataset(datatoworkon = "some data"): 237 | dict_for_storing_set = dict() 238 | 239 | for each_element in a_list_of_something_to_compare_with: 240 | data = function_that_uses_data_to_generate_something(each_element, datatoworkon) 241 | 242 | dict_for_storing_set.update({each_element:{"data":data, "size":len(data), "size-as":list(), "connection":list()}}) 243 | 244 | 245 | `find` connection and size 246 | 247 | for each_element in dict_for_storing_set: 248 | elements_data = dict_for_storing_set[each_element]["data"] 249 | elements_size = dict_for_storing_set[each_element]["size"] 250 | 251 | for second_element in dict_for_storing_set: 252 | if dict_for_storing_set[second_element]["size"] == elements_size: 253 | if elements_data == dict_for_storing_set["data"]: 254 | dict_for_storing_set[each_element]["connection"].append(second_element) 255 | else: 256 | dict_for_storing_set[each_element]["size-as"].append(second_element) 257 | else: 258 | continue 259 | 260 | # return finished dataset 261 | 262 | return dict_for_storing_set 263 | 264 | __and for parsing that infomation__ 265 | 266 | __you can use the detect function__ 267 | 268 | __as here:__ 269 | 270 | 271 | 272 | def detect(string, table, maybe = True): 273 | if not (type(string) == str): 274 | return None 275 | 276 | so = list() 277 | so_far = list() 278 | length = len(string) 279 | 280 | for key in table: 281 | dat = table[key] 282 | 283 | if dat["size"] == length: 284 | for i in dat["connection"]: 285 | if i not in so_far: 286 | so_far.append(i) 287 | 288 | for i in so_far: 289 | dat = table[i]["connection"] 290 | 291 | for j in so_far: 292 | if not j in dat: 293 | so_far.remove(j) 294 | 295 | if maybe: 296 | for key in table: 297 | dat = table[key] 298 | 299 | if dat["size"] == length: 300 | so.append(key) 301 | 302 | if len(so_far) >= 0 and len(so) == 1: 303 | 304 | # if there only is one option then use it 305 | 306 | return tup(certain=so, maybe=[]) 307 | else: 308 | return tup(certain=so_far, maybe=so) 309 | 310 | 311 | 312 | __compare hashes for hash-detection__ 313 | 314 | __it can generate data that can compare__ 315 | 316 | __diffrences between the results__ 317 | 318 | 319 | __if works by categorizing the hashes into__ 320 | 321 | __two categorizes. one for thoose who look alike__ 322 | 323 | __and one for thoose who generates the same output__ 324 | 325 | __given the same input. And with it a sorted result__ 326 | 327 | __is outputted and is ready to be used be the user.__ 328 | 329 | 330 | __list of which algorithms is most likly used (WIP)__ 331 | 332 | 333 | PRIORITY = { 334 | "md5":["md5"], 335 | "sha1":["dsaEncryption", "DSA", "ecdsa-with-SHA1", "dsaWithSHA", "DSA-SHA"] 336 | } 337 | 338 | ## Closest 339 | ```python 340 | Closest(self, /, *args, **kwargs) 341 | ``` 342 | Closest(certain, maybe) 343 | ## ishex 344 | ```python 345 | ishex(hexstr) 346 | ``` 347 | Checks if string is hexidecimal 348 | ## generate_data_set 349 | ```python 350 | generate_data_set(hashon, algos, hasher_that_takes_new) 351 | ``` 352 | Generates dataset based on data and list of strings that can be used to create objects to use that data 353 | ## detect 354 | ```python 355 | detect(s, table, maybe=True) 356 | ``` 357 | Compares result from datasets, finds connections and eleminates contestants 358 | # hashit.extra 359 | Extra functions and classes for hashit 360 | ## Crc32 361 | ```python 362 | Crc32(self, data=b'') 363 | ``` 364 | This class is an api for the crc32 function that is compatible with mor 365 | ## shake 366 | ```python 367 | shake(self, hashn, data=b'') 368 | ``` 369 | Top-level api for hashlib.shake 370 | 371 | 372 | [back](index.md) -------------------------------------------------------------------------------- /docs/pydocs/hashit.__main__.html: -------------------------------------------------------------------------------- 1 | 2 | Python: module hashit.__main__ 3 | 4 | 5 | 6 | 7 | 8 |
 
9 |  
hashit.__main__ (version 3.5.1)
index
/Users/javad/Dropbox/udvikling/projekter/hashit/hashit/hashit/__main__.py
12 |

Command line application for hashit
13 |  
14 | this module "__main__" contains all the code for argparsing, running
15 | and anything needed for an command lin application such as hashit.
16 |  
17 | it uses argc another package by me, but i am considering switching to argparse

18 |

19 | 20 | 21 | 23 | 24 | 25 |
 
22 | Modules
       
argparse
26 | hashlib
27 |
os
28 | random
29 |
traceback
30 |

31 | 32 | 33 | 35 | 36 | 37 |
 
34 | Classes
       
38 |
argparse.Action(argparse._AttributeHolder) 39 |
40 |
41 |
Execute 42 |
Print 43 |
44 |
45 |
46 |

47 | 48 | 49 | 51 | 52 | 53 | 56 | 57 |
 
50 | class Execute(argparse.Action)
   Execute(nargs=0, **kwargs)
54 |  
55 | Same as Print() but instead of printing an object it calls it takes func (function), and exit (bool)
 
 
Method resolution order:
58 |
Execute
59 |
argparse.Action
60 |
argparse._AttributeHolder
61 |
builtins.object
62 |
63 |
64 | Methods defined here:
65 |
__call__(self, parser, namespace, values, option_string=None)
Call self as a function.
66 | 67 |
__init__(self, nargs=0, **kwargs)
Initialize self.  See help(type(self)) for accurate signature.
68 | 69 |
70 | Methods inherited from argparse._AttributeHolder:
71 |
__repr__(self)
Return repr(self).
72 | 73 |
74 | Data descriptors inherited from argparse._AttributeHolder:
75 |
__dict__
76 |
dictionary for instance variables (if defined)
77 |
78 |
__weakref__
79 |
list of weak references to the object (if defined)
80 |
81 |

82 | 83 | 84 | 86 | 87 | 88 | 91 | 92 |
 
85 | class Print(argparse.Action)
   Print(nargs=0, **kwargs)
89 |  
90 | Print action for argparse, takes one kwarg which is text the varible which contains the string to be printed
 
 
Method resolution order:
93 |
Print
94 |
argparse.Action
95 |
argparse._AttributeHolder
96 |
builtins.object
97 |
98 |
99 | Methods defined here:
100 |
__call__(self, parser, namespace, values, option_string=None)
Call self as a function.
101 | 102 |
__init__(self, nargs=0, **kwargs)
Initialize self.  See help(type(self)) for accurate signature.
103 | 104 |
105 | Methods inherited from argparse._AttributeHolder:
106 |
__repr__(self)
Return repr(self).
107 | 108 |
109 | Data descriptors inherited from argparse._AttributeHolder:
110 |
__dict__
111 |
dictionary for instance variables (if defined)
112 |
113 |
__weakref__
114 |
list of weak references to the object (if defined)
115 |
116 |

117 | 118 | 119 | 121 | 122 | 123 |
 
120 | Functions
       
Exit = exit(...)
exit([status])
124 |  
125 | Exit the interpreter by raising SystemExit(status).
126 | If the status is omitted or None, it defaults to zero (i.e., success).
127 | If the status is an integer, it will be used as the system exit status.
128 | If it is another kind of object, it will be printed and the system
129 | exit status will be one (i.e., failure).
130 |
config(parser)
Sets argvs' config and commands with argparse and returns it for good sake
131 |
exclude(items, excludes)
Exclude removes all items in a list that is in the excludes list (for dirs)
132 |
main(args=None)
Main function with error catching, can force-exit with os._exit(1)
133 |  
134 | this main function calls main_() and cathes any error while giving the user some "pretty"
135 | errors.
136 |
main_(args)
Main function which is the cli parses arguments and runs appropriate commands
137 |
walk(go_over)
Goes over a path an finds all files, appends them to a list and returns that list
138 |

139 | 140 | 141 | 143 | 144 | 145 |
 
142 | Data
       GLOBAL = {'ACCESS': True, 'BLANK': (None, True, False), 'COLORS': {'GREEN': '\x1b[0;32m', 'RED': '\x1b[0;31m', 'RESET': '\x1b[0m', 'YELLOW': '\x1b[0;33m'}, 'DEFAULTS': {'APPEND': False, 'COLORS': True, 'DETECT': False, 'DRYRUN': False, 'HASH': 'md5', 'MEMOPT': False, 'QUIET': False, 'RECURS': False, 'SIZE': False, 'STRICT': False, ...}, 'DEVMODE': True, 'ERRORS': {'FileNotFoundError': "Error, file seems to be missing calling systemd to confirm 'sure you haved checked the MBR?'", 'IndexError': 'Out of range, cause i am not that big :)', 'OSError': {'END': 'JDK, so something happend with your os, message: ', 'linux': 'So {} , to be continued...\n', 'macos': 'Macos (Sierra+) and OSX (El Captain-) thank god for apples naming', 'windows': 'Windows 10, windows 8(.1), windows 7 (sp*), wind...p*), windows 98/95, windows NT *. OK not that bad'}, 'TypeError': 'Wrong type used (in cli-arguments) - please use a static programming language', 'ValueError': 'Wrong type or mood?! :)'}, 'EXTRA': {'crc32': <class 'hashit.extra.Crc32'>}, 'HASH_STR': 'Hello World!', 'IF_NO_ARGS': ['--string'], 'MESSAGES': {'CUR_FORM': 'current format is', 'DRYRUN_NOT': 'Does not support --dry-run', 'EMPTY_CHK': 'checksum file is empty', 'FAIL': 'FAILED', 'FILE_NOT': 'File does not exist', 'HASH_NOT': 'is not a valid hash', 'LENGTH_NOT': 'The files does not have the same length', 'LOAD_FAIL': 'Failed to load', 'MAYBE': 'Maybe', 'MAYBE_M': 'Did you maybe mean:', ...}, ...}
146 | LINUX_LIST = ['Mythbuntu', 'Mac OS X', 'Debian Pure Blend', 'Symphony OS', 'Astra Linux', 'Emdebian Grip', 'Russian Fedora Remix', 'Secure-K', 'Knopperdisk', 'Mobilinux', 'touchscreen', 'MX Linux', 'NepaLinux', 'fli4l', 'Nix', 'Ubuntu Mobile', 'primary', 'Fedora Core', 'ChromeOS', 'rPath', ...]
147 | __algorithms__ = ['md5', 'sha1', 'crc32', 'sha384', 'sha256', 'sha224', 'sha512', 'blake2b', 'blake2s', 'sha3_224', 'sha3_384', 'sha3_256', 'sha3_512']
148 | __help__ = 'Hashit is an hashing program which can be uses t...ot the idea to make such a program using\npython.\n'
149 | __license__ = 'MIT, Copyright (c) 2017-2020 Javad Shafique'

150 | 151 | 152 | 154 | 155 | 156 |
 
153 | Author
       Javad Shafique
157 | -------------------------------------------------------------------------------- /docs/pydocs/hashit.detection.html: -------------------------------------------------------------------------------- 1 | 2 | Python: module hashit.detection 3 | 4 | 5 | 6 | 7 | 8 |
 
9 |  
hashit.detection
index
/Users/javad/Dropbox/udvikling/projekter/hashit/hashit/hashit/detection.py
12 |

Copyrigth (c) 2020-present Javad Shafique
13 |  
14 | this module using length and connections to find a match 
15 | for an hashing algorithem. It's basicly a matching algorigtem
16 | it can be used for almost any pure function in this case for hashes.
17 |  
18 | # Copyright (c) 2020-present Javad Shafique
19 | # This 'Software' can't be used without permission
20 | # from Javad Shafique.
21 |  
22 | # this module using length and connections to find a match 
23 | # for an hashing algorithem. It's basicly a matching algorigtem
24 | # it can be used for almost any pure function in this case for hashes.
25 | # basic template:
26 |  
27 |  
28 | def generate_some_dataset(datatoworkon = "some data"):
29 |     dict_for_storing_set = dict()
30 |  
31 |     for each_element in a_list_of_something_to_compare_with:
32 |         data = function_that_uses_data_to_generate_something(each_element, datatoworkon)
33 |  
34 |         dict_for_storing_set.update({each_element:{"data":data, "size":len(data), "size-as":list(), "connection":list()}})
35 |  
36 |  
37 |     #find connection and size
38 |     
39 |     for each_element in dict_for_storing_set:
40 |         elements_data = dict_for_storing_set[each_element]["data"]
41 |         elements_size = dict_for_storing_set[each_element]["size"]
42 |  
43 |         for second_element in dict_for_storing_set:
44 |             if dict_for_storing_set[second_element]["size"] == elements_size:
45 |                 if elements_data == dict_for_storing_set["data"]:
46 |                     dict_for_storing_set[each_element]["connection"].append(second_element)
47 |                 else:
48 |                     dict_for_storing_set[each_element]["size-as"].append(second_element)
49 |             else:
50 |                 continue
51 |  
52 |     # return finished dataset
53 |     
54 |     return dict_for_storing_set
55 |  
56 | # and for parsing that infomation 
57 | # you can use the detect function
58 | # as here:
59 |  
60 |  
61 | def detect(string, table, maybe = True):
62 |     if not (type(string) == str):
63 |         return None
64 |     
65 |     so = list()
66 |     so_far = list()
67 |     length = len(string)
68 |     
69 |     for key in table:
70 |         dat = table[key]
71 |  
72 |         if dat["size"] == length:
73 |             for i in dat["connection"]:
74 |                 if i not in so_far:
75 |                     so_far.append(i)
76 |     
77 |     for i in so_far:
78 |         dat = table[i]["connection"]
79 |  
80 |         for j in so_far:
81 |             if not j in dat:
82 |                 so_far.remove(j)
83 |  
84 |     if maybe:
85 |         for key in table:
86 |             dat = table[key]
87 |  
88 |             if dat["size"] == length:
89 |                 so.append(key)
90 |  
91 |     if len(so_far) >= 0 and len(so) == 1:
92 |      
93 |         # if there only is one option then use it
94 |      
95 |         return tup(certain=so, maybe=[])
96 |     else:
97 |         return tup(certain=so_far, maybe=so)
98 |  
99 |  
100 |  
101 | # compare hashes for hash-detection
102 | # it can generate data that can compare
103 | # diffrences between the results
104 |  
105 | # if works by categorizing the hashes into 
106 | # two categorizes. one for thoose who look alike
107 | # and one for thoose who generates the same output
108 | # given the same input. And with it a sorted result
109 | # is outputted and is ready to be used be the user.
110 |  
111 | # list of which algorithms is most likly used (WIP)
112 |  
113 | PRIORITY = {
114 |     "md5":["md5"],
115 |     "sha1":["dsaEncryption", "DSA", "ecdsa-with-SHA1", "dsaWithSHA", "DSA-SHA"]
116 | }

117 |

118 | 119 | 120 | 122 | 123 | 124 |
 
121 | Modules
       
string
125 |

126 | 127 | 128 | 130 | 131 | 132 |
 
129 | Classes
       
133 |
builtins.tuple(builtins.object) 134 |
135 |
136 |
Closest 137 |
138 |
139 |
140 |

141 | 142 | 143 | 145 | 146 | 147 | 150 | 151 |
 
144 | NTUPLE = class Closest(builtins.tuple)
   NTUPLE(certain, maybe)
148 |  
149 | Closest(certain, maybe)
 
 
Method resolution order:
152 |
Closest
153 |
builtins.tuple
154 |
builtins.object
155 |
156 |
157 | Methods defined here:
158 |
__getnewargs__(self)
Return self as a plain tuple.  Used by copy and pickle.
159 | 160 |
__repr__(self)
Return a nicely formatted representation string
161 | 162 |
_asdict(self)
Return a new OrderedDict which maps field names to their values.
163 | 164 |
_replace(_self, **kwds)
Return a new Closest object replacing specified fields with new values
165 | 166 |
167 | Class methods defined here:
168 |
_make(iterable) from builtins.type
Make a new Closest object from a sequence or iterable
169 | 170 |
171 | Static methods defined here:
172 |
__new__(_cls, certain, maybe)
Create new instance of Closest(certain, maybe)
173 | 174 |
175 | Data descriptors defined here:
176 |
certain
177 |
Alias for field number 0
178 |
179 |
maybe
180 |
Alias for field number 1
181 |
182 |
183 | Data and other attributes defined here:
184 |
_field_defaults = {}
185 | 186 |
_fields = ('certain', 'maybe')
187 | 188 |
_fields_defaults = {}
189 | 190 |
191 | Methods inherited from builtins.tuple:
192 |
__add__(self, value, /)
Return self+value.
193 | 194 |
__contains__(self, key, /)
Return key in self.
195 | 196 |
__eq__(self, value, /)
Return self==value.
197 | 198 |
__ge__(self, value, /)
Return self>=value.
199 | 200 |
__getattribute__(self, name, /)
Return getattr(self, name).
201 | 202 |
__getitem__(self, key, /)
Return self[key].
203 | 204 |
__gt__(self, value, /)
Return self>value.
205 | 206 |
__hash__(self, /)
Return hash(self).
207 | 208 |
__iter__(self, /)
Implement iter(self).
209 | 210 |
__le__(self, value, /)
Return self<=value.
211 | 212 |
__len__(self, /)
Return len(self).
213 | 214 |
__lt__(self, value, /)
Return self<value.
215 | 216 |
__mul__(self, value, /)
Return self*value.
217 | 218 |
__ne__(self, value, /)
Return self!=value.
219 | 220 |
__rmul__(self, value, /)
Return value*self.
221 | 222 |
count(self, value, /)
Return number of occurrences of value.
223 | 224 |
index(self, value, start=0, stop=9223372036854775807, /)
Return first index of value.
225 |  
226 | Raises ValueError if the value is not present.
227 | 228 |

229 | 230 | 231 | 233 | 234 | 235 |
 
232 | Functions
       
detect(s, table, maybe=True)
Compares result from datasets, finds connections and eleminates contestants
236 |
generate_data_set(hashon, algos, hasher_that_takes_new)
Generates dataset based on data and list of strings that can be used to create objects to use that data
237 |
ishex(hexstr)
Checks if string is hexidecimal
238 |
239 | -------------------------------------------------------------------------------- /docs/pydocs/hashit.extra.html: -------------------------------------------------------------------------------- 1 | 2 | Python: module hashit.extra 3 | 4 | 5 | 6 | 7 | 8 |
 
9 |  
hashit.extra
index
/Users/javad/Dropbox/udvikling/projekter/hashit/hashit/hashit/extra.py
12 |

Extra functions and classes for hashit

13 |

14 | 15 | 16 | 18 | 19 | 20 |
 
17 | Modules
       
binascii
21 |
hashlib
22 |

23 | 24 | 25 | 27 | 28 | 29 |
 
26 | Classes
       
30 |
builtins.object 31 |
32 |
33 |
Crc32 34 |
shake 35 |
36 |
37 |
38 |

39 | 40 | 41 | 43 | 44 | 45 | 48 | 49 |
 
42 | class Crc32(builtins.object)
   Crc32(data=b'')
46 |  
47 | This class is an api for the crc32 function that is compatible with mor
 
 Methods defined here:
50 |
__init__(self, data=b'')
init class, creates data
51 | 52 |
copy(self)
return new Crc32 object with same properties
53 | 54 |
digest(self)
Digest as int
55 | 56 |
hexdigest(self)
Digest as hex
57 | 58 |
update(self, data=b'')
Update self.data with new data
59 | 60 |
61 | Data descriptors defined here:
62 |
__dict__
63 |
dictionary for instance variables (if defined)
64 |
65 |
__weakref__
66 |
list of weak references to the object (if defined)
67 |
68 |

69 | 70 | 71 | 73 | 74 | 75 | 78 | 79 |
 
72 | class shake(builtins.object)
   shake(hashn, data=b'')
76 |  
77 | Top-level api for hashlib.shake
 
 Methods defined here:
80 |
__init__(self, hashn, data=b'')
Init class create hasher and data
81 | 82 |
copy(self)
83 | 84 |
digest(self, length=None)
Digest binary
85 | 86 |
hexdigest(self, length=None)
Digest hex
87 | 88 |
update(self, data=b'')
Update self.data with new data
89 | 90 |
91 | Data descriptors defined here:
92 |
__dict__
93 |
dictionary for instance variables (if defined)
94 |
95 |
__weakref__
96 |
list of weak references to the object (if defined)
97 |
98 |

99 | 100 | 101 | 103 | 104 | 105 |
 
102 | Data
       LINUX_LIST = ['Mythbuntu', 'Mac OS X', 'Debian Pure Blend', 'Symphony OS', 'Astra Linux', 'Emdebian Grip', 'Russian Fedora Remix', 'Secure-K', 'Knopperdisk', 'Mobilinux', 'touchscreen', 'MX Linux', 'NepaLinux', 'fli4l', 'Nix', 'Ubuntu Mobile', 'primary', 'Fedora Core', 'ChromeOS', 'rPath', ...]
106 | -------------------------------------------------------------------------------- /docs/pydocs/hashit.html: -------------------------------------------------------------------------------- 1 | 2 | Python: package hashit 3 | 4 | 5 | 6 | 7 | 8 |
 
9 |  
hashit
index
/Users/javad/Dropbox/udvikling/projekter/hashit/hashit/hashit/__init__.py
12 |

hashit module for hashit command is contaning all the code for hashit
13 |  
14 | hashit is an hashing application which main purpose is to replace all the 'default'
15 | hashing commands that comes with linux and also provide a usable hashing program
16 | for windows hence the choice of using python. while hashit supports both python 2 and 3
17 | i would strongly recommend using python3 because that python3 comes with a newer version
18 | of hashlib and therefore many new hash-functions, altough it is posible to add these into
19 | python2 with the load() function which acts like a 'connecter' and enables hashit to use
20 | third-party hashing-functions as long as the have the same api as specified in docs/index.md
21 |  
22 | The GLOBAL dict contains all the configurations for this program, translations, error messages
23 | settings, plugins and more.
24 |  
25 | __algorithms__ is a list that contains all the builtin algorithms including crc32
26 |  
27 | LICENSE:
28 |  
29 |     MIT License
30 |  
31 |     Copyright (c) 2020 Javad Shafique
32 |  
33 |     Permission is hereby granted, free of charge, to any person obtaining a copy
34 |     of this software and associated documentation files (the "Software"), to deal
35 |     in the Software without restriction, including without limitation the rights
36 |     to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
37 |     copies of the Software, and to permit persons to whom the Software is
38 |     furnished to do so, subject to the following conditions:
39 |  
40 |     The above copyright notice and this permission notice shall be included in all
41 |     copies or substantial portions of the Software.
42 |  
43 |     THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
44 |     IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
45 |     FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
46 |     AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
47 |     LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
48 |     OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
49 |     SOFTWARE.
50 |  
51 |     NO ONE CAN CLAIM OWNERSHIP OF THIS "SOFTWARE" AND ASSOCIATED DOCUMENTATION FILES.

52 |

53 | 54 | 55 | 57 | 58 | 59 |
 
56 | Package Contents
       
__main__
60 |
detection
61 |
extra
62 |
version
63 |

64 | 65 | 66 | 68 | 69 | 70 |
 
67 | Classes
       
71 |
builtins.object 72 |
73 |
74 |
BSD 75 |
SFV 76 |
77 |
78 |
79 |

80 | 81 | 82 | 84 | 85 | 86 | 92 | 93 |
 
83 | class BSD(builtins.object)
   BSD(filename=None, size=False)
87 |  
88 | Parser for bsd and formater, also the
89 | same as SFV() but BSD() instead of sfv uses
90 | the bsd checksum output which is like this:
91 |     hashname (filename) = hash [size]
 
 Methods defined here:
94 |
__init__(self, filename=None, size=False)
Inits bsd class with filename and use_size
95 | 96 |
read(self, filename=None, size=False, reader=None)
Creates generator or uses generator that reads and parses bsd strings
97 | 98 |
99 | Static methods defined here:
100 |
format(file_hash, file_path, hashname)
Formats string in a bsd style format
101 | 102 |
parser(line, use_size=False)
Parses bsd string
103 | 104 |
105 | Data descriptors defined here:
106 |
__dict__
107 |
dictionary for instance variables (if defined)
108 |
109 |
__weakref__
110 |
list of weak references to the object (if defined)
111 |
112 |

113 | 114 | 115 | 117 | 118 | 119 | 124 | 125 |
 
116 | class SFV(builtins.object)
   SFV(filename=None, size=False)
120 |  
121 | Class for parsing and creating sfv strings
122 | SFV() contains all functions needed for parsing,
123 | creating and formating SFV strings
 
 Methods defined here:
126 |
__init__(self, filename=None, size=False)
Inits sfv class with file and use_size
127 | 128 |
read(self, filename=None, size=False, reader=None)
Creates generator or uses generator that reads and parses sfv compatible files using reader
129 | 130 |
131 | Static methods defined here:
132 |
format(file_hash, file_path, longest, size='')
calculates the amount of spaces needed in a sfv file
133 | 134 |
parser(line, use_size=False)
135 | 136 |
137 | Data descriptors defined here:
138 |
__dict__
139 |
dictionary for instance variables (if defined)
140 |
141 |
__weakref__
142 |
list of weak references to the object (if defined)
143 |
144 |

145 | 146 | 147 | 149 | 150 | 151 |
 
148 | Functions
       
Exit = exit(...)
exit([status])
152 |  
153 | Exit the interpreter by raising SystemExit(status).
154 | If the status is omitted or None, it defaults to zero (i.e., success).
155 | If the status is an integer, it will be used as the system exit status.
156 | If it is another kind of object, it will be printed and the system
157 | exit status will be one (i.e., failure).
158 |
blockIter(afile, blocksize=65536)
Will create a generator for reading a file
159 |
check(path, hashit, usecolors=False, be_quiet=False, detecthash=True, sfv=False, size=False, bsdtag=False, strict=False, trace=False, dry_run=False)
Uses check_() to print the error messages and statuses corrent (for CLI)
160 | they are seperated so that you can use the python api, if you so please.
161 |
check_files(file_read, hashit, first_line, sfv=False, size=False, bsdtag=False, dry_run=False)
Will read an file which have a SFV compatible checksum-file or a standard one and verify the files checksum
162 | by creating an generator which loops over another generator which parses/reads the file and then it will check
163 | if the hash and optionally the size of the files matches the current state of them. For more info on how this work
164 | see docs/index.md#technical.
165 |
choose_hash(hash1, hashit, cli=True)
Uses detect.decect to identify hashes with a high accuracy but when
166 | there if some issues it will take user input. CLI-only
167 |
detect_format(hashstr, use_size=False)
Autodetect hash format, by checking the length and what it contains
168 |
eprint(*args, **kwargs)
Prints to stderr usefull for warnings and error messages
169 |
fixpath(path)
Fixpath converts the releative path into an absolute path
170 | and if needed can append the path to the snap host-filesystem 
171 | which if the application is in devmode gives hashit access to 
172 | the hole filesystem, if you're not in devmode and you're still
173 | using snap, then you will need sudo to access the intire system.
174 | Also replaces / with \ on windows
175 |
hashFile(filename, hasher, memory_opt=False)
hashFile is a simple way to hash files using diffrent methods
176 |
hashIter(bytesiter, hasher, ashexstr=True)
Will hash the blockIter generator and return digest
177 |
load(hashclass)
Add hashes to GLOBAL.EXTRA which is the dict that contains all the "extra"
178 | hash-functions such as Crc32, which allows external hashing algorithms to 
179 | be used as long as the have the same api as specified in docs/README.md
180 |  
181 | returns True/False based on whether or not the data is loaded
182 |
load_all(list_of_hashclasses)
Just for it, a function that loads all plugins in a list
183 |
new(hashname, data=b'')
Custom hash-init function that returns the hashes
184 | depends on hashlib.new and GLOBAL["EXTRA"]. One of its'
185 | features is it's support for the python3 only shake-hash
186 | scheme were the default hash is shake_256 and the input is
187 | taken like this:
188 |     shake_[amount of output]
189 |
reader(filename, mode='r', comments=True, newlines=False)
Creates generator for a file or stdin, better for larger files not part of the MEMOPT,
190 | so an standard reader for most uses. Works like readlines but instead of a list it
191 | creates an generator that sortof clean the input before it is parsed by something like
192 | BSD() or SFV().
193 |
supports_color()
Returns True if the running system's terminal supports color, and False
194 | otherwise.
195 |

196 | 197 | 198 | 200 | 201 | 202 |
 
199 | Data
       GLOBAL = {'ACCESS': True, 'BLANK': (None, True, False), 'COLORS': {'GREEN': '\x1b[0;32m', 'RED': '\x1b[0;31m', 'RESET': '\x1b[0m', 'YELLOW': '\x1b[0;33m'}, 'DEFAULTS': {'APPEND': False, 'COLORS': True, 'DETECT': False, 'DRYRUN': False, 'HASH': 'md5', 'MEMOPT': False, 'QUIET': False, 'RECURS': False, 'SIZE': False, 'STRICT': False, ...}, 'DEVMODE': True, 'ERRORS': {'FileNotFoundError': "Error, file seems to be missing calling systemd to confirm 'sure you haved checked the MBR?'", 'IndexError': 'Out of range, cause i am not that big :)', 'OSError': {'END': 'JDK, so something happend with your os, message: ', 'linux': 'So {} , to be continued...\n', 'macos': 'Macos (Sierra+) and OSX (El Captain-) thank god for apples naming', 'windows': 'Windows 10, windows 8(.1), windows 7 (sp*), wind...p*), windows 98/95, windows NT *. OK not that bad'}, 'TypeError': 'Wrong type used (in cli-arguments) - please use a static programming language', 'ValueError': 'Wrong type or mood?! :)'}, 'EXTRA': {'crc32': <class 'hashit.extra.Crc32'>}, 'HASH_STR': 'Hello World!', 'IF_NO_ARGS': ['--string'], 'MESSAGES': {'CUR_FORM': 'current format is', 'DRYRUN_NOT': 'Does not support --dry-run', 'EMPTY_CHK': 'checksum file is empty', 'FAIL': 'FAILED', 'FILE_NOT': 'File does not exist', 'HASH_NOT': 'is not a valid hash', 'LENGTH_NOT': 'The files does not have the same length', 'LOAD_FAIL': 'Failed to load', 'MAYBE': 'Maybe', 'MAYBE_M': 'Did you maybe mean:', ...}, ...}
203 | __algorithms__ = ['md5', 'sha1', 'crc32', 'sha256', 'sha384', 'sha512', 'sha224', 'blake2b', 'blake2s', 'sha3_224', 'sha3_384', 'sha3_512', 'sha3_256']
204 | __help__ = 'Hashit is an hashing program which can be uses t...ot the idea to make such a program using\npython.\n'
205 | __license__ = 'MIT, Copyright (c) 2017-2020 Javad Shafique'
206 | print_function = _Feature((2, 6, 0, 'alpha', 2), (3, 0, 0, 'alpha', 0), 65536)
207 | with_statement = _Feature((2, 5, 0, 'alpha', 1), (2, 6, 0, 'alpha', 0), 32768)

208 | 209 | 210 | 212 | 213 | 214 |
 
211 | Author
       Javad Shafique
215 | -------------------------------------------------------------------------------- /docs/pydocs/hashit.version.html: -------------------------------------------------------------------------------- 1 | 2 | Python: module hashit.version 3 | 4 | 5 | 6 | 7 | 8 |
 
9 |  
hashit.version (version 3.5.1)
index
/Users/javad/Dropbox/udvikling/projekter/hashit/hashit/hashit/version.py
12 |

Set global version

13 | 14 | -------------------------------------------------------------------------------- /docs/usage.md: -------------------------------------------------------------------------------- 1 | --- 2 | layout: default 3 | --- 4 | # Hashit Usage 5 | 6 | ### Pretty Gif 7 | ![](https://raw.githubusercontent.com/cjavad/hashit/master/img/demo.gif) 8 | 9 | ## Usage 10 | Hashit takes arguments like this: 11 | ```bash 12 | usage: hashit [-h] [-p] [-V] [-L] [-hl] [-H hashname] 13 | [-e excludes [excludes ...]] [-C] [-sp] [-A] [-q] [-m] [-r] 14 | [-s [string]] [-d [hash]] [-l list] [-cl list list] 15 | [-c filename] [-o filename] [-S] [-sfv] [-bsd] [--dry-run] 16 | [--trace] [--strict] 17 | [path] [files [files ...]] 18 | ``` 19 | 20 | Where the options are at following: 21 | 22 | [//]: # (Nicely generated by argparse) 23 | 24 | ``` 25 | Hashit is an hashing program which can be uses to hash and verify muliple 26 | files on a system. I got the idea from an ubuntu iso image which have this 27 | hash table, so i got the idea to make such a program using python. 28 | 29 | positional arguments: 30 | path 31 | files 32 | 33 | help: 34 | -h, --help show this help message and exit 35 | -p, --page Launch interactive help with python help() (for python 36 | api) 37 | -V, --version Print current version and exit 38 | -L, --license Print license and exit 39 | -hl, --hash-list Prints list of all supported hashes and exits 40 | 41 | formats: 42 | -S, --size Adds the file size to the output 43 | -sfv, --sfv Outputs in a sfv compatible format 44 | -bsd, --bsd output using the bsd checksum-format 45 | 46 | settings: 47 | -H hashname, --hash hashname 48 | Select hash use -hl --hash-list for more info 49 | -e excludes [excludes ...], --exclude excludes [excludes ...] 50 | list of files and directories to exclude 51 | -C, --color Enable colored output where it is supported 52 | -sp, --strip-path Strips fullpath from the results 53 | -A, --append Instead of writing to a file you will append to it 54 | -q, --quiet Reduces output, (silences warnings) 55 | -m, --memory-optimatation 56 | Enables memory optimatation (useful for large files) 57 | -r, --recursive Hash all files in all subdirectories 58 | 59 | other: 60 | -s [string], --string [string] 61 | hash a string or a piece of text 62 | -d [hash], --detect [hash] 63 | Enable hash detection for check 64 | -l list, --list list Takes a file (list) of strings and hashes each of them 65 | -cl list list, --check-list list list 66 | Takes two arguments, hashlist and stringlist 67 | -c filename, --check filename 68 | Verify checksums from a checksum file 69 | -o filename, --output filename 70 | output output to an output (file) 71 | 72 | devtools: 73 | --dry-run prints the list of files that is doing to be hashed 74 | (and how) and the output type 75 | --trace Print traceback of any error cathed and exit 76 | --strict Exit non-zero on any errors 77 | 78 | MIT, Copyrigth (c) 2017-2020 Javad Shafique 79 | ``` 80 | 81 | So if i want to hash a file called fx. icon.png in the img dir 82 | ```bash 83 | $ hashit icon.png 84 | eade8f2bb7fcb89d396a850b977740fd img/icon.png 85 | ``` 86 | 87 | Or i wanted to hash the tests directory with lets say blake2s and write it to a file using the bsd format and then verify it 88 | ```bash 89 | $ hashit -H blake2s -o output.txt -bsd tests/ 90 | # no need to specify the files format it detects it automaticly 91 | $ hashit -H blake2s -c output.txt -C # or --color for -C 92 | tests/speed.py: OK 93 | tests/test.py: OK 94 | tests/__init__.py: OK 95 | tests/speed2.py: OK 96 | tests/res/benchmarks2.json: OK 97 | tests/res/file.json: OK 98 | tests/res/crc_hashcollisions.txt: OK 99 | tests/res/benchmarks.json: OK 100 | tests/res/pycrypto_vs_hashlib.json: OK 101 | tests/__pycache__/speed.cpython-36.pyc: OK 102 | tests/__pycache__/speed2.cpython-36.pyc: OK 103 | tests/__pycache__/test.cpython-36.pyc: OK 104 | tests/__pycache__/__init__.cpython-36.pyc: OK 105 | ``` 106 | 107 | But maybe you don't want to hash an file but a password or a piece of text then use the -s option 108 | ```bash 109 | $ hashit -s secret_key 110 | 73eeac3fa1a0ce48f381ca1e6d71f077 111 | # if you just use -s it will read until you use ctrl+D (^D) 112 | $ hashit -s 113 | secret_key^D 114 | 73eeac3fa1a0ce48f381ca1e6d71f077 115 | ``` 116 | If you want to hash multiple files with forexample a wildcard(*): 117 | ```bash 118 | $ hashit *.txt 119 | d41d8cd98f00b204e9800998ecf8427e empty.txt 120 | d41d8cd98f00b204e9800998ecf8427e another_0bytefile.txt 121 | d41d8cd98f00b204e9800998ecf8427e no_data.txt 122 | ``` 123 | 124 | Fun fact the -l --list option can also be used if you really want to check an file, because it 125 | reads a file line by line can it be used for checking each and every line in any file like this: 126 | 127 | > Chances of hash collisions = 0% (JDK, haven't done any statitics but 128 | > technicly a lesser chances for bigger files (more lines more security!) 129 | 130 | ``` 131 | $ hashit -l LICENSE -o license.chk 132 | $ hashit -cl license.chk LICENSE 133 | \\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\: OK 134 | ©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©: OK 135 | ©-------------------------------------------------------------------------------©: OK 136 | ©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©: OK 137 | MIT License : OK 138 | : OK 139 | Copyright (c) 2020 Javad Shafique: OK 140 | : OK 141 | Permission is hereby granted, free of charge, to any person obtaining a copy: OK 142 | of this software and associated documentation files (the "Software"), to deal: OK 143 | in the Software without restriction, including without limitation the rights: OK 144 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell: OK 145 | copies of the Software, and to permit persons to whom the Software is: OK 146 | furnished to do so, subject to the following conditions:: OK 147 | : OK 148 | The above copyright notice and this permission notice shall be included in all: OK 149 | copies or substantial portions of the Software.: OK 150 | : OK 151 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR: OK 152 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,: OK 153 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE: OK 154 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER: OK 155 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,: OK 156 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE: OK 157 | SOFTWARE.: OK 158 | : OK 159 | NO ONE CAN CLAIM OWNERSHIP OF THIS "SOFTWARE" AND ASSOCIATED DOCUMENTATION FILES.: OK 160 | : OK 161 | Icon from freepik.com all rights reserved: OK 162 | ©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©: OK 163 | ©-------------------------------------------------------------------------------©: OK 164 | ``` 165 | 166 | 167 | > TIP: add quotes around multi length strings to make them a single argument 168 | 169 | ## From python 170 | It also got a python-level api for those you want to integrate this into your own application 171 | ```py 172 | from hashit import hashFile, new 173 | 174 | # init hashing class 175 | hasher = new("md5") 176 | hash_from_file = hashFile("file.txt", hasher, True) # (True) Activate memory optimatation, faster for larger files 177 | print(hash_from_file) 178 | 179 | print(new("sha3_256", b'DATA').hexdigest()) 180 | print(new("crc32", b'DATA').hexdigest()) # custom hashes is also supported 181 | ``` 182 | See [extra](extra.md) for more customization and extended api usage from python 183 | 184 | 185 | [back](index.md) 186 | -------------------------------------------------------------------------------- /hashit.spec: -------------------------------------------------------------------------------- 1 | Name: hashit 2 | Version: 3.5.3 3 | Release: 1 4 | Summary: hashit, a hashing application. 5 | License: LICENSE 6 | URL: https://cjavad.github.io/hashit 7 | Requires: python3 8 | 9 | %description 10 | Hashing Application with muliple modes, settings and more! 11 | Hashit, is an hashing application used as an verification tool, intendet to replace the "standard" linux hashing utilities such as 12 | md5sum, sha1sum and so on. One of the main reasons why this program was develop was to create an easy-to-use command line tool for 13 | newcomers and professionals alike to hash/verify files and other data. For more see our homepage at https://cjavad.github.io/hashit. 14 | 15 | 16 | %prep 17 | python3 setup.py clean -a 18 | 19 | %build 20 | python3 setup.py build --force 21 | 22 | %install 23 | python3 setup.py install --force 24 | 25 | %files 26 | # python takes care of this 27 | 28 | %changelog 29 | # removed changelog :cry: 30 | -------------------------------------------------------------------------------- /hashit/__main__.py: -------------------------------------------------------------------------------- 1 | """Command line application for hashit 2 | 3 | this module "__main__" contains all the code for argparsing, running 4 | and anything needed for an command lin application such as hashit. 5 | 6 | it uses argc another package by me, but i am considering switching to argparse 7 | """ 8 | 9 | import random 10 | import traceback 11 | import argparse 12 | # Import all from hashit 13 | from .__init__ import os, hashlib, eprint, hashFile, new, BSD, load, \ 14 | GLOBAL, Exit, check, generate_data_set, detect, SFV, fixpath, reader, \ 15 | __algorithms__, __author__, __help__, __license__, supports_color 16 | 17 | from .extra import LINUX_LIST 18 | from .version import __version__ 19 | 20 | class Print(argparse.Action): 21 | """Print action for argparse, takes one kwarg which is text the varible which contains the string to be printed""" 22 | def __init__(self, nargs=0, **kwargs): 23 | if nargs != 0: 24 | raise ValueError('nargs for Print must be 0; it is just a flag.') 25 | elif "text" in kwargs: 26 | self.data = kwargs.pop("text") 27 | 28 | if "exit" in kwargs: 29 | self.exit = True if kwargs.pop("exit") else False 30 | 31 | super(Print, self).__init__(nargs=nargs, **kwargs) 32 | 33 | def __call__(self, parser, namespace, values, option_string=None): 34 | print(self.data) 35 | 36 | if self.exit: 37 | Exit(0) 38 | 39 | class Execute(argparse.Action): 40 | """Same as Print() but instead of printing an object it calls it takes func (function), and exit (bool)""" 41 | def __init__(self, nargs=0, **kwargs): 42 | if nargs != 0: 43 | raise ValueError('nargs for Execute must be 0; it is just a flag.') 44 | 45 | if "func" in kwargs: 46 | self.data = kwargs.pop("func") 47 | 48 | if "exit" in kwargs: 49 | self.exit = True if kwargs.pop("exit") else False 50 | 51 | super(Execute, self).__init__(nargs=nargs, **kwargs) 52 | 53 | def __call__(self, parser, namespace, values, option_string=None): 54 | print(self.data()) 55 | 56 | if self.exit: 57 | Exit(0) 58 | 59 | def walk(go_over): 60 | """Goes over a path an finds all files, appends them to a list and returns that list""" 61 | walked = [] 62 | for path, _subdirs, files in os.walk(go_over): 63 | # if the path does not exist skip it (What) 64 | if not os.path.exists(path): 65 | continue 66 | # for each file 67 | for name in files: 68 | # add it to in_files list() if it does exist 69 | p = (path + "/" + name).replace("\\", "/").replace("//", "/") 70 | if os.path.exists(p): 71 | walked.append(p) 72 | 73 | # return list with file names 74 | return walked 75 | 76 | # exclude function faster then last implementation 77 | def exclude(items, excludes): 78 | """Exclude removes all items in a list that is in the excludes list (for dirs)""" 79 | 80 | for ex in excludes: 81 | items = [x for x in items if not ex in x] 82 | # return items 83 | return items 84 | 85 | def config(parser): 86 | """Sets argvs' config and commands with argparse and returns it for good sake""" 87 | 88 | def hash_list(): 89 | """Generates an easy-to-read list""" 90 | algos = set((__algorithms__ + list(GLOBAL["EXTRA"].keys()))) # add extras 91 | # sort set 92 | s = [sorted(algos)[x:x+2] for x in range(0, len(algos), 2)] 93 | for c, l in enumerate(s): 94 | s[c] = ', '.join(l) 95 | 96 | return "\n" + '\n'.join(s) + "\n" 97 | 98 | def help_self(): 99 | """Launches help() for module""" 100 | # get info from self 101 | help(os.sys.modules["hashit"]) 102 | help(os.sys.modules[__name__]) # current 103 | help(os.sys.modules["hashit.detection"]) 104 | help(os.sys.modules["hashit.extra"]) 105 | help(os.sys.modules["hashit.version"]) 106 | 107 | return __help__ 108 | 109 | # create groups 110 | ghelp = parser.add_argument_group("help") 111 | formats = parser.add_argument_group("formats") 112 | settings = parser.add_argument_group("settings") 113 | other = parser.add_argument_group("other") 114 | dev = parser.add_argument_group("devtools") 115 | 116 | # set commands 117 | parser.add_argument('path', nargs="?", default=os.getcwd()) # for directroy 118 | parser.add_argument("files", nargs="*", default=[]) # for a list of files 119 | 120 | # add all the helping arguments 121 | ghelp.add_argument("-h", "--help", help="show this help message and exit", action=Execute, func=parser.format_help, exit=True) 122 | ghelp.add_argument("-p", "--page", help="Launch interactive help with python help() (for python api)", action=Execute, func=help_self, exit=True) 123 | ghelp.add_argument("-V", "--version", help="Print current version and exit", action="version", version="%(prog)s " + __version__) 124 | ghelp.add_argument("-L", "--license", help="Print license and exit", action=Print, text=__license__, exit=True) 125 | ghelp.add_argument("-hl", "--hash-list", help="Prints list of all supported hashes and exits", action=Execute, func=hash_list, exit=True) 126 | 127 | # all the options that sets something 128 | settings.add_argument("-H", "--hash", help="Select hash use -hl --hash-list for more info", metavar="hashname", default=GLOBAL["DEFAULTS"]["HASH"]) 129 | settings.add_argument("-e", "--exclude", help="list of files and directories to exclude", default=[], metavar="excludes", nargs="+") 130 | settings.add_argument("-C", "--color", help="Enable colored output where it is supported", action="store_true", default=GLOBAL["DEFAULTS"]["COLORS"]) 131 | settings.add_argument("-sp", "--strip-path", help="Strips fullpath from the results", action="store_true", default=GLOBAL["DEFAULTS"]["STRIP"]) 132 | settings.add_argument("-A", "--append", help="Instead of writing to a file you will append to it", action="store_true", default=GLOBAL["DEFAULTS"]["APPEND"]) 133 | settings.add_argument("-q", "--quiet", help="Reduces output, (silences warnings)", action="store_true") 134 | settings.add_argument("-m", "--memory-optimatation", help="Enables memory optimatation (useful for large files)", action="store_true", default=GLOBAL["DEFAULTS"]["MEMOPT"]) 135 | settings.add_argument("-r", "--recursive", help="Hash all files in all subdirectories", action="store_true", default=GLOBAL["DEFAULTS"]["RECURS"]) 136 | 137 | # other, things that are optinional such as detect and string hashes 138 | # other.add_argument("-a", "--all", help="Calculate all hashes for a single file", metavar="filename") NOTE: Removed for now 139 | other.add_argument("-s", "--string", nargs="?", help="hash a string or a piece of text", default=False, metavar="string") 140 | other.add_argument("-d", "--detect", nargs="?", help="Enable hash detection for check, or it can take a hash and decect hash algorithm", metavar="hash", default=GLOBAL["DEFAULTS"]["DETECT"]) 141 | other.add_argument("-l", "--list", help="Takes a file (list) of strings and hashes each of them", metavar="list") 142 | other.add_argument("-cl", "--check-list", help="Takes two arguments, hashlist and stringlist", nargs=2, metavar="list") 143 | # ~ More important ~ 144 | other.add_argument("-c", "--check", help="Verify checksums from a checksum file", nargs="?", const=1337, metavar="filename") 145 | other.add_argument("-o", "--output", help="output output to an output (file)", metavar="filename") 146 | 147 | # ~ Formatting ~ 148 | formats.add_argument("-S", "--size", help="Adds the file size to the output", action="store_true", default=GLOBAL["DEFAULTS"]["SIZE"]) 149 | formats.add_argument("-sfv", "--sfv", help="Outputs in a sfv compatible format", action="store_true") 150 | formats.add_argument("-bsd", "--bsd", help="output using the bsd checksum-format", action="store_true") 151 | 152 | # ~ Devtools ~ 153 | dev.add_argument("--dry-run", help="prints the list of files that is doing to be hashed (and how) and the output type", action="store_true", default=GLOBAL["DEFAULTS"]["DRYRUN"]) 154 | dev.add_argument("--trace", help="Print traceback of any error cathed and exit", action="store_true", default=GLOBAL["DEFAULTS"]["TRACE"]) 155 | dev.add_argument("--strict", help="Exit non-zero on any errors", action="store_true", default=GLOBAL["DEFAULTS"]["STRICT"]) 156 | 157 | # return parser 158 | return parser 159 | 160 | def main_(args): 161 | """Main function which is the cli parses arguments and runs appropriate commands""" 162 | # using argparse instead of argc for portability 163 | parser = argparse.ArgumentParser("hashit", description=__help__, epilog=__license__, add_help=False) 164 | # set commands and config with config 165 | parser = config(parser) 166 | 167 | # check for amount of arguments 168 | if not args: 169 | # if there is not arguments show help 170 | args = GLOBAL["IF_NO_ARGS"] 171 | 172 | # parse args 173 | argv = parser.parse_args(args) 174 | # Varibles 175 | 176 | # set colors 177 | RED = "" 178 | GREEN = "" 179 | YELLOW = "" 180 | RESET = "" 181 | 182 | # check if we should use colors 183 | if supports_color() and argv.color: 184 | # if yes enable them 185 | RED = GLOBAL["COLORS"]["RED"] 186 | GREEN = GLOBAL["COLORS"]["GREEN"] 187 | YELLOW = GLOBAL["COLORS"]["YELLOW"] 188 | RESET = GLOBAL["COLORS"]["RESET"] 189 | 190 | # file list, and path 191 | in_files = list() # list of all files 192 | my_path = os.getcwd() # path to search in 193 | 194 | # use md5 by default 195 | hash_is = new(GLOBAL["DEFAULTS"]["HASH"]) 196 | 197 | # check if its an valid hashing 198 | if argv.hash in hashlib.algorithms_available or argv.hash in __algorithms__ or argv.hash in list(GLOBAL["EXTRA"].keys()) or str(argv.hash)[:5] == "shake": 199 | # check if it's in guaranteed 200 | if not argv.hash in hashlib.algorithms_guaranteed and argv.hash in hashlib.algorithms_available: 201 | # if not print an warning 202 | if not argv.quiet: 203 | eprint(YELLOW + str(argv.hash), GLOBAL["MESSAGES"]["WORKS_ON"] + RESET) 204 | # and use the hash 205 | hash_is = new(argv.hash) 206 | 207 | elif not argv.hash in GLOBAL["BLANK"]: 208 | # then print error messageh 209 | eprint(RED + str(argv.hash), GLOBAL["MESSAGES"]["HASH_NOT"], RESET) 210 | 211 | # select output 212 | use_out = False 213 | output = None 214 | 215 | # check if out is set and it has a value 216 | if not argv.output in GLOBAL["BLANK"]: 217 | # if it is open file 218 | use_out = True 219 | # if dryrun dont open file 220 | if not argv.dry_run: 221 | output = open(fixpath(argv.output), GLOBAL["WRITE_MODE"]) 222 | else: 223 | # else set it to false 224 | use_out = False 225 | 226 | 227 | 228 | # check for new path 229 | if os.path.isdir(argv.path): 230 | new_path = argv.path 231 | # check if argument is path else do not change path 232 | if os.path.exists(new_path) and os.path.isdir(new_path): 233 | my_path = new_path 234 | 235 | # ~ Argument taking options ~ 236 | 237 | # check for string in args needed because argparse 238 | # does not support both store_true and store same for detect 239 | if "-s" in args or "--string" in args: 240 | # Check if dryrun is true 241 | if argv.dry_run and not argv.quiet: 242 | # if it is, print warning 243 | eprint(YELLOW + "-s --string, {}".format(GLOBAL["MESSAGES"]["DRYRUN_NOT"]) + RESET) 244 | 245 | # exit if strict 246 | if argv.strict: 247 | return 1 248 | 249 | data = argv.string 250 | if not data: 251 | # reed from stdin like md5sum 252 | data = os.sys.stdin.read() 253 | 254 | # check if data ends with newline 255 | if not data.endswith("\n"): 256 | # else print one 257 | print("") 258 | 259 | # if the data isn't bytes 260 | if not isinstance(data, bytes): 261 | # encode it 262 | data = data.encode() 263 | 264 | # then hash-it 265 | hash_is.update(data) 266 | 267 | # check for output methods 268 | if use_out and output != None: 269 | output.write(hash_is.hexdigest()) 270 | else: 271 | print(hash_is.hexdigest()) 272 | 273 | return 0 274 | 275 | # if detect is choosen use it 276 | elif not argv.detect in GLOBAL["BLANK"]: 277 | # Check if dryrun is true 278 | if argv.dry_run and not argv.quiet: 279 | # if it is, print warning 280 | eprint(YELLOW + "-d --detect, {}".format(GLOBAL["MESSAGES"]["DRYRUN_NOT"]) + RESET) 281 | 282 | # exit if strict 283 | if argv.strict: 284 | return 1 285 | 286 | hashes = detect(argv.detect, generate_data_set("Hallo", __algorithms__, new)) 287 | if hashes != None: 288 | for item in hashes.certain: 289 | print(GREEN + GLOBAL["MESSAGES"]["RESULTS_AS"], item + RESET) 290 | 291 | # print sepetator if there is a need for one 292 | if hashes.maybe and hashes.certain: 293 | print("") 294 | 295 | for item in hashes.maybe: 296 | print(YELLOW + GLOBAL["MESSAGES"]["MAYBE"], item + RESET) 297 | else: 298 | print(RED + str(argv.detect) + " " + GLOBAL["MESSAGES"]["HASH_NOT"] + RESET) 299 | 300 | # ~ Check functions ~ 301 | # if to check use that 302 | elif argv.check: 303 | # set argv.detect to true 304 | if "-d" in args or "--detect" in args: 305 | argv.detect = True 306 | # check for file or alternativly if no argument was provided 307 | if os.path.exists(argv.check) or type(argv.check) == int: 308 | # then check (return exitcode) 309 | return check( 310 | "" if type(argv.check) == int else argv.check, 311 | hash_is, 312 | argv.color, 313 | argv.quiet, 314 | argv.detect, 315 | argv.sfv, 316 | argv.size, 317 | argv.bsd, 318 | argv.strict, 319 | argv.trace, 320 | argv.dry_run 321 | ) 322 | 323 | else: 324 | # if the file does not exist 325 | # print error message 326 | eprint(RED + GLOBAL["MESSAGES"]["FILE_NOT"] + RESET) 327 | # check if strict 328 | if argv.strict: 329 | return 1 # if so then exit non-zero 330 | 331 | # Else exit 0 332 | return 0 333 | # ~ Check for list ~ 334 | elif not argv.list in GLOBAL["BLANK"]: 335 | # check for dry_run 336 | if argv.dry_run: 337 | print("Reading {} and hashing strings".format(argv.list)) 338 | 339 | elif os.path.exists(argv.list) and os.path.isfile(argv.list): 340 | for line in reader(argv.list, "r", False): 341 | hashstr = new(hash_is.name, line.encode()).hexdigest() 342 | 343 | if use_out and output != None: 344 | output.write(hashstr + "\n") 345 | else: 346 | print(hashstr) 347 | 348 | 349 | else: 350 | # if the file does not exist 351 | # print error message 352 | eprint(RED + GLOBAL["MESSAGES"]["FILE_NOT"] + RESET) 353 | # check if strict 354 | if argv.strict: 355 | return 1 # if so then exit non-zero 356 | 357 | # Else exit 0 358 | return 0 359 | 360 | elif not argv.check_list in GLOBAL["BLANK"]: 361 | # if check list is true then find the listnames 362 | hash_list = argv.check_list[0] 363 | cstr_list = argv.check_list[1] 364 | # first check for dry_run 365 | if argv.dry_run: 366 | print("Checking if {} matches {}".format(cstr_list, hash_list)) 367 | 368 | # else check if the exists 369 | elif os.path.exists(hash_list) and os.path.exists(cstr_list): 370 | # if they do read both files 371 | hash_list = [s.replace("\n", "") for s in open(hash_list, "r").readlines()] 372 | cstr_list = [s.replace("\n", "") for s in open(cstr_list, "r").readlines()] 373 | # and set count to 0 374 | count = 0 375 | 376 | # check if they have the same length 377 | if len(hash_list) != len(cstr_list): 378 | eprint(RED + GLOBAL["MESSAGES"]["LENGTH_NOT"] + RESET) 379 | # print error if needed and check for strict 380 | if argv.strict: 381 | return 1 382 | # loop over files 383 | while len(hash_list) > count: 384 | # check if there is an error 385 | if count > len(cstr_list): 386 | break 387 | 388 | # get last hash 389 | hashstr = hash_list[count] 390 | # get current string 391 | s = cstr_list[count] 392 | # hash current string 393 | newhashstr = new(hash_is.name, s.encode()).hexdigest() 394 | # set base print_str 395 | print_str = s + ": {}" 396 | 397 | # print correct results 398 | if hashstr == newhashstr and not argv.quiet: 399 | print(print_str.format(GREEN + GLOBAL["MESSAGES"]["OK"] + RESET)) 400 | 401 | elif hashstr != newhashstr: 402 | print(print_str.format(RED + GLOBAL["MESSAGES"]["FAIL"] + RESET)) 403 | 404 | # add 1 to count 405 | count += 1 406 | 407 | 408 | else: 409 | # if the files does not exist 410 | # print error message 411 | eprint(RED + GLOBAL["MESSAGES"]["FILE_NOT"] + RESET) 412 | # check if strict 413 | if argv.strict: 414 | return 1 # if so then exit non-zero 415 | 416 | # Exit 0 417 | return 0 418 | 419 | 420 | # ~ Check for files ~ 421 | 422 | # check the argv.files argument, and the path var 423 | # which can be a file. 424 | elif argv.files or os.path.isfile(argv.path): 425 | for fname in argv.files + [argv.path]: 426 | path = fixpath(fname) # use fixpath 427 | if os.path.exists(path): 428 | # if path is file 429 | if os.path.isfile(path): 430 | # append to in_files 431 | in_files.append(path) 432 | else: 433 | # if file not exist then print error 434 | eprint(RED + "{}, ".format(path) + GLOBAL["MESSAGES"]["FILE_NOT"] + RESET) 435 | # if strict exit non-zero 436 | if argv.strict: 437 | return 1 438 | 439 | # else return zero 440 | return 0 441 | 442 | # else if my_path is a dir and r is true 443 | elif argv.recursive and os.path.isdir(my_path): 444 | # walk directory and add files to in_files (use fixpath) 445 | in_files = [fixpath(fname) for fname in walk(my_path)] 446 | 447 | # else if my_path is a dir then just 448 | elif os.path.isdir(my_path): 449 | # hash all of the files in this directory 450 | in_files = [os.path.join(my_path, f) for f in os.listdir(my_path) if os.path.isfile(os.path.join(my_path, f))] 451 | 452 | # if there is any files in in_files 453 | if in_files: 454 | # check if we should remove any files 455 | if argv.exclude: 456 | # exclude files and fix paths 457 | in_files = exclude([fixpath(f) for f in in_files], argv.exclude) 458 | 459 | 460 | if not in_files: 461 | # no more files in in_files 462 | return 0 463 | 464 | # find the longest filename 465 | longest_filename = max(in_files, key=len) 466 | 467 | # go over files and hash them all 468 | for fname in in_files: 469 | # if dry run just print filename and hash 470 | if argv.dry_run: 471 | print("Hashing {} with {} and outputting to {}".format(fname, hash_is.name, ("stdout" if not use_out else argv.output))) 472 | # and continue 473 | continue 474 | try: 475 | # hash file 476 | current_hash = hashFile(fname, hash_is, argv.memory_optimatation) 477 | 478 | except (FileNotFoundError, PermissionError) as Error: 479 | # if the file does not exist print a error message 480 | if isinstance(Error, FileNotFoundError): 481 | eprint(RED + fname + ", " + GLOBAL["MESSAGES"]["FILE_NOT"] + RESET) 482 | 483 | # check if we have access to the file 484 | elif isinstance(Error, PermissionError): 485 | eprint(RED + fname + ", " + GLOBAL["MESSAGES"]["PERM_ERR"] + RESET) 486 | 487 | # print stack and trace if needed 488 | if argv.trace: 489 | eprint(YELLOW, end="") 490 | traceback.print_stack(file=os.sys.stderr) 491 | traceback.print_exc(file=os.sys.stderr) 492 | eprint(RESET, end="") 493 | 494 | continue 495 | 496 | # set print_str 497 | print_str = current_hash 498 | size = "" 499 | 500 | # size override size as string 501 | if argv.size: 502 | size = str(os.stat(fname).st_size) 503 | 504 | # if sfv format string 505 | if argv.sfv: 506 | print_str = SFV.format(current_hash, fname, len(longest_filename), size) 507 | # is bsd format string 508 | elif argv.bsd: 509 | print_str = BSD.format(current_hash, fname, hash_is.name) + (size if len(size) <= 0 else " " + size) 510 | # else use N/A 511 | else: 512 | print_str = current_hash + " " + str(size + " " + fname) 513 | 514 | # check if fullpath path shall be stripped 515 | if argv.strip_path: 516 | # then replace current path with 517 | print_str = print_str.replace(os.path.join(os.getcwd(), ""), "") 518 | print_str = print_str.replace("./", "") # if the file is in the current dir 519 | 520 | # if we should output the result to a file 521 | if use_out and output != None: 522 | # write result to an file 523 | output.write(print_str + "\n") 524 | 525 | else: 526 | # else print it 527 | print(print_str) 528 | 529 | # return ExitCode 530 | return 0 531 | 532 | """ 533 | Hashit __main__.py can be executed directly with python(3) -m hashit "commands" 534 | and via snap 535 | """ 536 | 537 | def main(args=None): 538 | """ 539 | Main function with error catching, can force-exit with os._exit(1) 540 | 541 | this main function calls main_() and cathes any error while giving the user some "pretty" 542 | errors. 543 | """ 544 | # switch args if needed 545 | if args is None: 546 | # to sys.args 547 | args = os.sys.argv[1:] 548 | try: 549 | # execute main application 550 | Exit(main_(args)) # Exit with return code 551 | except Exception as error: 552 | # define colors 553 | RD = "" 554 | YL = "" 555 | RE = "" 556 | # check if term supports color 557 | if supports_color(): 558 | YL = GLOBAL["COLORS"]["YELLOW"] 559 | RD = GLOBAL["COLORS"]["RED"] 560 | RE = GLOBAL["COLORS"]["RESET"] 561 | 562 | if isinstance(error, TypeError): 563 | eprint(YL + GLOBAL["ERRORS"]["TypeError"] + RE) 564 | 565 | elif isinstance(error, ValueError): 566 | eprint(YL + GLOBAL["ERRORS"]["ValueError"] + RE) 567 | 568 | elif isinstance(error, FileNotFoundError): 569 | eprint(YL + GLOBAL["ERRORS"]["FileNotFoundError"] + RE) 570 | 571 | elif isinstance(error, OSError): 572 | eprint(YL + GLOBAL["ERRORS"]["OSError"]["windows"]) 573 | eprint(GLOBAL["ERRORS"]["OSError"]["macos"]) 574 | eprint(GLOBAL["ERRORS"]["OSError"]["linux"].format(', '.join(random.sample(LINUX_LIST, 10)))) 575 | eprint(GLOBAL["ERRORS"]["OSError"]["END"] + RE) 576 | 577 | # print stack and trace if needed 578 | if "--trace" in args or "-t" in args: 579 | eprint(RD, end="") 580 | traceback.print_stack(file=os.sys.stderr) 581 | traceback.print_exc(file=os.sys.stderr) 582 | eprint(RE, end="") 583 | else: 584 | # else print error 585 | eprint(RD + str(error) + RE) 586 | 587 | os._exit(1) # force exit 588 | 589 | # if the program is being called 590 | if __name__ == "__main__": 591 | # Exit 0 on KeyboardInterruptExit 592 | try: 593 | main() # then execute main function 594 | except KeyboardInterrupt: 595 | Exit(130) # According to the posix standard 596 | -------------------------------------------------------------------------------- /hashit/detection.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyrigth (c) 2020-present Javad Shafique 3 | 4 | this module using length and connections to find a match 5 | for an hashing algorithem. It's basicly a matching algorigtem 6 | it can be used for almost any pure function in this case for hashes. 7 | 8 | # Copyright (c) 2020-present Javad Shafique 9 | # This 'Software' can't be used without permission 10 | # from Javad Shafique. 11 | 12 | # this module using length and connections to find a match 13 | # for an hashing algorithem. It's basicly a matching algorigtem 14 | # it can be used for almost any pure function in this case for hashes. 15 | # basic template: 16 | 17 | 18 | def generate_some_dataset(datatoworkon = "some data"): 19 | dict_for_storing_set = dict() 20 | 21 | for each_element in a_list_of_something_to_compare_with: 22 | data = function_that_uses_data_to_generate_something(each_element, datatoworkon) 23 | 24 | dict_for_storing_set.update({each_element:{"data":data, "size":len(data), "size-as":list(), "connection":list()}}) 25 | 26 | 27 | #find connection and size 28 | 29 | for each_element in dict_for_storing_set: 30 | elements_data = dict_for_storing_set[each_element]["data"] 31 | elements_size = dict_for_storing_set[each_element]["size"] 32 | 33 | for second_element in dict_for_storing_set: 34 | if dict_for_storing_set[second_element]["size"] == elements_size: 35 | if elements_data == dict_for_storing_set["data"]: 36 | dict_for_storing_set[each_element]["connection"].append(second_element) 37 | else: 38 | dict_for_storing_set[each_element]["size-as"].append(second_element) 39 | else: 40 | continue 41 | 42 | # return finished dataset 43 | 44 | return dict_for_storing_set 45 | 46 | # and for parsing that infomation 47 | # you can use the detect function 48 | # as here: 49 | 50 | 51 | def detect(string, table, maybe = True): 52 | if not (type(string) == str): 53 | return None 54 | 55 | so = list() 56 | so_far = list() 57 | length = len(string) 58 | 59 | for key in table: 60 | dat = table[key] 61 | 62 | if dat["size"] == length: 63 | for i in dat["connection"]: 64 | if i not in so_far: 65 | so_far.append(i) 66 | 67 | for i in so_far: 68 | dat = table[i]["connection"] 69 | 70 | for j in so_far: 71 | if not j in dat: 72 | so_far.remove(j) 73 | 74 | if maybe: 75 | for key in table: 76 | dat = table[key] 77 | 78 | if dat["size"] == length: 79 | so.append(key) 80 | 81 | if len(so_far) >= 0 and len(so) == 1: 82 | 83 | # if there only is one option then use it 84 | 85 | return tup(certain=so, maybe=[]) 86 | else: 87 | return tup(certain=so_far, maybe=so) 88 | 89 | 90 | 91 | # compare hashes for hash-detection 92 | # it can generate data that can compare 93 | # diffrences between the results 94 | 95 | # if works by categorizing the hashes into 96 | # two categorizes. one for thoose who look alike 97 | # and one for thoose who generates the same output 98 | # given the same input. And with it a sorted result 99 | # is outputted and is ready to be used be the user. 100 | 101 | # list of which algorithms is most likly used (WIP) 102 | 103 | PRIORITY = { 104 | "md5":["md5"], 105 | "sha1":["dsaEncryption", "DSA", "ecdsa-with-SHA1", "dsaWithSHA", "DSA-SHA"] 106 | } 107 | """ 108 | 109 | import string 110 | from collections import namedtuple 111 | 112 | # checks if string is hex 113 | def ishex(hexstr): 114 | """Checks if string is hexidecimal""" 115 | return all(char in string.hexdigits for char in hexstr) 116 | 117 | def generate_data_set(hashon, algos, hasher_that_takes_new): 118 | """Generates dataset based on data and list of strings that can be used to create objects to use that data""" 119 | data_dict = dict() 120 | # go over the algorithms 121 | for algo in algos: 122 | hashed = hasher_that_takes_new(algo, hashon.encode()).hexdigest() 123 | # create dict in dict with all infomation stored in a table 124 | data_dict.update({algo:{"data":hashed, "size":len(hashed), "size-as":list(), "connection":list()}}) 125 | 126 | for key in data_dict: 127 | # set default values 128 | hashed = data_dict[key]["data"] 129 | length = data_dict[key]["size"] 130 | 131 | for second in data_dict: 132 | if length == data_dict[second]["size"] and not second == key: 133 | if hashed == data_dict[second]["data"]: 134 | data_dict[key]["connection"].append(second) 135 | else: 136 | data_dict[key]["size-as"].append(second) 137 | else: 138 | continue 139 | 140 | return data_dict 141 | 142 | # return value for detect, a named tuple with two values 143 | NTUPLE = namedtuple("Closest", ["certain", "maybe"]) 144 | 145 | 146 | # detection function returns NTYPLE 147 | def detect(s, table, maybe=True): 148 | """Compares result from datasets, finds connections and eleminates contestants""" 149 | if not (len(s) % 4 == 0 and ishex(s)): 150 | return None 151 | 152 | so = list() 153 | so_far = list() 154 | length = len(s) 155 | 156 | for key in table: 157 | dat = table[key] 158 | 159 | if dat["size"] == length: 160 | for i in dat["connection"]: 161 | if i not in so_far: 162 | so_far.append(i) 163 | 164 | for i in so_far: 165 | dat = table[i]["connection"] 166 | 167 | for j in so_far: 168 | if not j in dat: 169 | so_far.remove(j) 170 | 171 | if maybe: 172 | for key in table: 173 | dat = table[key] 174 | if dat["size"] == length: 175 | so.append(key) 176 | 177 | if len(so_far) >= 0 and len(so) == 1: 178 | # if there only is one option then use it 179 | return NTUPLE(certain=so, maybe=[]) 180 | else: 181 | return NTUPLE(certain=so_far, maybe=so) 182 | -------------------------------------------------------------------------------- /hashit/extra.py: -------------------------------------------------------------------------------- 1 | """Extra functions and classes for hashit""" 2 | import binascii 3 | import hashlib 4 | 5 | # final class 6 | class Crc32: 7 | """This class is an api for the crc32 function that is compatible with mor""" 8 | def __init__(self, data=b''): 9 | """init class, creates data""" 10 | self.name = "crc32" 11 | self.data = data 12 | 13 | def update(self, data=b''): 14 | """Update self.data with new data""" 15 | self.data += data 16 | 17 | def copy(self): 18 | """return new Crc32 object with same properties""" 19 | return Crc32(self.data) 20 | 21 | def digest(self): 22 | """Digest as int""" 23 | return binascii.crc32(self.data) & 0xFFFFFFFF 24 | 25 | def hexdigest(self): 26 | """Digest as hex""" 27 | buf = (binascii.crc32(self.data) & 0xFFFFFFFF) 28 | return ("%08X" % buf).lower() 29 | 30 | # class for shake hash 31 | class shake: 32 | """Top-level api for hashlib.shake""" 33 | def __init__(self, hashn, data=b''): 34 | """Init class create hasher and data""" 35 | # split hashname with _ 36 | hashname = hashn.split("_") 37 | 38 | if len(hashname) == 3: 39 | if hashname[1] in ("256", "128"): 40 | self.hash = hashlib.new("shake_{}".format(hashname[1]), data) 41 | else: 42 | raise ValueError("{} is not a valid hash".format(hashn)) 43 | 44 | self.name = hashn 45 | self.length = int(hashname[2]) 46 | else: 47 | raise ValueError 48 | 49 | def update(self, data=b''): 50 | """Update self.data with new data""" 51 | self.hash.update(data) 52 | 53 | def copy(self): 54 | return self.hash.copy() 55 | 56 | def digest(self, length=None): 57 | """Digest binary""" 58 | length = length or self.length 59 | return self.hash.digest(length) 60 | 61 | def hexdigest(self, length=None): 62 | """Digest hex""" 63 | length = length or self.length 64 | return self.hash.hexdigest(length) 65 | 66 | 67 | LINUX_LIST = ['Mythbuntu', 'Mac OS X', 'Debian Pure Blend', 'Symphony OS', 'Astra Linux', 'Emdebian Grip',\ 68 | 'Russian Fedora Remix', 'Secure-K', 'Knopperdisk', 'Mobilinux', 'touchscreen', 'MX Linux', 'NepaLinux', 'fli4l', 'Nix', 'Ubuntu Mobile', 'primary',\ 69 | 'Fedora Core', 'ChromeOS', 'rPath', 'LEAF Project', 'MuLinux', 'Ubuntu',\ 70 | 'Berry Linux', 'dyne:bolic', 'TurnKey GNU/Linux', 'EasyPeasy', 'Budgie', 'Tin Hat Linux', 'paldo', 'Conary', 'Ubuntu Touch', 'netbooks', 'Emmabuntus',\ 71 | 'Linpus Linux Lite', 'Poseidon Linux', 'Elive', 'Source Mage', 'Skolelinux', 'Ubuntu MATE', 'Ubuntu Kylin', 'Solus', 'Nova', 'MeeGo', 'Pinguy OS', 'Nokia N9',\ 72 | 'Kanotix', 'Korora', 'Linux Mint', 'Billix', 'Linpus Linux', 'Ubuntu JeOS', 'XFCE', 'TinyMe', 'VectorLinux', 'Antergos', 'Asianux', 'BlankOn', 'Netrunner',\ 73 | 'Trisquel GNU/Linux', 'Tinfoil Hat Linux', 'Familiar Linux', 'Sentry Firewall', 'Fedora', 'Parsix', 'MythTV', 'Castile-La Mancha', 'Pardus', 'Austrumi Linux',\ 74 | 'Bodhi Linux', 'OpenZaurus', 'SME Server', 'Mandrake 9.2', 'Frugalware Linux', 'Coyote Linux', 'Sorcerer', 'senior citizens',\ 75 | 'Red Flag Linux', 'Chakra Linux', 'Arch Linux', 'Caldera OpenLinux', 'cAos Linux', 'Red Hat', 'EnGarde Secure Linux', 'Annvix',\ 76 | 'Feather Linux', 'CoreOS', 'Gentoox', 'SUSE Studio', 'Red Hat Linux', 'SmoothWall', 'Goobuntu', 'SystemRescueCD', 'Peppermint OS', 'Wolvix',\ 77 | 'Iskolinux', 'Ubuntu Netbook Edition', 'Lunar Linux', 'Guadalinex', 'bioinformatics', 'Network Security Toolkit', 'The Amnesic Incognito Live System',\ 78 | 'Container Linux', 'ELinOS', 'Aurora', 'LinuxMCE', 'antiX', 'GeeXboX', 'Foresight Linux', 'RXART', 'Prevas Industrial Linux', 'thin client',\ 79 | 'Parabola GNU/Linux-libre', 'Go', 'Ututo', 'Dreamlinux', 'Sunwah Linux', 'LOUD', 'Yellow Dog Linux', 'Trinity Rescue Kit',\ 80 | 'Miracle Linux', 'Hanthana', 'ROSA Linux', 'Munich', 'OpenGEU', 'BackTrack', 'Calculate Linux', 'Sabayon Linux', 'Chromium OS', 'Platypux', 'Xfce', 'ArchBang',\ 81 | 'Baltix', 'Mageia', 'MontaVista Linux', 'SUSE Linux Enterprise Server', 'Joli OS', 'SolydXK', 'DNALinux', 'SalineOS', 'Fermi Linux LTS', 'SliTaz',\ 82 | 'Android', 'KDE', 'Sacix', 'LliureX', 'Xubuntu', 'musl', 'Univention Corporate Server', 'Red Hat Enterprise Linux', 'Ubuntu for Android', 'ALT Linux',\ 83 | 'Canaima', 'Kurumin', 'Moblin', 'Vyatta', 'Kubuntu', 'Pentoo', 'GIS', 'Topologilinux', 'WinLinux', 'autonomic',\ 84 | 'CentOS', 'CRUX', 'Trustix','Galsoft Linux', 'Sugar-on-a-Stick Linux', 'BackBox', 'simpleLinux', 'Smallfoot', 'BackSlash Linux', 'HandyLinux',\ 85 | 'Funtoo Linux', 'Element OS', 'Ubuntu Budgie', 'YOPER', 'Xbox', 'Corel Linux', 'Webconverger', 'PelicanHPC', 'HostGIS',\ 86 | 'Yggdrasil Linux/GNU/X', 'BLAG Linux and GNU', 'LinHES', 'Raspbian', 'gNewSense', 'Slackintosh', 'OpenWrt', 'SalixOS', 'Qubes OS', 'One-Laptop-Per-Child project',\ 87 | 'Unity Linux', 'Mezzo', 'MythDora', 'Gobuntu', 'Fuduntu', 'CrunchBang Linux', 'Bharat Operating System Solutions', 'Italy', 'Enlightenment','Aurora SPARC Linux',\ 88 | 'Sabily', 'GNU Guix', 'PowerPC', 'MAX', 'SteamOS', 'Raspberry Pi Foundation', 'Mandriva Linux', 'Ubuntu GNOME', 'MkLinux', 'Frozen', 'Karoshi', 'Damn Small Linux',\ 89 | 'ZipSlack', 'MEPIS', 'Scientific Linux', 'Kuki Linux', 'LiMux', 'Finnix', 'SuperGamer', 'NimbleX', 'Slamd64', 'grml', 'Ubuntu Server', 'Alpine Linux', 'Dragora GNU/Linux-Libre',\ 90 | 'Fermi National Accelerator Laboratory', 'Porteus', 'NixOS', 'Generalitat Valenciana', 'Jlime', 'Puppy Linux', 'Tiny Core Linux', 'tomsrtbt', 'Edubuntu', 'OpenMandriva',\ 91 | 'Thinstation', 'elementary OS', 'Void Linux', 'Rocks Cluster Distribution', 'Lubuntu', 'gOS', 'Ubuntu TV', 'Openbox', 'Sharp Zaurus', 'PS2 Linux', 'MintPPC', 'Kali Linux',\ 92 | 'Qimo 4 Kids', 'Nitix', 'SUSE Linux Enterprise Desktop', 'GendBuntu', 'Buildix', 'Impi Linux', 'Linux Lite', 'Guix System Distribution', 'Turbolinux', 'Maemo',\ 93 | 'Softlanding Linux System', 'SUSE', 'EduLinux', 'Debian Live', 'OpenTV', 'Daylight Linux', 'Manjaro Linux', 'Nagra', 'Slax', 'Caldera', 'UberStudent',\ 94 | 'MCC Interim Linux', 'Oracle Linux', 'K12LTSP', 'Devuan', 'OjubaLinux', 'Xandros', 'Molinux', 'openSUSE', 'SparkyLinux', 'DSLinux', 'GoboLinux',\ 95 | 'LinuxTLE', 'MATE', 'Zenwalk', 'Andalucia', 'LinuxBBQ', 'Slackware', 'Vine Linux', 'PCLinuxOS', 'Vinux', 'Musix GNU+Linux',\ 96 | 'Ubuntu Studio', 'Knoppix', 'ClearOS', 'Hikarunix', 'NASLite', 'KateOS', 'LTSP', 'Mandrake Linux', 'Nokia N800'] 97 | -------------------------------------------------------------------------------- /hashit/version.py: -------------------------------------------------------------------------------- 1 | """ Set global version """ 2 | __version__ = "3.5.3" 3 | -------------------------------------------------------------------------------- /img/demo.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cjavad/hashit/b4f0fd3f84267f3a330a6aa34abd14adb31bc8d0/img/demo.gif -------------------------------------------------------------------------------- /img/icon.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cjavad/hashit/b4f0fd3f84267f3a330a6aa34abd14adb31bc8d0/img/icon.png -------------------------------------------------------------------------------- /release/hashit.tar.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cjavad/hashit/b4f0fd3f84267f3a330a6aa34abd14adb31bc8d0/release/hashit.tar.gz -------------------------------------------------------------------------------- /release/hashit.zip: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cjavad/hashit/b4f0fd3f84267f3a330a6aa34abd14adb31bc8d0/release/hashit.zip -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | from setuptools import setup 2 | from hashit.version import __version__ 3 | 4 | setup( 5 | name = "hashit", 6 | author = "Javad Shafique", 7 | author_email = "javadshafique@hotmail.com", 8 | version=__version__, 9 | license="MIT", 10 | include_package_data=True, 11 | test_suite="tests", 12 | zip_safe=True, 13 | entry_points = { 14 | "console_scripts":[ 15 | "hashit = hashit.__main__:main" 16 | ] 17 | }, 18 | url="https://github.com/cjavad/hashit", 19 | packages=["hashit"], 20 | description = "Hashing Application with muliple modes, settings and more! Hashit, is an hashing application used as an verification tool, intendet to replace the 'standard' linux hashing utilities such as md5sum, sha1sum and so on. One of the main reasons why this program was develop was to create an easy-to-use command line tool for newcomers and professionals alike to hash/verify files and other data. For more see our homepage at https://cjavad.github.io/hashit", 21 | long_description = open("README.rst", "r").read(), 22 | classifiers = [ 23 | "Programming Language :: Python :: 2", 24 | "Programming Language :: Python :: 3", 25 | "Topic :: Security :: Cryptography", 26 | "License :: OSI Approved :: MIT License", 27 | "Environment :: Console", 28 | "Intended Audience :: Developers", 29 | "Intended Audience :: System Administrators" 30 | ] 31 | ) 32 | -------------------------------------------------------------------------------- /snapcraft.yaml: -------------------------------------------------------------------------------- 1 | name: hashit 2 | version: 3.5.3 3 | summary: hashit is an replacement for (md5sum, sha1sum, and so on) 4 | icon: img/icon.png 5 | description: | 6 | Hashit, is an hashing application used as an verification tool, intendet to replace the "standard" linux hashing utilities such as 7 | md5sum, sha1sum and so on. One of the main reasons why this program was develop was to create an easy-to-use command line tool for 8 | newcomers and professionals alike to hash/verify files and other data. For more see our homepage at https://cjavad.github.io/hashit 9 | 10 | 11 | grade: stable 12 | confinement: strict 13 | 14 | apps: 15 | hashit: 16 | command: bin/hashit 17 | plugs: [home, removable-media] 18 | 19 | parts: 20 | hashit: 21 | plugin: python 22 | python-version: python3 23 | source: . 24 | -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- 1 | """Runner for unittests""" 2 | import unittest 3 | from . import unit 4 | 5 | def create_suite(): 6 | """Create test suite for unittest""" 7 | suite = unittest.TestSuite() 8 | suite.addTest(unit.Test()) 9 | suite.addTest(unit.TestLoad()) 10 | # return configured testsuite 11 | return suite 12 | 13 | if __name__ == "__main__": 14 | # run all 15 | unittest.TextTestRunner().run(create_suite()) 16 | -------------------------------------------------------------------------------- /tests/benchmarks/res/benchmarks.json: -------------------------------------------------------------------------------- 1 | { 2 | "DSA": { 3 | "algo": "DSA", 4 | "file-time": 2.8450868749996516, 5 | "number": 100000, 6 | "str-time": 0.18207929300024261 7 | }, 8 | "DSA-SHA": { 9 | "algo": "DSA-SHA", 10 | "file-time": 2.666441807999945, 11 | "number": 100000, 12 | "str-time": 0.22197757800040563 13 | }, 14 | "blake2b": { 15 | "algo": "blake2b", 16 | "file-time": 2.8445470799997565, 17 | "number": 100000, 18 | "str-time": 0.20133252900041043 19 | }, 20 | "blake2s": { 21 | "algo": "blake2s", 22 | "file-time": 3.2005310449999342, 23 | "number": 100000, 24 | "str-time": 0.20474589700006618 25 | }, 26 | "crc32": { 27 | "algo": "crc32", 28 | "file-time": 3.0626791010004126, 29 | "number": 100000, 30 | "str-time": 0.19565401399995608 31 | }, 32 | "dsaEncryption": { 33 | "algo": "dsaEncryption", 34 | "file-time": 3.06814845100007, 35 | "number": 100000, 36 | "str-time": 0.18941209899958267 37 | }, 38 | "dsaWithSHA": { 39 | "algo": "dsaWithSHA", 40 | "file-time": 2.65577638700006, 41 | "number": 100000, 42 | "str-time": 0.18819310099979702 43 | }, 44 | "ecdsa-with-SHA1": { 45 | "algo": "ecdsa-with-SHA1", 46 | "file-time": 2.604442813999867, 47 | "number": 100000, 48 | "str-time": 0.1899897069997678 49 | }, 50 | "md4": { 51 | "algo": "md4", 52 | "file-time": 2.5014770049997423, 53 | "number": 100000, 54 | "str-time": 0.1810867640001561 55 | }, 56 | "md5": { 57 | "algo": "md5", 58 | "file-time": 2.858168186000057, 59 | "number": 100000, 60 | "str-time": 0.20138439300035316 61 | }, 62 | "ripemd160": { 63 | "algo": "ripemd160", 64 | "file-time": 3.7956181730000935, 65 | "number": 100000, 66 | "str-time": 0.2105701600003158 67 | }, 68 | "sha": { 69 | "algo": "sha", 70 | "file-time": 3.1197131930002797, 71 | "number": 100000, 72 | "str-time": 0.1953218310000011 73 | }, 74 | "sha1": { 75 | "algo": "sha1", 76 | "file-time": 2.605248396999741, 77 | "number": 100000, 78 | "str-time": 0.17578223699956652 79 | }, 80 | "sha224": { 81 | "algo": "sha224", 82 | "file-time": 3.1550695630003247, 83 | "number": 100000, 84 | "str-time": 0.19930047200023182 85 | }, 86 | "sha256": { 87 | "algo": "sha256", 88 | "file-time": 3.120442401999753, 89 | "number": 100000, 90 | "str-time": 0.2023992099998395 91 | }, 92 | "sha384": { 93 | "algo": "sha384", 94 | "file-time": 3.145929054000135, 95 | "number": 100000, 96 | "str-time": 0.2327170609996756 97 | }, 98 | "sha3_224": { 99 | "algo": "sha3_224", 100 | "file-time": 3.8057815829997708, 101 | "number": 100000, 102 | "str-time": 0.3033928019999621 103 | }, 104 | "sha3_256": { 105 | "algo": "sha3_256", 106 | "file-time": 3.690774378000242, 107 | "number": 100000, 108 | "str-time": 0.2946057000003748 109 | }, 110 | "sha3_384": { 111 | "algo": "sha3_384", 112 | "file-time": 4.387827305000428, 113 | "number": 100000, 114 | "str-time": 0.38775761599981706 115 | }, 116 | "sha3_512": { 117 | "algo": "sha3_512", 118 | "file-time": 4.753955307000069, 119 | "number": 100000, 120 | "str-time": 0.3283711169997332 121 | }, 122 | "sha512": { 123 | "algo": "sha512", 124 | "file-time": 2.940883205999853, 125 | "number": 100000, 126 | "str-time": 0.22776917900000626 127 | }, 128 | "whirlpool": { 129 | "algo": "whirlpool", 130 | "file-time": 4.990728630999911, 131 | "number": 100000, 132 | "str-time": 0.25389273800010415 133 | } 134 | } -------------------------------------------------------------------------------- /tests/benchmarks/res/benchmarks2.json: -------------------------------------------------------------------------------- 1 | { 2 | "DSA": { 3 | "algo": "DSA", 4 | "file-time": 2.587571631999708, 5 | "number": 100000, 6 | "str-time": 0.1804981029999908 7 | }, 8 | "DSA-SHA": { 9 | "algo": "DSA-SHA", 10 | "file-time": 2.621822691000034, 11 | "number": 100000, 12 | "str-time": 0.18724435099966286 13 | }, 14 | "blake2b": { 15 | "algo": "blake2b", 16 | "file-time": 2.5808818010000323, 17 | "number": 100000, 18 | "str-time": 0.1994517729999643 19 | }, 20 | "blake2s": { 21 | "algo": "blake2s", 22 | "file-time": 2.9960495519999313, 23 | "number": 100000, 24 | "str-time": 0.18518551200031652 25 | }, 26 | "crc32": { 27 | "algo": "crc32", 28 | "file-time": 3.0182085370001914, 29 | "number": 100000, 30 | "str-time": 0.1889544890000252 31 | }, 32 | "dsaEncryption": { 33 | "algo": "dsaEncryption", 34 | "file-time": 2.721187908999582, 35 | "number": 100000, 36 | "str-time": 0.1898579060002703 37 | }, 38 | "dsaWithSHA": { 39 | "algo": "dsaWithSHA", 40 | "file-time": 2.625476805999824, 41 | "number": 100000, 42 | "str-time": 0.18663340800003425 43 | }, 44 | "ecdsa-with-SHA1": { 45 | "algo": "ecdsa-with-SHA1", 46 | "file-time": 2.702637606999815, 47 | "number": 100000, 48 | "str-time": 0.19142900599990753 49 | }, 50 | "md4": { 51 | "algo": "md4", 52 | "file-time": 2.468673462999959, 53 | "number": 100000, 54 | "str-time": 0.17610469600003853 55 | }, 56 | "md5": { 57 | "algo": "md5", 58 | "file-time": 3.536771564999981, 59 | "number": 100000, 60 | "str-time": 0.18716412799994941 61 | }, 62 | "ripemd160": { 63 | "algo": "ripemd160", 64 | "file-time": 4.072462933000224, 65 | "number": 100000, 66 | "str-time": 0.21529122499987352 67 | }, 68 | "sha": { 69 | "algo": "sha", 70 | "file-time": 3.537468500999694, 71 | "number": 100000, 72 | "str-time": 0.20911539300004733 73 | }, 74 | "sha1": { 75 | "algo": "sha1", 76 | "file-time": 2.6043913959997553, 77 | "number": 100000, 78 | "str-time": 0.17510244300001432 79 | }, 80 | "sha224": { 81 | "algo": "sha224", 82 | "file-time": 3.0966381100001854, 83 | "number": 100000, 84 | "str-time": 0.2003057900001295 85 | }, 86 | "sha256": { 87 | "algo": "sha256", 88 | "file-time": 3.107855021999967, 89 | "number": 100000, 90 | "str-time": 0.20576528700030394 91 | }, 92 | "sha384": { 93 | "algo": "sha384", 94 | "file-time": 3.0349763379999786, 95 | "number": 100000, 96 | "str-time": 0.22604391000004398 97 | }, 98 | "sha3_224": { 99 | "algo": "sha3_224", 100 | "file-time": 3.527630315999886, 101 | "number": 100000, 102 | "str-time": 0.3003184279996276 103 | }, 104 | "sha3_256": { 105 | "algo": "sha3_256", 106 | "file-time": 4.620477275999747, 107 | "number": 100000, 108 | "str-time": 0.2987421979996725 109 | }, 110 | "sha3_384": { 111 | "algo": "sha3_384", 112 | "file-time": 4.095533193999927, 113 | "number": 100000, 114 | "str-time": 0.2997056720000728 115 | }, 116 | "sha3_512": { 117 | "algo": "sha3_512", 118 | "file-time": 5.690221998999732, 119 | "number": 100000, 120 | "str-time": 0.35634653699980845 121 | }, 122 | "sha512": { 123 | "algo": "sha512", 124 | "file-time": 3.3216979710000487, 125 | "number": 100000, 126 | "str-time": 0.23402784100017016 127 | }, 128 | "whirlpool": { 129 | "algo": "whirlpool", 130 | "file-time": 4.943534676000127, 131 | "number": 100000, 132 | "str-time": 0.2533912399999281 133 | } 134 | } -------------------------------------------------------------------------------- /tests/benchmarks/speed.py: -------------------------------------------------------------------------------- 1 | """Benchmarking for hashits hashing functions and algorithms""" 2 | from __future__ import print_function 3 | import timeit, os, json, hashlib 4 | from memory_profiler import profile 5 | os.sys.path.insert(0, "..") 6 | import hashit 7 | 8 | 9 | # do not use, at least 10 times slower than any other method 10 | def easy_hash(filename, hasher): 11 | """Slow but easy to use self-contained hasher""" 12 | filename = filename 13 | # openfile 14 | with open(filename, "rb") as afile: 15 | for block in (line for line in afile.readlines()): 16 | hasher.update(block) 17 | # return hash 18 | return hasher.hexdigest() 19 | 20 | if os.sys.version_info[0] == 2: 21 | global input 22 | input = raw_input 23 | 24 | # takes algorithem 25 | def hashfile(file, algo): 26 | return hashit.hashIter(hashit.blockIter(open(file, "rb")), hashit.new(algo)) 27 | 28 | def hashstr(string, algo): 29 | return hashit.new(algo, string.encode()).hexdigest() 30 | 31 | 32 | def slow_hashfile(file, algo): 33 | return hashit.new(algo, open(file, "rb").read()).hexdigest() 34 | 35 | 36 | def easy_hashfile(file, algo): 37 | return easy_hash(file, hashit.new(algo)) 38 | 39 | 40 | 41 | def gen(n=timeit.default_number): 42 | for algo in hashit.__algorithms__: 43 | x = timeit.timeit("hashfile('speed.py', '" + algo + "')", setup="from __main__ import hashfile", number=n) 44 | x2 = timeit.timeit("hashstr('"+ str(x) + "', '" + algo + "')", setup="from __main__ import hashstr", number=n) 45 | yield {"algo":algo, "file-time":x,"str-time":x2,"number":n} 46 | 47 | 48 | def test1(n=timeit.default_number, filename=None): 49 | o = dict() 50 | for i in gen(n): 51 | o.update({i["algo"]:i}) 52 | 53 | open((filename or input("Output to: ")), "w").write(json.dumps(o, indent=4, sort_keys=True)) 54 | 55 | 56 | def parse_test1(jsonfile): 57 | data = json.loads(open(jsonfile, "r").read()) 58 | 59 | def findt(s): 60 | sorted_list = sorted(data, key=lambda key: data[key][s]) 61 | for c, i in enumerate(sorted_list): 62 | sorted_list[c] = i + ": " + str(data[i][s]) 63 | 64 | return sorted_list 65 | 66 | print("Fastest to slowest file\n ", '\n '.join(findt("file-time")), end='\n\n') 67 | print("Fastest to slowest string\n ", '\n '.join(findt("str-time")), end='\n\n') 68 | 69 | 70 | 71 | # where default big file is an 512M file 72 | #@profile 73 | def test2(algo, n=1000, bigfile="/home/javad/filename"): 74 | if algo in hashit.__algorithms__: 75 | fast = timeit.timeit("hashfile('" + bigfile + "', '"+algo+"')", setup="from __main__ import hashfile", number=n) 76 | print("Fast:", fast) 77 | all_in = timeit.timeit("slow_hashfile('" + bigfile + "', '"+algo+"')", setup="from __main__ import slow_hashfile", number=n) 78 | print("All in:", all_in) 79 | easy = timeit.timeit("easy_hashfile('" + bigfile + "', '"+algo+"')", setup="from __main__ import easy_hashfile", number=n) 80 | print("Easy:", easy) 81 | ''' 82 | 10000: 83 | Fast: 5.277344635996997 84 | All in: 3.604332027996861 85 | Easy: 35.06488174900005 86 | Filename: speed.py 87 | 88 | 1000000: 89 | Fast: 54.695157744000085 90 | All in: 40.071381821000045 91 | Easy: 880.830499345 92 | Filename: dataset_from_detect.json 93 | 94 | ''' 95 | 96 | #test2("md5", 1000000, path_to_large_file) 97 | 98 | if __name__ == "__main__": 99 | # hash with a bunch of algorigthms a million times each and compare results 100 | if not (os.path.exists("./res/benchmarks.json") and os.path.exists("./res/benchmarks2.json")): 101 | test1(100000, "./res/benchmarks.json") 102 | test1(100000, "./res/benchmarks2.json") 103 | 104 | parse_test1("./res/benchmarks.json") 105 | parse_test1("./res/benchmarks2.json") 106 | 107 | test2("sha256", bigfile="./res/file.json") -------------------------------------------------------------------------------- /tests/benchmarks/speed2.py: -------------------------------------------------------------------------------- 1 | """Speed tests comparing pycrypto and hashlib's hash functions in terms of performance""" 2 | from __future__ import print_function 3 | import timeit, os, json, hashlib 4 | from Crypto.Hash import MD4, MD5, SHA224, SHA256, SHA384, SHA512, SHA, RIPEMD 5 | os.sys.path.insert(0, "..") 6 | import hashit 7 | 8 | 9 | if os.sys.version_info[0] == 2: 10 | global input 11 | input = raw_input 12 | 13 | # dict with hashers 14 | hashers = { 15 | "md4":{"hashlib_hash":hashlib.new("md4"), "crypto_hash":MD4.MD4Hash()}, 16 | "md5":{"hashlib_hash":hashlib.new("md5"), "crypto_hash":MD5.MD5Hash()}, 17 | "sha224":{"hashlib_hash":hashlib.new("sha224"), "crypto_hash":SHA224.SHA224Hash()}, 18 | "sha256":{"hashlib_hash":hashlib.new("sha256"), "crypto_hash":SHA256.SHA256Hash()}, 19 | "sha384":{"hashlib_hash":hashlib.new("sha384"), "crypto_hash":SHA384.SHA384Hash()}, 20 | "sha512":{"hashlib_hash":hashlib.new("sha512"), "crypto_hash":SHA512.SHA512Hash()}, 21 | "sha1":{"hashlib_hash":hashlib.new("sha1"), "crypto_hash":SHA.SHA1Hash()}, 22 | "ripemd160":{"hashlib_hash":hashlib.new("ripemd160"), "crypto_hash":RIPEMD.RIPEMD160Hash()} 23 | } 24 | 25 | # takes hasher 26 | def hashFile(filename, hasher): 27 | return hashit.hashIter(hashit.blockIter(open(filename, "rb")), hasher) 28 | 29 | def hashStr(binary, hasher): 30 | hasher.update(binary) 31 | return hasher.hexdigest() 32 | 33 | def NoMemFile(filename, hasher): 34 | hasher.update(open(filename, "rb").read()) 35 | return hasher.hexdigest() 36 | 37 | def RawCompare(algo, file="speed2.py", n=100000): 38 | # create command 39 | cC = lambda key, filename, command, algorihtm=algo: "{}('{}', hashers['{}']['{}'])".format(command, filename, algorihtm, key) 40 | setup = "from __main__ import hashFile, NoMemFile, hashers" 41 | 42 | Mem_H, Mem_C = timeit.timeit(cC("hashlib_hash", file, "hashFile"), setup=setup, number=n), \ 43 | timeit.timeit(cC("crypto_hash", file, "hashFile"), setup=setup, number=n) 44 | 45 | NoMem_H, NoMem_C = timeit.timeit(cC("hashlib_hash", file, "NoMemFile"), setup=setup, number=n), \ 46 | timeit.timeit(cC("crypto_hash", file, "NoMemFile"), setup=setup, number=n) 47 | 48 | print(Mem_H, NoMem_H) 49 | print(Mem_C, NoMem_C) 50 | 51 | # generate dataset for CompareCnH 52 | def CryptoVsHashlib(file_to_hash="speed.py", data_to_hash="Hello World!", n=timeit.default_number): 53 | # dict for results 54 | results = { 55 | "md4":{}, 56 | "md5":{}, 57 | "sha224":{}, 58 | "sha256":{}, 59 | "sha384":{}, 60 | "sha512":{}, 61 | "sha1":{}, 62 | "ripemd160":{} 63 | } 64 | for algo in hashers: 65 | # hashlib_hash for hashlib and crypto_hash for crypto (pycrypto(dome)) 66 | # first hash an file 67 | h_file = timeit.timeit("hashFile('{}', hashers['{}']['hashlib_hash'])".format(file_to_hash, algo), setup="from __main__ import hashFile, hashers", number=n) 68 | c_file = timeit.timeit("hashFile('{}', hashers['{}']['crypto_hash'])".format(file_to_hash, algo), setup="from __main__ import hashFile, hashers", number=n) 69 | 70 | h_str = timeit.timeit("hashStr(b'{}', hashers['{}']['hashlib_hash'])".format(data_to_hash, algo), setup="from __main__ import hashStr, hashers", number=n) 71 | c_str = timeit.timeit("hashStr(b'{}', hashers['{}']['crypto_hash'])".format(data_to_hash, algo), setup="from __main__ import hashStr, hashers", number=n) 72 | 73 | results[algo]["hashlib_hash"] = {"file":h_file, "str":h_str} 74 | results[algo]["crypto_hash"] = {"file":c_file, "str":c_str} 75 | 76 | return results 77 | 78 | # use CryptoVsHashlib to compare speed 79 | 80 | def CompareCnH(output=None, amount_of_datasets=100, n=1000): 81 | res = list() 82 | for _ in range(amount_of_datasets): 83 | res.append(CryptoVsHashlib(n=n)) 84 | 85 | res2 = dict() 86 | 87 | for algo in hashers: 88 | res2[algo] = {} 89 | res2[algo]["crypto_hash"] = {"file":0, "str":0, "amount-file":[], "amount-str":[]} 90 | res2[algo]["hashlib_hash"] = {"file":0, "str":0, "amount-file":[], "amount-str":[]} 91 | 92 | other = lambda x: "crypto_hash" if x == "hashlib_hash" else "hashlib_hash" # switch to the opposite 93 | 94 | for ds in res: 95 | ff = max(ds[algo], key=lambda key: ds[algo][key]["file"]) 96 | fs = max(ds[algo], key=lambda key: ds[algo][key]["str"]) 97 | res2[algo][ff]["file"] += 1 98 | res2[algo][ff]["amount-file"].append(ds[algo][ff]["file"] - ds[algo][other(ff)]["file"]) 99 | res2[algo][fs]["str"] += 1 100 | res2[algo][fs]["amount-str"].append(ds[algo][fs]["str"] - ds[algo][other(fs)]["str"]) 101 | 102 | # write output to file 103 | open((output or str(input("Output to: "))), "w").write(json.dumps({"datasets":res, "results":res2}, indent=4, sort_keys=True)) 104 | 105 | def ReadCnH(filename): 106 | res2 = json.loads(open(filename, "r").read())["results"] 107 | # print data 108 | for algo in hashers: 109 | f_file = max(res2[algo], key=lambda key: res2[algo][key]["file"]) 110 | f_str = max(res2[algo], key=lambda key: res2[algo][key]["str"]) 111 | 112 | print("Fastest", algo, "For files", f_file, ) 113 | print("Fastest", algo, "For strings", f_str) 114 | 115 | if __name__ == "__main__": 116 | 117 | if not os.path.exists("./res/pycrypto_vs_hashlib.json"): 118 | CompareCnH(output="./res/pycrypto_vs_hashlib.json") 119 | 120 | ReadCnH("./res/pycrypto_vs_hashlib.json") -------------------------------------------------------------------------------- /tests/config.py: -------------------------------------------------------------------------------- 1 | # check sums for file 2 | FILE = "LICENSE" 3 | FILE_SUM = "c11869fc956d819d2a336c74f4cc6000" 4 | 5 | FILE_SUMS = { 6 | "DSA": "05d9842ff5ab98ea012b1cbe2693a0714a33547a", 7 | "DSA-SHA": "05d9842ff5ab98ea012b1cbe2693a0714a33547a", 8 | "blake2b": "a64b7235b81d307b919c0d74ded6c86b823e2b9b2a9c1e50e55e273daedd5417027f2a2a1b4abc5d72be5170b462979867cae4b8c3fcf8a7d8a09a1c93fc9d11", 9 | "blake2s": "1ecfc726c59ec5cd52a24730e3345a650d4a2554b1b1dc50ed9c1faf9ebd8179", 10 | "crc32": "3371bb00", 11 | "dsaEncryption": "05d9842ff5ab98ea012b1cbe2693a0714a33547a", 12 | "dsaWithSHA": "05d9842ff5ab98ea012b1cbe2693a0714a33547a", 13 | "ecdsa-with-SHA1": "05d9842ff5ab98ea012b1cbe2693a0714a33547a", 14 | "md4": "1901cf76521dfb68b0a88df72c995345", 15 | "md5": "c11869fc956d819d2a336c74f4cc6000", 16 | "mdc2":"e8746a342b753d68e2c44dbdfdc52950", 17 | "ripemd160": "7fbabc556593e015495d752a0f8ba1d99eee0f8a", 18 | "sha": "597018a568e01f2434ce967be416beaefff02536", 19 | "sha1": "05d9842ff5ab98ea012b1cbe2693a0714a33547a", 20 | "sha224": "16ac30faa8d42524bc70f3f52412680ada5993d401ca057edfe3cdec", 21 | "sha256": "81be97a4c17e703ddce3cfe0bd774aba4d67d4e3f225da4b4071a75388132aca", 22 | "sha384": "b4efcc718ecf169bddbaeb023694071193a255d57674144220f9880544da1feaee0a218043ae00cbd3fbe2e84900e771", 23 | "sha512": "70ef754d5a3f87b7a545bce7360f20327b17f094fc75f3fc095551d6ea9e2459b1bbc7d22f26971d7716a8d204e83b33b169099544bc7c32feac26a31090cc39", 24 | "sha3_224":"ed9514941e44182fa51414d87a9b0866aedc1a9a114cf82f001d2213", 25 | "sha3_256":"6b39f585427dd5ef205dbef1560e390ec4e413641c59255b361cf6daca8160eb", 26 | "sha3_384":"0571ed8233e5203fae819095a6802a7a5fbc566c96381cfcf710f2d338c3a6d3bf81f29128a4757c73f7e0a43c31a9e4", 27 | "sha3_512":"b02b6e54b0b44d54dce348d745f718233ee74ef6d95abf45cd76054b85c413b71d4de5dac8e77bbb3a48011bbf0806db7c431a43a7ca2a4fc6d35328575c1c2b", 28 | "whirlpool": "f6f74448a5ea9553387678f68146d0f38dd639e644e547840077cd39a6c20a23452d28d8758aa2aba03bcb2eba38b350050ec5fecc52d1f813ae0e1892994ce8" 29 | } -------------------------------------------------------------------------------- /tests/spec/arg.py: -------------------------------------------------------------------------------- 1 | import argparse 2 | 3 | __version__ = "3.3.3a0" 4 | 5 | class Print(argparse.Action): 6 | def __init__(self, nargs=0, **kwargs): 7 | if nargs != 0: 8 | raise ValueError('nargs for StartAction must be 0; it is just a flag.') 9 | elif "text" in kwargs: 10 | self.data = kwargs.pop("text") 11 | 12 | super().__init__(nargs=nargs, **kwargs) 13 | 14 | def __call__(self, parser, namespace, values, option_string=None): 15 | print(self.data) 16 | 17 | class Execute(argparse.Action): 18 | def __init__(self, nargs=0, **kwargs): 19 | if nargs != 0: 20 | raise ValueError('nargs for StartAction must be 0; it is just a flag.') 21 | 22 | if "func" in kwargs: 23 | self.data = kwargs.pop("func") 24 | 25 | if "exit" in kwargs: 26 | self.exit = True if kwargs.pop("exit") else False 27 | 28 | super().__init__(nargs=nargs, **kwargs) 29 | 30 | def __call__(self, parser, namespace, values, option_string=None): 31 | print(self.data()) 32 | if self.exit: 33 | exit() 34 | 35 | parser = argparse.ArgumentParser("hashit", "hashit [options] $path", "hashit is an hashing application...", "MIT License, Copyrigth (c) 2020 Javad Shafique") 36 | parser.add_argument("-V", "--version", help="Print current version and exit", action="version", version="%(prog)s " + __version__) 37 | parser.add_argument("-l", "--license", help="Print license and exit", action=Print, text="MIT") 38 | parser.add_argument("-H", "--hash", help="Select hash use -hl --hash-list for more info") 39 | parser.add_argument("-hl", "--hash-list", help="Prints list of all supported hashes and exits", action=Execute, func=lambda: 1, exit=True) 40 | parser.add_argument("-a", "--all", help="Calculate all hashes for a single file") 41 | parser.add_argument("-sfv", "--sfv", help="Outputs in a sfv compatible format", action="store_true") 42 | parser.add_argument("-C", "--color", help="Enable colored output where it is supported", action="store_true") 43 | parser.add_argument("-f", "--file", help="Hash single a file") 44 | parser.add_argument("-S", "--size", help="Adds the file size to the output", action="store_true") 45 | parser.add_argument("-s", "--string", help="hash a string or a piece of text", action="store_const", const=True) 46 | parser.add_argument("-sp", "--strip-path", help="Strips fullpath from the results", action="store_true") 47 | parser.add_argument("-A", "--append", help="Instead of writing to a file you will append to it", action="store_true") 48 | parser.add_argument("-d", "--detect", help="Enable hash detection for check (can take argument)", action="store_const", const=True) 49 | parser.add_argument("-m", "--memory-optimatation", help="Enables memory optimatation (useful for large files)", action="store_true") 50 | parser.add_argument("-c", "--check", help="Verify checksums from a checksum file") 51 | parser.add_argument("-q", "--quiet", help="Reduces output", action="store_true") 52 | parser.add_argument("-bsd", "--bsd", help="output using the bsd checksum-format", action="store_true") 53 | parser.add_argument("-o", "--output", help="output output to an output (file)") 54 | 55 | args = parser.parse_args() 56 | print(args) -------------------------------------------------------------------------------- /tests/spec/gui.py: -------------------------------------------------------------------------------- 1 | """Simple GUI for hashit 2 | 3 | This program simulates in what ways you can use hashit 4 | """ 5 | import os 6 | import argparse 7 | import easygui as gui 8 | os.sys.path.insert(0, "../..") 9 | from hashit import new, __algorithms__, __help__, hashFile, check_ 10 | from hashit.__main__ import walk 11 | 12 | def showhelp(): 13 | gui.buttonbox(__help__, "HASHIT - HELP", choices=["OK"], image="../../img/icon.png") 14 | 15 | def selecthash(): 16 | return gui.choicebox("Select an hash", "HASHIT", __algorithms__) 17 | 18 | 19 | def writetofile(): 20 | yn = gui.ynbox("Write to file? (Recommended)") 21 | if yn: 22 | return gui.filesavebox("Save to:", "HASHIT") 23 | else: 24 | return False 25 | 26 | def readfromfile(): 27 | return gui.fileopenbox("Read from:", "HASHIT") 28 | 29 | def main_(): 30 | COMMANDS = ["hash an file", "hash files from a directory" , "hash all files and folders in a directory", "check a checksum file", "help", "exit"] 31 | command = gui.choicebox("Select command:", "HASHIT", COMMANDS) 32 | 33 | if command == COMMANDS[0]: 34 | filename = gui.fileopenbox("Choose a file to hash", "HASHIT") 35 | hashres = hashFile(filename, new(selecthash()), False) 36 | file = writetofile() 37 | 38 | gui.msgbox(hashres, "HASHIT") 39 | 40 | elif command == COMMANDS[1]: 41 | my_path = gui.diropenbox("select directory:", "HASHIT") 42 | files = [my_path + "/" + f for f in os.listdir(my_path) if os.path.isfile(os.path.join(my_path, f))] 43 | files_to_hash = gui.multchoicebox("Select files to hash:", "HASHIT", files) 44 | hasher = selecthash() 45 | HASHED = [] 46 | 47 | for fname in files_to_hash: 48 | HASHED.append(str(hashFile(fname, new(hasher), False)) + " " + fname) 49 | 50 | file = writetofile() 51 | 52 | if file: 53 | open(file, "w").write("\n".join(HASHED)) 54 | else: 55 | gui.msgbox('\n\n'.join(HASHED)) 56 | 57 | elif command == COMMANDS[2]: 58 | my_path = gui.diropenbox("select directory:", "HASHIT") 59 | files = walk(my_path) 60 | hasher = selecthash() 61 | HASHED = [] 62 | 63 | for fname in files: 64 | HASHED.append(str(hashFile(fname, new(hasher), False)) + " " + fname) 65 | file = writetofile() 66 | 67 | if file: 68 | open(file, "w").write("\n".join(HASHED)) 69 | else: 70 | gui.msgbox('\n\n'.join(HASHED)) 71 | 72 | elif command == COMMANDS[3]: 73 | file = readfromfile() 74 | hasher = new(selecthash()) 75 | DONE = [] 76 | 77 | for c in check_(file, hasher, open(file, "r").readline(), False, False, False): 78 | 79 | if isinstance(c, str): 80 | gui.exceptionbox("An Error has occured:\n\n {}".format(c), "HASHIT") 81 | else: 82 | if not c["hash_check"]: 83 | DONE.append("{}: FAILED".format(c["filename"])) 84 | else: 85 | DONE.append("{}: OK".format(c["filename"])) 86 | 87 | gui.msgbox('\n'.join(DONE)) 88 | 89 | elif command == COMMANDS[4]: 90 | showhelp() 91 | 92 | elif command == COMMANDS[5]: 93 | exit() 94 | 95 | def main(): 96 | while 1: 97 | try: 98 | main_() 99 | except Exception: 100 | break 101 | exit() 102 | 103 | if __name__ == "__main__": 104 | main() -------------------------------------------------------------------------------- /tests/spec/oldmain.py: -------------------------------------------------------------------------------- 1 | """Command line program for hashit 2 | 3 | this module "__main__" contains all the code for argparsing, running 4 | and anything needed for an command lin application such as hashit. 5 | 6 | it uses argc another package by me, but i am considering switching to argparse 7 | """ 8 | import json 9 | import random 10 | from argc import argc 11 | # Import all from hashit 12 | from .__init__ import os, hashlib, eprint, hashFile, new, bsd_tag, load, \ 13 | GLOBAL, Exit, check, generate_data_set, detect, sfv_max, fixpath, \ 14 | __algorithms__, __author__, __help__, __license__, supports_color 15 | 16 | from .extra import LINUX_LIST 17 | from .version import __version__ 18 | 19 | def walk(goover): 20 | """Goes over a path an finds all files, appends them to a list and returns that list""" 21 | walked = [] 22 | for path, _subdirs, files in os.walk(goover): 23 | # for each file 24 | for name in files: 25 | # add it to in_files list() 26 | walked.append((path + "/" + name).replace("\\", "/").replace("//", "/")) 27 | 28 | # return list with file names 29 | return walked 30 | 31 | def config(argv): 32 | """Sets argvs' config and commands""" 33 | 34 | def hash_list(): 35 | """Generates an easy-to-read list""" 36 | algos = set((__algorithms__ + ["sha3_224", "sha3_256", "sha3_384", "sha3_512"] if os.sys.version_info[0] == 3 else __algorithms__)\ 37 | + list(GLOBAL["EXTRA"].keys())) # add extras 38 | # sort set 39 | s = [sorted(algos)[x:x+2] for x in range(0, len(algos), 2)] 40 | for c, l in enumerate(s): 41 | s[c] = ', '.join(l) 42 | 43 | return [""]+s+[""] 44 | 45 | # set commands 46 | argv.set("-h", "--help", "help", "Print help message and exit", None, __help__(argv.generate_docs), True) 47 | argv.set("-v", "--version", "version", "Print current version and exit", None, __version__, True) 48 | argv.set("-l", "--license", "license", "Print license and exit", None, __license__, True) 49 | argv.set("-hl", "--hash-list", "hashlist", "Prints list of all supported hashes and exits", None, hash_list(), True) 50 | # set arguments 51 | argv.set("-H", "--hash", "hash", "Select hash use -hl --hash-list for more info", GLOBAL["DEFAULTS"]["HASH"]) 52 | argv.set("-a", "--all", "all", "Calculate all hashes posible for a single file and output as json") 53 | argv.set("-s", "--string", "str", "hash a string/text", False) 54 | argv.set("-sp", "--strip-path", "spath", "Strips fullpath from results", GLOBAL["DEFAULTS"]["STRIP"]) 55 | argv.set("-c", "--check", "check", "Check checksum-file (sfv or standard)") 56 | argv.set("-o", "--output", "output", "Output data to file (in->do->out)") 57 | argv.set("-C", "--color", "color", "Enable colored output where it is supported", GLOBAL["DEFAULTS"]["COLORS"]) 58 | argv.set("-d", "--detect", "detect", "Enable hash detection for check and if you pass it and hash it will detect that", GLOBAL["DEFAULTS"]["DETECT"]) 59 | argv.set("-f", "--file", "file", "Hash single a file") 60 | argv.set("-q", "--quiet", "quiet", "Minimal output", GLOBAL["DEFAULTS"]["QUIET"]) 61 | argv.set("-bsd", "--bsd-tag", "bsd", "create a BSD-style checksum", False) 62 | argv.set("-m", "--memory-optimatation", "memopt", "Enables memory optimatation only useful for large files", GLOBAL["DEFAULTS"]["MEMOPT"]) 63 | argv.set("-sfv", "--simple-file-verification", "sfv", "Outputs in a sfv compatible format", False) 64 | argv.set("-S", "--size", "size", "Adds a size to output", GLOBAL["DEFAULTS"]["SIZE"]) 65 | argv.set("-A", "--append", "append", "Instead of writing to a file you will append to it", GLOBAL["DEFAULTS"]["APPEND"]) 66 | 67 | def main_(args=None): 68 | """Main function which is the cli parses arguments and runs appropriate commands""" 69 | # switch args if needed 70 | if args is None: 71 | # to sys.args 72 | args = os.sys.argv[1:] 73 | 74 | # using argc module by me (support for python2) 75 | argv = argc(args, False) 76 | # set commands and config with config 77 | config(argv) 78 | 79 | if len(args) == 0: 80 | # if there is not arguments show help 81 | argv.args["--help"] = True 82 | # run (can raise SystemExit) 83 | argv.run() 84 | 85 | 86 | # Varibles 87 | 88 | # set colors 89 | RED = "" 90 | GREEN = "" 91 | YELLOW = "" 92 | RESET = "" 93 | 94 | # file list, and path 95 | in_files = list() # list of all files 96 | my_path = os.getcwd() # path to search in 97 | 98 | Config = {} 99 | # get hash from arguments 100 | # default is md5 for now 101 | Config["hash"] = argv.get("hash") 102 | # get all other options and parse them 103 | Config["detect?"] = argv.get("detect") # to detect or not 104 | Config["check?"] = argv.get("check") # to check or not 105 | Config["single"] = argv.get("file") # only hash a single file (md5sum behavior) 106 | Config["all_single"] = argv.get("all") 107 | Config["colors?"] = argv.get("color", True) # use colors (True for detect type) 108 | Config["quiet?"] = argv.get("quiet") # silent output 109 | Config["strip-path?"] = argv.get("spath") # strip fullpath 110 | Config["writeToFile"] = argv.get("output") # output output to output (in->do->out) 111 | Config["SimpleFileVerification"] = argv.get("sfv") # use simple file verification compatible format 112 | Config["BSDTag"] = argv.get("bsd") # create a BSD-style checksum 113 | Config["MemoryOptimatation"] = argv.get("memopt") # use memory optimatations 114 | Config["AddSize"] = argv.get("size") # get size of file in bytes 115 | Config["String?"] = argv.get("str") # get string/setting 116 | 117 | # use md5 by default 118 | hash_is = new(GLOBAL["DEFAULTS"]["HASH"]) 119 | 120 | # check if its an valid hashing 121 | if Config["hash"] in hashlib.algorithms_available or Config["hash"] in __algorithms__ or Config["hash"] in list(GLOBAL["EXTRA"].keys()) or str(Config["hash"])[:5] == "shake": 122 | # check if it's in guaranteed 123 | if not Config["hash"] in hashlib.algorithms_guaranteed and Config["hash"] in hashlib.algorithms_available: 124 | # if not print an warning 125 | if not Config["quiet?"]: 126 | eprint(YELLOW + str(Config["hash"]), GLOBAL["MESSAGES"]["WORKS_ON"] + RESET) 127 | # and use the hash 128 | hash_is = new(Config["hash"]) 129 | 130 | else: 131 | if not Config["hash"] in GLOBAL["BLANK"] and not Config["quiet?"]: 132 | # then print error message 133 | eprint(RED + str(Config["hash"]), GLOBAL["MESSAGES"]["HASH_NOT"], RESET) 134 | 135 | # select output 136 | use_out = False 137 | output = None 138 | 139 | # check if out is set and it has a value 140 | if not Config["writeToFile"] in GLOBAL["BLANK"]: 141 | # if it is open file 142 | use_out = True 143 | output = open(fixpath(Config["writeToFile"]), GLOBAL["WRITE_MODE"]) 144 | else: 145 | # else set it to false 146 | use_out = False 147 | 148 | 149 | # check if we should use colors 150 | if supports_color() and Config["colors?"]: 151 | # if yes enable them 152 | RED = GLOBAL["COLORS"]["RED"] 153 | GREEN = GLOBAL["COLORS"]["GREEN"] 154 | YELLOW = GLOBAL["COLORS"]["YELLOW"] 155 | RESET = GLOBAL["COLORS"]["RESET"] 156 | 157 | 158 | # check for new path 159 | if len(args) >= 1: 160 | new_path = args[len(args) - 1].replace("\\", "/") 161 | # check if argument is path else do not change path 162 | if os.path.exists(new_path) and ("/" in new_path or new_path in (".", "..")): 163 | my_path = new_path 164 | 165 | # check for string 166 | if not Config["String?"] == False: 167 | data = Config["String?"] 168 | if data == True: 169 | # reed from stdin like md5sum 170 | data = os.sys.stdin.read() 171 | 172 | # check if data ends with newline 173 | if not data.endswith("\n"): 174 | # else print one 175 | print("") 176 | 177 | # if the data isn't bytes 178 | if not isinstance(data, bytes): 179 | # encode it 180 | data = data.encode() 181 | 182 | # then hash-it 183 | hash_is.update(data) 184 | 185 | # check for output methods 186 | if use_out and output != None: 187 | output.write(hash_is.hexdigest()) 188 | else: 189 | print(hash_is.hexdigest()) 190 | 191 | # check for hash one file 192 | elif not Config["all_single"] in GLOBAL["BLANK"]: 193 | if os.path.exists(Config["all_single"]): 194 | data = open(Config["all_single"], "rb").read() 195 | results = {} 196 | for algo in __algorithms__: 197 | results[algo] = new(algo, data).hexdigest() 198 | 199 | out = json.dumps(results, indent=4, sort_keys=True) 200 | 201 | if use_out and output != None: 202 | output.write(out) 203 | else: 204 | print(out) 205 | else: 206 | eprint(RED + GLOBAL["MESSAGES"]["FILE_NOT"] + RESET) 207 | 208 | # if detect is choosen use it 209 | elif not Config["detect?"] in GLOBAL["BLANK"]: 210 | hashes = detect(Config["detect?"], generate_data_set("Hallo", __algorithms__, new)) 211 | if hashes != None: 212 | for item in hashes.certain: 213 | print(GREEN + "Same results as", item + RESET) 214 | 215 | # print sepetator 216 | print("") 217 | 218 | for item in hashes.maybe: 219 | print(YELLOW + "Maybe", item + RESET) 220 | else: 221 | print(RED + "Not valid hash" + RESET) 222 | # exit when done 223 | Exit(0) 224 | 225 | # if to check use that 226 | elif not Config["check?"] in GLOBAL["BLANK"]: 227 | # check for file 228 | if os.path.exists(Config["check?"]): 229 | # then check 230 | check( 231 | Config["check?"], 232 | hash_is, 233 | Config["colors?"], 234 | Config["quiet?"], 235 | Config["detect?"], 236 | Config["SimpleFileVerification"], 237 | Config["AddSize"], 238 | Config["BSDTag"] 239 | ) 240 | 241 | else: 242 | # if the file does not exist 243 | # print error message 244 | eprint(RED + GLOBAL["MESSAGES"]["FILE_NOT"] + RESET) 245 | Exit(1) # and exit 246 | 247 | # check the Config["single"] argument 248 | elif not Config["single"] in GLOBAL["BLANK"]: 249 | in_files = [Config["single"]] 250 | 251 | else: 252 | # walk directory and add files to my_path 253 | in_files = walk(my_path) 254 | 255 | # if there is any files in in_files 256 | if in_files: 257 | # find the longest filename 258 | longest_filename = max(in_files, key=len) 259 | 260 | # go over files and hash them all 261 | for fname in in_files: 262 | try: 263 | # hash file 264 | current_hash = hashFile(fname, hash_is, Config["MemoryOptimatation"]) 265 | 266 | except (FileNotFoundError, PermissionError) as Error: 267 | # if the file does not exist print a error message 268 | if isinstance(Error, FileNotFoundError): 269 | eprint(RED + fname + ", " + GLOBAL["MESSAGES"]["FILE_NOT"] + RESET) 270 | 271 | # check if we have access to the file 272 | elif isinstance(Error, PermissionError): 273 | eprint(RED + fname + " " + GLOBAL["MESSAGES"] + RESET) 274 | # and continue 275 | continue 276 | 277 | # set print_str 278 | print_str = current_hash 279 | size = "" 280 | 281 | if Config["AddSize"]: 282 | size = str(os.stat(fname).st_size) 283 | 284 | if Config["SimpleFileVerification"]: 285 | print_str = sfv_max(current_hash, fname, len(longest_filename), size + " ") 286 | 287 | elif Config["BSDTag"]: 288 | print_str = bsd_tag(current_hash, fname, hash_is.name) + " " + size 289 | 290 | else: 291 | print_str = current_hash + " " + str(size + " " + fname) 292 | 293 | # check if fullpath path shall be stripped 294 | if Config["strip-path?"]: 295 | print_str = print_str.replace(my_path, ".") 296 | 297 | # if we should output the result to a file 298 | if use_out and output != None: 299 | # write result to an file 300 | output.write(print_str + "\n") 301 | 302 | else: 303 | # else print it 304 | print(print_str) 305 | 306 | # Exit when done 307 | Exit(0) 308 | else: 309 | # Else exit 310 | Exit(1) 311 | 312 | """ 313 | Hashit __main__.py can be executed directly with python(3) -m hashit "commands" 314 | and via snap 315 | """ 316 | 317 | def main(args=None): 318 | """ 319 | Main function with error catching, can force-exit with os._exit(1) 320 | 321 | this main function calls main_() and cathes any error while giving the user a "pretty" 322 | error. 323 | """ 324 | try: 325 | # execute main application 326 | main_(args) 327 | except Exception as error: 328 | # define colors 329 | RD = "" 330 | YL = "" 331 | RE = "" 332 | # check if term supports color 333 | if supports_color(): 334 | YL = GLOBAL["COLORS"]["YELLOW"] 335 | RD = GLOBAL["COLORS"]["RED"] 336 | RE = GLOBAL["COLORS"]["RESET"] 337 | 338 | if isinstance(error, TypeError): 339 | eprint(YL + GLOBAL["ERRORS"]["TypeError"] + RE) 340 | 341 | elif isinstance(error, FileNotFoundError): 342 | eprint(YL + GLOBAL["ERRORS"]["FileNotFoundError"] + RE) 343 | 344 | elif isinstance(error, OSError): 345 | eprint(YL + GLOBAL["ERRORS"]["OSError"]["windows"]) 346 | eprint(GLOBAL["ERRORS"]["OSError"]["macos"]) 347 | eprint(GLOBAL["ERRORS"]["OSError"]["linux"].format(', '.join(random.sample(LINUX_LIST, 10)))) 348 | eprint(GLOBAL["ERRORS"]["OSError"]["END"] + RE) 349 | 350 | # and print error 351 | eprint(RD + str(error) + RE) 352 | 353 | os._exit(1) # force exit 354 | 355 | # if the program is being called 356 | if __name__ == "__main__": 357 | main() # then execute main function 358 | -------------------------------------------------------------------------------- /tests/spec/test.py: -------------------------------------------------------------------------------- 1 | import timeit 2 | import binascii 3 | import sys 4 | import json 5 | sys.path.insert(0, "..") 6 | import hashit 7 | 8 | # create a crc32 hash 9 | def hex_crc32(buf): 10 | buf = (binascii.crc32(buf) & 0xFFFFFFFF) 11 | return ("%08X" % buf).lower() 12 | 13 | 14 | # final class 15 | class crc32: 16 | def __init__(self, data = b''): 17 | self.data = data 18 | 19 | def update(self, data): 20 | self.data += data 21 | 22 | def hexdigest(self): 23 | return hex_crc32(self.data) 24 | 25 | da = str() 26 | 27 | def test_speed(): 28 | return crc32(da).hexdigest() 29 | 30 | ''' 31 | s = timeit.timeit("test_speed()", setup="from __main__ import test_speed", number=10) 32 | print(s) 33 | ''' 34 | 35 | def collision(): 36 | crc = lambda d=b'': hashit.new("crc32", d).hexdigest() 37 | done = {} 38 | 39 | for n in range(1000**3*4 + 1): 40 | n = str(n) 41 | h = crc(n.encode()) 42 | if h in done: 43 | print("ERROR collision found in CRC32", h, n, "and", done[h]) 44 | else: 45 | try: 46 | done[h] = n 47 | except MemoryError: 48 | done.clear() 49 | 50 | 51 | if __name__ == "__main__": 52 | da = open("speed.py", "rb").read() 53 | collision() 54 | 55 | ''' 56 | back = [] 57 | data = open("./res/crc_hashcollisions.txt", "r").readlines() 58 | 59 | for l in data: 60 | l = l.split(" ") 61 | back.append(' '.join([l[0] + ":", l[1], l[2]])) 62 | 63 | open("file.yaml", "w").writelines(back) 64 | ''' -------------------------------------------------------------------------------- /tests/test_load.py: -------------------------------------------------------------------------------- 1 | class load_api_1: 2 | name="hash1" 3 | def __init__(self, data=b''): 4 | self.data = data 5 | 6 | def update(self, data=b''): 7 | self.data += data 8 | 9 | def digest(self): 10 | return 1516152524156352132515252551426 11 | 12 | def hexdigest(self): 13 | return hex(self.digest()) 14 | 15 | class load_api_2: 16 | name="hash2" 17 | def __init__(self, data=b''): 18 | self.data = data 19 | 20 | def update(self, data=b''): 21 | self.data += data 22 | 23 | def digest(self): 24 | return 1234567876543234567897654324562 25 | 26 | def hexdigest(self): 27 | return hex(self.digest()) 28 | 29 | class load_api_3: 30 | name="hash3" 31 | def __init__(self, data=b''): 32 | self.data = data 33 | 34 | def update(self, data=b''): 35 | self.data += data 36 | 37 | def digest(self): 38 | return 5232348239489234823948203294829 39 | 40 | def hexdigest(self): 41 | return hex(self.digest()) 42 | -------------------------------------------------------------------------------- /tests/unit.py: -------------------------------------------------------------------------------- 1 | from __future__ import print_function, with_statement 2 | import unittest 3 | import os, sys 4 | import random, string 5 | from .test_load import load_api_1, load_api_2, load_api_3 6 | from .config import FILE, FILE_SUM, FILE_SUMS 7 | from binascii import unhexlify # python3 support 8 | sys.path.insert(0, "..") 9 | import hashit 10 | import hashit.__main__ 11 | 12 | if not os.path.exists(FILE): 13 | os.chdir("..") 14 | 15 | # use with for this, to disable stdout and stderr 16 | class DisablePrint: 17 | def __enter__(self): 18 | self._original_stdout = sys.stdout 19 | self._original_stderr = sys.stderr 20 | sys.stdout = open(os.devnull, 'w') 21 | sys.stderr = open(os.devnull, 'w') 22 | 23 | def __exit__(self, exc_type, exc_val, exc_tb): 24 | # close os.devnull 25 | sys.stderr.close() 26 | sys.stdout.close() 27 | # set default 28 | sys.stdout = self._original_stdout 29 | sys.stderr = self._original_stderr 30 | 31 | class Test(unittest.TestCase): 32 | def test_hasher(self): 33 | file = open(FILE, "rb") 34 | data = file.read() 35 | 36 | for algo in hashit.__algorithms__: 37 | h1 = hashit.new(algo, data).hexdigest() 38 | h2 = hashit.hashFile(FILE, hashit.new(algo), True) 39 | self.assertEqual(h1, h2) 40 | self.assertEqual(h1, FILE_SUMS[algo]) 41 | 42 | h1 = hashit.new("md5", data).hexdigest() 43 | self.assertEqual(h1, FILE_SUM) 44 | file.close() 45 | 46 | def test_detect(self): 47 | # generate data set 48 | ds = hashit.generate_data_set("Hallo", hashit.__algorithms__, hashit.new) 49 | 50 | # hash file three times 51 | h1 = hashit.hashIter(hashit.blockIter(open(FILE, "rb")), hashit.new("md5")) 52 | h2 = hashit.hashIter(hashit.blockIter(open(FILE, "rb")), hashit.new("sha224")) 53 | h3 = hashit.hashIter(hashit.blockIter(open(FILE, "rb")), hashit.new("sha1")) 54 | h4 = hashit.hashIter(hashit.blockIter(open(FILE, "rb")), hashit.new("crc32")) 55 | 56 | # detect for three algorigthms 57 | cl1 = hashit.detect(h1, ds) 58 | cl2 = hashit.detect(h2, ds) 59 | cl3 = hashit.detect(h3, ds) 60 | cl4 = hashit.detect(h4, ds) 61 | 62 | # correct hash names 63 | correct1 = "md5" 64 | correct2 = "sha224" 65 | correct4 = "crc32" 66 | 67 | # md5 or md4 68 | self.assertTrue(correct1 in cl1.certain or correct1 in cl1.maybe) 69 | # only one left should be true 70 | self.assertTrue(correct2 in (cl2.certain if cl2.certain else cl2.maybe)) 71 | self.assertTrue(correct4 in cl4.certain) 72 | # and if it is to check hash with it 73 | self.assertEqual(hashit.new(correct2, b'Hallo').hexdigest(), hashit.new(cl2.certain[0] if cl2.certain else cl2.maybe[0], b'Hallo').hexdigest()) 74 | 75 | # for sha1 more options should be avaible 76 | self.assertTrue(len(cl3.certain) >= 1) 77 | # and work 78 | self.assertEqual(h3, hashit.hashIter(hashit.blockIter(open(FILE, "rb")), hashit.new(cl3.certain[0]))) 79 | 80 | def test_detect_format(self): 81 | # create shortcut 82 | df = lambda s: hashit.detect_format(s) 83 | 84 | bsdstr = hashit.BSD.format("12345678", "/path/to/file.txt", "crc32") 85 | sfvstr = hashit.SFV.format("12345678", "/path/to/file.txt", 18, "") 86 | nonata = "{} {}".format("12345678", "/path/to/file.txt") 87 | 88 | self.assertEqual(df(bsdstr), "bsd") 89 | self.assertEqual(df(sfvstr), "sfv") 90 | self.assertEqual(df(nonata), "N/A") 91 | 92 | def test_multi(self): 93 | # test all hashing functions 94 | algo = "md5" 95 | 96 | h1 = hashit.hashFile(FILE, hashit.new(algo), True) 97 | h2 = hashit.hashFile(FILE, hashit.new(algo)) 98 | h3 = hashit.hashIter(hashit.blockIter(open(FILE, "rb")), hashit.new(algo)) 99 | 100 | # just checking 101 | d = hashit.detect(hashit.hashFile(FILE, hashit.new("sha224"), False), hashit.generate_data_set("HALLO", hashit.__algorithms__, hashit.new)) 102 | self.assertEqual(d.certain[0] if d.certain else d.maybe[0], "sha224") 103 | 104 | self.assertTrue(h1 == h2 == h3 == FILE_SUM) 105 | 106 | 107 | def test_systemrandom(self): 108 | # test system random 109 | generate = lambda k=2: ''.join([random.SystemRandom().choice(string.hexdigits) for i in range(k)]) 110 | 111 | all_gen = list() 112 | c_str = generate() 113 | 114 | while not c_str in all_gen: 115 | all_gen.append(c_str) 116 | c_str = generate() 117 | 118 | print(c_str, "in list (COLLISION FOUND) after", len(all_gen), "Tries, which translates into", int(c_str, 16)) 119 | self.assertTrue(c_str in all_gen) 120 | 121 | def test_crc32(self): 122 | """ 123 | crc = lambda d=b'': hashit.new("crc32", d).hexdigest() 124 | done = {} 125 | 126 | for n in range(100000000): 127 | n = str(n) 128 | h = crc(n.encode()) 129 | if h in done: 130 | print("ERROR collision found in CRC32", h, n, "and", done[h]) 131 | break 132 | try: 133 | done[h] = n 134 | except MemoryError: 135 | done.clear() 136 | """ 137 | 138 | def test_format(self): 139 | s = hashit.BSD.format(FILE_SUM, FILE, "md5") 140 | self.assertEqual(s, "md5 ({}) = {}".format(FILE, FILE_SUM)) 141 | self.assertEqual(hashit.BSD.parser(s), ["md5", FILE, FILE_SUM]) 142 | 143 | # check the sfv parser 144 | self.assertEqual(hashit.SFV.format("abc", "def", 4), "def abc") 145 | 146 | 147 | def test_other(self): 148 | self.assertIsInstance(hashit.supports_color(), bool) 149 | 150 | with DisablePrint(): 151 | with self.assertRaises(SystemExit): 152 | hashit.__main__.main(["--help"]) 153 | 154 | with self.assertRaises(SystemExit): 155 | hashit.__main__.main(["--check", "file_name"]) 156 | 157 | # just checking 158 | self.assertEqual(hashit.__author__, "Javad Shafique") 159 | 160 | def test_exclude(self): 161 | list_with_paths = ["/home/file.sh", "/home/compact.min.js", "/only/file/left.py", "/only/path/left"] 162 | excludes = [".sh", ".min."] 163 | o = hashit.__main__.exclude(list_with_paths, excludes) 164 | self.assertEqual(o, ["/only/file/left.py", "/only/path/left"]) 165 | 166 | class TestLoad(unittest.TestCase): 167 | def test_load(self): 168 | hashit.load(load_api_1) 169 | h1 = hashit.new("hash1", b'data') 170 | self.assertEqual(hex(h1.digest()), h1.hexdigest()) 171 | 172 | def test_load_all(self): 173 | hashit.load_all([load_api_2, load_api_3]) 174 | h2 = hashit.new("hash2", b'data') 175 | h3 = hashit.new("hash3", b'data') 176 | self.assertEqual(hex(h2.digest()), h2.hexdigest()) 177 | self.assertEqual(hex(h3.digest()), h3.hexdigest()) --------------------------------------------------------------------------------