{{ site.description | default: site.github.project_tagline }}
22 | 23 |This project is maintained by {{ site.github.owner_name }}
33 | {% endif %} 34 | 35 | {% if site.github.is_user_page %} 36 | 9 | hashit.__main__ (version 3.5.1) | index /Users/javad/Dropbox/udvikling/projekter/hashit/hashit/hashit/__main__.py |
Command line application for hashit
13 |
14 | this module "__main__" contains all the code for argparsing, running
15 | and anything needed for an command lin application such as hashit.
16 |
17 | it uses argc another package by me, but i am considering switching to argparse
19 |
22 | Modules | ||||||
25 | |
|
31 |
34 | Classes | ||||||||||||||||||
37 | |
47 |
82 |
|
117 |
120 | Functions | ||
123 | |
|
139 |
142 | Data | ||
145 | | GLOBAL = {'ACCESS': True, 'BLANK': (None, True, False), 'COLORS': {'GREEN': '\x1b[0;32m', 'RED': '\x1b[0;31m', 'RESET': '\x1b[0m', 'YELLOW': '\x1b[0;33m'}, 'DEFAULTS': {'APPEND': False, 'COLORS': True, 'DETECT': False, 'DRYRUN': False, 'HASH': 'md5', 'MEMOPT': False, 'QUIET': False, 'RECURS': False, 'SIZE': False, 'STRICT': False, ...}, 'DEVMODE': True, 'ERRORS': {'FileNotFoundError': "Error, file seems to be missing calling systemd to confirm 'sure you haved checked the MBR?'", 'IndexError': 'Out of range, cause i am not that big :)', 'OSError': {'END': 'JDK, so something happend with your os, message: ', 'linux': 'So {} , to be continued...\n', 'macos': 'Macos (Sierra+) and OSX (El Captain-) thank god for apples naming', 'windows': 'Windows 10, windows 8(.1), windows 7 (sp*), wind...p*), windows 98/95, windows NT *. OK not that bad'}, 'TypeError': 'Wrong type used (in cli-arguments) - please use a static programming language', 'ValueError': 'Wrong type or mood?! :)'}, 'EXTRA': {'crc32': <class 'hashit.extra.Crc32'>}, 'HASH_STR': 'Hello World!', 'IF_NO_ARGS': ['--string'], 'MESSAGES': {'CUR_FORM': 'current format is', 'DRYRUN_NOT': 'Does not support --dry-run', 'EMPTY_CHK': 'checksum file is empty', 'FAIL': 'FAILED', 'FILE_NOT': 'File does not exist', 'HASH_NOT': 'is not a valid hash', 'LENGTH_NOT': 'The files does not have the same length', 'LOAD_FAIL': 'Failed to load', 'MAYBE': 'Maybe', 'MAYBE_M': 'Did you maybe mean:', ...}, ...} 146 | LINUX_LIST = ['Mythbuntu', 'Mac OS X', 'Debian Pure Blend', 'Symphony OS', 'Astra Linux', 'Emdebian Grip', 'Russian Fedora Remix', 'Secure-K', 'Knopperdisk', 'Mobilinux', 'touchscreen', 'MX Linux', 'NepaLinux', 'fli4l', 'Nix', 'Ubuntu Mobile', 'primary', 'Fedora Core', 'ChromeOS', 'rPath', ...] 147 | __algorithms__ = ['md5', 'sha1', 'crc32', 'sha384', 'sha256', 'sha224', 'sha512', 'blake2b', 'blake2s', 'sha3_224', 'sha3_384', 'sha3_256', 'sha3_512'] 148 | __help__ = 'Hashit is an hashing program which can be uses t...ot the idea to make such a program using\npython.\n' 149 | __license__ = 'MIT, Copyright (c) 2017-2020 Javad Shafique' |
150 |
153 | Author | ||
156 | | Javad Shafique |
9 | hashit.detection | index /Users/javad/Dropbox/udvikling/projekter/hashit/hashit/hashit/detection.py |
Copyrigth (c) 2020-present Javad Shafique
13 |
14 | this module using length and connections to find a match
15 | for an hashing algorithem. It's basicly a matching algorigtem
16 | it can be used for almost any pure function in this case for hashes.
17 |
18 | # Copyright (c) 2020-present Javad Shafique
19 | # This 'Software' can't be used without permission
20 | # from Javad Shafique.
21 |
22 | # this module using length and connections to find a match
23 | # for an hashing algorithem. It's basicly a matching algorigtem
24 | # it can be used for almost any pure function in this case for hashes.
25 | # basic template:
26 |
27 |
28 | def generate_some_dataset(datatoworkon = "some data"):
29 | dict_for_storing_set = dict()
30 |
31 | for each_element in a_list_of_something_to_compare_with:
32 | data = function_that_uses_data_to_generate_something(each_element, datatoworkon)
33 |
34 | dict_for_storing_set.update({each_element:{"data":data, "size":len(data), "size-as":list(), "connection":list()}})
35 |
36 |
37 | #find connection and size
38 |
39 | for each_element in dict_for_storing_set:
40 | elements_data = dict_for_storing_set[each_element]["data"]
41 | elements_size = dict_for_storing_set[each_element]["size"]
42 |
43 | for second_element in dict_for_storing_set:
44 | if dict_for_storing_set[second_element]["size"] == elements_size:
45 | if elements_data == dict_for_storing_set["data"]:
46 | dict_for_storing_set[each_element]["connection"].append(second_element)
47 | else:
48 | dict_for_storing_set[each_element]["size-as"].append(second_element)
49 | else:
50 | continue
51 |
52 | # return finished dataset
53 |
54 | return dict_for_storing_set
55 |
56 | # and for parsing that infomation
57 | # you can use the detect function
58 | # as here:
59 |
60 |
61 | def detect(string, table, maybe = True):
62 | if not (type(string) == str):
63 | return None
64 |
65 | so = list()
66 | so_far = list()
67 | length = len(string)
68 |
69 | for key in table:
70 | dat = table[key]
71 |
72 | if dat["size"] == length:
73 | for i in dat["connection"]:
74 | if i not in so_far:
75 | so_far.append(i)
76 |
77 | for i in so_far:
78 | dat = table[i]["connection"]
79 |
80 | for j in so_far:
81 | if not j in dat:
82 | so_far.remove(j)
83 |
84 | if maybe:
85 | for key in table:
86 | dat = table[key]
87 |
88 | if dat["size"] == length:
89 | so.append(key)
90 |
91 | if len(so_far) >= 0 and len(so) == 1:
92 |
93 | # if there only is one option then use it
94 |
95 | return tup(certain=so, maybe=[])
96 | else:
97 | return tup(certain=so_far, maybe=so)
98 |
99 |
100 |
101 | # compare hashes for hash-detection
102 | # it can generate data that can compare
103 | # diffrences between the results
104 |
105 | # if works by categorizing the hashes into
106 | # two categorizes. one for thoose who look alike
107 | # and one for thoose who generates the same output
108 | # given the same input. And with it a sorted result
109 | # is outputted and is ready to be used be the user.
110 |
111 | # list of which algorithms is most likly used (WIP)
112 |
113 | PRIORITY = {
114 | "md5":["md5"],
115 | "sha1":["dsaEncryption", "DSA", "ecdsa-with-SHA1", "dsaWithSHA", "DSA-SHA"]
116 | }
118 |
121 | Modules | ||||||
124 | |
|
126 |
129 | Classes | ||||||||||
132 | |
141 |
|
229 |
232 | Functions | ||
235 | |
|
9 | hashit.extra | index /Users/javad/Dropbox/udvikling/projekter/hashit/hashit/hashit/extra.py |
Extra functions and classes for hashit
13 |14 |
17 | Modules | ||||||
20 | |
|
23 |
26 | Classes | ||||||||||||||||||
29 | |
39 |
69 |
|
99 |
102 | Data | ||
105 | | LINUX_LIST = ['Mythbuntu', 'Mac OS X', 'Debian Pure Blend', 'Symphony OS', 'Astra Linux', 'Emdebian Grip', 'Russian Fedora Remix', 'Secure-K', 'Knopperdisk', 'Mobilinux', 'touchscreen', 'MX Linux', 'NepaLinux', 'fli4l', 'Nix', 'Ubuntu Mobile', 'primary', 'Fedora Core', 'ChromeOS', 'rPath', ...] |
9 | hashit | index /Users/javad/Dropbox/udvikling/projekter/hashit/hashit/hashit/__init__.py |
hashit module for hashit command is contaning all the code for hashit
13 |
14 | hashit is an hashing application which main purpose is to replace all the 'default'
15 | hashing commands that comes with linux and also provide a usable hashing program
16 | for windows hence the choice of using python. while hashit supports both python 2 and 3
17 | i would strongly recommend using python3 because that python3 comes with a newer version
18 | of hashlib and therefore many new hash-functions, altough it is posible to add these into
19 | python2 with the load() function which acts like a 'connecter' and enables hashit to use
20 | third-party hashing-functions as long as the have the same api as specified in docs/index.md
21 |
22 | The GLOBAL dict contains all the configurations for this program, translations, error messages
23 | settings, plugins and more.
24 |
25 | __algorithms__ is a list that contains all the builtin algorithms including crc32
26 |
27 | LICENSE:
28 |
29 | MIT License
30 |
31 | Copyright (c) 2020 Javad Shafique
32 |
33 | Permission is hereby granted, free of charge, to any person obtaining a copy
34 | of this software and associated documentation files (the "Software"), to deal
35 | in the Software without restriction, including without limitation the rights
36 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
37 | copies of the Software, and to permit persons to whom the Software is
38 | furnished to do so, subject to the following conditions:
39 |
40 | The above copyright notice and this permission notice shall be included in all
41 | copies or substantial portions of the Software.
42 |
43 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
44 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
45 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
46 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
47 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
48 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
49 | SOFTWARE.
50 |
51 | NO ONE CAN CLAIM OWNERSHIP OF THIS "SOFTWARE" AND ASSOCIATED DOCUMENTATION FILES.
53 |
56 | Package Contents | ||||||
59 | |
|
64 |
67 | Classes | ||||||||||||||||||
70 | |
80 |
113 |
|
145 |
148 | Functions | ||
151 | |
|
196 |
199 | Data | ||
202 | | GLOBAL = {'ACCESS': True, 'BLANK': (None, True, False), 'COLORS': {'GREEN': '\x1b[0;32m', 'RED': '\x1b[0;31m', 'RESET': '\x1b[0m', 'YELLOW': '\x1b[0;33m'}, 'DEFAULTS': {'APPEND': False, 'COLORS': True, 'DETECT': False, 'DRYRUN': False, 'HASH': 'md5', 'MEMOPT': False, 'QUIET': False, 'RECURS': False, 'SIZE': False, 'STRICT': False, ...}, 'DEVMODE': True, 'ERRORS': {'FileNotFoundError': "Error, file seems to be missing calling systemd to confirm 'sure you haved checked the MBR?'", 'IndexError': 'Out of range, cause i am not that big :)', 'OSError': {'END': 'JDK, so something happend with your os, message: ', 'linux': 'So {} , to be continued...\n', 'macos': 'Macos (Sierra+) and OSX (El Captain-) thank god for apples naming', 'windows': 'Windows 10, windows 8(.1), windows 7 (sp*), wind...p*), windows 98/95, windows NT *. OK not that bad'}, 'TypeError': 'Wrong type used (in cli-arguments) - please use a static programming language', 'ValueError': 'Wrong type or mood?! :)'}, 'EXTRA': {'crc32': <class 'hashit.extra.Crc32'>}, 'HASH_STR': 'Hello World!', 'IF_NO_ARGS': ['--string'], 'MESSAGES': {'CUR_FORM': 'current format is', 'DRYRUN_NOT': 'Does not support --dry-run', 'EMPTY_CHK': 'checksum file is empty', 'FAIL': 'FAILED', 'FILE_NOT': 'File does not exist', 'HASH_NOT': 'is not a valid hash', 'LENGTH_NOT': 'The files does not have the same length', 'LOAD_FAIL': 'Failed to load', 'MAYBE': 'Maybe', 'MAYBE_M': 'Did you maybe mean:', ...}, ...} 203 | __algorithms__ = ['md5', 'sha1', 'crc32', 'sha256', 'sha384', 'sha512', 'sha224', 'blake2b', 'blake2s', 'sha3_224', 'sha3_384', 'sha3_512', 'sha3_256'] 204 | __help__ = 'Hashit is an hashing program which can be uses t...ot the idea to make such a program using\npython.\n' 205 | __license__ = 'MIT, Copyright (c) 2017-2020 Javad Shafique' 206 | print_function = _Feature((2, 6, 0, 'alpha', 2), (3, 0, 0, 'alpha', 0), 65536) 207 | with_statement = _Feature((2, 5, 0, 'alpha', 1), (2, 6, 0, 'alpha', 0), 32768) |
208 |
211 | Author | ||
214 | | Javad Shafique |
9 | hashit.version (version 3.5.1) | index /Users/javad/Dropbox/udvikling/projekter/hashit/hashit/hashit/version.py |
Set global version
13 | 14 | -------------------------------------------------------------------------------- /docs/usage.md: -------------------------------------------------------------------------------- 1 | --- 2 | layout: default 3 | --- 4 | # Hashit Usage 5 | 6 | ### Pretty Gif 7 |  8 | 9 | ## Usage 10 | Hashit takes arguments like this: 11 | ```bash 12 | usage: hashit [-h] [-p] [-V] [-L] [-hl] [-H hashname] 13 | [-e excludes [excludes ...]] [-C] [-sp] [-A] [-q] [-m] [-r] 14 | [-s [string]] [-d [hash]] [-l list] [-cl list list] 15 | [-c filename] [-o filename] [-S] [-sfv] [-bsd] [--dry-run] 16 | [--trace] [--strict] 17 | [path] [files [files ...]] 18 | ``` 19 | 20 | Where the options are at following: 21 | 22 | [//]: # (Nicely generated by argparse) 23 | 24 | ``` 25 | Hashit is an hashing program which can be uses to hash and verify muliple 26 | files on a system. I got the idea from an ubuntu iso image which have this 27 | hash table, so i got the idea to make such a program using python. 28 | 29 | positional arguments: 30 | path 31 | files 32 | 33 | help: 34 | -h, --help show this help message and exit 35 | -p, --page Launch interactive help with python help() (for python 36 | api) 37 | -V, --version Print current version and exit 38 | -L, --license Print license and exit 39 | -hl, --hash-list Prints list of all supported hashes and exits 40 | 41 | formats: 42 | -S, --size Adds the file size to the output 43 | -sfv, --sfv Outputs in a sfv compatible format 44 | -bsd, --bsd output using the bsd checksum-format 45 | 46 | settings: 47 | -H hashname, --hash hashname 48 | Select hash use -hl --hash-list for more info 49 | -e excludes [excludes ...], --exclude excludes [excludes ...] 50 | list of files and directories to exclude 51 | -C, --color Enable colored output where it is supported 52 | -sp, --strip-path Strips fullpath from the results 53 | -A, --append Instead of writing to a file you will append to it 54 | -q, --quiet Reduces output, (silences warnings) 55 | -m, --memory-optimatation 56 | Enables memory optimatation (useful for large files) 57 | -r, --recursive Hash all files in all subdirectories 58 | 59 | other: 60 | -s [string], --string [string] 61 | hash a string or a piece of text 62 | -d [hash], --detect [hash] 63 | Enable hash detection for check 64 | -l list, --list list Takes a file (list) of strings and hashes each of them 65 | -cl list list, --check-list list list 66 | Takes two arguments, hashlist and stringlist 67 | -c filename, --check filename 68 | Verify checksums from a checksum file 69 | -o filename, --output filename 70 | output output to an output (file) 71 | 72 | devtools: 73 | --dry-run prints the list of files that is doing to be hashed 74 | (and how) and the output type 75 | --trace Print traceback of any error cathed and exit 76 | --strict Exit non-zero on any errors 77 | 78 | MIT, Copyrigth (c) 2017-2020 Javad Shafique 79 | ``` 80 | 81 | So if i want to hash a file called fx. icon.png in the img dir 82 | ```bash 83 | $ hashit icon.png 84 | eade8f2bb7fcb89d396a850b977740fd img/icon.png 85 | ``` 86 | 87 | Or i wanted to hash the tests directory with lets say blake2s and write it to a file using the bsd format and then verify it 88 | ```bash 89 | $ hashit -H blake2s -o output.txt -bsd tests/ 90 | # no need to specify the files format it detects it automaticly 91 | $ hashit -H blake2s -c output.txt -C # or --color for -C 92 | tests/speed.py: OK 93 | tests/test.py: OK 94 | tests/__init__.py: OK 95 | tests/speed2.py: OK 96 | tests/res/benchmarks2.json: OK 97 | tests/res/file.json: OK 98 | tests/res/crc_hashcollisions.txt: OK 99 | tests/res/benchmarks.json: OK 100 | tests/res/pycrypto_vs_hashlib.json: OK 101 | tests/__pycache__/speed.cpython-36.pyc: OK 102 | tests/__pycache__/speed2.cpython-36.pyc: OK 103 | tests/__pycache__/test.cpython-36.pyc: OK 104 | tests/__pycache__/__init__.cpython-36.pyc: OK 105 | ``` 106 | 107 | But maybe you don't want to hash an file but a password or a piece of text then use the -s option 108 | ```bash 109 | $ hashit -s secret_key 110 | 73eeac3fa1a0ce48f381ca1e6d71f077 111 | # if you just use -s it will read until you use ctrl+D (^D) 112 | $ hashit -s 113 | secret_key^D 114 | 73eeac3fa1a0ce48f381ca1e6d71f077 115 | ``` 116 | If you want to hash multiple files with forexample a wildcard(*): 117 | ```bash 118 | $ hashit *.txt 119 | d41d8cd98f00b204e9800998ecf8427e empty.txt 120 | d41d8cd98f00b204e9800998ecf8427e another_0bytefile.txt 121 | d41d8cd98f00b204e9800998ecf8427e no_data.txt 122 | ``` 123 | 124 | Fun fact the -l --list option can also be used if you really want to check an file, because it 125 | reads a file line by line can it be used for checking each and every line in any file like this: 126 | 127 | > Chances of hash collisions = 0% (JDK, haven't done any statitics but 128 | > technicly a lesser chances for bigger files (more lines more security!) 129 | 130 | ``` 131 | $ hashit -l LICENSE -o license.chk 132 | $ hashit -cl license.chk LICENSE 133 | \\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\: OK 134 | ©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©: OK 135 | ©-------------------------------------------------------------------------------©: OK 136 | ©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©: OK 137 | MIT License : OK 138 | : OK 139 | Copyright (c) 2020 Javad Shafique: OK 140 | : OK 141 | Permission is hereby granted, free of charge, to any person obtaining a copy: OK 142 | of this software and associated documentation files (the "Software"), to deal: OK 143 | in the Software without restriction, including without limitation the rights: OK 144 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell: OK 145 | copies of the Software, and to permit persons to whom the Software is: OK 146 | furnished to do so, subject to the following conditions:: OK 147 | : OK 148 | The above copyright notice and this permission notice shall be included in all: OK 149 | copies or substantial portions of the Software.: OK 150 | : OK 151 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR: OK 152 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,: OK 153 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE: OK 154 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER: OK 155 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,: OK 156 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE: OK 157 | SOFTWARE.: OK 158 | : OK 159 | NO ONE CAN CLAIM OWNERSHIP OF THIS "SOFTWARE" AND ASSOCIATED DOCUMENTATION FILES.: OK 160 | : OK 161 | Icon from freepik.com all rights reserved: OK 162 | ©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©©: OK 163 | ©-------------------------------------------------------------------------------©: OK 164 | ``` 165 | 166 | 167 | > TIP: add quotes around multi length strings to make them a single argument 168 | 169 | ## From python 170 | It also got a python-level api for those you want to integrate this into your own application 171 | ```py 172 | from hashit import hashFile, new 173 | 174 | # init hashing class 175 | hasher = new("md5") 176 | hash_from_file = hashFile("file.txt", hasher, True) # (True) Activate memory optimatation, faster for larger files 177 | print(hash_from_file) 178 | 179 | print(new("sha3_256", b'DATA').hexdigest()) 180 | print(new("crc32", b'DATA').hexdigest()) # custom hashes is also supported 181 | ``` 182 | See [extra](extra.md) for more customization and extended api usage from python 183 | 184 | 185 | [back](index.md) 186 | -------------------------------------------------------------------------------- /hashit.spec: -------------------------------------------------------------------------------- 1 | Name: hashit 2 | Version: 3.5.3 3 | Release: 1 4 | Summary: hashit, a hashing application. 5 | License: LICENSE 6 | URL: https://cjavad.github.io/hashit 7 | Requires: python3 8 | 9 | %description 10 | Hashing Application with muliple modes, settings and more! 11 | Hashit, is an hashing application used as an verification tool, intendet to replace the "standard" linux hashing utilities such as 12 | md5sum, sha1sum and so on. One of the main reasons why this program was develop was to create an easy-to-use command line tool for 13 | newcomers and professionals alike to hash/verify files and other data. For more see our homepage at https://cjavad.github.io/hashit. 14 | 15 | 16 | %prep 17 | python3 setup.py clean -a 18 | 19 | %build 20 | python3 setup.py build --force 21 | 22 | %install 23 | python3 setup.py install --force 24 | 25 | %files 26 | # python takes care of this 27 | 28 | %changelog 29 | # removed changelog :cry: 30 | -------------------------------------------------------------------------------- /hashit/__main__.py: -------------------------------------------------------------------------------- 1 | """Command line application for hashit 2 | 3 | this module "__main__" contains all the code for argparsing, running 4 | and anything needed for an command lin application such as hashit. 5 | 6 | it uses argc another package by me, but i am considering switching to argparse 7 | """ 8 | 9 | import random 10 | import traceback 11 | import argparse 12 | # Import all from hashit 13 | from .__init__ import os, hashlib, eprint, hashFile, new, BSD, load, \ 14 | GLOBAL, Exit, check, generate_data_set, detect, SFV, fixpath, reader, \ 15 | __algorithms__, __author__, __help__, __license__, supports_color 16 | 17 | from .extra import LINUX_LIST 18 | from .version import __version__ 19 | 20 | class Print(argparse.Action): 21 | """Print action for argparse, takes one kwarg which is text the varible which contains the string to be printed""" 22 | def __init__(self, nargs=0, **kwargs): 23 | if nargs != 0: 24 | raise ValueError('nargs for Print must be 0; it is just a flag.') 25 | elif "text" in kwargs: 26 | self.data = kwargs.pop("text") 27 | 28 | if "exit" in kwargs: 29 | self.exit = True if kwargs.pop("exit") else False 30 | 31 | super(Print, self).__init__(nargs=nargs, **kwargs) 32 | 33 | def __call__(self, parser, namespace, values, option_string=None): 34 | print(self.data) 35 | 36 | if self.exit: 37 | Exit(0) 38 | 39 | class Execute(argparse.Action): 40 | """Same as Print() but instead of printing an object it calls it takes func (function), and exit (bool)""" 41 | def __init__(self, nargs=0, **kwargs): 42 | if nargs != 0: 43 | raise ValueError('nargs for Execute must be 0; it is just a flag.') 44 | 45 | if "func" in kwargs: 46 | self.data = kwargs.pop("func") 47 | 48 | if "exit" in kwargs: 49 | self.exit = True if kwargs.pop("exit") else False 50 | 51 | super(Execute, self).__init__(nargs=nargs, **kwargs) 52 | 53 | def __call__(self, parser, namespace, values, option_string=None): 54 | print(self.data()) 55 | 56 | if self.exit: 57 | Exit(0) 58 | 59 | def walk(go_over): 60 | """Goes over a path an finds all files, appends them to a list and returns that list""" 61 | walked = [] 62 | for path, _subdirs, files in os.walk(go_over): 63 | # if the path does not exist skip it (What) 64 | if not os.path.exists(path): 65 | continue 66 | # for each file 67 | for name in files: 68 | # add it to in_files list() if it does exist 69 | p = (path + "/" + name).replace("\\", "/").replace("//", "/") 70 | if os.path.exists(p): 71 | walked.append(p) 72 | 73 | # return list with file names 74 | return walked 75 | 76 | # exclude function faster then last implementation 77 | def exclude(items, excludes): 78 | """Exclude removes all items in a list that is in the excludes list (for dirs)""" 79 | 80 | for ex in excludes: 81 | items = [x for x in items if not ex in x] 82 | # return items 83 | return items 84 | 85 | def config(parser): 86 | """Sets argvs' config and commands with argparse and returns it for good sake""" 87 | 88 | def hash_list(): 89 | """Generates an easy-to-read list""" 90 | algos = set((__algorithms__ + list(GLOBAL["EXTRA"].keys()))) # add extras 91 | # sort set 92 | s = [sorted(algos)[x:x+2] for x in range(0, len(algos), 2)] 93 | for c, l in enumerate(s): 94 | s[c] = ', '.join(l) 95 | 96 | return "\n" + '\n'.join(s) + "\n" 97 | 98 | def help_self(): 99 | """Launches help() for module""" 100 | # get info from self 101 | help(os.sys.modules["hashit"]) 102 | help(os.sys.modules[__name__]) # current 103 | help(os.sys.modules["hashit.detection"]) 104 | help(os.sys.modules["hashit.extra"]) 105 | help(os.sys.modules["hashit.version"]) 106 | 107 | return __help__ 108 | 109 | # create groups 110 | ghelp = parser.add_argument_group("help") 111 | formats = parser.add_argument_group("formats") 112 | settings = parser.add_argument_group("settings") 113 | other = parser.add_argument_group("other") 114 | dev = parser.add_argument_group("devtools") 115 | 116 | # set commands 117 | parser.add_argument('path', nargs="?", default=os.getcwd()) # for directroy 118 | parser.add_argument("files", nargs="*", default=[]) # for a list of files 119 | 120 | # add all the helping arguments 121 | ghelp.add_argument("-h", "--help", help="show this help message and exit", action=Execute, func=parser.format_help, exit=True) 122 | ghelp.add_argument("-p", "--page", help="Launch interactive help with python help() (for python api)", action=Execute, func=help_self, exit=True) 123 | ghelp.add_argument("-V", "--version", help="Print current version and exit", action="version", version="%(prog)s " + __version__) 124 | ghelp.add_argument("-L", "--license", help="Print license and exit", action=Print, text=__license__, exit=True) 125 | ghelp.add_argument("-hl", "--hash-list", help="Prints list of all supported hashes and exits", action=Execute, func=hash_list, exit=True) 126 | 127 | # all the options that sets something 128 | settings.add_argument("-H", "--hash", help="Select hash use -hl --hash-list for more info", metavar="hashname", default=GLOBAL["DEFAULTS"]["HASH"]) 129 | settings.add_argument("-e", "--exclude", help="list of files and directories to exclude", default=[], metavar="excludes", nargs="+") 130 | settings.add_argument("-C", "--color", help="Enable colored output where it is supported", action="store_true", default=GLOBAL["DEFAULTS"]["COLORS"]) 131 | settings.add_argument("-sp", "--strip-path", help="Strips fullpath from the results", action="store_true", default=GLOBAL["DEFAULTS"]["STRIP"]) 132 | settings.add_argument("-A", "--append", help="Instead of writing to a file you will append to it", action="store_true", default=GLOBAL["DEFAULTS"]["APPEND"]) 133 | settings.add_argument("-q", "--quiet", help="Reduces output, (silences warnings)", action="store_true") 134 | settings.add_argument("-m", "--memory-optimatation", help="Enables memory optimatation (useful for large files)", action="store_true", default=GLOBAL["DEFAULTS"]["MEMOPT"]) 135 | settings.add_argument("-r", "--recursive", help="Hash all files in all subdirectories", action="store_true", default=GLOBAL["DEFAULTS"]["RECURS"]) 136 | 137 | # other, things that are optinional such as detect and string hashes 138 | # other.add_argument("-a", "--all", help="Calculate all hashes for a single file", metavar="filename") NOTE: Removed for now 139 | other.add_argument("-s", "--string", nargs="?", help="hash a string or a piece of text", default=False, metavar="string") 140 | other.add_argument("-d", "--detect", nargs="?", help="Enable hash detection for check, or it can take a hash and decect hash algorithm", metavar="hash", default=GLOBAL["DEFAULTS"]["DETECT"]) 141 | other.add_argument("-l", "--list", help="Takes a file (list) of strings and hashes each of them", metavar="list") 142 | other.add_argument("-cl", "--check-list", help="Takes two arguments, hashlist and stringlist", nargs=2, metavar="list") 143 | # ~ More important ~ 144 | other.add_argument("-c", "--check", help="Verify checksums from a checksum file", nargs="?", const=1337, metavar="filename") 145 | other.add_argument("-o", "--output", help="output output to an output (file)", metavar="filename") 146 | 147 | # ~ Formatting ~ 148 | formats.add_argument("-S", "--size", help="Adds the file size to the output", action="store_true", default=GLOBAL["DEFAULTS"]["SIZE"]) 149 | formats.add_argument("-sfv", "--sfv", help="Outputs in a sfv compatible format", action="store_true") 150 | formats.add_argument("-bsd", "--bsd", help="output using the bsd checksum-format", action="store_true") 151 | 152 | # ~ Devtools ~ 153 | dev.add_argument("--dry-run", help="prints the list of files that is doing to be hashed (and how) and the output type", action="store_true", default=GLOBAL["DEFAULTS"]["DRYRUN"]) 154 | dev.add_argument("--trace", help="Print traceback of any error cathed and exit", action="store_true", default=GLOBAL["DEFAULTS"]["TRACE"]) 155 | dev.add_argument("--strict", help="Exit non-zero on any errors", action="store_true", default=GLOBAL["DEFAULTS"]["STRICT"]) 156 | 157 | # return parser 158 | return parser 159 | 160 | def main_(args): 161 | """Main function which is the cli parses arguments and runs appropriate commands""" 162 | # using argparse instead of argc for portability 163 | parser = argparse.ArgumentParser("hashit", description=__help__, epilog=__license__, add_help=False) 164 | # set commands and config with config 165 | parser = config(parser) 166 | 167 | # check for amount of arguments 168 | if not args: 169 | # if there is not arguments show help 170 | args = GLOBAL["IF_NO_ARGS"] 171 | 172 | # parse args 173 | argv = parser.parse_args(args) 174 | # Varibles 175 | 176 | # set colors 177 | RED = "" 178 | GREEN = "" 179 | YELLOW = "" 180 | RESET = "" 181 | 182 | # check if we should use colors 183 | if supports_color() and argv.color: 184 | # if yes enable them 185 | RED = GLOBAL["COLORS"]["RED"] 186 | GREEN = GLOBAL["COLORS"]["GREEN"] 187 | YELLOW = GLOBAL["COLORS"]["YELLOW"] 188 | RESET = GLOBAL["COLORS"]["RESET"] 189 | 190 | # file list, and path 191 | in_files = list() # list of all files 192 | my_path = os.getcwd() # path to search in 193 | 194 | # use md5 by default 195 | hash_is = new(GLOBAL["DEFAULTS"]["HASH"]) 196 | 197 | # check if its an valid hashing 198 | if argv.hash in hashlib.algorithms_available or argv.hash in __algorithms__ or argv.hash in list(GLOBAL["EXTRA"].keys()) or str(argv.hash)[:5] == "shake": 199 | # check if it's in guaranteed 200 | if not argv.hash in hashlib.algorithms_guaranteed and argv.hash in hashlib.algorithms_available: 201 | # if not print an warning 202 | if not argv.quiet: 203 | eprint(YELLOW + str(argv.hash), GLOBAL["MESSAGES"]["WORKS_ON"] + RESET) 204 | # and use the hash 205 | hash_is = new(argv.hash) 206 | 207 | elif not argv.hash in GLOBAL["BLANK"]: 208 | # then print error messageh 209 | eprint(RED + str(argv.hash), GLOBAL["MESSAGES"]["HASH_NOT"], RESET) 210 | 211 | # select output 212 | use_out = False 213 | output = None 214 | 215 | # check if out is set and it has a value 216 | if not argv.output in GLOBAL["BLANK"]: 217 | # if it is open file 218 | use_out = True 219 | # if dryrun dont open file 220 | if not argv.dry_run: 221 | output = open(fixpath(argv.output), GLOBAL["WRITE_MODE"]) 222 | else: 223 | # else set it to false 224 | use_out = False 225 | 226 | 227 | 228 | # check for new path 229 | if os.path.isdir(argv.path): 230 | new_path = argv.path 231 | # check if argument is path else do not change path 232 | if os.path.exists(new_path) and os.path.isdir(new_path): 233 | my_path = new_path 234 | 235 | # ~ Argument taking options ~ 236 | 237 | # check for string in args needed because argparse 238 | # does not support both store_true and store same for detect 239 | if "-s" in args or "--string" in args: 240 | # Check if dryrun is true 241 | if argv.dry_run and not argv.quiet: 242 | # if it is, print warning 243 | eprint(YELLOW + "-s --string, {}".format(GLOBAL["MESSAGES"]["DRYRUN_NOT"]) + RESET) 244 | 245 | # exit if strict 246 | if argv.strict: 247 | return 1 248 | 249 | data = argv.string 250 | if not data: 251 | # reed from stdin like md5sum 252 | data = os.sys.stdin.read() 253 | 254 | # check if data ends with newline 255 | if not data.endswith("\n"): 256 | # else print one 257 | print("") 258 | 259 | # if the data isn't bytes 260 | if not isinstance(data, bytes): 261 | # encode it 262 | data = data.encode() 263 | 264 | # then hash-it 265 | hash_is.update(data) 266 | 267 | # check for output methods 268 | if use_out and output != None: 269 | output.write(hash_is.hexdigest()) 270 | else: 271 | print(hash_is.hexdigest()) 272 | 273 | return 0 274 | 275 | # if detect is choosen use it 276 | elif not argv.detect in GLOBAL["BLANK"]: 277 | # Check if dryrun is true 278 | if argv.dry_run and not argv.quiet: 279 | # if it is, print warning 280 | eprint(YELLOW + "-d --detect, {}".format(GLOBAL["MESSAGES"]["DRYRUN_NOT"]) + RESET) 281 | 282 | # exit if strict 283 | if argv.strict: 284 | return 1 285 | 286 | hashes = detect(argv.detect, generate_data_set("Hallo", __algorithms__, new)) 287 | if hashes != None: 288 | for item in hashes.certain: 289 | print(GREEN + GLOBAL["MESSAGES"]["RESULTS_AS"], item + RESET) 290 | 291 | # print sepetator if there is a need for one 292 | if hashes.maybe and hashes.certain: 293 | print("") 294 | 295 | for item in hashes.maybe: 296 | print(YELLOW + GLOBAL["MESSAGES"]["MAYBE"], item + RESET) 297 | else: 298 | print(RED + str(argv.detect) + " " + GLOBAL["MESSAGES"]["HASH_NOT"] + RESET) 299 | 300 | # ~ Check functions ~ 301 | # if to check use that 302 | elif argv.check: 303 | # set argv.detect to true 304 | if "-d" in args or "--detect" in args: 305 | argv.detect = True 306 | # check for file or alternativly if no argument was provided 307 | if os.path.exists(argv.check) or type(argv.check) == int: 308 | # then check (return exitcode) 309 | return check( 310 | "" if type(argv.check) == int else argv.check, 311 | hash_is, 312 | argv.color, 313 | argv.quiet, 314 | argv.detect, 315 | argv.sfv, 316 | argv.size, 317 | argv.bsd, 318 | argv.strict, 319 | argv.trace, 320 | argv.dry_run 321 | ) 322 | 323 | else: 324 | # if the file does not exist 325 | # print error message 326 | eprint(RED + GLOBAL["MESSAGES"]["FILE_NOT"] + RESET) 327 | # check if strict 328 | if argv.strict: 329 | return 1 # if so then exit non-zero 330 | 331 | # Else exit 0 332 | return 0 333 | # ~ Check for list ~ 334 | elif not argv.list in GLOBAL["BLANK"]: 335 | # check for dry_run 336 | if argv.dry_run: 337 | print("Reading {} and hashing strings".format(argv.list)) 338 | 339 | elif os.path.exists(argv.list) and os.path.isfile(argv.list): 340 | for line in reader(argv.list, "r", False): 341 | hashstr = new(hash_is.name, line.encode()).hexdigest() 342 | 343 | if use_out and output != None: 344 | output.write(hashstr + "\n") 345 | else: 346 | print(hashstr) 347 | 348 | 349 | else: 350 | # if the file does not exist 351 | # print error message 352 | eprint(RED + GLOBAL["MESSAGES"]["FILE_NOT"] + RESET) 353 | # check if strict 354 | if argv.strict: 355 | return 1 # if so then exit non-zero 356 | 357 | # Else exit 0 358 | return 0 359 | 360 | elif not argv.check_list in GLOBAL["BLANK"]: 361 | # if check list is true then find the listnames 362 | hash_list = argv.check_list[0] 363 | cstr_list = argv.check_list[1] 364 | # first check for dry_run 365 | if argv.dry_run: 366 | print("Checking if {} matches {}".format(cstr_list, hash_list)) 367 | 368 | # else check if the exists 369 | elif os.path.exists(hash_list) and os.path.exists(cstr_list): 370 | # if they do read both files 371 | hash_list = [s.replace("\n", "") for s in open(hash_list, "r").readlines()] 372 | cstr_list = [s.replace("\n", "") for s in open(cstr_list, "r").readlines()] 373 | # and set count to 0 374 | count = 0 375 | 376 | # check if they have the same length 377 | if len(hash_list) != len(cstr_list): 378 | eprint(RED + GLOBAL["MESSAGES"]["LENGTH_NOT"] + RESET) 379 | # print error if needed and check for strict 380 | if argv.strict: 381 | return 1 382 | # loop over files 383 | while len(hash_list) > count: 384 | # check if there is an error 385 | if count > len(cstr_list): 386 | break 387 | 388 | # get last hash 389 | hashstr = hash_list[count] 390 | # get current string 391 | s = cstr_list[count] 392 | # hash current string 393 | newhashstr = new(hash_is.name, s.encode()).hexdigest() 394 | # set base print_str 395 | print_str = s + ": {}" 396 | 397 | # print correct results 398 | if hashstr == newhashstr and not argv.quiet: 399 | print(print_str.format(GREEN + GLOBAL["MESSAGES"]["OK"] + RESET)) 400 | 401 | elif hashstr != newhashstr: 402 | print(print_str.format(RED + GLOBAL["MESSAGES"]["FAIL"] + RESET)) 403 | 404 | # add 1 to count 405 | count += 1 406 | 407 | 408 | else: 409 | # if the files does not exist 410 | # print error message 411 | eprint(RED + GLOBAL["MESSAGES"]["FILE_NOT"] + RESET) 412 | # check if strict 413 | if argv.strict: 414 | return 1 # if so then exit non-zero 415 | 416 | # Exit 0 417 | return 0 418 | 419 | 420 | # ~ Check for files ~ 421 | 422 | # check the argv.files argument, and the path var 423 | # which can be a file. 424 | elif argv.files or os.path.isfile(argv.path): 425 | for fname in argv.files + [argv.path]: 426 | path = fixpath(fname) # use fixpath 427 | if os.path.exists(path): 428 | # if path is file 429 | if os.path.isfile(path): 430 | # append to in_files 431 | in_files.append(path) 432 | else: 433 | # if file not exist then print error 434 | eprint(RED + "{}, ".format(path) + GLOBAL["MESSAGES"]["FILE_NOT"] + RESET) 435 | # if strict exit non-zero 436 | if argv.strict: 437 | return 1 438 | 439 | # else return zero 440 | return 0 441 | 442 | # else if my_path is a dir and r is true 443 | elif argv.recursive and os.path.isdir(my_path): 444 | # walk directory and add files to in_files (use fixpath) 445 | in_files = [fixpath(fname) for fname in walk(my_path)] 446 | 447 | # else if my_path is a dir then just 448 | elif os.path.isdir(my_path): 449 | # hash all of the files in this directory 450 | in_files = [os.path.join(my_path, f) for f in os.listdir(my_path) if os.path.isfile(os.path.join(my_path, f))] 451 | 452 | # if there is any files in in_files 453 | if in_files: 454 | # check if we should remove any files 455 | if argv.exclude: 456 | # exclude files and fix paths 457 | in_files = exclude([fixpath(f) for f in in_files], argv.exclude) 458 | 459 | 460 | if not in_files: 461 | # no more files in in_files 462 | return 0 463 | 464 | # find the longest filename 465 | longest_filename = max(in_files, key=len) 466 | 467 | # go over files and hash them all 468 | for fname in in_files: 469 | # if dry run just print filename and hash 470 | if argv.dry_run: 471 | print("Hashing {} with {} and outputting to {}".format(fname, hash_is.name, ("stdout" if not use_out else argv.output))) 472 | # and continue 473 | continue 474 | try: 475 | # hash file 476 | current_hash = hashFile(fname, hash_is, argv.memory_optimatation) 477 | 478 | except (FileNotFoundError, PermissionError) as Error: 479 | # if the file does not exist print a error message 480 | if isinstance(Error, FileNotFoundError): 481 | eprint(RED + fname + ", " + GLOBAL["MESSAGES"]["FILE_NOT"] + RESET) 482 | 483 | # check if we have access to the file 484 | elif isinstance(Error, PermissionError): 485 | eprint(RED + fname + ", " + GLOBAL["MESSAGES"]["PERM_ERR"] + RESET) 486 | 487 | # print stack and trace if needed 488 | if argv.trace: 489 | eprint(YELLOW, end="") 490 | traceback.print_stack(file=os.sys.stderr) 491 | traceback.print_exc(file=os.sys.stderr) 492 | eprint(RESET, end="") 493 | 494 | continue 495 | 496 | # set print_str 497 | print_str = current_hash 498 | size = "" 499 | 500 | # size override size as string 501 | if argv.size: 502 | size = str(os.stat(fname).st_size) 503 | 504 | # if sfv format string 505 | if argv.sfv: 506 | print_str = SFV.format(current_hash, fname, len(longest_filename), size) 507 | # is bsd format string 508 | elif argv.bsd: 509 | print_str = BSD.format(current_hash, fname, hash_is.name) + (size if len(size) <= 0 else " " + size) 510 | # else use N/A 511 | else: 512 | print_str = current_hash + " " + str(size + " " + fname) 513 | 514 | # check if fullpath path shall be stripped 515 | if argv.strip_path: 516 | # then replace current path with 517 | print_str = print_str.replace(os.path.join(os.getcwd(), ""), "") 518 | print_str = print_str.replace("./", "") # if the file is in the current dir 519 | 520 | # if we should output the result to a file 521 | if use_out and output != None: 522 | # write result to an file 523 | output.write(print_str + "\n") 524 | 525 | else: 526 | # else print it 527 | print(print_str) 528 | 529 | # return ExitCode 530 | return 0 531 | 532 | """ 533 | Hashit __main__.py can be executed directly with python(3) -m hashit "commands" 534 | and via snap 535 | """ 536 | 537 | def main(args=None): 538 | """ 539 | Main function with error catching, can force-exit with os._exit(1) 540 | 541 | this main function calls main_() and cathes any error while giving the user some "pretty" 542 | errors. 543 | """ 544 | # switch args if needed 545 | if args is None: 546 | # to sys.args 547 | args = os.sys.argv[1:] 548 | try: 549 | # execute main application 550 | Exit(main_(args)) # Exit with return code 551 | except Exception as error: 552 | # define colors 553 | RD = "" 554 | YL = "" 555 | RE = "" 556 | # check if term supports color 557 | if supports_color(): 558 | YL = GLOBAL["COLORS"]["YELLOW"] 559 | RD = GLOBAL["COLORS"]["RED"] 560 | RE = GLOBAL["COLORS"]["RESET"] 561 | 562 | if isinstance(error, TypeError): 563 | eprint(YL + GLOBAL["ERRORS"]["TypeError"] + RE) 564 | 565 | elif isinstance(error, ValueError): 566 | eprint(YL + GLOBAL["ERRORS"]["ValueError"] + RE) 567 | 568 | elif isinstance(error, FileNotFoundError): 569 | eprint(YL + GLOBAL["ERRORS"]["FileNotFoundError"] + RE) 570 | 571 | elif isinstance(error, OSError): 572 | eprint(YL + GLOBAL["ERRORS"]["OSError"]["windows"]) 573 | eprint(GLOBAL["ERRORS"]["OSError"]["macos"]) 574 | eprint(GLOBAL["ERRORS"]["OSError"]["linux"].format(', '.join(random.sample(LINUX_LIST, 10)))) 575 | eprint(GLOBAL["ERRORS"]["OSError"]["END"] + RE) 576 | 577 | # print stack and trace if needed 578 | if "--trace" in args or "-t" in args: 579 | eprint(RD, end="") 580 | traceback.print_stack(file=os.sys.stderr) 581 | traceback.print_exc(file=os.sys.stderr) 582 | eprint(RE, end="") 583 | else: 584 | # else print error 585 | eprint(RD + str(error) + RE) 586 | 587 | os._exit(1) # force exit 588 | 589 | # if the program is being called 590 | if __name__ == "__main__": 591 | # Exit 0 on KeyboardInterruptExit 592 | try: 593 | main() # then execute main function 594 | except KeyboardInterrupt: 595 | Exit(130) # According to the posix standard 596 | -------------------------------------------------------------------------------- /hashit/detection.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyrigth (c) 2020-present Javad Shafique 3 | 4 | this module using length and connections to find a match 5 | for an hashing algorithem. It's basicly a matching algorigtem 6 | it can be used for almost any pure function in this case for hashes. 7 | 8 | # Copyright (c) 2020-present Javad Shafique 9 | # This 'Software' can't be used without permission 10 | # from Javad Shafique. 11 | 12 | # this module using length and connections to find a match 13 | # for an hashing algorithem. It's basicly a matching algorigtem 14 | # it can be used for almost any pure function in this case for hashes. 15 | # basic template: 16 | 17 | 18 | def generate_some_dataset(datatoworkon = "some data"): 19 | dict_for_storing_set = dict() 20 | 21 | for each_element in a_list_of_something_to_compare_with: 22 | data = function_that_uses_data_to_generate_something(each_element, datatoworkon) 23 | 24 | dict_for_storing_set.update({each_element:{"data":data, "size":len(data), "size-as":list(), "connection":list()}}) 25 | 26 | 27 | #find connection and size 28 | 29 | for each_element in dict_for_storing_set: 30 | elements_data = dict_for_storing_set[each_element]["data"] 31 | elements_size = dict_for_storing_set[each_element]["size"] 32 | 33 | for second_element in dict_for_storing_set: 34 | if dict_for_storing_set[second_element]["size"] == elements_size: 35 | if elements_data == dict_for_storing_set["data"]: 36 | dict_for_storing_set[each_element]["connection"].append(second_element) 37 | else: 38 | dict_for_storing_set[each_element]["size-as"].append(second_element) 39 | else: 40 | continue 41 | 42 | # return finished dataset 43 | 44 | return dict_for_storing_set 45 | 46 | # and for parsing that infomation 47 | # you can use the detect function 48 | # as here: 49 | 50 | 51 | def detect(string, table, maybe = True): 52 | if not (type(string) == str): 53 | return None 54 | 55 | so = list() 56 | so_far = list() 57 | length = len(string) 58 | 59 | for key in table: 60 | dat = table[key] 61 | 62 | if dat["size"] == length: 63 | for i in dat["connection"]: 64 | if i not in so_far: 65 | so_far.append(i) 66 | 67 | for i in so_far: 68 | dat = table[i]["connection"] 69 | 70 | for j in so_far: 71 | if not j in dat: 72 | so_far.remove(j) 73 | 74 | if maybe: 75 | for key in table: 76 | dat = table[key] 77 | 78 | if dat["size"] == length: 79 | so.append(key) 80 | 81 | if len(so_far) >= 0 and len(so) == 1: 82 | 83 | # if there only is one option then use it 84 | 85 | return tup(certain=so, maybe=[]) 86 | else: 87 | return tup(certain=so_far, maybe=so) 88 | 89 | 90 | 91 | # compare hashes for hash-detection 92 | # it can generate data that can compare 93 | # diffrences between the results 94 | 95 | # if works by categorizing the hashes into 96 | # two categorizes. one for thoose who look alike 97 | # and one for thoose who generates the same output 98 | # given the same input. And with it a sorted result 99 | # is outputted and is ready to be used be the user. 100 | 101 | # list of which algorithms is most likly used (WIP) 102 | 103 | PRIORITY = { 104 | "md5":["md5"], 105 | "sha1":["dsaEncryption", "DSA", "ecdsa-with-SHA1", "dsaWithSHA", "DSA-SHA"] 106 | } 107 | """ 108 | 109 | import string 110 | from collections import namedtuple 111 | 112 | # checks if string is hex 113 | def ishex(hexstr): 114 | """Checks if string is hexidecimal""" 115 | return all(char in string.hexdigits for char in hexstr) 116 | 117 | def generate_data_set(hashon, algos, hasher_that_takes_new): 118 | """Generates dataset based on data and list of strings that can be used to create objects to use that data""" 119 | data_dict = dict() 120 | # go over the algorithms 121 | for algo in algos: 122 | hashed = hasher_that_takes_new(algo, hashon.encode()).hexdigest() 123 | # create dict in dict with all infomation stored in a table 124 | data_dict.update({algo:{"data":hashed, "size":len(hashed), "size-as":list(), "connection":list()}}) 125 | 126 | for key in data_dict: 127 | # set default values 128 | hashed = data_dict[key]["data"] 129 | length = data_dict[key]["size"] 130 | 131 | for second in data_dict: 132 | if length == data_dict[second]["size"] and not second == key: 133 | if hashed == data_dict[second]["data"]: 134 | data_dict[key]["connection"].append(second) 135 | else: 136 | data_dict[key]["size-as"].append(second) 137 | else: 138 | continue 139 | 140 | return data_dict 141 | 142 | # return value for detect, a named tuple with two values 143 | NTUPLE = namedtuple("Closest", ["certain", "maybe"]) 144 | 145 | 146 | # detection function returns NTYPLE 147 | def detect(s, table, maybe=True): 148 | """Compares result from datasets, finds connections and eleminates contestants""" 149 | if not (len(s) % 4 == 0 and ishex(s)): 150 | return None 151 | 152 | so = list() 153 | so_far = list() 154 | length = len(s) 155 | 156 | for key in table: 157 | dat = table[key] 158 | 159 | if dat["size"] == length: 160 | for i in dat["connection"]: 161 | if i not in so_far: 162 | so_far.append(i) 163 | 164 | for i in so_far: 165 | dat = table[i]["connection"] 166 | 167 | for j in so_far: 168 | if not j in dat: 169 | so_far.remove(j) 170 | 171 | if maybe: 172 | for key in table: 173 | dat = table[key] 174 | if dat["size"] == length: 175 | so.append(key) 176 | 177 | if len(so_far) >= 0 and len(so) == 1: 178 | # if there only is one option then use it 179 | return NTUPLE(certain=so, maybe=[]) 180 | else: 181 | return NTUPLE(certain=so_far, maybe=so) 182 | -------------------------------------------------------------------------------- /hashit/extra.py: -------------------------------------------------------------------------------- 1 | """Extra functions and classes for hashit""" 2 | import binascii 3 | import hashlib 4 | 5 | # final class 6 | class Crc32: 7 | """This class is an api for the crc32 function that is compatible with mor""" 8 | def __init__(self, data=b''): 9 | """init class, creates data""" 10 | self.name = "crc32" 11 | self.data = data 12 | 13 | def update(self, data=b''): 14 | """Update self.data with new data""" 15 | self.data += data 16 | 17 | def copy(self): 18 | """return new Crc32 object with same properties""" 19 | return Crc32(self.data) 20 | 21 | def digest(self): 22 | """Digest as int""" 23 | return binascii.crc32(self.data) & 0xFFFFFFFF 24 | 25 | def hexdigest(self): 26 | """Digest as hex""" 27 | buf = (binascii.crc32(self.data) & 0xFFFFFFFF) 28 | return ("%08X" % buf).lower() 29 | 30 | # class for shake hash 31 | class shake: 32 | """Top-level api for hashlib.shake""" 33 | def __init__(self, hashn, data=b''): 34 | """Init class create hasher and data""" 35 | # split hashname with _ 36 | hashname = hashn.split("_") 37 | 38 | if len(hashname) == 3: 39 | if hashname[1] in ("256", "128"): 40 | self.hash = hashlib.new("shake_{}".format(hashname[1]), data) 41 | else: 42 | raise ValueError("{} is not a valid hash".format(hashn)) 43 | 44 | self.name = hashn 45 | self.length = int(hashname[2]) 46 | else: 47 | raise ValueError 48 | 49 | def update(self, data=b''): 50 | """Update self.data with new data""" 51 | self.hash.update(data) 52 | 53 | def copy(self): 54 | return self.hash.copy() 55 | 56 | def digest(self, length=None): 57 | """Digest binary""" 58 | length = length or self.length 59 | return self.hash.digest(length) 60 | 61 | def hexdigest(self, length=None): 62 | """Digest hex""" 63 | length = length or self.length 64 | return self.hash.hexdigest(length) 65 | 66 | 67 | LINUX_LIST = ['Mythbuntu', 'Mac OS X', 'Debian Pure Blend', 'Symphony OS', 'Astra Linux', 'Emdebian Grip',\ 68 | 'Russian Fedora Remix', 'Secure-K', 'Knopperdisk', 'Mobilinux', 'touchscreen', 'MX Linux', 'NepaLinux', 'fli4l', 'Nix', 'Ubuntu Mobile', 'primary',\ 69 | 'Fedora Core', 'ChromeOS', 'rPath', 'LEAF Project', 'MuLinux', 'Ubuntu',\ 70 | 'Berry Linux', 'dyne:bolic', 'TurnKey GNU/Linux', 'EasyPeasy', 'Budgie', 'Tin Hat Linux', 'paldo', 'Conary', 'Ubuntu Touch', 'netbooks', 'Emmabuntus',\ 71 | 'Linpus Linux Lite', 'Poseidon Linux', 'Elive', 'Source Mage', 'Skolelinux', 'Ubuntu MATE', 'Ubuntu Kylin', 'Solus', 'Nova', 'MeeGo', 'Pinguy OS', 'Nokia N9',\ 72 | 'Kanotix', 'Korora', 'Linux Mint', 'Billix', 'Linpus Linux', 'Ubuntu JeOS', 'XFCE', 'TinyMe', 'VectorLinux', 'Antergos', 'Asianux', 'BlankOn', 'Netrunner',\ 73 | 'Trisquel GNU/Linux', 'Tinfoil Hat Linux', 'Familiar Linux', 'Sentry Firewall', 'Fedora', 'Parsix', 'MythTV', 'Castile-La Mancha', 'Pardus', 'Austrumi Linux',\ 74 | 'Bodhi Linux', 'OpenZaurus', 'SME Server', 'Mandrake 9.2', 'Frugalware Linux', 'Coyote Linux', 'Sorcerer', 'senior citizens',\ 75 | 'Red Flag Linux', 'Chakra Linux', 'Arch Linux', 'Caldera OpenLinux', 'cAos Linux', 'Red Hat', 'EnGarde Secure Linux', 'Annvix',\ 76 | 'Feather Linux', 'CoreOS', 'Gentoox', 'SUSE Studio', 'Red Hat Linux', 'SmoothWall', 'Goobuntu', 'SystemRescueCD', 'Peppermint OS', 'Wolvix',\ 77 | 'Iskolinux', 'Ubuntu Netbook Edition', 'Lunar Linux', 'Guadalinex', 'bioinformatics', 'Network Security Toolkit', 'The Amnesic Incognito Live System',\ 78 | 'Container Linux', 'ELinOS', 'Aurora', 'LinuxMCE', 'antiX', 'GeeXboX', 'Foresight Linux', 'RXART', 'Prevas Industrial Linux', 'thin client',\ 79 | 'Parabola GNU/Linux-libre', 'Go', 'Ututo', 'Dreamlinux', 'Sunwah Linux', 'LOUD', 'Yellow Dog Linux', 'Trinity Rescue Kit',\ 80 | 'Miracle Linux', 'Hanthana', 'ROSA Linux', 'Munich', 'OpenGEU', 'BackTrack', 'Calculate Linux', 'Sabayon Linux', 'Chromium OS', 'Platypux', 'Xfce', 'ArchBang',\ 81 | 'Baltix', 'Mageia', 'MontaVista Linux', 'SUSE Linux Enterprise Server', 'Joli OS', 'SolydXK', 'DNALinux', 'SalineOS', 'Fermi Linux LTS', 'SliTaz',\ 82 | 'Android', 'KDE', 'Sacix', 'LliureX', 'Xubuntu', 'musl', 'Univention Corporate Server', 'Red Hat Enterprise Linux', 'Ubuntu for Android', 'ALT Linux',\ 83 | 'Canaima', 'Kurumin', 'Moblin', 'Vyatta', 'Kubuntu', 'Pentoo', 'GIS', 'Topologilinux', 'WinLinux', 'autonomic',\ 84 | 'CentOS', 'CRUX', 'Trustix','Galsoft Linux', 'Sugar-on-a-Stick Linux', 'BackBox', 'simpleLinux', 'Smallfoot', 'BackSlash Linux', 'HandyLinux',\ 85 | 'Funtoo Linux', 'Element OS', 'Ubuntu Budgie', 'YOPER', 'Xbox', 'Corel Linux', 'Webconverger', 'PelicanHPC', 'HostGIS',\ 86 | 'Yggdrasil Linux/GNU/X', 'BLAG Linux and GNU', 'LinHES', 'Raspbian', 'gNewSense', 'Slackintosh', 'OpenWrt', 'SalixOS', 'Qubes OS', 'One-Laptop-Per-Child project',\ 87 | 'Unity Linux', 'Mezzo', 'MythDora', 'Gobuntu', 'Fuduntu', 'CrunchBang Linux', 'Bharat Operating System Solutions', 'Italy', 'Enlightenment','Aurora SPARC Linux',\ 88 | 'Sabily', 'GNU Guix', 'PowerPC', 'MAX', 'SteamOS', 'Raspberry Pi Foundation', 'Mandriva Linux', 'Ubuntu GNOME', 'MkLinux', 'Frozen', 'Karoshi', 'Damn Small Linux',\ 89 | 'ZipSlack', 'MEPIS', 'Scientific Linux', 'Kuki Linux', 'LiMux', 'Finnix', 'SuperGamer', 'NimbleX', 'Slamd64', 'grml', 'Ubuntu Server', 'Alpine Linux', 'Dragora GNU/Linux-Libre',\ 90 | 'Fermi National Accelerator Laboratory', 'Porteus', 'NixOS', 'Generalitat Valenciana', 'Jlime', 'Puppy Linux', 'Tiny Core Linux', 'tomsrtbt', 'Edubuntu', 'OpenMandriva',\ 91 | 'Thinstation', 'elementary OS', 'Void Linux', 'Rocks Cluster Distribution', 'Lubuntu', 'gOS', 'Ubuntu TV', 'Openbox', 'Sharp Zaurus', 'PS2 Linux', 'MintPPC', 'Kali Linux',\ 92 | 'Qimo 4 Kids', 'Nitix', 'SUSE Linux Enterprise Desktop', 'GendBuntu', 'Buildix', 'Impi Linux', 'Linux Lite', 'Guix System Distribution', 'Turbolinux', 'Maemo',\ 93 | 'Softlanding Linux System', 'SUSE', 'EduLinux', 'Debian Live', 'OpenTV', 'Daylight Linux', 'Manjaro Linux', 'Nagra', 'Slax', 'Caldera', 'UberStudent',\ 94 | 'MCC Interim Linux', 'Oracle Linux', 'K12LTSP', 'Devuan', 'OjubaLinux', 'Xandros', 'Molinux', 'openSUSE', 'SparkyLinux', 'DSLinux', 'GoboLinux',\ 95 | 'LinuxTLE', 'MATE', 'Zenwalk', 'Andalucia', 'LinuxBBQ', 'Slackware', 'Vine Linux', 'PCLinuxOS', 'Vinux', 'Musix GNU+Linux',\ 96 | 'Ubuntu Studio', 'Knoppix', 'ClearOS', 'Hikarunix', 'NASLite', 'KateOS', 'LTSP', 'Mandrake Linux', 'Nokia N800'] 97 | -------------------------------------------------------------------------------- /hashit/version.py: -------------------------------------------------------------------------------- 1 | """ Set global version """ 2 | __version__ = "3.5.3" 3 | -------------------------------------------------------------------------------- /img/demo.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cjavad/hashit/b4f0fd3f84267f3a330a6aa34abd14adb31bc8d0/img/demo.gif -------------------------------------------------------------------------------- /img/icon.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cjavad/hashit/b4f0fd3f84267f3a330a6aa34abd14adb31bc8d0/img/icon.png -------------------------------------------------------------------------------- /release/hashit.tar.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cjavad/hashit/b4f0fd3f84267f3a330a6aa34abd14adb31bc8d0/release/hashit.tar.gz -------------------------------------------------------------------------------- /release/hashit.zip: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cjavad/hashit/b4f0fd3f84267f3a330a6aa34abd14adb31bc8d0/release/hashit.zip -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | from setuptools import setup 2 | from hashit.version import __version__ 3 | 4 | setup( 5 | name = "hashit", 6 | author = "Javad Shafique", 7 | author_email = "javadshafique@hotmail.com", 8 | version=__version__, 9 | license="MIT", 10 | include_package_data=True, 11 | test_suite="tests", 12 | zip_safe=True, 13 | entry_points = { 14 | "console_scripts":[ 15 | "hashit = hashit.__main__:main" 16 | ] 17 | }, 18 | url="https://github.com/cjavad/hashit", 19 | packages=["hashit"], 20 | description = "Hashing Application with muliple modes, settings and more! Hashit, is an hashing application used as an verification tool, intendet to replace the 'standard' linux hashing utilities such as md5sum, sha1sum and so on. One of the main reasons why this program was develop was to create an easy-to-use command line tool for newcomers and professionals alike to hash/verify files and other data. For more see our homepage at https://cjavad.github.io/hashit", 21 | long_description = open("README.rst", "r").read(), 22 | classifiers = [ 23 | "Programming Language :: Python :: 2", 24 | "Programming Language :: Python :: 3", 25 | "Topic :: Security :: Cryptography", 26 | "License :: OSI Approved :: MIT License", 27 | "Environment :: Console", 28 | "Intended Audience :: Developers", 29 | "Intended Audience :: System Administrators" 30 | ] 31 | ) 32 | -------------------------------------------------------------------------------- /snapcraft.yaml: -------------------------------------------------------------------------------- 1 | name: hashit 2 | version: 3.5.3 3 | summary: hashit is an replacement for (md5sum, sha1sum, and so on) 4 | icon: img/icon.png 5 | description: | 6 | Hashit, is an hashing application used as an verification tool, intendet to replace the "standard" linux hashing utilities such as 7 | md5sum, sha1sum and so on. One of the main reasons why this program was develop was to create an easy-to-use command line tool for 8 | newcomers and professionals alike to hash/verify files and other data. For more see our homepage at https://cjavad.github.io/hashit 9 | 10 | 11 | grade: stable 12 | confinement: strict 13 | 14 | apps: 15 | hashit: 16 | command: bin/hashit 17 | plugs: [home, removable-media] 18 | 19 | parts: 20 | hashit: 21 | plugin: python 22 | python-version: python3 23 | source: . 24 | -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- 1 | """Runner for unittests""" 2 | import unittest 3 | from . import unit 4 | 5 | def create_suite(): 6 | """Create test suite for unittest""" 7 | suite = unittest.TestSuite() 8 | suite.addTest(unit.Test()) 9 | suite.addTest(unit.TestLoad()) 10 | # return configured testsuite 11 | return suite 12 | 13 | if __name__ == "__main__": 14 | # run all 15 | unittest.TextTestRunner().run(create_suite()) 16 | -------------------------------------------------------------------------------- /tests/benchmarks/res/benchmarks.json: -------------------------------------------------------------------------------- 1 | { 2 | "DSA": { 3 | "algo": "DSA", 4 | "file-time": 2.8450868749996516, 5 | "number": 100000, 6 | "str-time": 0.18207929300024261 7 | }, 8 | "DSA-SHA": { 9 | "algo": "DSA-SHA", 10 | "file-time": 2.666441807999945, 11 | "number": 100000, 12 | "str-time": 0.22197757800040563 13 | }, 14 | "blake2b": { 15 | "algo": "blake2b", 16 | "file-time": 2.8445470799997565, 17 | "number": 100000, 18 | "str-time": 0.20133252900041043 19 | }, 20 | "blake2s": { 21 | "algo": "blake2s", 22 | "file-time": 3.2005310449999342, 23 | "number": 100000, 24 | "str-time": 0.20474589700006618 25 | }, 26 | "crc32": { 27 | "algo": "crc32", 28 | "file-time": 3.0626791010004126, 29 | "number": 100000, 30 | "str-time": 0.19565401399995608 31 | }, 32 | "dsaEncryption": { 33 | "algo": "dsaEncryption", 34 | "file-time": 3.06814845100007, 35 | "number": 100000, 36 | "str-time": 0.18941209899958267 37 | }, 38 | "dsaWithSHA": { 39 | "algo": "dsaWithSHA", 40 | "file-time": 2.65577638700006, 41 | "number": 100000, 42 | "str-time": 0.18819310099979702 43 | }, 44 | "ecdsa-with-SHA1": { 45 | "algo": "ecdsa-with-SHA1", 46 | "file-time": 2.604442813999867, 47 | "number": 100000, 48 | "str-time": 0.1899897069997678 49 | }, 50 | "md4": { 51 | "algo": "md4", 52 | "file-time": 2.5014770049997423, 53 | "number": 100000, 54 | "str-time": 0.1810867640001561 55 | }, 56 | "md5": { 57 | "algo": "md5", 58 | "file-time": 2.858168186000057, 59 | "number": 100000, 60 | "str-time": 0.20138439300035316 61 | }, 62 | "ripemd160": { 63 | "algo": "ripemd160", 64 | "file-time": 3.7956181730000935, 65 | "number": 100000, 66 | "str-time": 0.2105701600003158 67 | }, 68 | "sha": { 69 | "algo": "sha", 70 | "file-time": 3.1197131930002797, 71 | "number": 100000, 72 | "str-time": 0.1953218310000011 73 | }, 74 | "sha1": { 75 | "algo": "sha1", 76 | "file-time": 2.605248396999741, 77 | "number": 100000, 78 | "str-time": 0.17578223699956652 79 | }, 80 | "sha224": { 81 | "algo": "sha224", 82 | "file-time": 3.1550695630003247, 83 | "number": 100000, 84 | "str-time": 0.19930047200023182 85 | }, 86 | "sha256": { 87 | "algo": "sha256", 88 | "file-time": 3.120442401999753, 89 | "number": 100000, 90 | "str-time": 0.2023992099998395 91 | }, 92 | "sha384": { 93 | "algo": "sha384", 94 | "file-time": 3.145929054000135, 95 | "number": 100000, 96 | "str-time": 0.2327170609996756 97 | }, 98 | "sha3_224": { 99 | "algo": "sha3_224", 100 | "file-time": 3.8057815829997708, 101 | "number": 100000, 102 | "str-time": 0.3033928019999621 103 | }, 104 | "sha3_256": { 105 | "algo": "sha3_256", 106 | "file-time": 3.690774378000242, 107 | "number": 100000, 108 | "str-time": 0.2946057000003748 109 | }, 110 | "sha3_384": { 111 | "algo": "sha3_384", 112 | "file-time": 4.387827305000428, 113 | "number": 100000, 114 | "str-time": 0.38775761599981706 115 | }, 116 | "sha3_512": { 117 | "algo": "sha3_512", 118 | "file-time": 4.753955307000069, 119 | "number": 100000, 120 | "str-time": 0.3283711169997332 121 | }, 122 | "sha512": { 123 | "algo": "sha512", 124 | "file-time": 2.940883205999853, 125 | "number": 100000, 126 | "str-time": 0.22776917900000626 127 | }, 128 | "whirlpool": { 129 | "algo": "whirlpool", 130 | "file-time": 4.990728630999911, 131 | "number": 100000, 132 | "str-time": 0.25389273800010415 133 | } 134 | } -------------------------------------------------------------------------------- /tests/benchmarks/res/benchmarks2.json: -------------------------------------------------------------------------------- 1 | { 2 | "DSA": { 3 | "algo": "DSA", 4 | "file-time": 2.587571631999708, 5 | "number": 100000, 6 | "str-time": 0.1804981029999908 7 | }, 8 | "DSA-SHA": { 9 | "algo": "DSA-SHA", 10 | "file-time": 2.621822691000034, 11 | "number": 100000, 12 | "str-time": 0.18724435099966286 13 | }, 14 | "blake2b": { 15 | "algo": "blake2b", 16 | "file-time": 2.5808818010000323, 17 | "number": 100000, 18 | "str-time": 0.1994517729999643 19 | }, 20 | "blake2s": { 21 | "algo": "blake2s", 22 | "file-time": 2.9960495519999313, 23 | "number": 100000, 24 | "str-time": 0.18518551200031652 25 | }, 26 | "crc32": { 27 | "algo": "crc32", 28 | "file-time": 3.0182085370001914, 29 | "number": 100000, 30 | "str-time": 0.1889544890000252 31 | }, 32 | "dsaEncryption": { 33 | "algo": "dsaEncryption", 34 | "file-time": 2.721187908999582, 35 | "number": 100000, 36 | "str-time": 0.1898579060002703 37 | }, 38 | "dsaWithSHA": { 39 | "algo": "dsaWithSHA", 40 | "file-time": 2.625476805999824, 41 | "number": 100000, 42 | "str-time": 0.18663340800003425 43 | }, 44 | "ecdsa-with-SHA1": { 45 | "algo": "ecdsa-with-SHA1", 46 | "file-time": 2.702637606999815, 47 | "number": 100000, 48 | "str-time": 0.19142900599990753 49 | }, 50 | "md4": { 51 | "algo": "md4", 52 | "file-time": 2.468673462999959, 53 | "number": 100000, 54 | "str-time": 0.17610469600003853 55 | }, 56 | "md5": { 57 | "algo": "md5", 58 | "file-time": 3.536771564999981, 59 | "number": 100000, 60 | "str-time": 0.18716412799994941 61 | }, 62 | "ripemd160": { 63 | "algo": "ripemd160", 64 | "file-time": 4.072462933000224, 65 | "number": 100000, 66 | "str-time": 0.21529122499987352 67 | }, 68 | "sha": { 69 | "algo": "sha", 70 | "file-time": 3.537468500999694, 71 | "number": 100000, 72 | "str-time": 0.20911539300004733 73 | }, 74 | "sha1": { 75 | "algo": "sha1", 76 | "file-time": 2.6043913959997553, 77 | "number": 100000, 78 | "str-time": 0.17510244300001432 79 | }, 80 | "sha224": { 81 | "algo": "sha224", 82 | "file-time": 3.0966381100001854, 83 | "number": 100000, 84 | "str-time": 0.2003057900001295 85 | }, 86 | "sha256": { 87 | "algo": "sha256", 88 | "file-time": 3.107855021999967, 89 | "number": 100000, 90 | "str-time": 0.20576528700030394 91 | }, 92 | "sha384": { 93 | "algo": "sha384", 94 | "file-time": 3.0349763379999786, 95 | "number": 100000, 96 | "str-time": 0.22604391000004398 97 | }, 98 | "sha3_224": { 99 | "algo": "sha3_224", 100 | "file-time": 3.527630315999886, 101 | "number": 100000, 102 | "str-time": 0.3003184279996276 103 | }, 104 | "sha3_256": { 105 | "algo": "sha3_256", 106 | "file-time": 4.620477275999747, 107 | "number": 100000, 108 | "str-time": 0.2987421979996725 109 | }, 110 | "sha3_384": { 111 | "algo": "sha3_384", 112 | "file-time": 4.095533193999927, 113 | "number": 100000, 114 | "str-time": 0.2997056720000728 115 | }, 116 | "sha3_512": { 117 | "algo": "sha3_512", 118 | "file-time": 5.690221998999732, 119 | "number": 100000, 120 | "str-time": 0.35634653699980845 121 | }, 122 | "sha512": { 123 | "algo": "sha512", 124 | "file-time": 3.3216979710000487, 125 | "number": 100000, 126 | "str-time": 0.23402784100017016 127 | }, 128 | "whirlpool": { 129 | "algo": "whirlpool", 130 | "file-time": 4.943534676000127, 131 | "number": 100000, 132 | "str-time": 0.2533912399999281 133 | } 134 | } -------------------------------------------------------------------------------- /tests/benchmarks/speed.py: -------------------------------------------------------------------------------- 1 | """Benchmarking for hashits hashing functions and algorithms""" 2 | from __future__ import print_function 3 | import timeit, os, json, hashlib 4 | from memory_profiler import profile 5 | os.sys.path.insert(0, "..") 6 | import hashit 7 | 8 | 9 | # do not use, at least 10 times slower than any other method 10 | def easy_hash(filename, hasher): 11 | """Slow but easy to use self-contained hasher""" 12 | filename = filename 13 | # openfile 14 | with open(filename, "rb") as afile: 15 | for block in (line for line in afile.readlines()): 16 | hasher.update(block) 17 | # return hash 18 | return hasher.hexdigest() 19 | 20 | if os.sys.version_info[0] == 2: 21 | global input 22 | input = raw_input 23 | 24 | # takes algorithem 25 | def hashfile(file, algo): 26 | return hashit.hashIter(hashit.blockIter(open(file, "rb")), hashit.new(algo)) 27 | 28 | def hashstr(string, algo): 29 | return hashit.new(algo, string.encode()).hexdigest() 30 | 31 | 32 | def slow_hashfile(file, algo): 33 | return hashit.new(algo, open(file, "rb").read()).hexdigest() 34 | 35 | 36 | def easy_hashfile(file, algo): 37 | return easy_hash(file, hashit.new(algo)) 38 | 39 | 40 | 41 | def gen(n=timeit.default_number): 42 | for algo in hashit.__algorithms__: 43 | x = timeit.timeit("hashfile('speed.py', '" + algo + "')", setup="from __main__ import hashfile", number=n) 44 | x2 = timeit.timeit("hashstr('"+ str(x) + "', '" + algo + "')", setup="from __main__ import hashstr", number=n) 45 | yield {"algo":algo, "file-time":x,"str-time":x2,"number":n} 46 | 47 | 48 | def test1(n=timeit.default_number, filename=None): 49 | o = dict() 50 | for i in gen(n): 51 | o.update({i["algo"]:i}) 52 | 53 | open((filename or input("Output to: ")), "w").write(json.dumps(o, indent=4, sort_keys=True)) 54 | 55 | 56 | def parse_test1(jsonfile): 57 | data = json.loads(open(jsonfile, "r").read()) 58 | 59 | def findt(s): 60 | sorted_list = sorted(data, key=lambda key: data[key][s]) 61 | for c, i in enumerate(sorted_list): 62 | sorted_list[c] = i + ": " + str(data[i][s]) 63 | 64 | return sorted_list 65 | 66 | print("Fastest to slowest file\n ", '\n '.join(findt("file-time")), end='\n\n') 67 | print("Fastest to slowest string\n ", '\n '.join(findt("str-time")), end='\n\n') 68 | 69 | 70 | 71 | # where default big file is an 512M file 72 | #@profile 73 | def test2(algo, n=1000, bigfile="/home/javad/filename"): 74 | if algo in hashit.__algorithms__: 75 | fast = timeit.timeit("hashfile('" + bigfile + "', '"+algo+"')", setup="from __main__ import hashfile", number=n) 76 | print("Fast:", fast) 77 | all_in = timeit.timeit("slow_hashfile('" + bigfile + "', '"+algo+"')", setup="from __main__ import slow_hashfile", number=n) 78 | print("All in:", all_in) 79 | easy = timeit.timeit("easy_hashfile('" + bigfile + "', '"+algo+"')", setup="from __main__ import easy_hashfile", number=n) 80 | print("Easy:", easy) 81 | ''' 82 | 10000: 83 | Fast: 5.277344635996997 84 | All in: 3.604332027996861 85 | Easy: 35.06488174900005 86 | Filename: speed.py 87 | 88 | 1000000: 89 | Fast: 54.695157744000085 90 | All in: 40.071381821000045 91 | Easy: 880.830499345 92 | Filename: dataset_from_detect.json 93 | 94 | ''' 95 | 96 | #test2("md5", 1000000, path_to_large_file) 97 | 98 | if __name__ == "__main__": 99 | # hash with a bunch of algorigthms a million times each and compare results 100 | if not (os.path.exists("./res/benchmarks.json") and os.path.exists("./res/benchmarks2.json")): 101 | test1(100000, "./res/benchmarks.json") 102 | test1(100000, "./res/benchmarks2.json") 103 | 104 | parse_test1("./res/benchmarks.json") 105 | parse_test1("./res/benchmarks2.json") 106 | 107 | test2("sha256", bigfile="./res/file.json") -------------------------------------------------------------------------------- /tests/benchmarks/speed2.py: -------------------------------------------------------------------------------- 1 | """Speed tests comparing pycrypto and hashlib's hash functions in terms of performance""" 2 | from __future__ import print_function 3 | import timeit, os, json, hashlib 4 | from Crypto.Hash import MD4, MD5, SHA224, SHA256, SHA384, SHA512, SHA, RIPEMD 5 | os.sys.path.insert(0, "..") 6 | import hashit 7 | 8 | 9 | if os.sys.version_info[0] == 2: 10 | global input 11 | input = raw_input 12 | 13 | # dict with hashers 14 | hashers = { 15 | "md4":{"hashlib_hash":hashlib.new("md4"), "crypto_hash":MD4.MD4Hash()}, 16 | "md5":{"hashlib_hash":hashlib.new("md5"), "crypto_hash":MD5.MD5Hash()}, 17 | "sha224":{"hashlib_hash":hashlib.new("sha224"), "crypto_hash":SHA224.SHA224Hash()}, 18 | "sha256":{"hashlib_hash":hashlib.new("sha256"), "crypto_hash":SHA256.SHA256Hash()}, 19 | "sha384":{"hashlib_hash":hashlib.new("sha384"), "crypto_hash":SHA384.SHA384Hash()}, 20 | "sha512":{"hashlib_hash":hashlib.new("sha512"), "crypto_hash":SHA512.SHA512Hash()}, 21 | "sha1":{"hashlib_hash":hashlib.new("sha1"), "crypto_hash":SHA.SHA1Hash()}, 22 | "ripemd160":{"hashlib_hash":hashlib.new("ripemd160"), "crypto_hash":RIPEMD.RIPEMD160Hash()} 23 | } 24 | 25 | # takes hasher 26 | def hashFile(filename, hasher): 27 | return hashit.hashIter(hashit.blockIter(open(filename, "rb")), hasher) 28 | 29 | def hashStr(binary, hasher): 30 | hasher.update(binary) 31 | return hasher.hexdigest() 32 | 33 | def NoMemFile(filename, hasher): 34 | hasher.update(open(filename, "rb").read()) 35 | return hasher.hexdigest() 36 | 37 | def RawCompare(algo, file="speed2.py", n=100000): 38 | # create command 39 | cC = lambda key, filename, command, algorihtm=algo: "{}('{}', hashers['{}']['{}'])".format(command, filename, algorihtm, key) 40 | setup = "from __main__ import hashFile, NoMemFile, hashers" 41 | 42 | Mem_H, Mem_C = timeit.timeit(cC("hashlib_hash", file, "hashFile"), setup=setup, number=n), \ 43 | timeit.timeit(cC("crypto_hash", file, "hashFile"), setup=setup, number=n) 44 | 45 | NoMem_H, NoMem_C = timeit.timeit(cC("hashlib_hash", file, "NoMemFile"), setup=setup, number=n), \ 46 | timeit.timeit(cC("crypto_hash", file, "NoMemFile"), setup=setup, number=n) 47 | 48 | print(Mem_H, NoMem_H) 49 | print(Mem_C, NoMem_C) 50 | 51 | # generate dataset for CompareCnH 52 | def CryptoVsHashlib(file_to_hash="speed.py", data_to_hash="Hello World!", n=timeit.default_number): 53 | # dict for results 54 | results = { 55 | "md4":{}, 56 | "md5":{}, 57 | "sha224":{}, 58 | "sha256":{}, 59 | "sha384":{}, 60 | "sha512":{}, 61 | "sha1":{}, 62 | "ripemd160":{} 63 | } 64 | for algo in hashers: 65 | # hashlib_hash for hashlib and crypto_hash for crypto (pycrypto(dome)) 66 | # first hash an file 67 | h_file = timeit.timeit("hashFile('{}', hashers['{}']['hashlib_hash'])".format(file_to_hash, algo), setup="from __main__ import hashFile, hashers", number=n) 68 | c_file = timeit.timeit("hashFile('{}', hashers['{}']['crypto_hash'])".format(file_to_hash, algo), setup="from __main__ import hashFile, hashers", number=n) 69 | 70 | h_str = timeit.timeit("hashStr(b'{}', hashers['{}']['hashlib_hash'])".format(data_to_hash, algo), setup="from __main__ import hashStr, hashers", number=n) 71 | c_str = timeit.timeit("hashStr(b'{}', hashers['{}']['crypto_hash'])".format(data_to_hash, algo), setup="from __main__ import hashStr, hashers", number=n) 72 | 73 | results[algo]["hashlib_hash"] = {"file":h_file, "str":h_str} 74 | results[algo]["crypto_hash"] = {"file":c_file, "str":c_str} 75 | 76 | return results 77 | 78 | # use CryptoVsHashlib to compare speed 79 | 80 | def CompareCnH(output=None, amount_of_datasets=100, n=1000): 81 | res = list() 82 | for _ in range(amount_of_datasets): 83 | res.append(CryptoVsHashlib(n=n)) 84 | 85 | res2 = dict() 86 | 87 | for algo in hashers: 88 | res2[algo] = {} 89 | res2[algo]["crypto_hash"] = {"file":0, "str":0, "amount-file":[], "amount-str":[]} 90 | res2[algo]["hashlib_hash"] = {"file":0, "str":0, "amount-file":[], "amount-str":[]} 91 | 92 | other = lambda x: "crypto_hash" if x == "hashlib_hash" else "hashlib_hash" # switch to the opposite 93 | 94 | for ds in res: 95 | ff = max(ds[algo], key=lambda key: ds[algo][key]["file"]) 96 | fs = max(ds[algo], key=lambda key: ds[algo][key]["str"]) 97 | res2[algo][ff]["file"] += 1 98 | res2[algo][ff]["amount-file"].append(ds[algo][ff]["file"] - ds[algo][other(ff)]["file"]) 99 | res2[algo][fs]["str"] += 1 100 | res2[algo][fs]["amount-str"].append(ds[algo][fs]["str"] - ds[algo][other(fs)]["str"]) 101 | 102 | # write output to file 103 | open((output or str(input("Output to: "))), "w").write(json.dumps({"datasets":res, "results":res2}, indent=4, sort_keys=True)) 104 | 105 | def ReadCnH(filename): 106 | res2 = json.loads(open(filename, "r").read())["results"] 107 | # print data 108 | for algo in hashers: 109 | f_file = max(res2[algo], key=lambda key: res2[algo][key]["file"]) 110 | f_str = max(res2[algo], key=lambda key: res2[algo][key]["str"]) 111 | 112 | print("Fastest", algo, "For files", f_file, ) 113 | print("Fastest", algo, "For strings", f_str) 114 | 115 | if __name__ == "__main__": 116 | 117 | if not os.path.exists("./res/pycrypto_vs_hashlib.json"): 118 | CompareCnH(output="./res/pycrypto_vs_hashlib.json") 119 | 120 | ReadCnH("./res/pycrypto_vs_hashlib.json") -------------------------------------------------------------------------------- /tests/config.py: -------------------------------------------------------------------------------- 1 | # check sums for file 2 | FILE = "LICENSE" 3 | FILE_SUM = "c11869fc956d819d2a336c74f4cc6000" 4 | 5 | FILE_SUMS = { 6 | "DSA": "05d9842ff5ab98ea012b1cbe2693a0714a33547a", 7 | "DSA-SHA": "05d9842ff5ab98ea012b1cbe2693a0714a33547a", 8 | "blake2b": "a64b7235b81d307b919c0d74ded6c86b823e2b9b2a9c1e50e55e273daedd5417027f2a2a1b4abc5d72be5170b462979867cae4b8c3fcf8a7d8a09a1c93fc9d11", 9 | "blake2s": "1ecfc726c59ec5cd52a24730e3345a650d4a2554b1b1dc50ed9c1faf9ebd8179", 10 | "crc32": "3371bb00", 11 | "dsaEncryption": "05d9842ff5ab98ea012b1cbe2693a0714a33547a", 12 | "dsaWithSHA": "05d9842ff5ab98ea012b1cbe2693a0714a33547a", 13 | "ecdsa-with-SHA1": "05d9842ff5ab98ea012b1cbe2693a0714a33547a", 14 | "md4": "1901cf76521dfb68b0a88df72c995345", 15 | "md5": "c11869fc956d819d2a336c74f4cc6000", 16 | "mdc2":"e8746a342b753d68e2c44dbdfdc52950", 17 | "ripemd160": "7fbabc556593e015495d752a0f8ba1d99eee0f8a", 18 | "sha": "597018a568e01f2434ce967be416beaefff02536", 19 | "sha1": "05d9842ff5ab98ea012b1cbe2693a0714a33547a", 20 | "sha224": "16ac30faa8d42524bc70f3f52412680ada5993d401ca057edfe3cdec", 21 | "sha256": "81be97a4c17e703ddce3cfe0bd774aba4d67d4e3f225da4b4071a75388132aca", 22 | "sha384": "b4efcc718ecf169bddbaeb023694071193a255d57674144220f9880544da1feaee0a218043ae00cbd3fbe2e84900e771", 23 | "sha512": "70ef754d5a3f87b7a545bce7360f20327b17f094fc75f3fc095551d6ea9e2459b1bbc7d22f26971d7716a8d204e83b33b169099544bc7c32feac26a31090cc39", 24 | "sha3_224":"ed9514941e44182fa51414d87a9b0866aedc1a9a114cf82f001d2213", 25 | "sha3_256":"6b39f585427dd5ef205dbef1560e390ec4e413641c59255b361cf6daca8160eb", 26 | "sha3_384":"0571ed8233e5203fae819095a6802a7a5fbc566c96381cfcf710f2d338c3a6d3bf81f29128a4757c73f7e0a43c31a9e4", 27 | "sha3_512":"b02b6e54b0b44d54dce348d745f718233ee74ef6d95abf45cd76054b85c413b71d4de5dac8e77bbb3a48011bbf0806db7c431a43a7ca2a4fc6d35328575c1c2b", 28 | "whirlpool": "f6f74448a5ea9553387678f68146d0f38dd639e644e547840077cd39a6c20a23452d28d8758aa2aba03bcb2eba38b350050ec5fecc52d1f813ae0e1892994ce8" 29 | } -------------------------------------------------------------------------------- /tests/spec/arg.py: -------------------------------------------------------------------------------- 1 | import argparse 2 | 3 | __version__ = "3.3.3a0" 4 | 5 | class Print(argparse.Action): 6 | def __init__(self, nargs=0, **kwargs): 7 | if nargs != 0: 8 | raise ValueError('nargs for StartAction must be 0; it is just a flag.') 9 | elif "text" in kwargs: 10 | self.data = kwargs.pop("text") 11 | 12 | super().__init__(nargs=nargs, **kwargs) 13 | 14 | def __call__(self, parser, namespace, values, option_string=None): 15 | print(self.data) 16 | 17 | class Execute(argparse.Action): 18 | def __init__(self, nargs=0, **kwargs): 19 | if nargs != 0: 20 | raise ValueError('nargs for StartAction must be 0; it is just a flag.') 21 | 22 | if "func" in kwargs: 23 | self.data = kwargs.pop("func") 24 | 25 | if "exit" in kwargs: 26 | self.exit = True if kwargs.pop("exit") else False 27 | 28 | super().__init__(nargs=nargs, **kwargs) 29 | 30 | def __call__(self, parser, namespace, values, option_string=None): 31 | print(self.data()) 32 | if self.exit: 33 | exit() 34 | 35 | parser = argparse.ArgumentParser("hashit", "hashit [options] $path", "hashit is an hashing application...", "MIT License, Copyrigth (c) 2020 Javad Shafique") 36 | parser.add_argument("-V", "--version", help="Print current version and exit", action="version", version="%(prog)s " + __version__) 37 | parser.add_argument("-l", "--license", help="Print license and exit", action=Print, text="MIT") 38 | parser.add_argument("-H", "--hash", help="Select hash use -hl --hash-list for more info") 39 | parser.add_argument("-hl", "--hash-list", help="Prints list of all supported hashes and exits", action=Execute, func=lambda: 1, exit=True) 40 | parser.add_argument("-a", "--all", help="Calculate all hashes for a single file") 41 | parser.add_argument("-sfv", "--sfv", help="Outputs in a sfv compatible format", action="store_true") 42 | parser.add_argument("-C", "--color", help="Enable colored output where it is supported", action="store_true") 43 | parser.add_argument("-f", "--file", help="Hash single a file") 44 | parser.add_argument("-S", "--size", help="Adds the file size to the output", action="store_true") 45 | parser.add_argument("-s", "--string", help="hash a string or a piece of text", action="store_const", const=True) 46 | parser.add_argument("-sp", "--strip-path", help="Strips fullpath from the results", action="store_true") 47 | parser.add_argument("-A", "--append", help="Instead of writing to a file you will append to it", action="store_true") 48 | parser.add_argument("-d", "--detect", help="Enable hash detection for check (can take argument)", action="store_const", const=True) 49 | parser.add_argument("-m", "--memory-optimatation", help="Enables memory optimatation (useful for large files)", action="store_true") 50 | parser.add_argument("-c", "--check", help="Verify checksums from a checksum file") 51 | parser.add_argument("-q", "--quiet", help="Reduces output", action="store_true") 52 | parser.add_argument("-bsd", "--bsd", help="output using the bsd checksum-format", action="store_true") 53 | parser.add_argument("-o", "--output", help="output output to an output (file)") 54 | 55 | args = parser.parse_args() 56 | print(args) -------------------------------------------------------------------------------- /tests/spec/gui.py: -------------------------------------------------------------------------------- 1 | """Simple GUI for hashit 2 | 3 | This program simulates in what ways you can use hashit 4 | """ 5 | import os 6 | import argparse 7 | import easygui as gui 8 | os.sys.path.insert(0, "../..") 9 | from hashit import new, __algorithms__, __help__, hashFile, check_ 10 | from hashit.__main__ import walk 11 | 12 | def showhelp(): 13 | gui.buttonbox(__help__, "HASHIT - HELP", choices=["OK"], image="../../img/icon.png") 14 | 15 | def selecthash(): 16 | return gui.choicebox("Select an hash", "HASHIT", __algorithms__) 17 | 18 | 19 | def writetofile(): 20 | yn = gui.ynbox("Write to file? (Recommended)") 21 | if yn: 22 | return gui.filesavebox("Save to:", "HASHIT") 23 | else: 24 | return False 25 | 26 | def readfromfile(): 27 | return gui.fileopenbox("Read from:", "HASHIT") 28 | 29 | def main_(): 30 | COMMANDS = ["hash an file", "hash files from a directory" , "hash all files and folders in a directory", "check a checksum file", "help", "exit"] 31 | command = gui.choicebox("Select command:", "HASHIT", COMMANDS) 32 | 33 | if command == COMMANDS[0]: 34 | filename = gui.fileopenbox("Choose a file to hash", "HASHIT") 35 | hashres = hashFile(filename, new(selecthash()), False) 36 | file = writetofile() 37 | 38 | gui.msgbox(hashres, "HASHIT") 39 | 40 | elif command == COMMANDS[1]: 41 | my_path = gui.diropenbox("select directory:", "HASHIT") 42 | files = [my_path + "/" + f for f in os.listdir(my_path) if os.path.isfile(os.path.join(my_path, f))] 43 | files_to_hash = gui.multchoicebox("Select files to hash:", "HASHIT", files) 44 | hasher = selecthash() 45 | HASHED = [] 46 | 47 | for fname in files_to_hash: 48 | HASHED.append(str(hashFile(fname, new(hasher), False)) + " " + fname) 49 | 50 | file = writetofile() 51 | 52 | if file: 53 | open(file, "w").write("\n".join(HASHED)) 54 | else: 55 | gui.msgbox('\n\n'.join(HASHED)) 56 | 57 | elif command == COMMANDS[2]: 58 | my_path = gui.diropenbox("select directory:", "HASHIT") 59 | files = walk(my_path) 60 | hasher = selecthash() 61 | HASHED = [] 62 | 63 | for fname in files: 64 | HASHED.append(str(hashFile(fname, new(hasher), False)) + " " + fname) 65 | file = writetofile() 66 | 67 | if file: 68 | open(file, "w").write("\n".join(HASHED)) 69 | else: 70 | gui.msgbox('\n\n'.join(HASHED)) 71 | 72 | elif command == COMMANDS[3]: 73 | file = readfromfile() 74 | hasher = new(selecthash()) 75 | DONE = [] 76 | 77 | for c in check_(file, hasher, open(file, "r").readline(), False, False, False): 78 | 79 | if isinstance(c, str): 80 | gui.exceptionbox("An Error has occured:\n\n {}".format(c), "HASHIT") 81 | else: 82 | if not c["hash_check"]: 83 | DONE.append("{}: FAILED".format(c["filename"])) 84 | else: 85 | DONE.append("{}: OK".format(c["filename"])) 86 | 87 | gui.msgbox('\n'.join(DONE)) 88 | 89 | elif command == COMMANDS[4]: 90 | showhelp() 91 | 92 | elif command == COMMANDS[5]: 93 | exit() 94 | 95 | def main(): 96 | while 1: 97 | try: 98 | main_() 99 | except Exception: 100 | break 101 | exit() 102 | 103 | if __name__ == "__main__": 104 | main() -------------------------------------------------------------------------------- /tests/spec/oldmain.py: -------------------------------------------------------------------------------- 1 | """Command line program for hashit 2 | 3 | this module "__main__" contains all the code for argparsing, running 4 | and anything needed for an command lin application such as hashit. 5 | 6 | it uses argc another package by me, but i am considering switching to argparse 7 | """ 8 | import json 9 | import random 10 | from argc import argc 11 | # Import all from hashit 12 | from .__init__ import os, hashlib, eprint, hashFile, new, bsd_tag, load, \ 13 | GLOBAL, Exit, check, generate_data_set, detect, sfv_max, fixpath, \ 14 | __algorithms__, __author__, __help__, __license__, supports_color 15 | 16 | from .extra import LINUX_LIST 17 | from .version import __version__ 18 | 19 | def walk(goover): 20 | """Goes over a path an finds all files, appends them to a list and returns that list""" 21 | walked = [] 22 | for path, _subdirs, files in os.walk(goover): 23 | # for each file 24 | for name in files: 25 | # add it to in_files list() 26 | walked.append((path + "/" + name).replace("\\", "/").replace("//", "/")) 27 | 28 | # return list with file names 29 | return walked 30 | 31 | def config(argv): 32 | """Sets argvs' config and commands""" 33 | 34 | def hash_list(): 35 | """Generates an easy-to-read list""" 36 | algos = set((__algorithms__ + ["sha3_224", "sha3_256", "sha3_384", "sha3_512"] if os.sys.version_info[0] == 3 else __algorithms__)\ 37 | + list(GLOBAL["EXTRA"].keys())) # add extras 38 | # sort set 39 | s = [sorted(algos)[x:x+2] for x in range(0, len(algos), 2)] 40 | for c, l in enumerate(s): 41 | s[c] = ', '.join(l) 42 | 43 | return [""]+s+[""] 44 | 45 | # set commands 46 | argv.set("-h", "--help", "help", "Print help message and exit", None, __help__(argv.generate_docs), True) 47 | argv.set("-v", "--version", "version", "Print current version and exit", None, __version__, True) 48 | argv.set("-l", "--license", "license", "Print license and exit", None, __license__, True) 49 | argv.set("-hl", "--hash-list", "hashlist", "Prints list of all supported hashes and exits", None, hash_list(), True) 50 | # set arguments 51 | argv.set("-H", "--hash", "hash", "Select hash use -hl --hash-list for more info", GLOBAL["DEFAULTS"]["HASH"]) 52 | argv.set("-a", "--all", "all", "Calculate all hashes posible for a single file and output as json") 53 | argv.set("-s", "--string", "str", "hash a string/text", False) 54 | argv.set("-sp", "--strip-path", "spath", "Strips fullpath from results", GLOBAL["DEFAULTS"]["STRIP"]) 55 | argv.set("-c", "--check", "check", "Check checksum-file (sfv or standard)") 56 | argv.set("-o", "--output", "output", "Output data to file (in->do->out)") 57 | argv.set("-C", "--color", "color", "Enable colored output where it is supported", GLOBAL["DEFAULTS"]["COLORS"]) 58 | argv.set("-d", "--detect", "detect", "Enable hash detection for check and if you pass it and hash it will detect that", GLOBAL["DEFAULTS"]["DETECT"]) 59 | argv.set("-f", "--file", "file", "Hash single a file") 60 | argv.set("-q", "--quiet", "quiet", "Minimal output", GLOBAL["DEFAULTS"]["QUIET"]) 61 | argv.set("-bsd", "--bsd-tag", "bsd", "create a BSD-style checksum", False) 62 | argv.set("-m", "--memory-optimatation", "memopt", "Enables memory optimatation only useful for large files", GLOBAL["DEFAULTS"]["MEMOPT"]) 63 | argv.set("-sfv", "--simple-file-verification", "sfv", "Outputs in a sfv compatible format", False) 64 | argv.set("-S", "--size", "size", "Adds a size to output", GLOBAL["DEFAULTS"]["SIZE"]) 65 | argv.set("-A", "--append", "append", "Instead of writing to a file you will append to it", GLOBAL["DEFAULTS"]["APPEND"]) 66 | 67 | def main_(args=None): 68 | """Main function which is the cli parses arguments and runs appropriate commands""" 69 | # switch args if needed 70 | if args is None: 71 | # to sys.args 72 | args = os.sys.argv[1:] 73 | 74 | # using argc module by me (support for python2) 75 | argv = argc(args, False) 76 | # set commands and config with config 77 | config(argv) 78 | 79 | if len(args) == 0: 80 | # if there is not arguments show help 81 | argv.args["--help"] = True 82 | # run (can raise SystemExit) 83 | argv.run() 84 | 85 | 86 | # Varibles 87 | 88 | # set colors 89 | RED = "" 90 | GREEN = "" 91 | YELLOW = "" 92 | RESET = "" 93 | 94 | # file list, and path 95 | in_files = list() # list of all files 96 | my_path = os.getcwd() # path to search in 97 | 98 | Config = {} 99 | # get hash from arguments 100 | # default is md5 for now 101 | Config["hash"] = argv.get("hash") 102 | # get all other options and parse them 103 | Config["detect?"] = argv.get("detect") # to detect or not 104 | Config["check?"] = argv.get("check") # to check or not 105 | Config["single"] = argv.get("file") # only hash a single file (md5sum behavior) 106 | Config["all_single"] = argv.get("all") 107 | Config["colors?"] = argv.get("color", True) # use colors (True for detect type) 108 | Config["quiet?"] = argv.get("quiet") # silent output 109 | Config["strip-path?"] = argv.get("spath") # strip fullpath 110 | Config["writeToFile"] = argv.get("output") # output output to output (in->do->out) 111 | Config["SimpleFileVerification"] = argv.get("sfv") # use simple file verification compatible format 112 | Config["BSDTag"] = argv.get("bsd") # create a BSD-style checksum 113 | Config["MemoryOptimatation"] = argv.get("memopt") # use memory optimatations 114 | Config["AddSize"] = argv.get("size") # get size of file in bytes 115 | Config["String?"] = argv.get("str") # get string/setting 116 | 117 | # use md5 by default 118 | hash_is = new(GLOBAL["DEFAULTS"]["HASH"]) 119 | 120 | # check if its an valid hashing 121 | if Config["hash"] in hashlib.algorithms_available or Config["hash"] in __algorithms__ or Config["hash"] in list(GLOBAL["EXTRA"].keys()) or str(Config["hash"])[:5] == "shake": 122 | # check if it's in guaranteed 123 | if not Config["hash"] in hashlib.algorithms_guaranteed and Config["hash"] in hashlib.algorithms_available: 124 | # if not print an warning 125 | if not Config["quiet?"]: 126 | eprint(YELLOW + str(Config["hash"]), GLOBAL["MESSAGES"]["WORKS_ON"] + RESET) 127 | # and use the hash 128 | hash_is = new(Config["hash"]) 129 | 130 | else: 131 | if not Config["hash"] in GLOBAL["BLANK"] and not Config["quiet?"]: 132 | # then print error message 133 | eprint(RED + str(Config["hash"]), GLOBAL["MESSAGES"]["HASH_NOT"], RESET) 134 | 135 | # select output 136 | use_out = False 137 | output = None 138 | 139 | # check if out is set and it has a value 140 | if not Config["writeToFile"] in GLOBAL["BLANK"]: 141 | # if it is open file 142 | use_out = True 143 | output = open(fixpath(Config["writeToFile"]), GLOBAL["WRITE_MODE"]) 144 | else: 145 | # else set it to false 146 | use_out = False 147 | 148 | 149 | # check if we should use colors 150 | if supports_color() and Config["colors?"]: 151 | # if yes enable them 152 | RED = GLOBAL["COLORS"]["RED"] 153 | GREEN = GLOBAL["COLORS"]["GREEN"] 154 | YELLOW = GLOBAL["COLORS"]["YELLOW"] 155 | RESET = GLOBAL["COLORS"]["RESET"] 156 | 157 | 158 | # check for new path 159 | if len(args) >= 1: 160 | new_path = args[len(args) - 1].replace("\\", "/") 161 | # check if argument is path else do not change path 162 | if os.path.exists(new_path) and ("/" in new_path or new_path in (".", "..")): 163 | my_path = new_path 164 | 165 | # check for string 166 | if not Config["String?"] == False: 167 | data = Config["String?"] 168 | if data == True: 169 | # reed from stdin like md5sum 170 | data = os.sys.stdin.read() 171 | 172 | # check if data ends with newline 173 | if not data.endswith("\n"): 174 | # else print one 175 | print("") 176 | 177 | # if the data isn't bytes 178 | if not isinstance(data, bytes): 179 | # encode it 180 | data = data.encode() 181 | 182 | # then hash-it 183 | hash_is.update(data) 184 | 185 | # check for output methods 186 | if use_out and output != None: 187 | output.write(hash_is.hexdigest()) 188 | else: 189 | print(hash_is.hexdigest()) 190 | 191 | # check for hash one file 192 | elif not Config["all_single"] in GLOBAL["BLANK"]: 193 | if os.path.exists(Config["all_single"]): 194 | data = open(Config["all_single"], "rb").read() 195 | results = {} 196 | for algo in __algorithms__: 197 | results[algo] = new(algo, data).hexdigest() 198 | 199 | out = json.dumps(results, indent=4, sort_keys=True) 200 | 201 | if use_out and output != None: 202 | output.write(out) 203 | else: 204 | print(out) 205 | else: 206 | eprint(RED + GLOBAL["MESSAGES"]["FILE_NOT"] + RESET) 207 | 208 | # if detect is choosen use it 209 | elif not Config["detect?"] in GLOBAL["BLANK"]: 210 | hashes = detect(Config["detect?"], generate_data_set("Hallo", __algorithms__, new)) 211 | if hashes != None: 212 | for item in hashes.certain: 213 | print(GREEN + "Same results as", item + RESET) 214 | 215 | # print sepetator 216 | print("") 217 | 218 | for item in hashes.maybe: 219 | print(YELLOW + "Maybe", item + RESET) 220 | else: 221 | print(RED + "Not valid hash" + RESET) 222 | # exit when done 223 | Exit(0) 224 | 225 | # if to check use that 226 | elif not Config["check?"] in GLOBAL["BLANK"]: 227 | # check for file 228 | if os.path.exists(Config["check?"]): 229 | # then check 230 | check( 231 | Config["check?"], 232 | hash_is, 233 | Config["colors?"], 234 | Config["quiet?"], 235 | Config["detect?"], 236 | Config["SimpleFileVerification"], 237 | Config["AddSize"], 238 | Config["BSDTag"] 239 | ) 240 | 241 | else: 242 | # if the file does not exist 243 | # print error message 244 | eprint(RED + GLOBAL["MESSAGES"]["FILE_NOT"] + RESET) 245 | Exit(1) # and exit 246 | 247 | # check the Config["single"] argument 248 | elif not Config["single"] in GLOBAL["BLANK"]: 249 | in_files = [Config["single"]] 250 | 251 | else: 252 | # walk directory and add files to my_path 253 | in_files = walk(my_path) 254 | 255 | # if there is any files in in_files 256 | if in_files: 257 | # find the longest filename 258 | longest_filename = max(in_files, key=len) 259 | 260 | # go over files and hash them all 261 | for fname in in_files: 262 | try: 263 | # hash file 264 | current_hash = hashFile(fname, hash_is, Config["MemoryOptimatation"]) 265 | 266 | except (FileNotFoundError, PermissionError) as Error: 267 | # if the file does not exist print a error message 268 | if isinstance(Error, FileNotFoundError): 269 | eprint(RED + fname + ", " + GLOBAL["MESSAGES"]["FILE_NOT"] + RESET) 270 | 271 | # check if we have access to the file 272 | elif isinstance(Error, PermissionError): 273 | eprint(RED + fname + " " + GLOBAL["MESSAGES"] + RESET) 274 | # and continue 275 | continue 276 | 277 | # set print_str 278 | print_str = current_hash 279 | size = "" 280 | 281 | if Config["AddSize"]: 282 | size = str(os.stat(fname).st_size) 283 | 284 | if Config["SimpleFileVerification"]: 285 | print_str = sfv_max(current_hash, fname, len(longest_filename), size + " ") 286 | 287 | elif Config["BSDTag"]: 288 | print_str = bsd_tag(current_hash, fname, hash_is.name) + " " + size 289 | 290 | else: 291 | print_str = current_hash + " " + str(size + " " + fname) 292 | 293 | # check if fullpath path shall be stripped 294 | if Config["strip-path?"]: 295 | print_str = print_str.replace(my_path, ".") 296 | 297 | # if we should output the result to a file 298 | if use_out and output != None: 299 | # write result to an file 300 | output.write(print_str + "\n") 301 | 302 | else: 303 | # else print it 304 | print(print_str) 305 | 306 | # Exit when done 307 | Exit(0) 308 | else: 309 | # Else exit 310 | Exit(1) 311 | 312 | """ 313 | Hashit __main__.py can be executed directly with python(3) -m hashit "commands" 314 | and via snap 315 | """ 316 | 317 | def main(args=None): 318 | """ 319 | Main function with error catching, can force-exit with os._exit(1) 320 | 321 | this main function calls main_() and cathes any error while giving the user a "pretty" 322 | error. 323 | """ 324 | try: 325 | # execute main application 326 | main_(args) 327 | except Exception as error: 328 | # define colors 329 | RD = "" 330 | YL = "" 331 | RE = "" 332 | # check if term supports color 333 | if supports_color(): 334 | YL = GLOBAL["COLORS"]["YELLOW"] 335 | RD = GLOBAL["COLORS"]["RED"] 336 | RE = GLOBAL["COLORS"]["RESET"] 337 | 338 | if isinstance(error, TypeError): 339 | eprint(YL + GLOBAL["ERRORS"]["TypeError"] + RE) 340 | 341 | elif isinstance(error, FileNotFoundError): 342 | eprint(YL + GLOBAL["ERRORS"]["FileNotFoundError"] + RE) 343 | 344 | elif isinstance(error, OSError): 345 | eprint(YL + GLOBAL["ERRORS"]["OSError"]["windows"]) 346 | eprint(GLOBAL["ERRORS"]["OSError"]["macos"]) 347 | eprint(GLOBAL["ERRORS"]["OSError"]["linux"].format(', '.join(random.sample(LINUX_LIST, 10)))) 348 | eprint(GLOBAL["ERRORS"]["OSError"]["END"] + RE) 349 | 350 | # and print error 351 | eprint(RD + str(error) + RE) 352 | 353 | os._exit(1) # force exit 354 | 355 | # if the program is being called 356 | if __name__ == "__main__": 357 | main() # then execute main function 358 | -------------------------------------------------------------------------------- /tests/spec/test.py: -------------------------------------------------------------------------------- 1 | import timeit 2 | import binascii 3 | import sys 4 | import json 5 | sys.path.insert(0, "..") 6 | import hashit 7 | 8 | # create a crc32 hash 9 | def hex_crc32(buf): 10 | buf = (binascii.crc32(buf) & 0xFFFFFFFF) 11 | return ("%08X" % buf).lower() 12 | 13 | 14 | # final class 15 | class crc32: 16 | def __init__(self, data = b''): 17 | self.data = data 18 | 19 | def update(self, data): 20 | self.data += data 21 | 22 | def hexdigest(self): 23 | return hex_crc32(self.data) 24 | 25 | da = str() 26 | 27 | def test_speed(): 28 | return crc32(da).hexdigest() 29 | 30 | ''' 31 | s = timeit.timeit("test_speed()", setup="from __main__ import test_speed", number=10) 32 | print(s) 33 | ''' 34 | 35 | def collision(): 36 | crc = lambda d=b'': hashit.new("crc32", d).hexdigest() 37 | done = {} 38 | 39 | for n in range(1000**3*4 + 1): 40 | n = str(n) 41 | h = crc(n.encode()) 42 | if h in done: 43 | print("ERROR collision found in CRC32", h, n, "and", done[h]) 44 | else: 45 | try: 46 | done[h] = n 47 | except MemoryError: 48 | done.clear() 49 | 50 | 51 | if __name__ == "__main__": 52 | da = open("speed.py", "rb").read() 53 | collision() 54 | 55 | ''' 56 | back = [] 57 | data = open("./res/crc_hashcollisions.txt", "r").readlines() 58 | 59 | for l in data: 60 | l = l.split(" ") 61 | back.append(' '.join([l[0] + ":", l[1], l[2]])) 62 | 63 | open("file.yaml", "w").writelines(back) 64 | ''' -------------------------------------------------------------------------------- /tests/test_load.py: -------------------------------------------------------------------------------- 1 | class load_api_1: 2 | name="hash1" 3 | def __init__(self, data=b''): 4 | self.data = data 5 | 6 | def update(self, data=b''): 7 | self.data += data 8 | 9 | def digest(self): 10 | return 1516152524156352132515252551426 11 | 12 | def hexdigest(self): 13 | return hex(self.digest()) 14 | 15 | class load_api_2: 16 | name="hash2" 17 | def __init__(self, data=b''): 18 | self.data = data 19 | 20 | def update(self, data=b''): 21 | self.data += data 22 | 23 | def digest(self): 24 | return 1234567876543234567897654324562 25 | 26 | def hexdigest(self): 27 | return hex(self.digest()) 28 | 29 | class load_api_3: 30 | name="hash3" 31 | def __init__(self, data=b''): 32 | self.data = data 33 | 34 | def update(self, data=b''): 35 | self.data += data 36 | 37 | def digest(self): 38 | return 5232348239489234823948203294829 39 | 40 | def hexdigest(self): 41 | return hex(self.digest()) 42 | -------------------------------------------------------------------------------- /tests/unit.py: -------------------------------------------------------------------------------- 1 | from __future__ import print_function, with_statement 2 | import unittest 3 | import os, sys 4 | import random, string 5 | from .test_load import load_api_1, load_api_2, load_api_3 6 | from .config import FILE, FILE_SUM, FILE_SUMS 7 | from binascii import unhexlify # python3 support 8 | sys.path.insert(0, "..") 9 | import hashit 10 | import hashit.__main__ 11 | 12 | if not os.path.exists(FILE): 13 | os.chdir("..") 14 | 15 | # use with for this, to disable stdout and stderr 16 | class DisablePrint: 17 | def __enter__(self): 18 | self._original_stdout = sys.stdout 19 | self._original_stderr = sys.stderr 20 | sys.stdout = open(os.devnull, 'w') 21 | sys.stderr = open(os.devnull, 'w') 22 | 23 | def __exit__(self, exc_type, exc_val, exc_tb): 24 | # close os.devnull 25 | sys.stderr.close() 26 | sys.stdout.close() 27 | # set default 28 | sys.stdout = self._original_stdout 29 | sys.stderr = self._original_stderr 30 | 31 | class Test(unittest.TestCase): 32 | def test_hasher(self): 33 | file = open(FILE, "rb") 34 | data = file.read() 35 | 36 | for algo in hashit.__algorithms__: 37 | h1 = hashit.new(algo, data).hexdigest() 38 | h2 = hashit.hashFile(FILE, hashit.new(algo), True) 39 | self.assertEqual(h1, h2) 40 | self.assertEqual(h1, FILE_SUMS[algo]) 41 | 42 | h1 = hashit.new("md5", data).hexdigest() 43 | self.assertEqual(h1, FILE_SUM) 44 | file.close() 45 | 46 | def test_detect(self): 47 | # generate data set 48 | ds = hashit.generate_data_set("Hallo", hashit.__algorithms__, hashit.new) 49 | 50 | # hash file three times 51 | h1 = hashit.hashIter(hashit.blockIter(open(FILE, "rb")), hashit.new("md5")) 52 | h2 = hashit.hashIter(hashit.blockIter(open(FILE, "rb")), hashit.new("sha224")) 53 | h3 = hashit.hashIter(hashit.blockIter(open(FILE, "rb")), hashit.new("sha1")) 54 | h4 = hashit.hashIter(hashit.blockIter(open(FILE, "rb")), hashit.new("crc32")) 55 | 56 | # detect for three algorigthms 57 | cl1 = hashit.detect(h1, ds) 58 | cl2 = hashit.detect(h2, ds) 59 | cl3 = hashit.detect(h3, ds) 60 | cl4 = hashit.detect(h4, ds) 61 | 62 | # correct hash names 63 | correct1 = "md5" 64 | correct2 = "sha224" 65 | correct4 = "crc32" 66 | 67 | # md5 or md4 68 | self.assertTrue(correct1 in cl1.certain or correct1 in cl1.maybe) 69 | # only one left should be true 70 | self.assertTrue(correct2 in (cl2.certain if cl2.certain else cl2.maybe)) 71 | self.assertTrue(correct4 in cl4.certain) 72 | # and if it is to check hash with it 73 | self.assertEqual(hashit.new(correct2, b'Hallo').hexdigest(), hashit.new(cl2.certain[0] if cl2.certain else cl2.maybe[0], b'Hallo').hexdigest()) 74 | 75 | # for sha1 more options should be avaible 76 | self.assertTrue(len(cl3.certain) >= 1) 77 | # and work 78 | self.assertEqual(h3, hashit.hashIter(hashit.blockIter(open(FILE, "rb")), hashit.new(cl3.certain[0]))) 79 | 80 | def test_detect_format(self): 81 | # create shortcut 82 | df = lambda s: hashit.detect_format(s) 83 | 84 | bsdstr = hashit.BSD.format("12345678", "/path/to/file.txt", "crc32") 85 | sfvstr = hashit.SFV.format("12345678", "/path/to/file.txt", 18, "") 86 | nonata = "{} {}".format("12345678", "/path/to/file.txt") 87 | 88 | self.assertEqual(df(bsdstr), "bsd") 89 | self.assertEqual(df(sfvstr), "sfv") 90 | self.assertEqual(df(nonata), "N/A") 91 | 92 | def test_multi(self): 93 | # test all hashing functions 94 | algo = "md5" 95 | 96 | h1 = hashit.hashFile(FILE, hashit.new(algo), True) 97 | h2 = hashit.hashFile(FILE, hashit.new(algo)) 98 | h3 = hashit.hashIter(hashit.blockIter(open(FILE, "rb")), hashit.new(algo)) 99 | 100 | # just checking 101 | d = hashit.detect(hashit.hashFile(FILE, hashit.new("sha224"), False), hashit.generate_data_set("HALLO", hashit.__algorithms__, hashit.new)) 102 | self.assertEqual(d.certain[0] if d.certain else d.maybe[0], "sha224") 103 | 104 | self.assertTrue(h1 == h2 == h3 == FILE_SUM) 105 | 106 | 107 | def test_systemrandom(self): 108 | # test system random 109 | generate = lambda k=2: ''.join([random.SystemRandom().choice(string.hexdigits) for i in range(k)]) 110 | 111 | all_gen = list() 112 | c_str = generate() 113 | 114 | while not c_str in all_gen: 115 | all_gen.append(c_str) 116 | c_str = generate() 117 | 118 | print(c_str, "in list (COLLISION FOUND) after", len(all_gen), "Tries, which translates into", int(c_str, 16)) 119 | self.assertTrue(c_str in all_gen) 120 | 121 | def test_crc32(self): 122 | """ 123 | crc = lambda d=b'': hashit.new("crc32", d).hexdigest() 124 | done = {} 125 | 126 | for n in range(100000000): 127 | n = str(n) 128 | h = crc(n.encode()) 129 | if h in done: 130 | print("ERROR collision found in CRC32", h, n, "and", done[h]) 131 | break 132 | try: 133 | done[h] = n 134 | except MemoryError: 135 | done.clear() 136 | """ 137 | 138 | def test_format(self): 139 | s = hashit.BSD.format(FILE_SUM, FILE, "md5") 140 | self.assertEqual(s, "md5 ({}) = {}".format(FILE, FILE_SUM)) 141 | self.assertEqual(hashit.BSD.parser(s), ["md5", FILE, FILE_SUM]) 142 | 143 | # check the sfv parser 144 | self.assertEqual(hashit.SFV.format("abc", "def", 4), "def abc") 145 | 146 | 147 | def test_other(self): 148 | self.assertIsInstance(hashit.supports_color(), bool) 149 | 150 | with DisablePrint(): 151 | with self.assertRaises(SystemExit): 152 | hashit.__main__.main(["--help"]) 153 | 154 | with self.assertRaises(SystemExit): 155 | hashit.__main__.main(["--check", "file_name"]) 156 | 157 | # just checking 158 | self.assertEqual(hashit.__author__, "Javad Shafique") 159 | 160 | def test_exclude(self): 161 | list_with_paths = ["/home/file.sh", "/home/compact.min.js", "/only/file/left.py", "/only/path/left"] 162 | excludes = [".sh", ".min."] 163 | o = hashit.__main__.exclude(list_with_paths, excludes) 164 | self.assertEqual(o, ["/only/file/left.py", "/only/path/left"]) 165 | 166 | class TestLoad(unittest.TestCase): 167 | def test_load(self): 168 | hashit.load(load_api_1) 169 | h1 = hashit.new("hash1", b'data') 170 | self.assertEqual(hex(h1.digest()), h1.hexdigest()) 171 | 172 | def test_load_all(self): 173 | hashit.load_all([load_api_2, load_api_3]) 174 | h2 = hashit.new("hash2", b'data') 175 | h3 = hashit.new("hash3", b'data') 176 | self.assertEqual(hex(h2.digest()), h2.hexdigest()) 177 | self.assertEqual(hex(h3.digest()), h3.hexdigest()) --------------------------------------------------------------------------------