├── .gitignore ├── README.md └── enpass-to-keepass.py /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | wheels/ 23 | *.egg-info/ 24 | .installed.cfg 25 | *.egg 26 | MANIFEST 27 | 28 | # PyInstaller 29 | # Usually these files are written by a python script from a template 30 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 31 | *.manifest 32 | *.spec 33 | 34 | # Installer logs 35 | pip-log.txt 36 | pip-delete-this-directory.txt 37 | 38 | # Unit test / coverage reports 39 | htmlcov/ 40 | .tox/ 41 | .coverage 42 | .coverage.* 43 | .cache 44 | nosetests.xml 45 | coverage.xml 46 | *.cover 47 | .hypothesis/ 48 | .pytest_cache/ 49 | 50 | # Translations 51 | *.mo 52 | *.pot 53 | 54 | # Django stuff: 55 | *.log 56 | local_settings.py 57 | db.sqlite3 58 | 59 | # Flask stuff: 60 | instance/ 61 | .webassets-cache 62 | 63 | # Scrapy stuff: 64 | .scrapy 65 | 66 | # Sphinx documentation 67 | docs/_build/ 68 | 69 | # PyBuilder 70 | target/ 71 | 72 | # Jupyter Notebook 73 | .ipynb_checkpoints 74 | 75 | # pyenv 76 | .python-version 77 | 78 | # celery beat schedule file 79 | celerybeat-schedule 80 | 81 | # SageMath parsed files 82 | *.sage.py 83 | 84 | # Environments 85 | .env 86 | .venv 87 | env/ 88 | venv/ 89 | ENV/ 90 | env.bak/ 91 | venv.bak/ 92 | 93 | # Spyder project settings 94 | .spyderproject 95 | .spyproject 96 | 97 | # Rope project settings 98 | .ropeproject 99 | 100 | # mkdocs documentation 101 | /site 102 | 103 | # mypy 104 | .mypy_cache/ 105 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # enpass-to-keepass 2 | Convert an Enpass csv export so it can be imported to a KeePass database using KeePassXC 3 | 4 | ## Enpass Version 5 | This version of the script converts an Enpass JSON export (Enpass version >=6). Since Enpass now uses folders as "labels" and can have multiple, the first item in the folder list is chosen as the Keepass "group". 6 | 7 | **If you need to convert an older (Enpass < v6) CSV export, use the [`v1.0`](https://github.com/jsphpl/enpass-to-keepass/releases/tag/v1.0) tag of this git repository.** 8 | 9 | ## Background 10 | Read this blog article for some background on this tool: [https://jsph.pl/migrating-from-enpass-to-keepass/](https://jsph.pl/migrating-from-enpass-to-keepass/) 11 | 12 | ## Usage 13 | ``` 14 | $ ./enpass-to-keepass.py --help 15 | usage: enpass-to-keepass.py [-h] input_file output_file 16 | 17 | Convert an Enpass export file so it can be imported to a KeePass database using KeePassXC 18 | 19 | Documentation & Issues: https://github.com/jsphpl/enpass-to-keepass 20 | 21 | License: Public Domain 22 | Author: Joseph Paul 23 | 24 | positional arguments: 25 | input_file Path to Enpass JSON export file 26 | output_file Path to output file (CSV) 27 | 28 | optional arguments: 29 | -h, --help show this help message and exit 30 | ``` 31 | 32 | ## License 33 | Public Domain 34 | -------------------------------------------------------------------------------- /enpass-to-keepass.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | """Convert an Enpass export file so it can be imported to a KeePass database using KeePassXC 4 | 5 | Documentation & Issues: https://github.com/jsphpl/enpass-to-keepass 6 | 7 | License: Public Domain 8 | Author: Joseph Paul 9 | """ 10 | 11 | import argparse 12 | import csv 13 | import json 14 | 15 | DIRECTLY_MAPPED_FIELDS = ["url", "username", "password", "totp"] 16 | CSV_HEADERS = ["title", "url", "username", "password", "group", "updated_at", "notes", "totp", "created_at"] 17 | FIELD_ALIASES = { 18 | "website": "url", 19 | "e-mail": "email", 20 | "login": "username", 21 | "benutzername": "username", 22 | "kennwort": "password", 23 | "one-time code": "totp", 24 | } 25 | 26 | extra_keys = set([]) 27 | 28 | 29 | def process_item(item, folders): 30 | print(f"Reading item: {item['title']}") 31 | result = { 32 | "title": item["title"], 33 | } 34 | 35 | if folders and item.get("folders"): 36 | for folder in folders: 37 | if folder["uuid"] == item["folders"][0]: 38 | result["group"] = folder["title"] 39 | break 40 | 41 | email = None 42 | username = None 43 | 44 | updated_at = None 45 | try: 46 | updated_at = int(item["updated_at"]) 47 | except (TypeError, ValueError): 48 | pass 49 | 50 | created_at = None 51 | try: 52 | created_at = int(item["createdAt"]) 53 | except (TypeError, ValueError): 54 | pass 55 | 56 | extra_fields = {} 57 | 58 | if (item["note"]): 59 | extra_fields["Note"] = item["note"] 60 | 61 | for field in item.get("fields", []): 62 | field_name = field.get("label", "").lower() 63 | field_alias = FIELD_ALIASES.get(field_name, field_name) 64 | 65 | field_updated_at = None 66 | try: 67 | field_updated_at = int(field["updated_at"]) 68 | except (TypeError, ValueError): 69 | pass 70 | if field_updated_at is not None and \ 71 | (updated_at is None or field_updated_at > updated_at): 72 | updated_at = field_updated_at 73 | 74 | if field_alias in DIRECTLY_MAPPED_FIELDS: 75 | result[field_alias] = field["value"] 76 | if field_alias == "username": 77 | username = field["value"] 78 | else: 79 | if field_alias == "email": 80 | email = field["value"] 81 | continue 82 | 83 | if len(str(field["value"])) > 0: 84 | extra_fields[field["label"]] = field["value"] 85 | 86 | extra_keys.add(field["label"]) 87 | 88 | if email: 89 | if username: 90 | extra_fields["E-mail"] = email 91 | else: 92 | result["username"] = email 93 | 94 | if updated_at is not None: 95 | result["updated_at"] = updated_at 96 | 97 | if created_at is not None: 98 | result["created_at"] = created_at 99 | 100 | result["notes"] = "\n".join( 101 | [f"{key}: {value}" for key, value in extra_fields.items()] 102 | ) 103 | 104 | return result 105 | 106 | 107 | def convert_enpass_to_keypass(input_file, output_file): 108 | with input_file as json_file: 109 | data = json.load(json_file) 110 | 111 | if not data: 112 | print("No JSON data load from {args.input_file.name}") 113 | return 114 | 115 | folders = data.get("folders") 116 | items = data.get("items") 117 | 118 | results = [process_item(item, folders) for item in items] 119 | 120 | if not results: 121 | print("No rows to write (empty input file?)") 122 | return 123 | 124 | print(f"{len(results)} rows processed") 125 | print(f"Writing to {output_file.name}") 126 | if extra_keys: 127 | print(f"Found extra keys: {', '.join(extra_keys)}") 128 | 129 | writer = csv.DictWriter(output_file, CSV_HEADERS) 130 | writer.writeheader() 131 | writer.writerows(results) 132 | 133 | 134 | if __name__ == "__main__": 135 | parser = argparse.ArgumentParser( 136 | description=__doc__, formatter_class=argparse.RawTextHelpFormatter 137 | ) 138 | parser.add_argument( 139 | "input_file", 140 | type=argparse.FileType("r"), 141 | help="Path to Enpass JSON export file", 142 | ) 143 | parser.add_argument( 144 | "output_file", type=argparse.FileType("w"), help="Path to output file (CSV)" 145 | ) 146 | args = parser.parse_args() 147 | convert_enpass_to_keypass(args.input_file, args.output_file) 148 | --------------------------------------------------------------------------------