├── to_json.py ├── LICENSE ├── README.md ├── app.py └── .gitignore /to_json.py: -------------------------------------------------------------------------------- 1 | import pandas as pd 2 | 3 | 4 | def convert_to_json(): 5 | kamusi = pd.read_csv( 6 | "words.csv", usecols=["Index", "Word", "Meaning", "Synonyms", "Conjugation"] 7 | ) 8 | kamusi = kamusi.set_index("Index") 9 | kamusi.to_json("kamusi.json", orient="index") 10 | 11 | 12 | if __name__ == "__main__": 13 | convert_to_json() 14 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2021 Jordan Kalebu 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | 2 | 3 | # [kamusi](https://github.com/Kalebu/kamusi) 4 | JSON and CSV data for swahili dictionary with over 16600+ words. 5 | 6 | This repo consists of data from **swahili dictionary** with about 16683 words together with their meaning, synonyms and conjugations. 7 | 8 | This repo couldn't exist without [Kamusi-Mobile](https://github.com/jacksiro254/Kamusi-Mobile/), Thanks to great effort done by [Jack Siro](https://github.com/jacksiro254). 9 | 10 | ## So how this data was generated ? 11 | 12 | This data is result of webscraping done to [kamusi](http://kamusi.appsmata.com) with help of selenium and BeautifulSoup. 13 | 14 | There are basically two scripts, one for scraping **app.py** while the other one **to_json** serves a purpose of converting scrapped CSV data into json that can easily be used by others. 15 | 16 | ## Gathering data 17 | 18 | I'm currently gathering and organizing swahili data mainly for doing NLP purposes, if you now any other places that we can scrap useful data in swahili please raise an issue for it. 19 | 20 | Looking forward to see what you're going to build with it. 21 | 22 | ## Give it a star 23 | 24 | Was this useful to you ? Then give it a star so that more people can make use of this. 25 | 26 | ## Credits 27 | 28 | All the credits to: 29 | 30 | - [kalebu](https://github.com/kalebu) 31 | - [Jack Siro](https://github.com/jacksiro254) 32 | - and all the contributors 33 | 34 | 35 | -------------------------------------------------------------------------------- /app.py: -------------------------------------------------------------------------------- 1 | import time 2 | import pandas as pd 3 | from bs4 import BeautifulSoup 4 | from selenium import webdriver 5 | from selenium.webdriver.common.keys import Keys 6 | 7 | 8 | driver = webdriver.Chrome() 9 | base_url = "http://kamusi.appsmata.com" 10 | 11 | 12 | def login(email, password): 13 | # Login to the website 14 | driver.get(f"{base_url}/login") 15 | time.sleep(5) 16 | driver.find_element_by_xpath( 17 | "/html/body/div/div[2]/div/div/div[1]/div/form/div[1]/div/input" 18 | ).send_keys(email) 19 | driver.find_element_by_xpath( 20 | "/html/body/div/div[2]/div/div/div[1]/div/form/div[2]/div/input" 21 | ).send_keys(password) 22 | 23 | print("Logged in") 24 | driver.find_element_by_xpath( 25 | "/html/body/div/div[2]/div/div/div[1]/div/form/div[4]/button" 26 | ).click() 27 | return driver 28 | 29 | 30 | def get_soup(url): 31 | # Get the soup of the page 32 | driver.get(url) 33 | html = driver.page_source 34 | soup = BeautifulSoup(html, "html.parser") 35 | return soup 36 | 37 | 38 | def generate_all_urls(max_page=833): 39 | # Generate all the urls 40 | urls = [] 41 | for i in range(1, max_page + 1): 42 | urls.append(f"{base_url}/words?page={i}") 43 | return urls 44 | 45 | 46 | def scrape_page(url): 47 | # Scrape the page 48 | soup = get_soup(url) 49 | table = soup.find_all("table") 50 | df = pd.read_html(str(table))[0] 51 | return df 52 | 53 | 54 | def scrap_all_urls(urls): 55 | # Scrape all the urls 56 | all_df = [] 57 | for url in urls: 58 | df = scrape_page(url) 59 | all_df.append(df) 60 | return all_df 61 | 62 | 63 | if __name__ == "__main__": 64 | # Login to the website 65 | driver = login("email", "password") 66 | # Get the soup of the page 67 | urls = generate_all_urls() 68 | # Scrape all the urls 69 | all_df = scrap_all_urls(urls) 70 | # Combine all the dataframes 71 | df = pd.concat(all_df) 72 | # Save the dataframe 73 | df.to_csv("words.csv", index=False) 74 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | wheels/ 23 | pip-wheel-metadata/ 24 | share/python-wheels/ 25 | *.egg-info/ 26 | .installed.cfg 27 | *.egg 28 | MANIFEST 29 | 30 | # PyInstaller 31 | # Usually these files are written by a python script from a template 32 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 33 | *.manifest 34 | *.spec 35 | 36 | # Installer logs 37 | pip-log.txt 38 | pip-delete-this-directory.txt 39 | 40 | # Unit test / coverage reports 41 | htmlcov/ 42 | .tox/ 43 | .nox/ 44 | .coverage 45 | .coverage.* 46 | .cache 47 | nosetests.xml 48 | coverage.xml 49 | *.cover 50 | *.py,cover 51 | .hypothesis/ 52 | .pytest_cache/ 53 | 54 | # Translations 55 | *.mo 56 | *.pot 57 | 58 | # Django stuff: 59 | *.log 60 | local_settings.py 61 | db.sqlite3 62 | db.sqlite3-journal 63 | 64 | # Flask stuff: 65 | instance/ 66 | .webassets-cache 67 | 68 | # Scrapy stuff: 69 | .scrapy 70 | 71 | # Sphinx documentation 72 | docs/_build/ 73 | 74 | # PyBuilder 75 | target/ 76 | 77 | # Jupyter Notebook 78 | .ipynb_checkpoints 79 | 80 | # IPython 81 | profile_default/ 82 | ipython_config.py 83 | 84 | # pyenv 85 | .python-version 86 | 87 | # pipenv 88 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 89 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 90 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 91 | # install all needed dependencies. 92 | #Pipfile.lock 93 | 94 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow 95 | __pypackages__/ 96 | 97 | # Celery stuff 98 | celerybeat-schedule 99 | celerybeat.pid 100 | 101 | # SageMath parsed files 102 | *.sage.py 103 | 104 | # Environments 105 | .env 106 | .venv 107 | env/ 108 | venv/ 109 | ENV/ 110 | env.bak/ 111 | venv.bak/ 112 | 113 | # Spyder project settings 114 | .spyderproject 115 | .spyproject 116 | 117 | # Rope project settings 118 | .ropeproject 119 | 120 | # mkdocs documentation 121 | /site 122 | 123 | # mypy 124 | .mypy_cache/ 125 | .dmypy.json 126 | dmypy.json 127 | 128 | # Pyre type checker 129 | .pyre/ 130 | --------------------------------------------------------------------------------