├── CPNATAL2.pdf ├── models └── mlp.save ├── dataset └── example.png ├── drivers └── linux │ ├── geckodriver │ └── chromedriver ├── README.md ├── .gitignore ├── captcha.yml ├── notebooks ├── Breaking captchas - Neural Networks.ipynb └── Breaking captchas - Computer Vision.ipynb └── LICENSE /CPNATAL2.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cs-ufrn/breaking-captchas/master/CPNATAL2.pdf -------------------------------------------------------------------------------- /models/mlp.save: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cs-ufrn/breaking-captchas/master/models/mlp.save -------------------------------------------------------------------------------- /dataset/example.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cs-ufrn/breaking-captchas/master/dataset/example.png -------------------------------------------------------------------------------- /drivers/linux/geckodriver: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cs-ufrn/breaking-captchas/master/drivers/linux/geckodriver -------------------------------------------------------------------------------- /drivers/linux/chromedriver: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cs-ufrn/breaking-captchas/master/drivers/linux/chromedriver -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Breaking captchas 2 | 3 | Repository destined to workshop of **Breaking captchas** in CPNatal2. So, here is shown the basic use of OpenCV, Selenium and scikit-learn to navigate on web and obtain data from websites that use CAPTCHAs as access control. 4 | 5 | ## Authors 6 | 7 | [![Vinicius Campos](https://avatars.githubusercontent.com/vinihcampos?s=100)
Vinicius Campos](https://github.com/vinihcampos) | [![Vitor Greati](https://avatars.githubusercontent.com/greati?s=100)
Vitor Greati](https://github.com/greati) 8 | ------------ | ------------- 9 | 10 | ## License 11 | 12 | This project is licensed under the Apache License 2.0 - see the [LICENSE.md](LICENSE) file for details. 13 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | wheels/ 23 | *.egg-info/ 24 | .installed.cfg 25 | *.egg 26 | MANIFEST 27 | 28 | # PyInstaller 29 | # Usually these files are written by a python script from a template 30 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 31 | *.manifest 32 | *.spec 33 | 34 | # Installer logs 35 | pip-log.txt 36 | pip-delete-this-directory.txt 37 | 38 | # Unit test / coverage reports 39 | htmlcov/ 40 | .tox/ 41 | .coverage 42 | .coverage.* 43 | .cache 44 | nosetests.xml 45 | coverage.xml 46 | *.cover 47 | .hypothesis/ 48 | .pytest_cache/ 49 | 50 | # Translations 51 | *.mo 52 | *.pot 53 | 54 | # Django stuff: 55 | *.log 56 | local_settings.py 57 | db.sqlite3 58 | 59 | # Flask stuff: 60 | instance/ 61 | .webassets-cache 62 | 63 | # Scrapy stuff: 64 | .scrapy 65 | 66 | # Sphinx documentation 67 | docs/_build/ 68 | 69 | # PyBuilder 70 | target/ 71 | 72 | # Jupyter Notebook 73 | .ipynb_checkpoints 74 | 75 | # pyenv 76 | .python-version 77 | 78 | # celery beat schedule file 79 | celerybeat-schedule 80 | 81 | # SageMath parsed files 82 | *.sage.py 83 | 84 | # Environments 85 | .env 86 | .venv 87 | env/ 88 | venv/ 89 | ENV/ 90 | env.bak/ 91 | venv.bak/ 92 | 93 | # Spyder project settings 94 | .spyderproject 95 | .spyproject 96 | 97 | # Rope project settings 98 | .ropeproject 99 | 100 | # mkdocs documentation 101 | /site 102 | 103 | # mypy 104 | .mypy_cache/ 105 | -------------------------------------------------------------------------------- /captcha.yml: -------------------------------------------------------------------------------- 1 | name: captcha 2 | channels: 3 | - defaults 4 | dependencies: 5 | - _libgcc_mutex=0.1=main 6 | - asn1crypto=0.24.0=py37_0 7 | - attrs=19.1.0=py37_1 8 | - backcall=0.1.0=py37_0 9 | - blas=1.0=mkl 10 | - bleach=3.1.0=py37_0 11 | - bzip2=1.0.8=h7b6447c_0 12 | - ca-certificates=2019.5.15=0 13 | - cairo=1.14.12=h8948797_3 14 | - certifi=2019.6.16=py37_1 15 | - cffi=1.12.3=py37h2e261b9_0 16 | - chardet=3.0.4=py37_1003 17 | - cryptography=2.7=py37h1ba5d50_0 18 | - cycler=0.10.0=py37_0 19 | - dbus=1.13.6=h746ee38_0 20 | - decorator=4.4.0=py37_1 21 | - defusedxml=0.6.0=py_0 22 | - entrypoints=0.3=py37_0 23 | - expat=2.2.6=he6710b0_0 24 | - ffmpeg=4.0=hcdf2ecd_0 25 | - fontconfig=2.13.0=h9420a91_0 26 | - freeglut=3.0.0=hf484d3e_5 27 | - freetype=2.9.1=h8a8886c_1 28 | - glib=2.56.2=hd408876_0 29 | - gmp=6.1.2=h6c8ec71_1 30 | - graphite2=1.3.13=h23475e2_0 31 | - gst-plugins-base=1.14.0=hbbd80ab_1 32 | - gstreamer=1.14.0=hb453b48_1 33 | - harfbuzz=1.8.8=hffaf4a1_0 34 | - hdf5=1.10.2=hba1933b_1 35 | - icu=58.2=h9c2bf20_1 36 | - idna=2.8=py37_0 37 | - intel-openmp=2019.4=243 38 | - ipykernel=5.1.1=py37h39e3cac_0 39 | - ipython=7.7.0=py37h39e3cac_0 40 | - ipython_genutils=0.2.0=py37_0 41 | - jasper=2.0.14=h07fcdf6_1 42 | - jedi=0.13.3=py37_0 43 | - jinja2=2.10.1=py37_0 44 | - joblib=0.13.2=py37_0 45 | - jpeg=9b=h024ee3a_2 46 | - json5=0.8.4=py_0 47 | - jsonschema=3.0.1=py37_0 48 | - jupyter_client=5.3.1=py_0 49 | - jupyter_core=4.5.0=py_0 50 | - jupyterlab=1.0.2=py37hf63ae98_0 51 | - jupyterlab_server=1.0.0=py_0 52 | - kiwisolver=1.1.0=py37he6710b0_0 53 | - libedit=3.1.20181209=hc058e9b_0 54 | - libffi=3.2.1=hd88cf55_4 55 | - libgcc-ng=9.1.0=hdf63c60_0 56 | - libgfortran-ng=7.3.0=hdf63c60_0 57 | - libglu=9.0.0=hf484d3e_1 58 | - libopencv=3.4.2=hb342d67_1 59 | - libopus=1.3=h7b6447c_0 60 | - libpng=1.6.37=hbc83047_0 61 | - libsodium=1.0.16=h1bed415_0 62 | - libstdcxx-ng=9.1.0=hdf63c60_0 63 | - libtiff=4.0.10=h2733197_2 64 | - libuuid=1.0.3=h1bed415_2 65 | - libvpx=1.7.0=h439df22_0 66 | - libxcb=1.13=h1bed415_1 67 | - libxml2=2.9.9=hea5a465_1 68 | - markupsafe=1.1.1=py37h7b6447c_0 69 | - matplotlib=3.1.0=py37h5429711_0 70 | - mistune=0.8.4=py37h7b6447c_0 71 | - mkl=2019.4=243 72 | - mkl-service=2.0.2=py37h7b6447c_0 73 | - mkl_fft=1.0.12=py37ha843d7b_0 74 | - mkl_random=1.0.2=py37hd81dba3_0 75 | - nbconvert=5.5.0=py_0 76 | - nbformat=4.4.0=py37_0 77 | - ncurses=6.1=he6710b0_1 78 | - notebook=6.0.0=py37_0 79 | - numpy=1.16.4=py37h7e9f1db_0 80 | - numpy-base=1.16.4=py37hde5b4d6_0 81 | - opencv=3.4.2=py37h6fd60c2_1 82 | - openssl=1.1.1c=h7b6447c_1 83 | - pandoc=2.2.3.2=0 84 | - pandocfilters=1.4.2=py37_1 85 | - parso=0.5.0=py_0 86 | - pcre=8.43=he6710b0_0 87 | - pexpect=4.7.0=py37_0 88 | - pickleshare=0.7.5=py37_0 89 | - pip=19.1.1=py37_0 90 | - pixman=0.38.0=h7b6447c_0 91 | - prometheus_client=0.7.1=py_0 92 | - prompt_toolkit=2.0.9=py37_0 93 | - ptyprocess=0.6.0=py37_0 94 | - py-opencv=3.4.2=py37hb342d67_1 95 | - pycparser=2.19=py37_0 96 | - pygments=2.4.2=py_0 97 | - pyopenssl=19.0.0=py37_0 98 | - pyparsing=2.4.0=py_0 99 | - pyqt=5.9.2=py37h05f1152_2 100 | - pyrsistent=0.14.11=py37h7b6447c_0 101 | - pysocks=1.7.0=py37_0 102 | - python=3.7.3=h0371630_0 103 | - python-dateutil=2.8.0=py37_0 104 | - pytz=2019.1=py_0 105 | - pyzmq=18.0.0=py37he6710b0_0 106 | - qt=5.9.7=h5867ecd_1 107 | - readline=7.0=h7b6447c_5 108 | - requests=2.22.0=py37_0 109 | - scikit-learn=0.21.2=py37hd81dba3_0 110 | - scipy=1.3.0=py37h7c811a0_0 111 | - selenium=3.141.0=py37h7b6447c_0 112 | - send2trash=1.5.0=py37_0 113 | - setuptools=41.0.1=py37_0 114 | - sip=4.19.8=py37hf484d3e_0 115 | - six=1.12.0=py37_0 116 | - sqlite=3.29.0=h7b6447c_0 117 | - terminado=0.8.2=py37_0 118 | - testpath=0.4.2=py37_0 119 | - tk=8.6.8=hbc83047_0 120 | - tornado=6.0.3=py37h7b6447c_0 121 | - traitlets=4.3.2=py37_0 122 | - urllib3=1.24.2=py37_0 123 | - wcwidth=0.1.7=py37_0 124 | - webencodings=0.5.1=py37_1 125 | - wheel=0.33.4=py37_0 126 | - xz=5.2.4=h14c3975_4 127 | - zeromq=4.3.1=he6710b0_3 128 | - zlib=1.2.11=h7b6447c_3 129 | - zstd=1.3.7=h0b5b093_0 130 | prefix: /home/vinihcampos/miniconda3/envs/captcha 131 | 132 | -------------------------------------------------------------------------------- /notebooks/Breaking captchas - Neural Networks.ipynb: -------------------------------------------------------------------------------- 1 | {"nbformat":4,"nbformat_minor":0,"metadata":{"colab":{"name":"Breaking captchas - Neural Networks.ipynb","version":"0.3.2","provenance":[],"collapsed_sections":[]},"kernelspec":{"name":"python3","display_name":"Python 3"}},"cells":[{"cell_type":"markdown","metadata":{"id":"onQpC3qjh4uA","colab_type":"text"},"source":["# OCR com Rede Neural\n","\n","O problema de reconhecer caracteres expressos em imagens é conhecido como\n","Optical Character Recognition (OCR).\n","\n","Mostraremos aqui como treinar uma Rede Neural em Python para\n","reconhecer dígitos, para ser aplicada no nosso problema\n","de quebrar CAPTCHAS vulneráveis!"]},{"cell_type":"markdown","metadata":{"id":"-7bisj0Vid4G","colab_type":"text"},"source":["## Dependências"]},{"cell_type":"code","metadata":{"id":"U_ynPzVPcqM3","colab_type":"code","colab":{}},"source":["# Processamento de dados\n","import pandas as pd\n","# Processamento numérico\n","import numpy as np\n","# Visualização\n","import matplotlib.pyplot as plt\n","# Aprendizado de máquina\n","from sklearn.model_selection import train_test_split\n","from sklearn.neural_network import MLPClassifier\n","from sklearn.externals import joblib\n","from sklearn.metrics import precision_score\n","from sklearn.metrics import recall_score\n","from sklearn.metrics import f1_score\n","from sklearn.metrics import accuracy_score\n","# Processamento de imagens\n","import cv2"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"EPSDxYB_jP9b","colab_type":"text"},"source":["## Coleta e engenharia de *features*"]},{"cell_type":"markdown","metadata":{"id":"CD6qiOwfjgxV","colab_type":"text"},"source":["Para treinar a rede neural, precisamos de **exemplos**. No nosso\n","caso, são imagens de dígitos.\n","\n","Vamos assumir aqui que temos um banco de dados\n","com várias imagens contendo dígitos, e veremos\n","como extraí-los e prepará-los para o treino da rede,\n","usando uma imagem de exemplo.\n","\n","Primeiro, ganharemos acesso à imagem, armazenada no drive:"]},{"cell_type":"code","metadata":{"id":"76dSs6ZzdF8N","colab_type":"code","colab":{}},"source":["from google.colab import drive\n","drive.mount('/content/drive')\n","\n","# Definindo o caminho dos arquivos para o nosso projeto.\n","root_path = \"drive/My Drive/campus-party-captchas\""],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"L7KJk82YkO8a","colab_type":"text"},"source":["Agora, vamos carregar essa imagem, e exibi-la para vermos o resultado:"]},{"cell_type":"code","metadata":{"id":"trJ48qdueuom","colab_type":"code","colab":{}},"source":["# Criando o dataset\n","example = cv2.imread(root_path + '/dataset/example.png', 0)\n","\n","plt.imshow(example, cmap='gray')\n","plt.show"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"vv_KjrJDkePr","colab_type":"text"},"source":["Vamos agora limpar essa imagem, e binarizá-la, para isolar os dígitos:"]},{"cell_type":"code","metadata":{"id":"c-7PnfsMfMMK","colab_type":"code","colab":{}},"source":["binary = cv2.adaptiveThreshold(example,255,cv2.ADAPTIVE_THRESH_GAUSSIAN_C, cv2.THRESH_BINARY_INV,75,10)\n","plt.figure()\n","plt.imshow(binary, cmap='gray')\n","plt.show\n","\n","binary = cv2.medianBlur(binary, 3)\n","binary = cv2.medianBlur(binary, 3)\n","binary = cv2.medianBlur(binary, 3)\n","binary = cv2.medianBlur(binary, 3)\n","\n","plt.figure()\n","plt.imshow(binary, cmap='gray')\n","plt.show\n"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"WkaOjA8KknxE","colab_type":"text"},"source":["Agora, repetimos o mesmo processamento para\n","segmentar os dígitos, desde a busca de contornos, \n","ordenação e correção de inclinação.\n","\n","Ao final, produzimos os vetores de características\n","que representam os dígitos."]},{"cell_type":"code","metadata":{"id":"43JplH5ojMKN","colab_type":"code","colab":{}},"source":["SZ_W = 15\n","SZ_H = 20"],"execution_count":0,"outputs":[]},{"cell_type":"code","metadata":{"id":"hQVw-ITQfpvP","colab_type":"code","colab":{}},"source":["_, contours, _ = cv2.findContours(binary, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE) # Obtendo os contornos\n","\n","# Definição da função que retorna o primeiro ponto a esquerda do contorno\n","def top_left(c):\n"," x,y,_,_ = cv2.boundingRect(c)\n"," return (x,y)\n","\n","contours.sort(key=len,reverse=True) # Realizando a ordenação pelo tamanho (de maneira decrescente)\n","contours = contours[:min(6, len(contours))] # Obtendo os seis primeiros contornos\n","contours.sort(key=top_left) # Sorteando os 6 (ou menos) contornos pela posição\n","\n","# Definindo a função que corrige a orientação dos dígitos\n","def deskew(img):\n"," m = cv2.moments(img) \n"," if abs(m['mu02']) < 1e-2:\n"," return img.copy() \n"," skew = m['mu11']/m['mu02'] \n"," M = np.float32([[1, skew, -0.5*SZ_W*skew], [0, 1, 0]]) \n"," img = cv2.warpAffine(img, M, (SZ_W, SZ_H), flags=cv2.WARP_INVERSE_MAP | cv2.INTER_CUBIC)\n"," return img\n","\n","fig_chars = plt.figure()\n","number_images = []\n","\n","# Itera sobre os contornos e aplica a correção\n","for c in range(len(contours)):\n"," x,y,w,h = cv2.boundingRect(contours[c])\n"," crop = np.zeros(binary.shape, dtype=np.uint8)\n"," cv2.drawContours(crop, contours, c, 255, -1)\n"," crop = cv2.bitwise_and(crop, binary)\n"," crop = crop[y:y+h, x:x+w]\n","\n"," crop = cv2.resize(crop, (SZ_W, SZ_H), interpolation = cv2.INTER_CUBIC) # Redimensionando o contorno para (15x20)\n"," skewed = deskew(crop) # Aplicação da correção\n"," fig_chars.add_subplot(1, 6, c+1)\n"," plt.imshow(skewed, cmap = 'gray')\n"," number_images.append( skewed.flatten().tolist() ) # Adiciona a lista de números os dígitos selecionados."],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"x5RF32qylO5b","colab_type":"text"},"source":["Vamos ver com mais detalhes, para o primeiro dígito:"]},{"cell_type":"code","metadata":{"id":"2CCj1dMKgTR_","colab_type":"code","colab":{}},"source":["# Mostrando os pixels do primeiro dígito\n","print( np.array(number_images).shape )\n","\n","print( np.array(number_images)[0,:])"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"Z_2WdtRVoCCS","colab_type":"text"},"source":["Agora, preparamos o dataset, incluindo a resposta (target) esperada para cada vetor:"]},{"cell_type":"code","metadata":{"id":"HW6sTYc7g7AP","colab_type":"code","colab":{}},"source":["df = pd.DataFrame( np.array(number_images) )\n","df['target'] = [5,4,1,3,3,0]\n","df.head()"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"XhhBTuKKohwJ","colab_type":"text"},"source":["## Treinamento da Rede Neural\n","\n","Vimos como geramos os vetores de características\n","para um digito presente em uma imagem. \n","\n","Primeiramente, carregamos o datase contendo todos os dígitos\n","que coletamos e transformamos:"]},{"cell_type":"code","metadata":{"id":"A4lyB1nvdIJ_","colab_type":"code","colab":{}},"source":["dataset_path = root_path + '/dataset/dataset.csv'\n","dataset = pd.read_csv(dataset_path, index_col=0)\n","dataset.head()"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"M24c06QrhxZe","colab_type":"text"},"source":["Vamos explorar rapidamente esse dataset, verificando quantos exemplos de cada dígito nós temos:"]},{"cell_type":"code","metadata":{"id":"88EkxTY8dhK7","colab_type":"code","colab":{}},"source":["# Obtendo informações sobre o dataset\n","dataset.target.value_counts()"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"N1CIudP5pAvd","colab_type":"text"},"source":["Criaremos agora conjuntos de treino e teste. Com o primeiro, treinaremos a rede neural, e, com o segundo, avaliaremos a capacidade dela de reconhecer os dígitos:"]},{"cell_type":"code","metadata":{"id":"hP-ha5T1djF7","colab_type":"code","colab":{}},"source":["# Separando os dados\n","train_data, test_data, train_target, test_target = train_test_split(dataset.iloc[:,:-1], dataset['target'], test_size=0.2)"],"execution_count":0,"outputs":[]},{"cell_type":"code","metadata":{"id":"6gT69OTqd4iI","colab_type":"code","colab":{}},"source":["print('[INFO] train: ', len(train_data))\n","print(train_target.value_counts() / len(train_target))\n","print('[INFO] test: ', len(test_data))\n","print(test_target.value_counts() / len(test_target))"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"FHo6JcNupQTV","colab_type":"text"},"source":["Finalmente, vamos criar a nossa Rede Neural e treiná-la utilizando o dataset de treino."]},{"cell_type":"code","metadata":{"id":"BJ5O3cFGd61T","colab_type":"code","colab":{}},"source":["mlp = MLPClassifier(max_iter=1000)\n","mlp.fit(train_data, train_target)"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"N0rRu6VtpV5o","colab_type":"text"},"source":["Por fim, avaliaremos o quão bom está o modelo, usando o dataset de teste e algumas métricas:"]},{"cell_type":"code","metadata":{"id":"eFON2rEZd--Q","colab_type":"code","colab":{}},"source":["output = mlp.predict(test_data)\n","\n","print('[INFO] scores')\n","print('[accuracy score]:', accuracy_score(np.array(test_target), output))"],"execution_count":0,"outputs":[]}]} -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright [yyyy] [name of copyright owner] 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /notebooks/Breaking captchas - Computer Vision.ipynb: -------------------------------------------------------------------------------- 1 | {"nbformat":4,"nbformat_minor":0,"metadata":{"kernelspec":{"display_name":"Python 3","language":"python","name":"python3"},"language_info":{"codemirror_mode":{"name":"ipython","version":3},"file_extension":".py","mimetype":"text/x-python","name":"python","nbconvert_exporter":"python","pygments_lexer":"ipython3","version":"3.7.3"},"colab":{"name":"Breaking captchas - Computer Vision.ipynb","version":"0.3.2","provenance":[],"collapsed_sections":[]}},"cells":[{"cell_type":"markdown","metadata":{"id":"GKTuV427QW_r","colab_type":"text"},"source":["# Quebrando captchas vulneráveis - Visão computacional\n","\n","---\n","\n","Nesse notebook veremos os princípios utilizados e os passos necessários para *quebrar CAPTCHAs*, desde o processamento de imagens até extração de informações . Para isso, utilizaremos as bilbiotecas: Selenium para extrair dados de páginas web, OpenCV para o processamento de imagens e scikit-learn para converter os dados da imagens em informações importantes, que nesse caso é o conteúdo dos *CAPTCHAs*."]},{"cell_type":"markdown","metadata":{"id":"wlUpUewxY_LY","colab_type":"text"},"source":["
\"computer_vision\" \"computer_vision\"
Fonte: https://towardsdatascience.com/how-to-do-everything-in-computer-vision-2b442c469928 e https://tulip.co/blog/tulip/give-your-apps-the-power-of-vision/\n","
"]},{"cell_type":"markdown","metadata":{"id":"aYpYTUH0StYX","colab_type":"text"},"source":["## Dependências\n","\n","Nesse momento iremos instalar e importar as dependências necessárias para executar nosso notebook."]},{"cell_type":"code","metadata":{"id":"Xd_FdrjyMh7D","colab_type":"code","colab":{}},"source":["# Vamos instalar algumas bibliotecas e configurar algumas informações para o google colab. \n","\n","!pip install selenium\n","!apt-get update\n","!apt install chromium-chromedriver\n","!cp /usr/lib/chromium-browser/chromedriver /usr/bin\n","import sys\n","sys.path.insert(0,'/usr/lib/chromium-browser/chromedriver')\n","\n","# Importando as bibliotecas necessárias para o projeto\n","from selenium import webdriver # Para realizar o webscraping\n","from io import BytesIO # Para conversão das imagens na página para OpenCV\n","import matplotlib.pyplot as plt # Para mostrar os resultados dos processamentos\n","import cv2, numpy as np # Para manipular e realizar o processamento de imagens\n","import random # Para gerar número aleatórios\n","from joblib import load # Para importar o modelo de rede neural\n","\n","SZ_W = 15\n","SZ_H = 20\n","\n","chrome_options = webdriver.ChromeOptions()\n","chrome_options.add_argument('--headless')\n","chrome_options.add_argument('--no-sandbox')\n","chrome_options.add_argument('--disable-dev-shm-usage')"],"execution_count":0,"outputs":[]},{"cell_type":"code","metadata":{"id":"kRp6x86lNWgT","colab_type":"code","colab":{}},"source":["# Montando o drive para poder acessar nossos arquivos.\n","\n","from google.colab import drive\n","drive.mount('/content/drive')\n","\n","# Definindo o caminho dos arquivos para o nosso projeto.\n","root_path = \"drive/My Drive/campus-party-captchas\""],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"cLB3mCV5W8_u","colab_type":"text"},"source":["---\n","\n","\"creating\n","\n","## Codificação"]},{"cell_type":"markdown","metadata":{"id":"CsWroapWYxrM","colab_type":"text"},"source":["

Selenium + Chrome webdriver

\"webdriver-architecture\" Fonte: https://www.edureka.co/blog/selenium-tutorial/\n","
\n","\n","\n","Selenium é uma biblioteca que nos permite navegar por páginas web e realizar ações **humanas** predefinidas, como preencher campos, ações de cliques e busca de termos na página. O que facilita as ações de *web crawling*, cujo propósito é criar scripts ou programas que automatizem tarefas."]},{"cell_type":"markdown","metadata":{"id":"_II1q9e2fFZR","colab_type":"text"},"source":["### Inicializando o webdriver"]},{"cell_type":"code","metadata":{"id":"eyXHE0tYMWiG","colab_type":"code","colab":{}},"source":["# Instanciando nosso driver\n","driver = webdriver.Chrome('chromedriver',options=chrome_options)"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"xTXnArPjMWiW","colab_type":"text"},"source":["### Realizando acesso a página\n","\n","Nesse momento é realizado o acesso a página para obter a imagem do captcha."]},{"cell_type":"code","metadata":{"id":"CDqj9t05MWia","colab_type":"code","colab":{}},"source":["# Definindo o caminho\n","url = 'http://captchas-generator.herokuapp.com/'"],"execution_count":0,"outputs":[]},{"cell_type":"code","metadata":{"id":"gVfnYALnMWig","colab_type":"code","colab":{}},"source":["# Carregando a página solicitada\n","driver.get(url)\n","\n","# Obtendo print da página\n","fullPage = driver.get_screenshot_as_png()\n","\n","# Convertendo para Mat(OpenCV)\n","img_stream = BytesIO(fullPage)\n","img = cv2.imdecode(np.frombuffer(img_stream.read(), np.uint8), 1)\n","img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB)\n"],"execution_count":0,"outputs":[]},{"cell_type":"code","metadata":{"id":"jY0KudPPfPKH","colab_type":"code","colab":{}},"source":["# Mostrando a imagem carregada\n","plt.figure(figsize=(15,10))\n","plt.imshow(img)\n","plt.show"],"execution_count":0,"outputs":[]},{"cell_type":"code","metadata":{"id":"sPABMHA0fPi3","colab_type":"code","colab":{}},"source":["captcha = driver.find_element_by_id(\"captcha_image\") # Encontrando o captcha na página pelo id\n","loc = captcha.location # Obtendo localização em pixels\n","size = captcha.size # Obtendo o tamanho da imagem\n","img = img[ int(loc['y']): int(loc['y'] + size['height']) , int(loc['x']): int(loc['x'] + size['width']) ] # Delimitando a imagem ao captcha\n","gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY) # Convertendo para escala de cinza"],"execution_count":0,"outputs":[]},{"cell_type":"code","metadata":{"id":"wDsE9TcnMWio","colab_type":"code","colab":{}},"source":["# Mostrando o captcha obtido\n","plt.imshow(gray, cmap='gray')\n","plt.show"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"_vjk6Qt1MWix","colab_type":"text"},"source":["### Estágio de processamento de imagens\n","\n","A partir desse ponto é iniciado a etapa de processamento."]},{"cell_type":"markdown","metadata":{"id":"XM-3YYWNhhdB","colab_type":"text"},"source":["#### Binarização"]},{"cell_type":"markdown","metadata":{"id":"AtEsB0yVfT1c","colab_type":"text"},"source":["

Limiarização Adaptativa

\"opencv_thresholding\"
Fonte: https://docs.opencv.org/3.4.3/d7/d4d/tutorial_py_thresholding.html\n","
\n","\n","O processo de limiarização consiste na transformação de uma imagem em escala de cinza para uma imagem binária. A limiarização mais simples é a definida por um **limite** predefinido, onde tudo que for menor que esse limite é transformado para um valor, e maior que ele para outro valor, ilustrado no topo esquerdo da Figura acima. \n","\n","Outros métodos mais complexos e mais eficientes também podem ser aplicados, como: Otsu's method e limiarização adaptativa. Este utiliza-se da vizinhaça para descobrir o limite, ao invés de um valor predefinido."]},{"cell_type":"code","metadata":{"id":"EP2nK371MWi0","colab_type":"code","colab":{}},"source":["# Conversão da imagem em escala de cinza para preto e branco.\n","binary = cv2.adaptiveThreshold(gray,255,cv2.ADAPTIVE_THRESH_GAUSSIAN_C, cv2.THRESH_BINARY_INV,75,10)\n","plt.imshow(binary, cmap='gray')\n","plt.show"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"0hV_-GWihoBo","colab_type":"text"},"source":["#### Filtro de mediana\n","\n","

Aplicação do filtro

\"opencv_median\"
Fonte: https://dev.to/enzoftware/how-to-build-amazing-image-filters-with-python-median-filter---sobel-filter---5h7\n","
\n","\n","O filtro de mediana é uns dos pré-processamentos mais utilizados na literatura quando o assunto é rúidos, principalmente os de sal e pipenta. \n","\n","
\"lena_normal\" \"lena_noise\"\n","
\n","\n","Através de uma janela deslizante, o algoritmo busca o valor central (mediano) entre todos os valores dessa janela e o utiliza como valor do pixel para nova imagem. "]},{"cell_type":"code","metadata":{"id":"5ED2W-mSMWi7","colab_type":"code","colab":{}},"source":["plt.figure(figsize=(20,5))\n","filtered = cv2.medianBlur(binary, 3) # Primeira aplicação\n","ax1 = plt.subplot(221)\n","ax1.imshow(filtered, cmap='gray')\n","\n","filtered = cv2.medianBlur(filtered, 3) # Segunda aplicação\n","ax2 = plt.subplot(222)\n","ax2.imshow(filtered, cmap='gray')\n","\n","filtered = cv2.medianBlur(filtered, 3) # Terceira aplicação\n","ax3 = plt.subplot(223)\n","ax3.imshow(filtered, cmap='gray')\n","\n","filtered = cv2.medianBlur(filtered, 3) # Quarta aplicação\n","ax4 = plt.subplot(224)\n","ax4.imshow(filtered, cmap='gray')\n","\n","ax1.axis('off')\n","ax2.axis('off')\n","ax3.axis('off')\n","ax4.axis('off')\n","\n","ax1.text(0.5,-0.1, \"Median blur 1\", ha=\"center\", transform=ax1.transAxes)\n","ax2.text(0.5,-0.1, \"Median blur 2\", ha=\"center\", transform=ax2.transAxes)\n","ax3.text(0.5,-0.1, \"Median blur 3\", ha=\"center\", transform=ax3.transAxes)\n","ax4.text(0.5,-0.1, \"Final result\", ha=\"center\", transform=ax4.transAxes)"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"nsVsUCjwMWjE","colab_type":"text"},"source":["#### Buscando contornos"]},{"cell_type":"markdown","metadata":{"id":"NqfSMdm1BA7J","colab_type":"text"},"source":["

Encontrando contornos com OpenCV

\"opencv_contour\"
Fonte: https://www.pyimagesearch.com/2016/02/01/opencv-center-of-contour/\n","
\n","\n","O processo de buscar contornos,consiste em analisar os pixels nas imagens e realizar o agrupamento desses pixels por meio da proximidade (distância e intensidade). Bastante utilizados na análise de silhuetas e na busca por objetos."]},{"cell_type":"code","metadata":{"id":"a7Dakk_pMWjI","colab_type":"code","colab":{}},"source":["_, contours, _ = cv2.findContours(filtered, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE) # Obtendo os contornos\n","\n","img_cp = img.copy()\n","\n","# Mostrando os contornos obtidos pelo algoritmo\n","for c in range(len(contours)):\n"," x,y,w,h = cv2.boundingRect(contours[c])\n"," cv2.rectangle(img_cp,(x,y),(x+w,y+h),\n"," (random.randint(0, 255), \n"," random.randint(0, 255), \n"," random.randint(0, 255)),1)\n","\n","plt.figure(figsize=(15,10))\n","plt.imshow(img_cp)\n","plt.show()"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"6V40u69wMWjP","colab_type":"text"},"source":["#### Ordenando pelo tamanho do contorno (número de pixels) e pela posição"]},{"cell_type":"code","metadata":{"id":"t78LMcwVMWjR","colab_type":"code","colab":{}},"source":["# Definição da função que retorna o primeiro ponto a esquerda do contorno\n","def top_left(c):\n"," x,y,_,_ = cv2.boundingRect(c)\n"," return (x,y)\n","\n","contours.sort(key=len,reverse=True) # Realizando a ordenação pelo tamanho (de maneira decrescente)\n","contours = contours[:min(6, len(contours))] # Obtendo os seis primeiros contornos\n","contours.sort(key=top_left) # Sorteando os 6 (ou menos) contornos pela posição\n","\n","img_cp = img.copy()\n","\n","# Mostrando os resultados finais\n","for c in range(len(contours)):\n"," x,y,w,h = cv2.boundingRect(contours[c])\n"," cv2.rectangle(img_cp,(x,y),(x+w,y+h),(random.randint(0, 255), random.randint(0, 255), random.randint(0, 255)),1)\n","\n","plt.figure(figsize=(15,10))\n","plt.imshow(img_cp)\n","plt.show()"],"execution_count":0,"outputs":[]},{"cell_type":"code","metadata":{"id":"E0ePssgLMWjb","colab_type":"code","colab":{}},"source":["fig_chars = plt.figure()\n","\n","# Separando e mostrando os candidatos a digitos\n","for c in range(len(contours)):\n"," x,y,w,h = cv2.boundingRect(contours[c])\n"," crop = np.zeros(filtered.shape, dtype=np.uint8)\n"," cv2.drawContours(crop, contours, c, 255, -1)\n"," crop = cv2.bitwise_and(crop, filtered)\n"," crop = crop[y:y+h, x:x+w]\n","\n"," crop = cv2.resize(crop, (SZ_W, SZ_H), interpolation = cv2.INTER_CUBIC)\n"," fig_chars.add_subplot(1, 6, c+1)\n"," plt.imshow(crop, cmap = 'gray')"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"fMnO6QXbMWjh","colab_type":"text"},"source":["#### Corrigindo a orientação dos dígitos"]},{"cell_type":"markdown","metadata":{"id":"4SSRXMn4GxLn","colab_type":"text"},"source":["

Detectando inclinações

\"opencv_skew\"
Fonte: https://www.pyimagesearch.com/2017/02/20/text-skew-correction-opencv-python/\n","
\n","\n","A correção da inclinação textual é corrigida através de 2 momentos centrais, o qual é verificado a razão entre os dois eixos do plano."]},{"cell_type":"code","metadata":{"id":"wtQWS8w1MWjk","colab_type":"code","colab":{}},"source":["# Definindo a função que corrige a orientação dos dígitos\n","def deskew(img):\n"," m = cv2.moments(img)\n"," \n"," if abs(m['mu02']) < 1e-2:\n"," return img.copy() \n"," skew = m['mu11']/m['mu02'] \n"," M = np.float32([[1, skew, -0.5*SZ_W*skew], [0, 1, 0]]) \n"," img = cv2.warpAffine(img, M, (SZ_W, SZ_H), flags=cv2.WARP_INVERSE_MAP | cv2.INTER_CUBIC)\n"," return img"],"execution_count":0,"outputs":[]},{"cell_type":"code","metadata":{"id":"6QWvZ75bMWjr","colab_type":"code","colab":{}},"source":["fig_chars = plt.figure()\n","number_images = []\n","\n","# Itera sobre os contornos e aplica a correção\n","for c in range(len(contours)):\n"," x,y,w,h = cv2.boundingRect(contours[c])\n"," crop = np.zeros(filtered.shape, dtype=np.uint8)\n"," cv2.drawContours(crop, contours, c, 255, -1)\n"," crop = cv2.bitwise_and(crop, filtered)\n"," crop = crop[y:y+h, x:x+w]\n","\n"," crop = cv2.resize(crop, (SZ_W, SZ_H), interpolation = cv2.INTER_CUBIC) # Redimensionando o contorno para (15x20)\n"," skewed = deskew(crop) # Aplicação da correção\n"," fig_chars.add_subplot(1, 6, c+1)\n"," plt.imshow(skewed, cmap = 'gray')\n"," number_images.append( skewed.flatten().tolist() ) # Adiciona a lista de números os dígitos selecionados."],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"SaTAzHYlMWjw","colab_type":"text"},"source":["### Classificação dos dígitos\n","\n","Nessa etapa será realizado a classificação dos dígitos através de um modelo construido com aprendizado de máquina."]},{"cell_type":"markdown","metadata":{"id":"ARz404IhvSx0","colab_type":"text"},"source":["#### Rede neural"]},{"cell_type":"markdown","metadata":{"id":"jwvE2TV8ISHL","colab_type":"text"},"source":["

Arquitetura de uma RN

\"opencv_skew\"
Fonte: https://medium.com/@pallawi.ds/ai-starter-train-and-test-your-first-neural-network-classifier-in-keras-from-scratch-b6a5f3b3ebc4\n","
\n","\n","Redes neurais são modelos computacionais compostos de\n","camadas de elementos de computação interconectados, os neurônios.\n","Elas são capazes de assimilar padrões em dados e, com isso,\n","realizar predições, diagnósticos, reconhecimentos, entre outros.\n"]},{"cell_type":"code","metadata":{"id":"OBj0zoRzMWjy","colab_type":"code","colab":{}},"source":["# Carregando o modelo de rede neural\n","mlp = load(root_path + '/models/mlp.save')"],"execution_count":0,"outputs":[]},{"cell_type":"code","metadata":{"id":"7U_LjOotMWj6","colab_type":"code","colab":{}},"source":["# Realizando a predição\n","mlp_result = mlp.predict(number_images)\n","mlp_result = ''.join( [str(x) for x in mlp_result] )\n","print(mlp_result)"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"1IMZqI7rMWj_","colab_type":"text"},"source":["## Finalizando o acesso à página\n"]},{"cell_type":"markdown","metadata":{"id":"fqpgdBAKwFxt","colab_type":"text"},"source":["### Preenchendo os campos de texto"]},{"cell_type":"code","metadata":{"id":"sbE89DKbMWkA","colab_type":"code","colab":{}},"source":["captcha_key = driver.find_element_by_id('captcha_input')\n","button = driver.find_element_by_id('captcha_btn')"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"lz2XtUD_wQ0z","colab_type":"text"},"source":["### Submetendo os dados"]},{"cell_type":"code","metadata":{"id":"4YgmniFjMWkH","colab_type":"code","colab":{}},"source":["captcha_key.send_keys(mlp_result)\n","button.click()"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"0Gtm1oedTb5c","colab_type":"text"},"source":["## Validando resultados"]},{"cell_type":"code","metadata":{"id":"D5yT5G6mMWkN","colab_type":"code","colab":{}},"source":["# Obtendo página de resultado\n","page = driver.page_source"],"execution_count":0,"outputs":[]},{"cell_type":"code","metadata":{"id":"1twiZjBFSvBq","colab_type":"code","colab":{}},"source":["try:\n"," success_message = driver.find_element_by_id('success') # Verifica a existência do id com a mensagem de sucesso\n"," print(\"Página carregada com sucesso!\")\n","except:\n"," print(\"CAPTCHA inválida, tente novamente!\") # Caso der errado\n","\n","# Mostrando a página obtida\n","resultPage = driver.get_screenshot_as_png()\n","result_stream = BytesIO(resultPage)\n","result_img = cv2.imdecode(np.frombuffer(result_stream.read(), np.uint8), 1)\n","result_img = cv2.cvtColor(result_img, cv2.COLOR_BGR2RGB)\n","\n","plt.figure(figsize=(15,10))\n","plt.imshow(result_img)\n","plt.show"],"execution_count":0,"outputs":[]},{"cell_type":"code","metadata":{"id":"7Hc9Zzgx6BoQ","colab_type":"code","colab":{}},"source":[""],"execution_count":0,"outputs":[]}]} --------------------------------------------------------------------------------