├── .gitignore ├── Procfile ├── README.md ├── app.py ├── images ├── initial_website.jpg ├── results.jpg └── search.jpg ├── requirements.txt └── templates └── index.html /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | wheels/ 23 | share/python-wheels/ 24 | *.egg-info/ 25 | .installed.cfg 26 | *.egg 27 | MANIFEST 28 | 29 | # PyInstaller 30 | # Usually these files are written by a python script from a template 31 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 32 | *.manifest 33 | *.spec 34 | 35 | # Installer logs 36 | pip-log.txt 37 | pip-delete-this-directory.txt 38 | 39 | # Unit test / coverage reports 40 | htmlcov/ 41 | .tox/ 42 | .nox/ 43 | .coverage 44 | .coverage.* 45 | .cache 46 | nosetests.xml 47 | coverage.xml 48 | *.cover 49 | *.py,cover 50 | .hypothesis/ 51 | .pytest_cache/ 52 | cover/ 53 | 54 | # Translations 55 | *.mo 56 | *.pot 57 | 58 | # Django stuff: 59 | *.log 60 | local_settings.py 61 | db.sqlite3 62 | db.sqlite3-journal 63 | 64 | # Flask stuff: 65 | instance/ 66 | .webassets-cache 67 | 68 | # Scrapy stuff: 69 | .scrapy 70 | 71 | # Sphinx documentation 72 | docs/_build/ 73 | 74 | # PyBuilder 75 | .pybuilder/ 76 | target/ 77 | 78 | # Jupyter Notebook 79 | .ipynb_checkpoints 80 | 81 | # IPython 82 | profile_default/ 83 | ipython_config.py 84 | 85 | # pyenv 86 | # For a library or package, you might want to ignore these files since the code is 87 | # intended to run in multiple environments; otherwise, check them in: 88 | # .python-version 89 | 90 | # pipenv 91 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 92 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 93 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 94 | # install all needed dependencies. 95 | #Pipfile.lock 96 | 97 | # poetry 98 | # Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control. 99 | # This is especially recommended for binary packages to ensure reproducibility, and is more 100 | # commonly ignored for libraries. 101 | # https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control 102 | #poetry.lock 103 | 104 | # pdm 105 | # Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control. 106 | #pdm.lock 107 | # pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it 108 | # in version control. 109 | # https://pdm.fming.dev/#use-with-ide 110 | .pdm.toml 111 | 112 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm 113 | __pypackages__/ 114 | 115 | # Celery stuff 116 | celerybeat-schedule 117 | celerybeat.pid 118 | 119 | # SageMath parsed files 120 | *.sage.py 121 | 122 | # Environments 123 | .env 124 | .venv 125 | env/ 126 | venv/ 127 | ENV/ 128 | env.bak/ 129 | venv.bak/ 130 | 131 | # Spyder project settings 132 | .spyderproject 133 | .spyproject 134 | 135 | # Rope project settings 136 | .ropeproject 137 | 138 | # mkdocs documentation 139 | /site 140 | 141 | # mypy 142 | .mypy_cache/ 143 | .dmypy.json 144 | dmypy.json 145 | 146 | # Pyre type checker 147 | .pyre/ 148 | 149 | # pytype static type analyzer 150 | .pytype/ 151 | 152 | # Cython debug symbols 153 | cython_debug/ 154 | 155 | # PyCharm 156 | # JetBrains specific template is maintained in a separate JetBrains.gitignore that can 157 | # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore 158 | # and can be added to the global gitignore or merged into this file. For a more nuclear 159 | # option (not recommended) you can uncomment the following to ignore the entire idea folder. 160 | #.idea/ -------------------------------------------------------------------------------- /Procfile: -------------------------------------------------------------------------------- 1 | web: gunicorn app:app -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Webscrapping Amazon with python 2 | 3 | ## How to run this project 4 | You can use anaconda for this, then you will open the prompt and in the root folder you will write python .\app.py and can run this project 5 | 6 | ## How it works 7 | - In this project when you run it you will see this website 8 | 9 | 10 |  11 | 12 | 13 | - Then you can type any product to search, for this short sample we wrote "computer" and when I press on find button you will see this results, in this case we show just first 10 results 14 | 15 | 16 |  17 | 18 | 19 | And when we click on each button will show you the results in amazon, the list of results are based on same search you will make in amazon official website so I made a webscrapping to get that results in my website. 20 | 21 |  22 | -------------------------------------------------------------------------------- /app.py: -------------------------------------------------------------------------------- 1 | from flask import Flask, jsonify, request, render_template 2 | from bs4 import BeautifulSoup 3 | import requests 4 | import json 5 | 6 | app = Flask(__name__) 7 | 8 | @app.route('/') 9 | def index(): 10 | return render_template('index.html') 11 | 12 | @app.route('/amazon', methods=["POST"]) 13 | def amazon(): 14 | request_data = request.get_json() 15 | mytext=request_data['sendinfo'] 16 | url='https://www.amazon.com/s?k={0}'.format(mytext) 17 | headers={"FUser":"XSaint","user-agent":"XSaint"} 18 | 19 | r=requests.get(url,headers=headers) 20 | if r.status_code==200: 21 | soup = BeautifulSoup(r.content, 'html.parser') 22 | urls=soup.find('div', attrs={"class":"s-main-slot s-result-list s-search-results sg-row"}).find_all('a',attrs={"class":"a-link-normal s-underline-text s-underline-link-text s-link-style a-text-normal"}) 23 | urls_2=["https://www.amazon.com"+ i.get('href') for i in urls[0:10]] 24 | aux = [] 25 | for k in urls[0:10]: 26 | rating = k.find('span', class_='a-size-medium a-color-base a-text-normal') 27 | rating2 = k.find('span', class_='a-size-base-plus a-color-base a-text-normal') 28 | if rating: 29 | aux.append(rating.text) 30 | elif rating2: 31 | aux.append(rating2.text) 32 | else: 33 | aux.append('None') 34 | return jsonify({"links": urls_2, "texto": aux}) 35 | return jsonify({"answer":"failed"}) 36 | 37 | 38 | if __name__=="__main__": 39 | app.run(debug=True, port=5000) 40 | #app.run(debug=True, port=5000,host="0.0.0.0") -------------------------------------------------------------------------------- /images/initial_website.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/XSaintX/webscrapping-python/88fae981d42f25be8ddebf0097328bb3d2eef74a/images/initial_website.jpg -------------------------------------------------------------------------------- /images/results.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/XSaintX/webscrapping-python/88fae981d42f25be8ddebf0097328bb3d2eef74a/images/results.jpg -------------------------------------------------------------------------------- /images/search.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/XSaintX/webscrapping-python/88fae981d42f25be8ddebf0097328bb3d2eef74a/images/search.jpg -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | bs4 2 | requests 3 | flask==2.1.2 4 | gunicorn -------------------------------------------------------------------------------- /templates/index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 |
4 | 5 | 6 | 7 |