├── deepbnb ├── __init__.py ├── api │ ├── __init__.py │ ├── ApiBase.py │ ├── PdpReviews.py │ ├── ExploreSearch.py │ └── PdpPlatformSections.py ├── spiders │ ├── __init__.py │ └── airbnb.py ├── model.py ├── middlewares.py ├── items.py ├── exporter.py ├── settings.py.dist └── pipelines.py ├── .gitignore ├── requirements.txt ├── scrapy.cfg ├── README.md └── LICENSE /deepbnb/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /deepbnb/api/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | __pycache__/ 2 | .idea/ 3 | /deepbnb/settings.py 4 | /env 5 | /results/* 6 | *.csv 7 | *.xlsx 8 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | elasticsearch==8.4.3 2 | lxml==4.9.1 3 | openpyxl==3.0.10 4 | requests==2.28.1 5 | Scrapy==2.6.3 6 | scrapy-playwright==0.0.22 7 | -------------------------------------------------------------------------------- /deepbnb/spiders/__init__.py: -------------------------------------------------------------------------------- 1 | # This package will contain the spiders of your Scrapy project 2 | # 3 | # Please refer to the documentation for information on how to create and manage 4 | # your spiders. 5 | -------------------------------------------------------------------------------- /scrapy.cfg: -------------------------------------------------------------------------------- 1 | # Automatically created by: scrapy startproject 2 | # 3 | # For more information about the [deploy] section see: 4 | # https://scrapyd.readthedocs.org/en/latest/deploy.html 5 | 6 | [settings] 7 | default = deepbnb.settings 8 | 9 | [deploy] 10 | #url = http://localhost:6800/ 11 | project = deepbnb 12 | -------------------------------------------------------------------------------- /deepbnb/model.py: -------------------------------------------------------------------------------- 1 | # class Listing(Document): 2 | # """Base class containing the common fields.""" 3 | # access = Text() 4 | # additional_house_rules = Text() 5 | # allows_events = Boolean() 6 | # amenities = Keyword(multi=True) 7 | # amenity_ids = Keyword(multi=True) 8 | # avg_rating = Float() 9 | # bathrooms = Float() 10 | # bedrooms = Integer() 11 | # beds = Integer() 12 | # business_travel_ready = Boolean() 13 | # city = Text(fields={'keyword': Keyword()}, required=True) 14 | # country = Text(fields={'keyword': Keyword()}, required=True) 15 | # coordinates = GeoPoint() 16 | # description = Text() 17 | # host_id = Integer(fields={'keyword': Keyword()}) 18 | # house_rules = Text() 19 | # interaction = Text() 20 | # is_hotel = Boolean() 21 | # monthly_price_factor = Float() 22 | # name = Text(fields={'keyword': Keyword()}, required=True) 23 | # neighborhood_overview = Text() 24 | # person_capacity = Integer() 25 | # photo_count = Integer() 26 | # photos = Keyword(multi=True) 27 | # place_id = Text(fields={'keyword': Keyword()}) 28 | # price_rate = Float() 29 | # price_rate_type = Text(fields={'keyword': Keyword()}, required=True) 30 | # province = Text(fields={'keyword': Keyword()}) 31 | # rating_accuracy = Float() 32 | # rating_checkin = Float() 33 | # rating_cleanliness = Float() 34 | # rating_communication = Float() 35 | # rating_location = Float() 36 | # rating_value = Float() 37 | # review_count = Integer() 38 | # reviews = Nested() 39 | # room_and_property_type = Text(fields={'keyword': Keyword()}, required=True) 40 | # room_type = Text(fields={'keyword': Keyword()}, required=True) 41 | # room_type_category = Text(fields={'keyword': Keyword()}, required=True) 42 | # satisfaction_guest = Float() 43 | # star_rating = Float() 44 | # state = Text(fields={'keyword': Keyword()}, required=True) 45 | # transit = Text() 46 | # url = Text(fields={'keyword': Keyword()}, required=True) 47 | # weekly_price_factor = Float() 48 | -------------------------------------------------------------------------------- /deepbnb/api/ApiBase.py: -------------------------------------------------------------------------------- 1 | import json 2 | 3 | from abc import abstractmethod, ABC 4 | from logging import LoggerAdapter 5 | from scrapy.http import Response 6 | from urllib.parse import urlencode, urlunparse 7 | 8 | 9 | class ApiBase(ABC): 10 | 11 | def __init__(self, api_key: str, logger: LoggerAdapter, currency: str): 12 | self._api_key = api_key 13 | self._currency = currency 14 | self._logger = logger 15 | 16 | @abstractmethod 17 | def api_request(self, **kwargs): 18 | raise NotImplementedError(f'{self.__class__.__name__}.api_request method is not defined') 19 | 20 | @staticmethod 21 | def build_airbnb_url(path, query=None): 22 | if query is not None: 23 | query = urlencode(query) 24 | 25 | return urlunparse(['https', 'www.airbnb.com', path, None, query, None]) 26 | 27 | @property 28 | def api_key(self): 29 | return self._api_key 30 | 31 | @staticmethod 32 | def _put_json_param_strings(query: dict): 33 | """Property format JSON strings for 'variables' & 'extensions' params.""" 34 | query['variables'] = json.dumps(query['variables'], separators=(',', ':')) 35 | query['extensions'] = json.dumps(query['extensions'], separators=(',', ':')) 36 | 37 | def read_data(self, response: Response): 38 | """Read response data as json""" 39 | self._logger.debug(f"Parsing {response.url}") 40 | data = json.loads(response.body) 41 | 42 | return data 43 | 44 | def _get_search_headers(self, response=None) -> dict: 45 | """Get headers for search requests.""" 46 | headers = { 47 | 'Accept': '*/*', 48 | 'Accept-Encoding': 'gzip,deflate', 49 | 'Connection': 'keep-alive', 50 | 'User-Agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/106.0.0.0 Safari/537.36', 51 | 'X-Airbnb-Api-Key': self._api_key 52 | } 53 | if response: 54 | headers['Cookie'] = str(response.headers.get('Set-Cookie')) 55 | 56 | return headers 57 | -------------------------------------------------------------------------------- /deepbnb/middlewares.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | # Define here the models for your spider middleware 4 | # 5 | # See documentation in: 6 | # http://doc.scrapy.org/en/latest/topics/spider-middleware.html 7 | 8 | from scrapy import signals 9 | 10 | 11 | class DeepbnbSpiderMiddleware(object): 12 | # Not all methods need to be defined. If a method is not defined, 13 | # scrapy acts as if the spider middleware does not modify the 14 | # passed objects. 15 | 16 | @classmethod 17 | def from_crawler(cls, crawler): 18 | # This method is used by Scrapy to create your spiders. 19 | s = cls() 20 | crawler.signals.connect(s.spider_opened, signal=signals.spider_opened) 21 | return s 22 | 23 | def process_spider_input(self, response, spider): 24 | # Called for each response that goes through the spider 25 | # middleware and into the spider. 26 | 27 | # Should return None or raise an exception. 28 | return None 29 | 30 | def process_spider_output(self, response, result, spider): 31 | # Called with the results returned from the Spider, after 32 | # it has processed the response. 33 | 34 | # Must return an iterable of Request, dict or Item objects. 35 | for i in result: 36 | yield i 37 | 38 | def process_spider_exception(self, response, exception, spider): 39 | # Called when a spider or process_spider_input() method 40 | # (from other spider middleware) raises an exception. 41 | 42 | # Should return either None or an iterable of Response, dict 43 | # or Item objects. 44 | pass 45 | 46 | def process_start_requests(self, start_requests, spider): 47 | # Called with the start requests of the spider, and works 48 | # similarly to the process_spider_output() method, except 49 | # that it doesn’t have a response associated. 50 | 51 | # Must return only requests (not items). 52 | for r in start_requests: 53 | yield r 54 | 55 | def spider_opened(self, spider): 56 | spider.logger.info('Spider opened: %s' % spider.name) 57 | -------------------------------------------------------------------------------- /deepbnb/items.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | # Define here the models for your scraped items 4 | # 5 | # See documentation in: 6 | # http://doc.scrapy.org/en/latest/topics/items.html 7 | 8 | import scrapy 9 | 10 | 11 | class DeepbnbItem(scrapy.Item): 12 | # define the fields for your item here like: 13 | # name = scrapy.Field() 14 | access = scrapy.Field() 15 | additional_house_rules = scrapy.Field() 16 | allows_events = scrapy.Field() 17 | amenities = scrapy.Field() 18 | amenity_ids = scrapy.Field() 19 | avg_rating = scrapy.Field() 20 | bathrooms = scrapy.Field() 21 | bedrooms = scrapy.Field() 22 | beds = scrapy.Field() 23 | business_travel_ready = scrapy.Field() 24 | city = scrapy.Field() 25 | country = scrapy.Field() 26 | description = scrapy.Field() 27 | host_id = scrapy.Field() 28 | house_rules = scrapy.Field() 29 | id = scrapy.Field() 30 | interaction = scrapy.Field() 31 | is_hotel = scrapy.Field() 32 | latitude = scrapy.Field() 33 | listing_expectations = scrapy.Field() 34 | longitude = scrapy.Field() 35 | monthly_price_factor = scrapy.Field() 36 | name = scrapy.Field() 37 | neighborhood_overview = scrapy.Field() 38 | person_capacity = scrapy.Field() 39 | photo_count = scrapy.Field() 40 | photos = scrapy.Field() 41 | place_id = scrapy.Field() 42 | price_rate = scrapy.Field() 43 | price_rate_type = scrapy.Field() 44 | province = scrapy.Field() 45 | rating_accuracy = scrapy.Field() 46 | rating_checkin = scrapy.Field() 47 | rating_cleanliness = scrapy.Field() 48 | rating_communication = scrapy.Field() 49 | rating_location = scrapy.Field() 50 | rating_value = scrapy.Field() 51 | review_count = scrapy.Field() 52 | reviews = scrapy.Field() 53 | room_and_property_type = scrapy.Field() 54 | room_type = scrapy.Field() 55 | room_type_category = scrapy.Field() 56 | satisfaction_guest = scrapy.Field() 57 | star_rating = scrapy.Field() 58 | state = scrapy.Field() 59 | total_price = scrapy.Field() 60 | transit = scrapy.Field() 61 | url = scrapy.Field() 62 | weekly_price_factor = scrapy.Field() 63 | -------------------------------------------------------------------------------- /deepbnb/exporter.py: -------------------------------------------------------------------------------- 1 | import openpyxl 2 | 3 | from scrapy.exporters import BaseItemExporter 4 | 5 | 6 | class XlsxItemExporter(BaseItemExporter): 7 | """Export items to Excel spreadsheet.""" 8 | 9 | def __init__(self, file, include_headers_line=True, join_multivalued=',', **kwargs): 10 | """Class constructor.""" 11 | # fields_to_export = settings.get('FIELDS_TO_EXPORT', []) 12 | # if fields_to_export: 13 | # kwargs['fields_to_export'] = fields_to_export 14 | 15 | super().__init__(**kwargs) 16 | 17 | self.include_headers_line = include_headers_line 18 | self._workbook = openpyxl.workbook.Workbook() 19 | self._worksheet = self._workbook.active 20 | self._headers_not_written = True 21 | self._join_multivalued = join_multivalued 22 | self._filename = file.name 23 | file.close() 24 | 25 | def export_item(self, item): 26 | if self._headers_not_written: 27 | self._headers_not_written = False 28 | self._write_headers_and_set_fields_to_export(item) 29 | 30 | # Make name into a hyperlink 31 | item['name'] = '=HYPERLINK("https://www.airbnb.com/rooms/{}", "{}")'.format( 32 | item['id'], item.get('name', item['id'])), 33 | 34 | fields = self._get_serialized_fields(item, default_value='', include_empty=True) 35 | values = tuple(self._build_row(x for _, x in fields)) 36 | self._worksheet.append(values) 37 | 38 | def finish_exporting(self): 39 | self._workbook.save(self._filename) 40 | 41 | def serialize_field(self, field, name, value): 42 | serializer = field.get('serializer', self._join_if_needed) 43 | return serializer(value) 44 | 45 | @staticmethod 46 | def _build_row(values): 47 | for s in values: 48 | yield s 49 | 50 | def _join_if_needed(self, value): 51 | if isinstance(value, (list, tuple)): 52 | try: 53 | return self._join_multivalued.join(value) 54 | except TypeError: # list in value may not contain strings 55 | pass 56 | return value 57 | 58 | def _write_headers_and_set_fields_to_export(self, item): 59 | if self.include_headers_line: 60 | if not self.fields_to_export: 61 | 62 | if isinstance(item, dict): 63 | # for dicts try using fields of the first item 64 | self.fields_to_export = list(item.keys()) 65 | else: 66 | # use fields declared in Item 67 | self.fields_to_export = list(item.fields.keys()) 68 | 69 | row = tuple(self._build_row(self.fields_to_export)) 70 | self._worksheet.append(row) 71 | -------------------------------------------------------------------------------- /deepbnb/api/PdpReviews.py: -------------------------------------------------------------------------------- 1 | import json 2 | import requests 3 | import scrapy 4 | 5 | from urllib.parse import urlparse, parse_qs 6 | 7 | from deepbnb.api.ApiBase import ApiBase 8 | 9 | 10 | class PdpReviews(ApiBase): 11 | """Airbnb API v3 Reviews Endpoint""" 12 | 13 | def api_request(self, listing_id: str, limit: int = 7, start_offset: int = 0): 14 | """Perform API request.""" 15 | # get first batch of reviews 16 | reviews, n_reviews_total = self._get_reviews_batch(listing_id, limit, start_offset) 17 | 18 | # get any additional batches 19 | start_idx = start_offset + limit 20 | for offset in range(start_idx, n_reviews_total, limit): 21 | r, _ = self._get_reviews_batch(listing_id, limit, offset) 22 | reviews.extend(r) 23 | 24 | return reviews 25 | 26 | def _get_reviews_batch(self, listing_id: str, limit: int, offset: int): 27 | """Get reviews for a given listing ID in batches.""" 28 | url = self._get_url(listing_id, limit, offset) 29 | headers = self._get_search_headers() 30 | response = requests.get(url, headers=headers) 31 | data = json.loads(response.text) 32 | pdp_reviews = data['data']['merlin']['pdpReviews'] 33 | n_reviews_total = int(pdp_reviews['metadata']['reviewsCount']) 34 | reviews = [{ 35 | 'comments': r['comments'], 36 | 'created_at': r['createdAt'], 37 | 'language': r['language'], 38 | 'rating': r['rating'], 39 | 'response': r['response'], 40 | } for r in pdp_reviews['reviews']] 41 | 42 | return reviews, n_reviews_total 43 | 44 | def _get_url(self, listing_id: str, limit: int = 7, offset: int = None) -> str: 45 | _api_path = '/api/v3/PdpReviews' 46 | query = { 47 | 'operationName': 'PdpReviews', 48 | 'locale': 'en', 49 | 'currency': self._currency, 50 | 'variables': { 51 | 'request': { 52 | 'fieldSelector': 'for_p3', 53 | 'limit': limit, 54 | 'listingId': listing_id, 55 | 'numberOfAdults': '1', 56 | 'numberOfChildren': '0', 57 | 'numberOfInfants': '0' 58 | } 59 | }, 60 | 'extensions': { 61 | 'persistedQuery': { 62 | 'version': 1, 63 | 'sha256Hash': '4730a25512c4955aa741389d8df80ff1e57e516c469d2b91952636baf6eee3bd' 64 | } 65 | } 66 | } 67 | 68 | if offset: 69 | query['variables']['request']['offset'] = offset 70 | 71 | self._put_json_param_strings(query) 72 | 73 | return self.build_airbnb_url(_api_path, query) 74 | 75 | def _parse_reviews(self, response): 76 | # parse qs 77 | parsed = urlparse(response.request.url) 78 | parsed_qs = parse_qs(parsed.query) 79 | variables = json.loads(parsed_qs['variables'][0]) 80 | 81 | # extract data 82 | listing_id = variables['request']['listingId'] 83 | limit = variables['request']['limit'] 84 | offset = variables['request'].get('offset', 0) 85 | data = self.read_data(response) 86 | pdp_reviews = data['data']['merlin']['pdpReviews'] 87 | n_reviews_total = int(pdp_reviews['metadata']['reviewsCount']) 88 | 89 | if offset == 0: # get all other reviews 90 | for offset in range(limit, n_reviews_total, limit): 91 | url = self._get_url(listing_id, limit, offset) 92 | yield scrapy.Request(url, callback=self._parse_reviews, headers=self._get_search_headers()) 93 | 94 | # return distilled review 95 | yield from ({ 96 | 'comments': r['comments'], 97 | 'created_at': r['createdAt'], 98 | 'language': r['language'], 99 | 'rating': r['rating'], 100 | 'response': r['response'], 101 | } for r in pdp_reviews['reviews']) 102 | -------------------------------------------------------------------------------- /deepbnb/settings.py.dist: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | # Scrapy settings for deepbnb project 4 | # 5 | # For simplicity, this file contains only settings considered important or 6 | # commonly used. You can find more settings consulting the documentation: 7 | # 8 | # http://doc.scrapy.org/en/latest/topics/settings.html 9 | # http://scrapy.readthedocs.org/en/latest/topics/downloader-middleware.html 10 | # http://scrapy.readthedocs.org/en/latest/topics/spider-middleware.html 11 | 12 | BOT_NAME = 'deepbnb' 13 | 14 | SPIDER_MODULES = ['deepbnb.spiders'] 15 | NEWSPIDER_MODULE = 'deepbnb.spiders' 16 | 17 | # 18 | # Scraper config 19 | # 20 | 21 | # Elasticsearch (optional) 22 | # ELASTICSEARCH_SERVERS = ['localhost'] 23 | # ELASTICSEARCH_INDEX = 'airbnb-listing' 24 | # ELASTICSEARCH_INDEX_DATE_FORMAT = '%Y-%m' 25 | # ELASTICSEARCH_TYPE = 'deepbnb' 26 | # ELASTICSEARCH_UNIQ_KEY = 'url' # Custom unique key 27 | 28 | # Need to create a postgresql database to use this, see http://newcoder.io/scrape/part-3/ 29 | # DATABASE = { 30 | # 'drivername': 'postgres', 31 | # 'host': 'localhost', 32 | # 'port': '5432', 33 | # 'username': 'postgres', 34 | # 'password': '', 35 | # 'database': 'scrape' 36 | # } 37 | 38 | # Public development key (get this from the 'key' url parameter in async requests to /api/v2/explore_tabs) 39 | AIRBNB_API_KEY = '' 40 | 41 | # Crawl responsibly by identifying yourself (and your website) on the user-agent 42 | USER_AGENT = 'deepbnb (https://airbnb-scraper)' 43 | 44 | # Obey robots.txt rules 45 | ROBOTSTXT_OBEY = True 46 | 47 | # https://docs.python.org/3/library/webbrowser.html 48 | # Open results in web browser 49 | # WEB_BROWSER = 'chromium' 50 | 51 | # Configure maximum concurrent requests performed by Scrapy (default: 16) 52 | # CONCURRENT_REQUESTS = 32 53 | 54 | # Configure a delay for requests for the same website (default: 0) 55 | # See http://scrapy.readthedocs.org/en/latest/topics/settings.html#download-delay 56 | # See also autothrottle settings and docs 57 | DOWNLOAD_DELAY = 10 58 | # The download delay setting will honor only one of: 59 | CONCURRENT_REQUESTS_PER_DOMAIN = 10 60 | # CONCURRENT_REQUESTS_PER_IP = 16 61 | 62 | # Disable cookies (enabled by default) 63 | # COOKIES_ENABLED = False 64 | 65 | # Disable Telnet Console (enabled by default) 66 | TELNETCONSOLE_ENABLED = False 67 | 68 | # Override the default request headers: 69 | # DEFAULT_REQUEST_HEADERS = { 70 | # 'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8', 71 | # 'Accept-Language': 'en', 72 | # } 73 | 74 | # Enable or disable spider middlewares 75 | # See http://scrapy.readthedocs.org/en/latest/topics/spider-middleware.html 76 | # SPIDER_MIDDLEWARES = { 77 | # 'deepbnb.middlewares.MyCustomSpiderMiddleware': 543, 78 | # } 79 | 80 | # Enable or disable extensions 81 | # See http://scrapy.readthedocs.org/en/latest/topics/extensions.html 82 | # EXTENSIONS = { 83 | # 'scrapy.extensions.telnet.TelnetConsole': None, 84 | # } 85 | 86 | # Configure item pipelines 87 | # See http://scrapy.readthedocs.org/en/latest/topics/item-pipeline.html 88 | ITEM_PIPELINES = { 89 | 'deepbnb.pipelines.DuplicatesPipeline': 299, 90 | 'deepbnb.pipelines.BnbPipeline': 300, 91 | # 'deepbnb.pipelines.ElasticBnbPipeline': 301 # enable if you want to pipeline results to local elasticsearch 92 | } 93 | 94 | # https://docs.scrapy.org/en/latest/topics/feed-exports.html 95 | FEED_EXPORTERS = { 96 | 'xlsx': 'deepbnb.exporter.XlsxItemExporter', 97 | } 98 | 99 | FEED_EXPORT_FIELDS = [ 100 | 'name', 101 | 'url', 102 | 'price_rate', 103 | 'price_rate_type', 104 | 'total_price', 105 | 'room_and_property_type', 106 | 'latitude', 107 | 'longitude', 108 | 'monthly_price_factor', 109 | 'weekly_price_factor', 110 | 'room_type', 111 | 'person_capacity', 112 | 'amenities', 113 | 'review_count', 114 | 'review_score', 115 | 'rating_accuracy', 116 | 'rating_checkin', 117 | 'rating_cleanliness', 118 | 'rating_communication', 119 | 'rating_location', 120 | 'rating_value', 121 | 'star_rating', 122 | 'satisfaction_guest', 123 | 'description', 124 | 'neighborhood_overview', 125 | 'notes', 126 | 'additional_house_rules', 127 | 'interaction', 128 | 'access', 129 | 'transit', 130 | 'response_rate', 131 | 'response_time', 132 | 'photos', 133 | ] 134 | 135 | # Minimum monthly discount percent 136 | # MINIMUM_MONTHLY_DISCOUNT = 0 137 | 138 | # Minimum weekly discount percent 139 | # MINIMUM_WEEKLY_DISCOUNT = 0 140 | 141 | # Minimum photos per listing 142 | MINIMUM_PHOTOS = 2 143 | 144 | # Default currency 145 | # DEFAULT_CURRENCY = 'BRL' 146 | 147 | # Desired hosting amenities and corresponding IDs. Determined by observing search GET parameters. 148 | PROPERTY_AMENITIES = { 149 | # 'a/c': 5, 150 | 'kitchen': 8, 151 | 'tv': 58, 152 | 'washer': 33, 153 | 'dryer': 34, 154 | 'wifi': 4, 155 | } 156 | 157 | ROOM_TYPES = [] 158 | # Blacklisted property types 159 | PROPERTY_TYPE_BLACKLIST = ['Camper/RV', 'Campsite', 'Entire guest suite'] 160 | 161 | # Enable and configure the AutoThrottle extension (disabled by default) 162 | # See http://doc.scrapy.org/en/latest/topics/autothrottle.html 163 | AUTOTHROTTLE_ENABLED = True 164 | 165 | # The initial download delay 166 | AUTOTHROTTLE_START_DELAY = 5 167 | 168 | # The maximum download delay to be set in case of high latencies 169 | AUTOTHROTTLE_MAX_DELAY = 60 170 | 171 | # The average number of requests Scrapy should be sending in parallel to 172 | # each remote server 173 | # AUTOTHROTTLE_TARGET_CONCURRENCY = 1.0 174 | # Enable showing throttling stats for every response received: 175 | AUTOTHROTTLE_DEBUG = False 176 | 177 | # Enable and configure HTTP caching (disabled by default) 178 | # See http://scrapy.readthedocs.org/en/latest/topics/downloader-middleware.html#httpcache-middleware-settings 179 | # HTTPCACHE_ENABLED = True 180 | # HTTPCACHE_EXPIRATION_SECS = 0 181 | # HTTPCACHE_DIR = 'httpcache' 182 | # HTTPCACHE_IGNORE_HTTP_CODES = [] 183 | # HTTPCACHE_STORAGE = 'scrapy.extensions.httpcache.FilesystemCacheStorage' 184 | 185 | # scrapy-playwright settings 186 | DOWNLOAD_HANDLERS = { 187 | "http": "scrapy_playwright.handler.ScrapyPlaywrightDownloadHandler", 188 | "https": "scrapy_playwright.handler.ScrapyPlaywrightDownloadHandler", 189 | } 190 | TWISTED_REACTOR = "twisted.internet.asyncioreactor.AsyncioSelectorReactor" 191 | PLAYWRIGHT_DEFAULT_NAVIGATION_TIMEOUT = 60000 192 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Airbnb Scraper: Advanced Airbnb Search using Scrapy 2 | 3 | ## Disclaimer: No longer maintained 4 | 5 | ### This project is not currently maintained, due to difficulty in using scrapy to make requests to the Airbnb API. Project is on hold until further notice. Currently exploring a simpler approach here: https://github.com/JoeBashe/stl-scraper 6 | 7 | Use Airbnb's unofficial API to efficiently search for rental properties. 8 | Regex matching, ranged search, open matched properties in a browser, save to CSV, xlsx, or ElasticSearch (alpha). 9 | 10 | ## Notes 11 | 12 | - Airbnb's API is subject to change at any moment, which would break this scraper. They've already changed it several 13 | times in the past. Also, using this probably violates their TOS. Please only use for educational or research purposes. 14 | - The scraper was recently updated to work with Airbnb's new v3 GraphQL API. Some features are still being updated. 15 | - If you get 403 Forbidden errors when running this scraper, try browsing the Airbnb site in your web browser from the 16 | same computer first, then try running the script again. 17 | 18 | ## Requirements 19 | 20 | * **Python 3.10+** 21 | * [Scrapy](http://scrapy.org/) 22 | * [openpyxl](https://openpyxl.readthedocs.io/en/default/#installation) 23 | * ElasticSearch 7+ if using elasticsearch pipeline 24 | * see [requirements.txt](requirements.txt) for details 25 | 26 | ## Installation (nix) 27 | 28 | ```bash 29 | # Create venv 30 | python3.10 -m venv env 31 | 32 | # Enable venv 33 | . env/bin/activate 34 | 35 | # Install required packages 36 | pip install -Ur requirements.txt 37 | 38 | # Create settings.py 39 | cp deepbnb/settings.py.dist deepbnb/settings.py 40 | 41 | # @NOTE: Don't forget to set AIRBNB_API_KEY in settings.py. To find your API key, 42 | # search Airbnb using Chrome, open dev tools, and look for to the url parameter 43 | # named "key" in async requests to /api/v2/explore_tabs under the Network tab. 44 | ``` 45 | 46 | ## Configuration 47 | 48 | Edit `deepbnb/settings.py` for settings. I've created some custom settings which are 49 | documented [below](https://github.com/digital-engineering/airbnb-scraper#settings). The rest are documented 50 | in https://docs.scrapy.org/en/latest/topics/settings.html. 51 | 52 | ## Example Usage 53 | 54 | #### Minimal scraper usage: 55 | 56 | scrapy crawl airbnb -a query="Colorado Springs, CO" -o colorado_springs.csv 57 | 58 | #### Advanced examples: 59 | 60 | ##### Madrid, fixed dates 61 | 62 | ``` 63 | scrapy crawl airbnb \ 64 | -a query="Madrid, Spain" \ 65 | -a checkin=2023-10-01 \ 66 | -a checkout=2023-11-30 \ 67 | -a max_price=1900 \ 68 | -a min_price=1800 \ 69 | -a neighborhoods="Acacias,Almagro,Arganzuela,Argüelles,Centro,Cortes,Embajadores,Imperial,Jerónimos,La Latina,Malasaña,Moncloa,Palacio,Recoletos,Retiro,Salamanca,Sol" \ 70 | -s MUST_HAVE="(atico|attic|balcon|terra|patio|outdoor|roof|view)" \ 71 | -s CANNOT_HAVE="studio" \ 72 | -s MINIMUM_WEEKLY_DISCOUNT=20 \ 73 | -s WEB_BROWSER="/usr/bin/chromium" \ 74 | -o madrid.xlsx 75 | ``` 76 | 77 | ##### New York ranged date search 78 | 79 | ``` 80 | scrapy crawl airbnb \ 81 | -a query="New York, NY" \ 82 | -a checkin="2023-01-22+7-0" \ 83 | -a checkout="2023-02-22+14-3" \ 84 | -a max_price=1800 \ 85 | -s CANNOT_HAVE="guest suite" \ 86 | -s MUST_HAVE="(walking distance|short walk|no car needed|walk everywhere|metro close|public transport)" \ 87 | -o newyork.csv 88 | ``` 89 | 90 | ## Ranged date queries 91 | 92 | If you have flexible checkin / checkout dates, use the ranged search feature to search a range of checkin / checkout 93 | dates. 94 | 95 | ### Search checkin date range +5 days -2 days 96 | 97 | scrapy crawl airbnb \ 98 | -a query="Minneapolis, MN" \ 99 | -a checkin="2023-10-15+5-2" \ 100 | -a checkout="2023-11-15" \ 101 | -o minneapolis.csv 102 | 103 | This search would look for rentals in Minneapolis using Oct 15 2023 as base check-in date, and also searching for 104 | rentals available for check-in 2 days before, up to 5 days after. In other words, check-ins from Oct 13 to Oct 20. This 105 | is specified by the string `+5-2` appended to the checkin date `2023-10-15+5-2`. The string must always follow the 106 | pattern`+[days_after]-[days_before]` unless `[days_after]` and `[days_before]` are equal, in which case you can 107 | use `+-[days]`. The numbers may be any integer 0 or greater (large numbers untested). 108 | 109 | ### Search checkin date +5 days -2 days, checkout date + or - 3 days 110 | 111 | scrapy crawl airbnb \ 112 | -a query="Florence, Italy" \ 113 | -a checkin="2023-10-15+5-2" \ 114 | -a checkout="2023-11-15+-3" \ 115 | -o firenze.csv 116 | 117 | ## Scraping Description 118 | 119 | After running the crawl command, the scraper will start. It will first run the 120 | search query, then determine the quantity of result pages, and finally iterate 121 | through each of those, scraping each of the property listings on each page. 122 | 123 | Scraped items (listings) will be passed to the default item pipeline, where, 124 | optionally, the `description`, `name`, and `reviews.description` fields will 125 | be filtered using either or both of the `CANNOT_HAVE` and `MUST_HAVE` regexes. 126 | Filtered items will be dropped. Accepted items can be optionally opened in a 127 | given web browser, so that you can easily view your search results. 128 | 129 | Finally, the output can be saved to an xlsx format file for additional 130 | filtering, sorting, and inspection. 131 | 132 | ## Parameters 133 | 134 | You can find the values for these by first doing a search manually on the 135 | Airbnb site. 136 | 137 | * `query`: City and State to search. **(required)** 138 | * `checkin`, `checkout`: Check-in and Check-out dates. 139 | * `min_price`, `max_price`: Minimum and maximum price for the period. 140 | *The Airbnb search algorithm calculates this based upon search length. 141 | It will be either the daily or monthly price, depending on the length 142 | of the stay.* 143 | * `neighborhoods`: Comma-separated list of neighborhoods within the city 144 | to filter for. 145 | * `output`: Name of output file. Only `xlsx` output is tested. 146 | 147 | ## Settings 148 | 149 | These settings can be edited in the `settings.py` file, or appended to the 150 | command line using the `-s` flag as in the example above. 151 | 152 | * `CANNOT_HAVE=""` 153 | Don't accept listings that match the given regex pattern. 154 | **(optional)** 155 | 156 | 157 | * `FIELDS_TO_EXPORT="['field1', 'field2', ...]"` 158 | Can be found in settings.py. Contains a list of all possible fields to 159 | export, i.e. all fields of `AirbnbScraperItem`. Comment items to 160 | remove undesired fields from output. Applies only to `xlsx` output. 161 | 162 | 163 | * `MINIMUM_MONTHLY_DISCOUNT=30` 164 | Minimum monthly discount. 165 | **(optional)** 166 | 167 | 168 | * `MINIMUM_WEEKLY_DISCOUNT=25` 169 | Minimum weekly discount. 170 | **(optional)** 171 | 172 | 173 | * `MUST_HAVE="()"` 174 | Only accept listings that match the given regex pattern. 175 | **(optional)** 176 | 177 | 178 | * `ROOM_TYPES="['Camper/RV', 'Campsite', 'Entire guest suite']"` 179 | Room Types to filter. 180 | **(optional)** 181 | 182 | 183 | * `SKIP_LIST="['12345678', '12345679', '12345680']"` 184 | Property IDs to filter. 185 | **(optional)** 186 | 187 | 188 | * `WEB_BROWSER="/path/to/browser %s"` 189 | Web browser executable command. **(optional)** 190 | 191 | *Examples*: 192 | - MacOS 193 | `WEB_BROWSER="open -a /Applications/Google\ Chrome.app"` 194 | 195 | - Windows 196 | `WEB_BROWSER="C:\Program Files (x86)\Google\Chrome\Application\chrome.exe"` 197 | 198 | - Linux 199 | `WEB_BROWSER="/usr/bin/google-chrome"` 200 | 201 | ## Elasticsearch 202 | 203 | Enable `deepbnb.pipelines.ElasticBnbPipeline` in `settings.py` 204 | 205 | ## Credits 206 | 207 | - This project was originally inspired by [this excellent blog post](http://www.verginer.eu/blog/web-scraping-airbnb/) 208 | by Luca Verginer. 209 | - In converting this to use the unofficial API, https://stevesie.com/apps/airbnb-api was very helpful. 210 | - [This analysis of Bali Airbnbs](https://github.com/daben/m2851-prac1) provided inspiration for more eloquent code. 211 | -------------------------------------------------------------------------------- /deepbnb/pipelines.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | import elasticsearch.exceptions 3 | import re 4 | import webbrowser 5 | 6 | from datetime import datetime 7 | 8 | # from deepbnb.model import Listing 9 | from scrapy.exceptions import DropItem 10 | 11 | 12 | class BnbPipeline: 13 | @classmethod 14 | def from_crawler(cls, crawler): 15 | return cls( 16 | minimum_monthly_discount=crawler.settings.get('MINIMUM_MONTHLY_DISCOUNT'), 17 | minimum_weekly_discount=crawler.settings.get('MINIMUM_WEEKLY_DISCOUNT'), 18 | minimum_photos=crawler.settings.get('MINIMUM_PHOTOS'), 19 | skip_list=crawler.settings.get('SKIP_LIST'), 20 | cannot_have=crawler.settings.get('CANNOT_HAVE'), 21 | must_have=crawler.settings.get('MUST_HAVE'), 22 | property_type_blacklist=crawler.settings.get('PROPERTY_TYPE_BLACKLIST'), 23 | feed_format=crawler.settings.get('FEED_FORMAT'), # output file type, autogenerated from -o file ext. 24 | web_browser=crawler.settings.get('WEB_BROWSER') 25 | ) 26 | 27 | def __init__( 28 | self, 29 | minimum_monthly_discount, 30 | minimum_weekly_discount, 31 | minimum_photos, 32 | skip_list, 33 | cannot_have, 34 | must_have, 35 | property_type_blacklist, 36 | feed_format, 37 | web_browser 38 | ): 39 | """Class constructor.""" 40 | self._feed_format = feed_format 41 | # self._fields_to_check = ['description', 'name', 'summary', 'notes'] 42 | self._fields_to_check = ['description', 'name'] 43 | self._minimum_monthly_discount = minimum_monthly_discount 44 | self._minimum_weekly_discount = minimum_weekly_discount 45 | self._minimum_photos = minimum_photos 46 | 47 | self._skip_list = skip_list 48 | self._property_type_blacklist = property_type_blacklist 49 | 50 | self._cannot_have_regex = cannot_have 51 | if self._cannot_have_regex: 52 | self._cannot_have_regex = re.compile(str(self._cannot_have_regex), re.IGNORECASE) 53 | 54 | self._must_have_regex = must_have 55 | if self._must_have_regex: 56 | self._must_have_regex = re.compile(str(self._must_have_regex), re.IGNORECASE) 57 | 58 | self._web_browser = web_browser 59 | if self._web_browser: 60 | self._web_browser = webbrowser.get(web_browser + ' %s') # append URL placeholder (%s) 61 | 62 | def process_item(self, item, spider): 63 | """Drop items not fitting parameters. Open in browser if specified. Return accepted items.""" 64 | 65 | if self._skip_list and str(item.get('id')) in self._skip_list: 66 | raise DropItem('Item in skip list: {}'.format(item['id'])) 67 | 68 | if self._property_type_blacklist and item['room_and_property_type'] in self._property_type_blacklist: 69 | raise DropItem('Skipping property type: {}'.format(item['room_and_property_type'])) 70 | 71 | if self._minimum_monthly_discount and 'monthly_discount' in item: 72 | if item['monthly_discount'] < self._minimum_monthly_discount: 73 | raise DropItem('Monthly discount too low: {}'.format(item['monthly_discount'])) 74 | 75 | if self._minimum_weekly_discount and 'weekly_discount' in item: 76 | if item['weekly_discount'] < self._minimum_monthly_discount: 77 | raise DropItem('Weekly discount too low: {}'.format(item['weekly_discount'])) 78 | 79 | if self._minimum_photos and item['photo_count'] < self._minimum_photos: 80 | raise DropItem('Photos too low: {} photos'.format(item['photo_count'])) 81 | 82 | # check regexes 83 | if self._cannot_have_regex: 84 | for f in self._fields_to_check: 85 | field_val = item[f] 86 | if field_val is None: 87 | continue 88 | v = str(field_val.encode('ASCII', 'replace')) 89 | if self._cannot_have_regex.search(v): 90 | raise DropItem('Found: {}'.format(self._cannot_have_regex.pattern)) 91 | 92 | if self._must_have_regex: 93 | has_must_haves = False 94 | for f in self._fields_to_check: 95 | field_val = item[f] 96 | if field_val is None: 97 | continue 98 | v = str(field_val.encode('ASCII', 'replace')) 99 | if self._must_have_regex.search(v): 100 | has_must_haves = True 101 | break 102 | 103 | if not has_must_haves: 104 | raise DropItem('Not Found: {}'.format(self._must_have_regex.pattern)) 105 | 106 | if self._web_browser: # open in browser 107 | self._web_browser.open_new_tab(item['url']) 108 | 109 | return item 110 | 111 | 112 | class ElasticBnbPipeline: 113 | _datetime_scrape = datetime.now() 114 | 115 | @classmethod 116 | def from_crawler(cls, crawler): 117 | return cls(elasticsearch_index=crawler.settings.get('ELASTICSEARCH_INDEX')) 118 | 119 | def __init__(self, elasticsearch_index): 120 | """Class constructor.""" 121 | self._elasticsearch_index = elasticsearch_index 122 | 123 | def process_item(self, item, spider): 124 | """Insert / update items in ElasticSearch.""" 125 | properties = { 126 | 'access': item['access'], 127 | 'additional_house_rules': item['additional_house_rules'], 128 | 'allows_events': item['allows_events'], 129 | 'amenities': item['amenities'], 130 | 'amenity_ids': item['amenity_ids'], 131 | 'avg_rating': item['avg_rating'], 132 | 'bathrooms': item['bathrooms'], 133 | 'bedrooms': item['bedrooms'], 134 | 'beds': item['beds'], 135 | 'business_travel_ready': item['business_travel_ready'], 136 | 'city': item['city'], 137 | 'country': item['country'], 138 | 'coordinates': {'lon': item['longitude'], 'lat': item['latitude']}, 139 | 'description': item['description'], 140 | 'host_id': item['host_id'], 141 | 'house_rules': item['house_rules'], 142 | 'interaction': item.get('interaction'), 143 | 'is_hotel': item['is_hotel'], 144 | 'monthly_price_factor': item['monthly_price_factor'], 145 | 'name': item['name'], 146 | 'neighborhood_overview': item['neighborhood_overview'], 147 | 'person_capacity': item['person_capacity'], 148 | 'photo_count': item['photo_count'], 149 | 'photos': item['photos'], 150 | 'place_id': item['place_id'], 151 | 'price_rate': item['price_rate'], 152 | 'price_rate_type': item['price_rate_type'], 153 | 'province': item['province'], 154 | 'rating_accuracy': item['rating_accuracy'], 155 | 'rating_checkin': item['rating_checkin'], 156 | 'rating_cleanliness': item['rating_cleanliness'], 157 | 'rating_communication': item['rating_communication'], 158 | 'rating_location': item['rating_location'], 159 | 'rating_value': item['rating_value'], 160 | 'review_count': item['review_count'], 161 | 'review_score': item.get('review_score'), 162 | 'reviews': item.get('reviews'), 163 | 'room_and_property_type': item['room_and_property_type'], 164 | 'room_type': item['room_type'], 165 | 'room_type_category': item['room_type_category'], 166 | 'satisfaction_guest': item['satisfaction_guest'], 167 | 'datetime_scrape': self._datetime_scrape, 168 | 'star_rating': item['star_rating'], 169 | 'state': item['state'], 170 | 'transit': item.get('transit'), 171 | 'url': item['url'], 172 | 'weekly_price_factor': item['weekly_price_factor'] 173 | } 174 | 175 | # update if exists, else insert new 176 | try: 177 | listing = Listing.get(id=item['id'], index=self._elasticsearch_index) 178 | listing.update(**properties) 179 | except elasticsearch.exceptions.NotFoundError: 180 | properties['meta'] = {'id': item['id']} 181 | listing = Listing(**properties) 182 | listing.save(index=self._elasticsearch_index) 183 | 184 | return item 185 | 186 | 187 | class DuplicatesPipeline: 188 | """Looks for duplicate items, and drops those items that were already processed 189 | 190 | @ref: https://docs.scrapy.org/en/latest/topics/item-pipeline.html#duplicates-filter 191 | """ 192 | 193 | def __init__(self): 194 | self.ids_seen = set() 195 | 196 | def process_item(self, item, spider): 197 | if item['id'] in self.ids_seen: 198 | raise DropItem("Duplicate item found: %s" % item) 199 | else: 200 | self.ids_seen.add(item['id']) 201 | return item 202 | -------------------------------------------------------------------------------- /deepbnb/api/ExploreSearch.py: -------------------------------------------------------------------------------- 1 | import json 2 | import scrapy 3 | import re 4 | 5 | from datetime import date, timedelta 6 | from logging import LoggerAdapter 7 | from scrapy import Spider 8 | from urllib.parse import parse_qs, urlparse 9 | 10 | from deepbnb.api.ApiBase import ApiBase 11 | 12 | 13 | class ExploreSearch(ApiBase): 14 | """Airbnb API v3 Search Endpoint""" 15 | 16 | def __init__( 17 | self, 18 | api_key: str, 19 | logger: LoggerAdapter, 20 | currency: str, 21 | spider: Spider, 22 | room_types: list, 23 | geography: dict, 24 | query: str 25 | ): 26 | super().__init__(api_key, logger, currency) 27 | self.__geography = geography 28 | self.__room_types = room_types 29 | self.__query = query 30 | self.__spider = spider 31 | 32 | @staticmethod 33 | def add_search_params(params, response): 34 | parsed_qs = parse_qs(urlparse(response.request.url).query) 35 | variables = json.loads(parsed_qs['variables'][0])['request'] 36 | if 'checkin' in variables: 37 | params['checkin'] = variables['checkin'] 38 | params['checkout'] = variables['checkout'] 39 | 40 | if 'priceMax' in variables: 41 | params['priceMax'] = variables['priceMax'] 42 | 43 | if 'priceMin' in variables: 44 | params['priceMin'] = variables['priceMin'] 45 | 46 | if 'ne_lat' in parsed_qs: 47 | params['ne_lat'] = parsed_qs['ne_lat'][0] 48 | 49 | if 'ne_lng' in parsed_qs: 50 | params['ne_lng'] = parsed_qs['ne_lng'][0] 51 | 52 | if 'sw_lat' in parsed_qs: 53 | params['sw_lat'] = parsed_qs['sw_lat'][0] 54 | 55 | if 'sw_lng' in parsed_qs: 56 | params['sw_lng'] = parsed_qs['sw_lng'][0] 57 | 58 | def api_request(self, query, params=None, callback=None, response=None, headers=None): 59 | """Perform API request.""" 60 | request = response.follow if response else scrapy.Request 61 | callback = callback or self.__spider.parse 62 | url = self._get_url(query, params) 63 | search_headers = self._get_search_headers(response) 64 | headers = headers | search_headers if headers else search_headers 65 | return request(url, callback, headers=headers, meta={'playwright': True}, cb_kwargs={'headers': headers}) 66 | 67 | def get_paginated_search_params(self, response, data): 68 | """Consolidate search parameters and return result.""" 69 | metadata = data['data']['dora']['exploreV3']['metadata'] 70 | pagination = metadata['paginationMetadata'] 71 | filter_state = data['data']['dora']['exploreV3']['filters']['state'] 72 | 73 | place_id = self.__geography.get('place_id', metadata['geography']['placeId']) 74 | query = [fs['value']['stringValue'] for fs in filter_state if fs['key'] == 'query'][0] 75 | 76 | params = {'placeId': place_id, 'query': query} 77 | if pagination['hasNextPage']: 78 | params['lastSearchSessionId'] = pagination['searchSessionId'] 79 | 80 | self.add_search_params(params, response) 81 | 82 | return params 83 | 84 | def parse_landing_page(self, response): 85 | """Parse search response and generate URLs for all searches, then perform them.""" 86 | self._logger.debug(f"Parsing {response.url}") 87 | json_response = response.xpath('body/pre/text()').get() # remove html wrapper 88 | data = json.loads(json_response) 89 | search_params = self.get_paginated_search_params(response, data) 90 | 91 | self.__geography.update(data['data']['dora']['exploreV3']['metadata']['geography']) 92 | self._logger.info(f"Geography:\n{self.__geography}") 93 | 94 | yield self.api_request(self.__query, search_params, self.__spider.parse, response) 95 | 96 | def perform_checkin_start_requests( 97 | self, 98 | checkin: str, 99 | checkout: str, 100 | checkin_range_spec: str, 101 | checkout_range_spec: str, 102 | params: dict 103 | ): 104 | """Perform requests for start URLs. 105 | 106 | :param checkin: 107 | :param checkout: 108 | :param checkin_range_spec: 109 | :param checkout_range_spec: 110 | :param params: 111 | :return: 112 | """ 113 | # single request for static start and end dates 114 | if not (checkin_range_spec or checkout_range_spec): # simple start and end date 115 | params['checkin'] = checkin 116 | params['checkout'] = checkout 117 | yield self.api_request(self.__query, params, self.parse_landing_page) 118 | 119 | # multi request for dynamic start and static end date 120 | if checkin_range_spec and not checkout_range_spec: # ranged start date, single end date, iterate over checkin range 121 | checkin_start_date, checkin_range = self._build_date_range(checkin, checkin_range_spec) 122 | for i in range(checkin_range.days + 1): # + 1 to include end date 123 | params['checkin'] = str(checkin_start_date + timedelta(days=i)) 124 | params['checkout'] = checkout 125 | yield self.api_request(self.__query, params, self.parse_landing_page) 126 | 127 | # multi request for static start and dynamic end date 128 | if checkout_range_spec and not checkin_range_spec: # ranged end date, single start date, iterate over checkout range 129 | checkout_start_date, checkout_range = self._build_date_range(checkout, checkout_range_spec) 130 | for i in range(checkout_range.days + 1): # + 1 to include end date 131 | params['checkout'] = str(checkout_start_date + timedelta(days=i)) 132 | params['checkin'] = checkin 133 | yield self.api_request(self.__query, params, self.parse_landing_page) 134 | 135 | # double nested multi request, iterate over both start and end date ranges 136 | if checkout_range_spec and checkin_range_spec: 137 | checkin_start_date, checkin_range = self._build_date_range(checkin, checkin_range_spec) 138 | checkout_start_date, checkout_range = self._build_date_range(checkout, checkout_range_spec) 139 | for i in range(checkin_range.days + 1): # + 1 to include end date 140 | params['checkin'] = str(checkin_start_date + timedelta(days=i)) 141 | for j in range(checkout_range.days + 1): # + 1 to include end date 142 | params['checkout'] = str(checkout_start_date + timedelta(days=j)) 143 | yield self.api_request(self.__query, params, self.parse_landing_page) 144 | 145 | @staticmethod 146 | def _build_date_range(iso_date: str, range_spec: str): 147 | """Calculate start and end dates for a range. Return start date and timedelta for number of days.""" 148 | base_date = date.fromisoformat(iso_date) 149 | if range_spec.startswith('+-'): # +-7 150 | days = float(re.match(r'\+-(\d+)', range_spec).group(1)) 151 | start_date = base_date - timedelta(days=days) 152 | end_date = base_date + timedelta(days=days) 153 | else: # +0-3 154 | result = re.match(r'\+(\d+)-(\d+)', range_spec) 155 | post_days = float(result.group(1)) 156 | pre_days = float(result.group(2)) 157 | start_date = base_date - timedelta(days=pre_days) 158 | end_date = base_date + timedelta(days=post_days) 159 | 160 | return start_date, end_date - start_date 161 | 162 | def _get_url(self, search_string: str, params: dict = None): 163 | _api_path = '/api/v3/ExploreSearch' 164 | query = { 165 | 'operationName': 'ExploreSearch', 166 | 'locale': 'en', 167 | 'currency': self._currency, 168 | '_cb': 'ld7rar1fhh6if', 169 | } 170 | data = { 171 | 'variables': { 172 | 'request': { 173 | 'metadataOnly': False, 174 | 'version': '1.7.9', 175 | 'itemsPerGrid': 20, 176 | 'tabId': 'home_tab', 177 | 'refinementPaths': ['/homes'], 178 | 'source': 'structured_search_input_header', 179 | 'searchType': 'filter_change', 180 | 'query': search_string, 181 | # 'roomTypes': self.__room_types, 182 | 'cdnCacheSafe': False, 183 | 'simpleSearchTreatment': 'simple_search_only', 184 | 'treatmentFlags': [ 185 | 'simple_search_1_1', 186 | 'simple_search_desktop_v3_full_bleed', 187 | 'flexible_dates_options_extend_one_three_seven_days' 188 | ], 189 | 'screenSize': 'large' 190 | } 191 | }, 192 | 'extensions': { 193 | 'persistedQuery': { 194 | 'version': 1, 195 | 'sha256Hash': '13aa9971e70fbf5ab888f2a851c765ea098d8ae68c81e1f4ce06e2046d91b6ea' 196 | } 197 | } 198 | } 199 | if params: 200 | data['variables']['request'] |= params 201 | 202 | self._put_json_param_strings(data) 203 | 204 | url = self.build_airbnb_url(_api_path, query) 205 | url += '&variables=%s' % data['variables'] 206 | url += '&extensions=%s' % data['extensions'] 207 | 208 | return url 209 | -------------------------------------------------------------------------------- /deepbnb/api/PdpPlatformSections.py: -------------------------------------------------------------------------------- 1 | import lxml.html 2 | import re 3 | import scrapy 4 | 5 | from typing import Union 6 | from logging import LoggerAdapter 7 | 8 | from deepbnb.api.ApiBase import ApiBase 9 | from deepbnb.api.PdpReviews import PdpReviews 10 | from deepbnb.items import DeepbnbItem 11 | 12 | 13 | class PdpPlatformSections(ApiBase): 14 | """Airbnb API v3 Property Display Endpoint""" 15 | 16 | # Unused. This is just a list of sections where we presently pull data from. (@see `parse_listing_contents()`) 17 | SECTION_IDS = [ 18 | 'AMENITIES_DEFAULT', 19 | 'DESCRIPTION_DEFAULT', 20 | 'HOST_PROFILE_DEFAULT', 21 | 'LOCATION_DEFAULT', 22 | 'POLICIES_DEFAULT', 23 | ] 24 | 25 | def __init__( 26 | self, 27 | api_key: str, 28 | logger: LoggerAdapter, 29 | currency: str, 30 | data_cache: dict, 31 | geography: dict, 32 | pdp_reviews: PdpReviews 33 | ): 34 | super().__init__(api_key, logger, currency) 35 | self.__data_cache = data_cache 36 | self.__geography = geography 37 | self.__regex_amenity_id = re.compile(r'^([a-z0-9]+_)+([0-9]+)_') 38 | self.__pdp_reviews = pdp_reviews 39 | 40 | def api_request(self, listing_id: str): 41 | """Generate scrapy.Request for listing page.""" 42 | _api_path = '/api/v3/PdpPlatformSections' 43 | query = { 44 | 'operationName': 'PdpPlatformSections', 45 | 'locale': 'en', 46 | 'currency': self._currency, 47 | 'variables': { 48 | 'request': { 49 | 'id': listing_id, 50 | 'layouts': ['SIDEBAR', 'SINGLE_COLUMN'], 51 | 'pdpTypeOverride': None, 52 | 'translateUgc': None, 53 | 'preview': False, 54 | 'bypassTargetings': False, 55 | 'displayExtensions': None, 56 | 'adults': '1', 57 | 'children': None, 58 | 'infants': None, 59 | 'causeId': None, 60 | 'disasterId': None, 61 | 'priceDropSource': None, 62 | 'promotionUuid': None, 63 | 'selectedCancellationPolicyId': None, 64 | 'forceBoostPriorityMessageType': None, 65 | 'privateBooking': False, 66 | 'invitationClaimed': False, 67 | 'discountedGuestFeeVersion': None, 68 | 'staysBookingMigrationEnabled': False, 69 | 'useNewSectionWrapperApi': False, 70 | 'previousStateCheckIn': None, 71 | 'previousStateCheckOut': None, 72 | 'federatedSearchId': None, 73 | 'interactionType': None, 74 | 'searchId': None, 75 | 'sectionIds': None, 76 | 'checkIn': None, 77 | 'checkOut': None, 78 | 'p3ImpressionId': 'p3_1608841700_z2VzPeybmBEdZG20' 79 | } 80 | }, 81 | 'extensions': { 82 | 'persistedQuery': { 83 | 'version': 1, 84 | 'sha256Hash': '625a4ba56ba72f8e8585d60078eb95ea0030428cac8772fde09de073da1bcdd0' 85 | } 86 | } 87 | } 88 | 89 | self._put_json_param_strings(query) 90 | url = self.build_airbnb_url(_api_path, query) 91 | 92 | return scrapy.Request(url, callback=self.parse_listing_contents, headers=self._get_search_headers()) 93 | 94 | def parse_listing_contents(self, response): 95 | """Obtain data from an individual listing page, combine with cached data, and return DeepbnbItem.""" 96 | # Collect base data 97 | data = self.read_data(response) 98 | pdp_sections = data['data']['merlin']['pdpSections'] 99 | listing_id = pdp_sections['id'] 100 | sections = pdp_sections['sections'] 101 | metadata = pdp_sections['metadata'] 102 | logging_data = metadata['loggingContext']['eventDataLogging'] 103 | 104 | # Get sections 105 | amenities_section = [s for s in sections if s['sectionId'] == 'AMENITIES_DEFAULT'][0]['section'] 106 | description_section = [s for s in sections if s['sectionId'] == 'DESCRIPTION_DEFAULT'][0]['section'] 107 | host_profile = [s for s in sections if s['sectionId'] == 'HOST_PROFILE_DEFAULT'][0]['section'] 108 | location = [s for s in sections if s['sectionId'] == 'LOCATION_DEFAULT'][0]['section'] 109 | policies = [s for s in sections if s['sectionId'] == 'POLICIES_DEFAULT'][0]['section'] 110 | 111 | # Collect amenity data 112 | amenities_groups = amenities_section['seeAllAmenitiesGroups'] 113 | amenities_access = [g['amenities'] for g in amenities_groups if g['title'] == 'Guest access'] 114 | amenities_avail = [amenity for g in amenities_groups for amenity in g['amenities'] if amenity['available']] 115 | 116 | # Structure data 117 | listing_data_cached = self.__data_cache[listing_id] 118 | item = DeepbnbItem( 119 | id=listing_id, 120 | access=self._render_titles(amenities_access[0]) if amenities_access else None, 121 | additional_house_rules=policies['additionalHouseRules'], 122 | allows_events='No parties or events' in [r['title'] for r in policies['houseRules']], 123 | amenities=self._render_titles(amenities_avail, sep=' - ', join=False), 124 | amenity_ids=list(self._get_amenity_ids(amenities_avail)), 125 | avg_rating=listing_data_cached['avg_rating'], 126 | bathrooms=listing_data_cached['bathrooms'], 127 | bedrooms=listing_data_cached['bedrooms'], 128 | beds=listing_data_cached['beds'], 129 | business_travel_ready=listing_data_cached['business_travel_ready'], 130 | city=listing_data_cached.get('city', self.__geography['city']), 131 | country=self.__geography['country'], 132 | description=self._html_to_text( 133 | description_section['htmlDescription']['htmlText'] 134 | ) if description_section.get('htmlDescription') else None, 135 | host_id=listing_data_cached['host_id'], 136 | house_rules=[r['title'] for r in policies['houseRules']], 137 | is_hotel=metadata['bookingPrefetchData']['isHotelRatePlanEnabled'], 138 | latitude=listing_data_cached['latitude'], 139 | listing_expectations=self._render_titles(policies['listingExpectations']) if policies else None, 140 | longitude=listing_data_cached['longitude'], 141 | # max_nights=listing.get('max_nights'), 142 | # min_nights=listing['min_nights'], 143 | monthly_price_factor=listing_data_cached['monthly_price_factor'], 144 | name=listing_data_cached.get('name', listing_id), 145 | neighborhood_overview=listing_data_cached.get('neighborhood_overview'), 146 | # notes=listing['sectioned_description']['notes'], 147 | person_capacity=listing_data_cached['person_capacity'], 148 | photo_count=listing_data_cached['photo_count'], 149 | photos=listing_data_cached['photos'], 150 | place_id=self.__geography['placeId'], 151 | price_rate=listing_data_cached['price_rate'], 152 | price_rate_type=listing_data_cached['price_rate_type'], 153 | province=self.__geography.get('province'), 154 | rating_accuracy=logging_data['accuracyRating'], 155 | rating_checkin=logging_data['checkinRating'], 156 | rating_cleanliness=logging_data['cleanlinessRating'], 157 | rating_communication=logging_data['communicationRating'], 158 | rating_location=logging_data['locationRating'], 159 | rating_value=logging_data['valueRating'], 160 | review_count=listing_data_cached['review_count'], 161 | reviews=self.__pdp_reviews.api_request(listing_id, 50), 162 | room_and_property_type=listing_data_cached['room_and_property_type'], 163 | room_type=listing_data_cached['room_type'], 164 | room_type_category=listing_data_cached['room_type_category'], 165 | satisfaction_guest=logging_data['guestSatisfactionOverall'], 166 | star_rating=listing_data_cached['star_rating'], 167 | state=self.__geography['state'], 168 | # summary=listing['sectioned_description']['summary'], 169 | total_price=listing_data_cached['total_price'], 170 | url="https://www.airbnb.com/rooms/{}".format(listing_id), 171 | weekly_price_factor=listing_data_cached['weekly_price_factor'] 172 | ) 173 | 174 | self._get_detail_property(item, 'transit', 'Getting around', location['seeAllLocationDetails'], 'content') 175 | self._get_detail_property(item, 'interaction', 'During your stay', host_profile['hostInfos'], 'html') 176 | 177 | return item 178 | 179 | @staticmethod 180 | def _html_to_text(html: str) -> str: 181 | """Get plaintext from HTML.""" 182 | return lxml.html.document_fromstring(html).text_content() 183 | 184 | @staticmethod 185 | def _render_titles(title_list: list, sep: str = ': ', join=True) -> Union[str, list]: 186 | """Render list of objects with titles and subtitles into string.""" 187 | lines = [] 188 | for t in title_list: 189 | line = '{}{}{}'.format(t['title'], sep, t['subtitle']) if t.get('subtitle') else t.get('title') 190 | lines.append(line) 191 | 192 | return '\n'.join(lines) if join else lines 193 | 194 | def _get_amenity_ids(self, amenities: list): 195 | """Extract amenity id from `id` string field.""" 196 | for amenity in amenities: 197 | match = self.__regex_amenity_id.match(amenity['id']) 198 | yield int(match.group(match.lastindex)) 199 | 200 | def _get_detail_property(self, item, prop, title, prop_list, key): 201 | """Search for matching title in property list for prop. If exists, add htmlText for key to item.""" 202 | if title in [i['title'] for i in prop_list]: 203 | item[prop] = self._html_to_text([i[key]['htmlText'] for i in prop_list if i['title'] == title][0]) 204 | -------------------------------------------------------------------------------- /deepbnb/spiders/airbnb.py: -------------------------------------------------------------------------------- 1 | import json 2 | import re 3 | import scrapy 4 | 5 | from datetime import date, timedelta 6 | from scrapy.http import HtmlResponse 7 | from scrapy_playwright.page import PageMethod 8 | 9 | from deepbnb.api.ExploreSearch import ExploreSearch 10 | from deepbnb.api.PdpPlatformSections import PdpPlatformSections 11 | from deepbnb.api.PdpReviews import PdpReviews 12 | 13 | 14 | class AirbnbSpider(scrapy.Spider): 15 | """Airbnb Spider 16 | 17 | Perform a search, collect data from search results, cache that data, then scrape each listing individually to 18 | obtain additional information, and finally compile the data together into a DeepbnbItem. 19 | """ 20 | 21 | name = 'airbnb' 22 | allowed_domains = ['airbnb.com'] 23 | default_currency = 'USD' 24 | default_max_price = 3000 25 | default_price_increment = 100 26 | price_range = (0, default_max_price, default_price_increment) 27 | page_limit = 20 28 | 29 | def __init__( 30 | self, 31 | query, 32 | checkin=None, 33 | checkout=None, 34 | currency=default_currency, 35 | max_price=None, 36 | min_price=None, 37 | ne_lat=None, 38 | ne_lng=None, 39 | sw_lat=None, 40 | sw_lng=None, 41 | **kwargs 42 | ): 43 | """Class constructor.""" 44 | super().__init__(**kwargs) 45 | self.__checkin = checkin 46 | self.__checkout = checkout 47 | self.__currency = currency 48 | self.__data_cache = {} 49 | self.__explore_search = None 50 | self.__geography = {} 51 | self.__ids_seen = set() 52 | self.__ne_lat = ne_lat 53 | self.__ne_lng = ne_lng 54 | self.__pdp_platform_sections = None 55 | self.__pdp_reviews = None 56 | self.__query = query 57 | self.__search_params = {} 58 | self.__set_price_params(max_price, min_price) 59 | self.__sw_lat = sw_lat 60 | self.__sw_lng = sw_lng 61 | 62 | def start_requests(self): 63 | """Spider entry point. Generate the first search request(s).""" 64 | self.logger.info(f'starting survey for: {self.__query}') 65 | if 'deepbnb.pipelines.ElasticBnbPipeline' in self.settings.get('ITEM_PIPELINES'): 66 | self.__create_index_if_not_exists() 67 | 68 | api_key = self.settings.get('AIRBNB_API_KEY') 69 | self.__explore_search = ExploreSearch( 70 | api_key, 71 | self.logger, 72 | self.__currency, 73 | self, 74 | self.settings.get('ROOM_TYPES'), 75 | self.__geography, 76 | self.__query 77 | ) 78 | self.__pdp_platform_sections = PdpPlatformSections( 79 | api_key, 80 | self.logger, 81 | self.__currency, 82 | self.__data_cache, 83 | self.__geography, 84 | PdpReviews(api_key, self.logger, self.__currency) 85 | ) 86 | 87 | # get params from injected constructor values 88 | params = {} 89 | if self.__price_max: 90 | params['priceMax'] = self.__price_max 91 | 92 | if self.__price_min: 93 | params['priceMin'] = self.__price_min 94 | 95 | if self.__ne_lat: 96 | params['ne_lat'] = self.__ne_lat 97 | 98 | if self.__ne_lng: 99 | params['ne_lng'] = self.__ne_lng 100 | 101 | if self.__sw_lat: 102 | params['sw_lat'] = self.__sw_lat 103 | 104 | if self.__sw_lng: 105 | params['sw_lng'] = self.__sw_lng 106 | 107 | if self.__checkin: # assume self._checkout also 108 | checkin, checkout, checkin_range_spec, checkout_range_spec = self._process_checkin_vars() 109 | yield from self.__explore_search.perform_checkin_start_requests( 110 | checkin, checkout, checkin_range_spec, checkout_range_spec, params) 111 | else: 112 | yield from self.__city_search() 113 | 114 | def __city_search(self): 115 | """Search entire city given in self.__query""" 116 | search_path = self.__query.replace(', ', '--').replace(' ', '-') + '/homes' 117 | url = self.__explore_search.build_airbnb_url('s/' + search_path) 118 | headers = self.__get_search_headers() 119 | yield scrapy.Request(url, callback=self.parse_landing_page, headers=headers, meta={ 120 | 'playwright': True, 121 | 'playwright_include_page': True, 122 | 'playwright_page_methods': [PageMethod('wait_for_selector', '#data-deferred-state', state='hidden')] 123 | }, errback=self.errback, cb_kwargs={'headers': headers}) 124 | 125 | async def errback(self, failure): 126 | page = failure.request.meta['playwright_page'] 127 | await page.close() 128 | 129 | async def parse_landing_page(self, response: HtmlResponse, headers: dict): 130 | """Parse search response and generate URLs for all searches, then perform them.""" 131 | # debugging: get data from all script data-* attributes 132 | # script_data = {s.attrib['id']: json.loads(s.css('::text').get()) for s in response.css('script[id^=data-]')} 133 | data_deferred = json.loads(response.xpath('//script[@id="data-deferred-state"]/text()').get()) 134 | data_deferred['niobeMinimalClientData'][0][0] = json.loads( 135 | re.sub(r'^StaysSearch:', '', data_deferred['niobeMinimalClientData'][0][0])) 136 | 137 | explore_data = data_deferred['niobeMinimalClientData'][0][1]['data']['presentation']['explore'] 138 | if 'sectionIndependentData' in explore_data['sections']: 139 | stays_search = explore_data['sections']['sectionIndependentData']['staysSearch'] 140 | remarketing_data = stays_search['loggingMetadata']['remarketingLoggingData'] 141 | listing_data = stays_search['searchResults'] 142 | else: 143 | remarketing_data = self.__find_section(explore_data['sections']['sections'], 'EXPLORE_REMARKETING') 144 | listing_data_wrapper = self.__find_section(explore_data['sections']['sections'], 'EXPLORE_SECTION_WRAPPER') 145 | listing_data = listing_data_wrapper['child']['section'] 146 | 147 | yield self.__explore_search.api_request(self.__query, {}, self.parse, response, headers) 148 | 149 | def parse(self, response, **kwargs): 150 | """Default parse method.""" 151 | self.logger.debug(f"Parsing {response.url}") 152 | json_response = response.xpath('body/pre/text()').get() # remove html wrapper 153 | data = json.loads(json_response) 154 | 155 | # Handle pagination 156 | next_section = {} 157 | pagination = data['data']['dora']['exploreV3']['metadata']['paginationMetadata'] 158 | if pagination['hasNextPage']: 159 | items_offset = pagination['itemsOffset'] 160 | self.__explore_search.add_search_params(next_section, response) 161 | next_section.update({'itemsOffset': items_offset}) 162 | 163 | yield self.__explore_search.api_request(self.__query, next_section, response=response) 164 | 165 | # handle listings 166 | params = {'key': self.__explore_search.api_key} 167 | self.__explore_search.add_search_params(params, response) 168 | listing_ids = self.__get_listings_from_sections(data['data']['dora']['exploreV3']['sections']) 169 | for listing_id in listing_ids: # request each property page 170 | if listing_id in self.__ids_seen: 171 | continue # filter duplicates 172 | 173 | self.__ids_seen.add(listing_id) 174 | 175 | yield self.__pdp_platform_sections.api_request(listing_id) 176 | 177 | @staticmethod 178 | def _get_neighborhoods(data): 179 | """Get all neighborhoods in an area if they exist.""" 180 | neighborhoods = {} 181 | meta = data['explore_tabs'][0]['home_tab_metadata'] 182 | if meta['listings_count'] < 300: 183 | return neighborhoods 184 | 185 | for section in meta['filters']['sections']: 186 | if section['filter_section_id'] != 'neighborhoods': 187 | continue 188 | for item in section['items']: 189 | key = item['title'] 190 | neighborhoods[key] = item 191 | for param in item['params']: 192 | if param['key'] == 'neighborhood_ids': 193 | neighborhoods[key]['id'] = param['value'] 194 | break 195 | 196 | return neighborhoods 197 | 198 | def _collect_listing_data(self, listing_item: dict): 199 | """Collect listing data from search results, save in _data_cache. All listing data is aggregated together in the 200 | parse_listing_contents method.""" 201 | listing = listing_item['listing'] 202 | pricing = listing_item['pricingQuote'] or {} 203 | 204 | self.__data_cache[listing['id']] = { 205 | # get general data 206 | 'avg_rating': listing['avgRating'], 207 | 'bathrooms': listing['bathrooms'], 208 | 'bedrooms': listing['bedrooms'], 209 | 'beds': listing['beds'], 210 | 'business_travel_ready': listing['isBusinessTravelReady'], 211 | 'city': listing['city'], 212 | 'host_id': listing['user']['id'], 213 | 'latitude': listing['lat'], 214 | 'longitude': listing['lng'], 215 | 'name': listing['name'], 216 | 'neighborhood_overview': listing['neighborhoodOverview'], 217 | 'person_capacity': listing['personCapacity'], 218 | 'photo_count': listing['pictureCount'], 219 | 'photos': [p['picture'] for p in listing['contextualPictures']], 220 | 'review_count': listing['reviewsCount'], 221 | 'room_and_property_type': listing['roomAndPropertyType'], 222 | 'room_type': listing['roomType'], 223 | 'room_type_category': listing['roomTypeCategory'], 224 | 'star_rating': listing['starRating'], 225 | 226 | # get pricing data 227 | 'monthly_price_factor': pricing.get('monthlyPriceFactor'), 228 | 'weekly_price_factor': pricing.get('weeklyPriceFactor'), 229 | 'price_rate': self.__get_price_rate(pricing), 230 | 'price_rate_type': self.__get_rate_type(pricing), 231 | # use total price if dates given, price rate otherwise. can't show total price if there are no dates. 232 | 'total_price': self.__get_total_price(pricing) 233 | } 234 | 235 | def __create_index_if_not_exists(self): 236 | index_name = self.settings.get('ELASTICSEARCH_INDEX') 237 | # index = Index(index_name) 238 | # if not index.exists(): 239 | # Listing.init(index_name) 240 | 241 | def __get_listings_from_sections(self, sections: list) -> list: 242 | """Get listings from "sections" (i.e. search results page sections). 243 | 244 | Also collect some data and save it for later. Double check prices are correct, because Airbnb switches to daily 245 | pricing if less than 28 days are selected (e.g. during a range search). 246 | """ 247 | listing_ids = [] 248 | for section in [s for s in sections if s['sectionComponentType'] == 'listings_ListingsGrid_Explore']: 249 | for listing_item in section.get('items'): 250 | pricing = listing_item['pricingQuote'] 251 | if pricing: 252 | rate_with_service_fee = pricing['rateWithServiceFee'] 253 | if rate_with_service_fee is None: # some properties need dates to show rates 254 | rate_with_service_fee_amt = 0 255 | pricing['rateWithServiceFee'] = {'amount': None} 256 | else: 257 | rate_with_service_fee_amt = rate_with_service_fee['amount'] 258 | 259 | # To account for results where price_max was specified as monthly but quoted rate is nightly, calculate 260 | # monthly rate and drop listing if it is greater. Use 28 days = 1 month. Assume price_max of 1000+ is a 261 | # monthly price requirement. 262 | if (self.__price_max and self.__price_max > 1000 263 | and pricing['structuredStayDisplayPrice']['primaryLine']['qualifier'] != 'month' 264 | and (rate_with_service_fee_amt * 28) > self.__price_max): 265 | continue 266 | 267 | self._collect_listing_data(listing_item) 268 | listing_ids.append(listing_item['listing']['id']) 269 | 270 | return listing_ids 271 | 272 | @staticmethod 273 | def __get_search_headers() -> dict: 274 | return { 275 | 'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9', 276 | 'Accept-Encoding': 'gzip, deflate, br', 277 | 'Accept-Language': 'en-US,en;q=0.9', 278 | 'Cache-Control': 'no-cache', 279 | 'Pragma': 'no-cache', 280 | 'Sec-Ch-Ua': '"Not;A=Brand";v="99", "Chromium";v="106"', 281 | 'Sec-Ch-Ua-Mobile': '?0', 282 | 'Sec-Ch-Ua-Platform': '"Linux"', 283 | 'Sec-Fetch-Dest': 'document', 284 | 'Sec-Fetch-Mode': 'navigate', 285 | 'Sec-Fetch-Site': 'none', 286 | 'Sec-Fetch-User': '?1', 287 | 'Upgrade-Insecure-Requests': '1', 288 | 'User-Agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/106.0.0.0 Safari/537.36' 289 | } 290 | 291 | @staticmethod 292 | def __get_price_key(pricing) -> str: 293 | return 'price' if 'price' in pricing['structuredStayDisplayPrice']['primaryLine'] else 'discountedPrice' 294 | 295 | @staticmethod 296 | def __get_price_rate(pricing) -> int | None: 297 | if pricing: 298 | price_key = AirbnbSpider.__get_price_key(pricing) 299 | return int(pricing['structuredStayDisplayPrice']['primaryLine'][price_key].lstrip('$').replace(',', '')) 300 | 301 | return None 302 | 303 | @staticmethod 304 | def __get_rate_type(pricing) -> str | None: 305 | if pricing: 306 | return pricing['structuredStayDisplayPrice']['primaryLine']['qualifier'] 307 | 308 | return None 309 | 310 | def __get_total_price(self, pricing) -> int | None: 311 | if not self.__checkin: 312 | return None # can't have a price without dates 313 | 314 | if pricing['structuredStayDisplayPrice']['secondaryLine']: 315 | price = pricing['structuredStayDisplayPrice']['secondaryLine']['price'] 316 | amount_match = re.match(r'\$([\w,]+) total', price) 317 | else: 318 | price_key = AirbnbSpider.__get_price_key(pricing) 319 | price = pricing['structuredStayDisplayPrice']['primaryLine'][price_key] 320 | amount_match = re.match(r'\$([\w,]+)', price) 321 | 322 | if not amount_match: 323 | raise ValueError('No amount match found for price: %s' % price) 324 | 325 | return int(amount_match[1].replace(',', '')) 326 | 327 | @staticmethod 328 | def __find_section(sections: list, section_type: str): 329 | result = [i for i in sections if i.get('sectionComponentType') == section_type] 330 | return result.pop().get('section') if result else {} 331 | 332 | def _process_checkin_vars(self) -> tuple: 333 | """Determine if a range is specified, if so, extract ranges, validate, and return as variables. 334 | 335 | @NOTE: Should only be run once on crawler initialization. 336 | 337 | :return: checkin/checkout range specs 338 | """ 339 | if not self.__checkin: 340 | return None, None, None, None 341 | 342 | checkin_range_spec, checkout_range_spec = None, None 343 | 344 | # Handle ranged queries 345 | checkin_plus_range_position = self.__checkin.find('+') 346 | if checkin_plus_range_position != -1: # range_spec e.g. +5-3 means plus five days, minus three days 347 | checkin_range_spec = self.__checkin[checkin_plus_range_position:] 348 | self.__checkin = self.__checkin[:checkin_plus_range_position] 349 | 350 | checkout_plus_range_position = self.__checkout.find('+') 351 | if checkout_plus_range_position != -1: # range_spec e.g. +-3 means plus or minus 3 days 352 | checkout_range_spec = self.__checkout[checkout_plus_range_position:] 353 | self.__checkout = self.__checkout[:checkout_plus_range_position] 354 | 355 | # Validate checkin / checkout values 356 | today = date.today() 357 | if date.fromisoformat(self.__checkin) < today: 358 | raise ValueError('Checkin cannot be in past: {}'.format(self.__checkin)) 359 | tomorrow = today + timedelta(days=1) 360 | if date.fromisoformat(self.__checkout) < tomorrow: 361 | raise ValueError('Checkout must be tomorrow or later: {}'.format(self.__checkout)) 362 | 363 | return self.__checkin, self.__checkout, checkin_range_spec, checkout_range_spec 364 | 365 | def __set_price_params(self, price_max, price_min): 366 | """Set price parameters based on price_max and price_min input values.""" 367 | self.__price_max = price_max 368 | self.__price_min = price_min 369 | if self.__price_min and self.__price_max: 370 | self.__price_max = int(self.__price_max) 371 | self.__price_min = int(self.__price_min) 372 | self.price_range = (self.__price_min, self.__price_max, self.default_price_increment) 373 | 374 | if self.__price_min and not self.__price_max: 375 | self.__price_min = int(self.__price_min) 376 | self.price_range = (self.__price_min, self.default_max_price, self.default_price_increment) 377 | 378 | if not self.__price_min and self.__price_max: 379 | self.__price_max = int(self.__price_max) 380 | self.price_range = (0, self.__price_max, self.default_price_increment) 381 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | GNU GENERAL PUBLIC LICENSE 2 | Version 3, 29 June 2007 3 | 4 | Copyright (C) 2007 Free Software Foundation, Inc. 5 | Everyone is permitted to copy and distribute verbatim copies 6 | of this license document, but changing it is not allowed. 7 | 8 | Preamble 9 | 10 | The GNU General Public License is a free, copyleft license for 11 | software and other kinds of works. 12 | 13 | The licenses for most software and other practical works are designed 14 | to take away your freedom to share and change the works. By contrast, 15 | the GNU General Public License is intended to guarantee your freedom to 16 | share and change all versions of a program--to make sure it remains free 17 | software for all its users. We, the Free Software Foundation, use the 18 | GNU General Public License for most of our software; it applies also to 19 | any other work released this way by its authors. You can apply it to 20 | your programs, too. 21 | 22 | When we speak of free software, we are referring to freedom, not 23 | price. Our General Public Licenses are designed to make sure that you 24 | have the freedom to distribute copies of free software (and charge for 25 | them if you wish), that you receive source code or can get it if you 26 | want it, that you can change the software or use pieces of it in new 27 | free programs, and that you know you can do these things. 28 | 29 | To protect your rights, we need to prevent others from denying you 30 | these rights or asking you to surrender the rights. Therefore, you have 31 | certain responsibilities if you distribute copies of the software, or if 32 | you modify it: responsibilities to respect the freedom of others. 33 | 34 | For example, if you distribute copies of such a program, whether 35 | gratis or for a fee, you must pass on to the recipients the same 36 | freedoms that you received. You must make sure that they, too, receive 37 | or can get the source code. And you must show them these terms so they 38 | know their rights. 39 | 40 | Developers that use the GNU GPL protect your rights with two steps: 41 | (1) assert copyright on the software, and (2) offer you this License 42 | giving you legal permission to copy, distribute and/or modify it. 43 | 44 | For the developers' and authors' protection, the GPL clearly explains 45 | that there is no warranty for this free software. For both users' and 46 | authors' sake, the GPL requires that modified versions be marked as 47 | changed, so that their problems will not be attributed erroneously to 48 | authors of previous versions. 49 | 50 | Some devices are designed to deny users access to install or run 51 | modified versions of the software inside them, although the manufacturer 52 | can do so. This is fundamentally incompatible with the aim of 53 | protecting users' freedom to change the software. The systematic 54 | pattern of such abuse occurs in the area of products for individuals to 55 | use, which is precisely where it is most unacceptable. Therefore, we 56 | have designed this version of the GPL to prohibit the practice for those 57 | products. If such problems arise substantially in other domains, we 58 | stand ready to extend this provision to those domains in future versions 59 | of the GPL, as needed to protect the freedom of users. 60 | 61 | Finally, every program is threatened constantly by software patents. 62 | States should not allow patents to restrict development and use of 63 | software on general-purpose computers, but in those that do, we wish to 64 | avoid the special danger that patents applied to a free program could 65 | make it effectively proprietary. To prevent this, the GPL assures that 66 | patents cannot be used to render the program non-free. 67 | 68 | The precise terms and conditions for copying, distribution and 69 | modification follow. 70 | 71 | TERMS AND CONDITIONS 72 | 73 | 0. Definitions. 74 | 75 | "This License" refers to version 3 of the GNU General Public License. 76 | 77 | "Copyright" also means copyright-like laws that apply to other kinds of 78 | works, such as semiconductor masks. 79 | 80 | "The Program" refers to any copyrightable work licensed under this 81 | License. Each licensee is addressed as "you". "Licensees" and 82 | "recipients" may be individuals or organizations. 83 | 84 | To "modify" a work means to copy from or adapt all or part of the work 85 | in a fashion requiring copyright permission, other than the making of an 86 | exact copy. The resulting work is called a "modified version" of the 87 | earlier work or a work "based on" the earlier work. 88 | 89 | A "covered work" means either the unmodified Program or a work based 90 | on the Program. 91 | 92 | To "propagate" a work means to do anything with it that, without 93 | permission, would make you directly or secondarily liable for 94 | infringement under applicable copyright law, except executing it on a 95 | computer or modifying a private copy. Propagation includes copying, 96 | distribution (with or without modification), making available to the 97 | public, and in some countries other activities as well. 98 | 99 | To "convey" a work means any kind of propagation that enables other 100 | parties to make or receive copies. Mere interaction with a user through 101 | a computer network, with no transfer of a copy, is not conveying. 102 | 103 | An interactive user interface displays "Appropriate Legal Notices" 104 | to the extent that it includes a convenient and prominently visible 105 | feature that (1) displays an appropriate copyright notice, and (2) 106 | tells the user that there is no warranty for the work (except to the 107 | extent that warranties are provided), that licensees may convey the 108 | work under this License, and how to view a copy of this License. If 109 | the interface presents a list of user commands or options, such as a 110 | menu, a prominent item in the list meets this criterion. 111 | 112 | 1. Source Code. 113 | 114 | The "source code" for a work means the preferred form of the work 115 | for making modifications to it. "Object code" means any non-source 116 | form of a work. 117 | 118 | A "Standard Interface" means an interface that either is an official 119 | standard defined by a recognized standards body, or, in the case of 120 | interfaces specified for a particular programming language, one that 121 | is widely used among developers working in that language. 122 | 123 | The "System Libraries" of an executable work include anything, other 124 | than the work as a whole, that (a) is included in the normal form of 125 | packaging a Major Component, but which is not part of that Major 126 | Component, and (b) serves only to enable use of the work with that 127 | Major Component, or to implement a Standard Interface for which an 128 | implementation is available to the public in source code form. A 129 | "Major Component", in this context, means a major essential component 130 | (kernel, window system, and so on) of the specific operating system 131 | (if any) on which the executable work runs, or a compiler used to 132 | produce the work, or an object code interpreter used to run it. 133 | 134 | The "Corresponding Source" for a work in object code form means all 135 | the source code needed to generate, install, and (for an executable 136 | work) run the object code and to modify the work, including scripts to 137 | control those activities. However, it does not include the work's 138 | System Libraries, or general-purpose tools or generally available free 139 | programs which are used unmodified in performing those activities but 140 | which are not part of the work. For example, Corresponding Source 141 | includes interface definition files associated with source files for 142 | the work, and the source code for shared libraries and dynamically 143 | linked subprograms that the work is specifically designed to require, 144 | such as by intimate data communication or control flow between those 145 | subprograms and other parts of the work. 146 | 147 | The Corresponding Source need not include anything that users 148 | can regenerate automatically from other parts of the Corresponding 149 | Source. 150 | 151 | The Corresponding Source for a work in source code form is that 152 | same work. 153 | 154 | 2. Basic Permissions. 155 | 156 | All rights granted under this License are granted for the term of 157 | copyright on the Program, and are irrevocable provided the stated 158 | conditions are met. This License explicitly affirms your unlimited 159 | permission to run the unmodified Program. The output from running a 160 | covered work is covered by this License only if the output, given its 161 | content, constitutes a covered work. This License acknowledges your 162 | rights of fair use or other equivalent, as provided by copyright law. 163 | 164 | You may make, run and propagate covered works that you do not 165 | convey, without conditions so long as your license otherwise remains 166 | in force. You may convey covered works to others for the sole purpose 167 | of having them make modifications exclusively for you, or provide you 168 | with facilities for running those works, provided that you comply with 169 | the terms of this License in conveying all material for which you do 170 | not control copyright. Those thus making or running the covered works 171 | for you must do so exclusively on your behalf, under your direction 172 | and control, on terms that prohibit them from making any copies of 173 | your copyrighted material outside their relationship with you. 174 | 175 | Conveying under any other circumstances is permitted solely under 176 | the conditions stated below. Sublicensing is not allowed; section 10 177 | makes it unnecessary. 178 | 179 | 3. Protecting Users' Legal Rights From Anti-Circumvention Law. 180 | 181 | No covered work shall be deemed part of an effective technological 182 | measure under any applicable law fulfilling obligations under article 183 | 11 of the WIPO copyright treaty adopted on 20 December 1996, or 184 | similar laws prohibiting or restricting circumvention of such 185 | measures. 186 | 187 | When you convey a covered work, you waive any legal power to forbid 188 | circumvention of technological measures to the extent such circumvention 189 | is effected by exercising rights under this License with respect to 190 | the covered work, and you disclaim any intention to limit operation or 191 | modification of the work as a means of enforcing, against the work's 192 | users, your or third parties' legal rights to forbid circumvention of 193 | technological measures. 194 | 195 | 4. Conveying Verbatim Copies. 196 | 197 | You may convey verbatim copies of the Program's source code as you 198 | receive it, in any medium, provided that you conspicuously and 199 | appropriately publish on each copy an appropriate copyright notice; 200 | keep intact all notices stating that this License and any 201 | non-permissive terms added in accord with section 7 apply to the code; 202 | keep intact all notices of the absence of any warranty; and give all 203 | recipients a copy of this License along with the Program. 204 | 205 | You may charge any price or no price for each copy that you convey, 206 | and you may offer support or warranty protection for a fee. 207 | 208 | 5. Conveying Modified Source Versions. 209 | 210 | You may convey a work based on the Program, or the modifications to 211 | produce it from the Program, in the form of source code under the 212 | terms of section 4, provided that you also meet all of these conditions: 213 | 214 | a) The work must carry prominent notices stating that you modified 215 | it, and giving a relevant date. 216 | 217 | b) The work must carry prominent notices stating that it is 218 | released under this License and any conditions added under section 219 | 7. This requirement modifies the requirement in section 4 to 220 | "keep intact all notices". 221 | 222 | c) You must license the entire work, as a whole, under this 223 | License to anyone who comes into possession of a copy. This 224 | License will therefore apply, along with any applicable section 7 225 | additional terms, to the whole of the work, and all its parts, 226 | regardless of how they are packaged. This License gives no 227 | permission to license the work in any other way, but it does not 228 | invalidate such permission if you have separately received it. 229 | 230 | d) If the work has interactive user interfaces, each must display 231 | Appropriate Legal Notices; however, if the Program has interactive 232 | interfaces that do not display Appropriate Legal Notices, your 233 | work need not make them do so. 234 | 235 | A compilation of a covered work with other separate and independent 236 | works, which are not by their nature extensions of the covered work, 237 | and which are not combined with it such as to form a larger program, 238 | in or on a volume of a storage or distribution medium, is called an 239 | "aggregate" if the compilation and its resulting copyright are not 240 | used to limit the access or legal rights of the compilation's users 241 | beyond what the individual works permit. Inclusion of a covered work 242 | in an aggregate does not cause this License to apply to the other 243 | parts of the aggregate. 244 | 245 | 6. Conveying Non-Source Forms. 246 | 247 | You may convey a covered work in object code form under the terms 248 | of sections 4 and 5, provided that you also convey the 249 | machine-readable Corresponding Source under the terms of this License, 250 | in one of these ways: 251 | 252 | a) Convey the object code in, or embodied in, a physical product 253 | (including a physical distribution medium), accompanied by the 254 | Corresponding Source fixed on a durable physical medium 255 | customarily used for software interchange. 256 | 257 | b) Convey the object code in, or embodied in, a physical product 258 | (including a physical distribution medium), accompanied by a 259 | written offer, valid for at least three years and valid for as 260 | long as you offer spare parts or customer support for that product 261 | model, to give anyone who possesses the object code either (1) a 262 | copy of the Corresponding Source for all the software in the 263 | product that is covered by this License, on a durable physical 264 | medium customarily used for software interchange, for a price no 265 | more than your reasonable cost of physically performing this 266 | conveying of source, or (2) access to copy the 267 | Corresponding Source from a network server at no charge. 268 | 269 | c) Convey individual copies of the object code with a copy of the 270 | written offer to provide the Corresponding Source. This 271 | alternative is allowed only occasionally and noncommercially, and 272 | only if you received the object code with such an offer, in accord 273 | with subsection 6b. 274 | 275 | d) Convey the object code by offering access from a designated 276 | place (gratis or for a charge), and offer equivalent access to the 277 | Corresponding Source in the same way through the same place at no 278 | further charge. You need not require recipients to copy the 279 | Corresponding Source along with the object code. If the place to 280 | copy the object code is a network server, the Corresponding Source 281 | may be on a different server (operated by you or a third party) 282 | that supports equivalent copying facilities, provided you maintain 283 | clear directions next to the object code saying where to find the 284 | Corresponding Source. Regardless of what server hosts the 285 | Corresponding Source, you remain obligated to ensure that it is 286 | available for as long as needed to satisfy these requirements. 287 | 288 | e) Convey the object code using peer-to-peer transmission, provided 289 | you inform other peers where the object code and Corresponding 290 | Source of the work are being offered to the general public at no 291 | charge under subsection 6d. 292 | 293 | A separable portion of the object code, whose source code is excluded 294 | from the Corresponding Source as a System Library, need not be 295 | included in conveying the object code work. 296 | 297 | A "User Product" is either (1) a "consumer product", which means any 298 | tangible personal property which is normally used for personal, family, 299 | or household purposes, or (2) anything designed or sold for incorporation 300 | into a dwelling. In determining whether a product is a consumer product, 301 | doubtful cases shall be resolved in favor of coverage. For a particular 302 | product received by a particular user, "normally used" refers to a 303 | typical or common use of that class of product, regardless of the status 304 | of the particular user or of the way in which the particular user 305 | actually uses, or expects or is expected to use, the product. A product 306 | is a consumer product regardless of whether the product has substantial 307 | commercial, industrial or non-consumer uses, unless such uses represent 308 | the only significant mode of use of the product. 309 | 310 | "Installation Information" for a User Product means any methods, 311 | procedures, authorization keys, or other information required to install 312 | and execute modified versions of a covered work in that User Product from 313 | a modified version of its Corresponding Source. The information must 314 | suffice to ensure that the continued functioning of the modified object 315 | code is in no case prevented or interfered with solely because 316 | modification has been made. 317 | 318 | If you convey an object code work under this section in, or with, or 319 | specifically for use in, a User Product, and the conveying occurs as 320 | part of a transaction in which the right of possession and use of the 321 | User Product is transferred to the recipient in perpetuity or for a 322 | fixed term (regardless of how the transaction is characterized), the 323 | Corresponding Source conveyed under this section must be accompanied 324 | by the Installation Information. But this requirement does not apply 325 | if neither you nor any third party retains the ability to install 326 | modified object code on the User Product (for example, the work has 327 | been installed in ROM). 328 | 329 | The requirement to provide Installation Information does not include a 330 | requirement to continue to provide support service, warranty, or updates 331 | for a work that has been modified or installed by the recipient, or for 332 | the User Product in which it has been modified or installed. Access to a 333 | network may be denied when the modification itself materially and 334 | adversely affects the operation of the network or violates the rules and 335 | protocols for communication across the network. 336 | 337 | Corresponding Source conveyed, and Installation Information provided, 338 | in accord with this section must be in a format that is publicly 339 | documented (and with an implementation available to the public in 340 | source code form), and must require no special password or key for 341 | unpacking, reading or copying. 342 | 343 | 7. Additional Terms. 344 | 345 | "Additional permissions" are terms that supplement the terms of this 346 | License by making exceptions from one or more of its conditions. 347 | Additional permissions that are applicable to the entire Program shall 348 | be treated as though they were included in this License, to the extent 349 | that they are valid under applicable law. If additional permissions 350 | apply only to part of the Program, that part may be used separately 351 | under those permissions, but the entire Program remains governed by 352 | this License without regard to the additional permissions. 353 | 354 | When you convey a copy of a covered work, you may at your option 355 | remove any additional permissions from that copy, or from any part of 356 | it. (Additional permissions may be written to require their own 357 | removal in certain cases when you modify the work.) You may place 358 | additional permissions on material, added by you to a covered work, 359 | for which you have or can give appropriate copyright permission. 360 | 361 | Notwithstanding any other provision of this License, for material you 362 | add to a covered work, you may (if authorized by the copyright holders of 363 | that material) supplement the terms of this License with terms: 364 | 365 | a) Disclaiming warranty or limiting liability differently from the 366 | terms of sections 15 and 16 of this License; or 367 | 368 | b) Requiring preservation of specified reasonable legal notices or 369 | author attributions in that material or in the Appropriate Legal 370 | Notices displayed by works containing it; or 371 | 372 | c) Prohibiting misrepresentation of the origin of that material, or 373 | requiring that modified versions of such material be marked in 374 | reasonable ways as different from the original version; or 375 | 376 | d) Limiting the use for publicity purposes of names of licensors or 377 | authors of the material; or 378 | 379 | e) Declining to grant rights under trademark law for use of some 380 | trade names, trademarks, or service marks; or 381 | 382 | f) Requiring indemnification of licensors and authors of that 383 | material by anyone who conveys the material (or modified versions of 384 | it) with contractual assumptions of liability to the recipient, for 385 | any liability that these contractual assumptions directly impose on 386 | those licensors and authors. 387 | 388 | All other non-permissive additional terms are considered "further 389 | restrictions" within the meaning of section 10. If the Program as you 390 | received it, or any part of it, contains a notice stating that it is 391 | governed by this License along with a term that is a further 392 | restriction, you may remove that term. If a license document contains 393 | a further restriction but permits relicensing or conveying under this 394 | License, you may add to a covered work material governed by the terms 395 | of that license document, provided that the further restriction does 396 | not survive such relicensing or conveying. 397 | 398 | If you add terms to a covered work in accord with this section, you 399 | must place, in the relevant source files, a statement of the 400 | additional terms that apply to those files, or a notice indicating 401 | where to find the applicable terms. 402 | 403 | Additional terms, permissive or non-permissive, may be stated in the 404 | form of a separately written license, or stated as exceptions; 405 | the above requirements apply either way. 406 | 407 | 8. Termination. 408 | 409 | You may not propagate or modify a covered work except as expressly 410 | provided under this License. Any attempt otherwise to propagate or 411 | modify it is void, and will automatically terminate your rights under 412 | this License (including any patent licenses granted under the third 413 | paragraph of section 11). 414 | 415 | However, if you cease all violation of this License, then your 416 | license from a particular copyright holder is reinstated (a) 417 | provisionally, unless and until the copyright holder explicitly and 418 | finally terminates your license, and (b) permanently, if the copyright 419 | holder fails to notify you of the violation by some reasonable means 420 | prior to 60 days after the cessation. 421 | 422 | Moreover, your license from a particular copyright holder is 423 | reinstated permanently if the copyright holder notifies you of the 424 | violation by some reasonable means, this is the first time you have 425 | received notice of violation of this License (for any work) from that 426 | copyright holder, and you cure the violation prior to 30 days after 427 | your receipt of the notice. 428 | 429 | Termination of your rights under this section does not terminate the 430 | licenses of parties who have received copies or rights from you under 431 | this License. If your rights have been terminated and not permanently 432 | reinstated, you do not qualify to receive new licenses for the same 433 | material under section 10. 434 | 435 | 9. Acceptance Not Required for Having Copies. 436 | 437 | You are not required to accept this License in order to receive or 438 | run a copy of the Program. Ancillary propagation of a covered work 439 | occurring solely as a consequence of using peer-to-peer transmission 440 | to receive a copy likewise does not require acceptance. However, 441 | nothing other than this License grants you permission to propagate or 442 | modify any covered work. These actions infringe copyright if you do 443 | not accept this License. Therefore, by modifying or propagating a 444 | covered work, you indicate your acceptance of this License to do so. 445 | 446 | 10. Automatic Licensing of Downstream Recipients. 447 | 448 | Each time you convey a covered work, the recipient automatically 449 | receives a license from the original licensors, to run, modify and 450 | propagate that work, subject to this License. You are not responsible 451 | for enforcing compliance by third parties with this License. 452 | 453 | An "entity transaction" is a transaction transferring control of an 454 | organization, or substantially all assets of one, or subdividing an 455 | organization, or merging organizations. If propagation of a covered 456 | work results from an entity transaction, each party to that 457 | transaction who receives a copy of the work also receives whatever 458 | licenses to the work the party's predecessor in interest had or could 459 | give under the previous paragraph, plus a right to possession of the 460 | Corresponding Source of the work from the predecessor in interest, if 461 | the predecessor has it or can get it with reasonable efforts. 462 | 463 | You may not impose any further restrictions on the exercise of the 464 | rights granted or affirmed under this License. For example, you may 465 | not impose a license fee, royalty, or other charge for exercise of 466 | rights granted under this License, and you may not initiate litigation 467 | (including a cross-claim or counterclaim in a lawsuit) alleging that 468 | any patent claim is infringed by making, using, selling, offering for 469 | sale, or importing the Program or any portion of it. 470 | 471 | 11. Patents. 472 | 473 | A "contributor" is a copyright holder who authorizes use under this 474 | License of the Program or a work on which the Program is based. The 475 | work thus licensed is called the contributor's "contributor version". 476 | 477 | A contributor's "essential patent claims" are all patent claims 478 | owned or controlled by the contributor, whether already acquired or 479 | hereafter acquired, that would be infringed by some manner, permitted 480 | by this License, of making, using, or selling its contributor version, 481 | but do not include claims that would be infringed only as a 482 | consequence of further modification of the contributor version. For 483 | purposes of this definition, "control" includes the right to grant 484 | patent sublicenses in a manner consistent with the requirements of 485 | this License. 486 | 487 | Each contributor grants you a non-exclusive, worldwide, royalty-free 488 | patent license under the contributor's essential patent claims, to 489 | make, use, sell, offer for sale, import and otherwise run, modify and 490 | propagate the contents of its contributor version. 491 | 492 | In the following three paragraphs, a "patent license" is any express 493 | agreement or commitment, however denominated, not to enforce a patent 494 | (such as an express permission to practice a patent or covenant not to 495 | sue for patent infringement). To "grant" such a patent license to a 496 | party means to make such an agreement or commitment not to enforce a 497 | patent against the party. 498 | 499 | If you convey a covered work, knowingly relying on a patent license, 500 | and the Corresponding Source of the work is not available for anyone 501 | to copy, free of charge and under the terms of this License, through a 502 | publicly available network server or other readily accessible means, 503 | then you must either (1) cause the Corresponding Source to be so 504 | available, or (2) arrange to deprive yourself of the benefit of the 505 | patent license for this particular work, or (3) arrange, in a manner 506 | consistent with the requirements of this License, to extend the patent 507 | license to downstream recipients. "Knowingly relying" means you have 508 | actual knowledge that, but for the patent license, your conveying the 509 | covered work in a country, or your recipient's use of the covered work 510 | in a country, would infringe one or more identifiable patents in that 511 | country that you have reason to believe are valid. 512 | 513 | If, pursuant to or in connection with a single transaction or 514 | arrangement, you convey, or propagate by procuring conveyance of, a 515 | covered work, and grant a patent license to some of the parties 516 | receiving the covered work authorizing them to use, propagate, modify 517 | or convey a specific copy of the covered work, then the patent license 518 | you grant is automatically extended to all recipients of the covered 519 | work and works based on it. 520 | 521 | A patent license is "discriminatory" if it does not include within 522 | the scope of its coverage, prohibits the exercise of, or is 523 | conditioned on the non-exercise of one or more of the rights that are 524 | specifically granted under this License. You may not convey a covered 525 | work if you are a party to an arrangement with a third party that is 526 | in the business of distributing software, under which you make payment 527 | to the third party based on the extent of your activity of conveying 528 | the work, and under which the third party grants, to any of the 529 | parties who would receive the covered work from you, a discriminatory 530 | patent license (a) in connection with copies of the covered work 531 | conveyed by you (or copies made from those copies), or (b) primarily 532 | for and in connection with specific products or compilations that 533 | contain the covered work, unless you entered into that arrangement, 534 | or that patent license was granted, prior to 28 March 2007. 535 | 536 | Nothing in this License shall be construed as excluding or limiting 537 | any implied license or other defenses to infringement that may 538 | otherwise be available to you under applicable patent law. 539 | 540 | 12. No Surrender of Others' Freedom. 541 | 542 | If conditions are imposed on you (whether by court order, agreement or 543 | otherwise) that contradict the conditions of this License, they do not 544 | excuse you from the conditions of this License. If you cannot convey a 545 | covered work so as to satisfy simultaneously your obligations under this 546 | License and any other pertinent obligations, then as a consequence you may 547 | not convey it at all. For example, if you agree to terms that obligate you 548 | to collect a royalty for further conveying from those to whom you convey 549 | the Program, the only way you could satisfy both those terms and this 550 | License would be to refrain entirely from conveying the Program. 551 | 552 | 13. Use with the GNU Affero General Public License. 553 | 554 | Notwithstanding any other provision of this License, you have 555 | permission to link or combine any covered work with a work licensed 556 | under version 3 of the GNU Affero General Public License into a single 557 | combined work, and to convey the resulting work. The terms of this 558 | License will continue to apply to the part which is the covered work, 559 | but the special requirements of the GNU Affero General Public License, 560 | section 13, concerning interaction through a network will apply to the 561 | combination as such. 562 | 563 | 14. Revised Versions of this License. 564 | 565 | The Free Software Foundation may publish revised and/or new versions of 566 | the GNU General Public License from time to time. Such new versions will 567 | be similar in spirit to the present version, but may differ in detail to 568 | address new problems or concerns. 569 | 570 | Each version is given a distinguishing version number. If the 571 | Program specifies that a certain numbered version of the GNU General 572 | Public License "or any later version" applies to it, you have the 573 | option of following the terms and conditions either of that numbered 574 | version or of any later version published by the Free Software 575 | Foundation. If the Program does not specify a version number of the 576 | GNU General Public License, you may choose any version ever published 577 | by the Free Software Foundation. 578 | 579 | If the Program specifies that a proxy can decide which future 580 | versions of the GNU General Public License can be used, that proxy's 581 | public statement of acceptance of a version permanently authorizes you 582 | to choose that version for the Program. 583 | 584 | Later license versions may give you additional or different 585 | permissions. However, no additional obligations are imposed on any 586 | author or copyright holder as a result of your choosing to follow a 587 | later version. 588 | 589 | 15. Disclaimer of Warranty. 590 | 591 | THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY 592 | APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT 593 | HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY 594 | OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, 595 | THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR 596 | PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM 597 | IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF 598 | ALL NECESSARY SERVICING, REPAIR OR CORRECTION. 599 | 600 | 16. Limitation of Liability. 601 | 602 | IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING 603 | WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS 604 | THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY 605 | GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE 606 | USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF 607 | DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD 608 | PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), 609 | EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF 610 | SUCH DAMAGES. 611 | 612 | 17. Interpretation of Sections 15 and 16. 613 | 614 | If the disclaimer of warranty and limitation of liability provided 615 | above cannot be given local legal effect according to their terms, 616 | reviewing courts shall apply local law that most closely approximates 617 | an absolute waiver of all civil liability in connection with the 618 | Program, unless a warranty or assumption of liability accompanies a 619 | copy of the Program in return for a fee. 620 | 621 | END OF TERMS AND CONDITIONS 622 | 623 | How to Apply These Terms to Your New Programs 624 | 625 | If you develop a new program, and you want it to be of the greatest 626 | possible use to the public, the best way to achieve this is to make it 627 | free software which everyone can redistribute and change under these terms. 628 | 629 | To do so, attach the following notices to the program. It is safest 630 | to attach them to the start of each source file to most effectively 631 | state the exclusion of warranty; and each file should have at least 632 | the "copyright" line and a pointer to where the full notice is found. 633 | 634 | {one line to give the program's name and a brief idea of what it does.} 635 | Copyright (C) {year} {name of author} 636 | 637 | This program is free software: you can redistribute it and/or modify 638 | it under the terms of the GNU General Public License as published by 639 | the Free Software Foundation, either version 3 of the License, or 640 | (at your option) any later version. 641 | 642 | This program is distributed in the hope that it will be useful, 643 | but WITHOUT ANY WARRANTY; without even the implied warranty of 644 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 645 | GNU General Public License for more details. 646 | 647 | You should have received a copy of the GNU General Public License 648 | along with this program. If not, see . 649 | 650 | Also add information on how to contact you by electronic and paper mail. 651 | 652 | If the program does terminal interaction, make it output a short 653 | notice like this when it starts in an interactive mode: 654 | 655 | {project} Copyright (C) {year} {fullname} 656 | This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'. 657 | This is free software, and you are welcome to redistribute it 658 | under certain conditions; type `show c' for details. 659 | 660 | The hypothetical commands `show w' and `show c' should show the appropriate 661 | parts of the General Public License. Of course, your program's commands 662 | might be different; for a GUI interface, you would use an "about box". 663 | 664 | You should also get your employer (if you work as a programmer) or school, 665 | if any, to sign a "copyright disclaimer" for the program, if necessary. 666 | For more information on this, and how to apply and follow the GNU GPL, see 667 | . 668 | 669 | The GNU General Public License does not permit incorporating your program 670 | into proprietary programs. If your program is a subroutine library, you 671 | may consider it more useful to permit linking proprietary applications with 672 | the library. If this is what you want to do, use the GNU Lesser General 673 | Public License instead of this License. But first, please read 674 | . 675 | --------------------------------------------------------------------------------