├── README.md
├── .github
└── workflows
│ └── codeql-analysis.yml
└── main.py
/README.md:
--------------------------------------------------------------------------------
1 |  
2 |
3 |
4 | 
5 |
6 |
7 | # Scrappp v2
8 | Scrappp is a python tool made to scrap free proxy lists ( OLD VERSIONS ), we added alot of new things to our tool such as smtp checker, who is hosting lookup and much more, you can check releases the tool is very easy and i will try to keep updating it and making more versions.
9 | hope you enjoy it.
10 |
11 | # Screenshots
12 |
13 | 
14 |
15 | # Modules
16 |
17 | You should install this modules before starting the tool:
18 |
19 | ```
20 | requests
21 | bs4
22 | ```
23 |
24 |
Or use the following command...
25 |
26 | ```
27 | pip install requests
28 | pip install bs4
29 | ```
30 |
31 | Install the last version from releases
32 |
33 | # Using the tool
34 | After you start the tool you need to choose option from 1 to 10
each option do a work
35 |
36 |
37 | | Option | Description |
38 | | --- | --- |
39 | | 1 | Scrap HTTP OR HTTPS |
40 | | 2 | Scrap SOCKS4 |
41 | | 3 | Scrap SOCKS5 |
42 | | 4 | Scrap ALL OF PROXY TYPES |
43 | | 5 | Check https proxy |
44 | | 6 | Open port checker |
45 | | 7 | Who is hosting lookup |
46 | | 8 | Revers dns lookup |
47 | | 9 | Ping lookup |
48 | | 10 | Check if site is dead |
49 | | 11 | Text to hash |
50 | | 12 | SMTP checker |
51 |
52 | # API
53 | `https://api.proxyscrape.com/`
54 | `https://proxy-daily.com/`
55 | `https://www.us-proxy.org/`
56 | `https://proxylist.geonode.com/api/`
57 | `https://ipasn.com/open-port-check/`
58 | `https://hooshosting.com/api/`
59 | `https://api.hackertarget.com/reverseiplookup`
60 | `https://api.hashify.net`
61 | `https://www.isitdownrightnow.com/`
62 | `https://pingability.com/`
63 |
64 | # Contacts
65 |
66 | > Discord : Joji#0082
67 |
68 | > Telegram : @Jojigodamn
69 |
70 | > Telegram channel : https://t.me/jojipydev
71 |
72 | > Be an OG in our discord server : [Join](https://discord.gg/acbeVxY5ra)
73 |
--------------------------------------------------------------------------------
/.github/workflows/codeql-analysis.yml:
--------------------------------------------------------------------------------
1 | # For most projects, this workflow file will not need changing; you simply need
2 | # to commit it to your repository.
3 | #
4 | # You may wish to alter this file to override the set of languages analyzed,
5 | # or to provide custom queries or build logic.
6 | #
7 | # ******** NOTE ********
8 | # We have attempted to detect the languages in your repository. Please check
9 | # the `language` matrix defined below to confirm you have the correct set of
10 | # supported CodeQL languages.
11 | #
12 | name: "CodeQL"
13 |
14 | on:
15 | push:
16 | branches: [ main ]
17 | pull_request:
18 | # The branches below must be a subset of the branches above
19 | branches: [ main ]
20 | schedule:
21 | - cron: '40 15 * * 2'
22 |
23 | jobs:
24 | analyze:
25 | name: Analyze
26 | runs-on: ubuntu-latest
27 | permissions:
28 | actions: read
29 | contents: read
30 | security-events: write
31 |
32 | strategy:
33 | fail-fast: false
34 | matrix:
35 | language: [ 'python' ]
36 | # CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python', 'ruby' ]
37 | # Learn more about CodeQL language support at https://git.io/codeql-language-support
38 |
39 | steps:
40 | - name: Checkout repository
41 | uses: actions/checkout@v2
42 |
43 | # Initializes the CodeQL tools for scanning.
44 | - name: Initialize CodeQL
45 | uses: github/codeql-action/init@v1
46 | with:
47 | languages: ${{ matrix.language }}
48 | # If you wish to specify custom queries, you can do so here or in a config file.
49 | # By default, queries listed here will override any specified in a config file.
50 | # Prefix the list here with "+" to use these queries and those in the config file.
51 | # queries: ./path/to/local/query, your-org/your-repo/queries@main
52 |
53 | # Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
54 | # If this step fails, then you should remove it and run the build manually (see below)
55 | - name: Autobuild
56 | uses: github/codeql-action/autobuild@v1
57 |
58 | # ℹ️ Command-line programs to run using the OS shell.
59 | # 📚 https://git.io/JvXDl
60 |
61 | # ✏️ If the Autobuild fails above, remove it and uncomment the following three lines
62 | # and modify them (or add more) to build your code if your project
63 | # uses a compiled language
64 |
65 | #- run: |
66 | # make bootstrap
67 | # make release
68 |
69 | - name: Perform CodeQL Analysis
70 | uses: github/codeql-action/analyze@v1
71 |
--------------------------------------------------------------------------------
/main.py:
--------------------------------------------------------------------------------
1 | import os
2 | import requests
3 | import socket
4 | import time
5 | import json
6 | from bs4 import BeautifulSoup as sp
7 |
8 | # VARS
9 |
10 | msg1 = ' [-] List: '.upper()
11 | WINDOWS_NEWLINE = '\r\n'
12 | http = "https://api.proxyscrape.com/v2/?request=displayproxies&protocol=http&timeout=10000&country=all&ssl=all&anonymity=all"
13 | socks4 = "https://api.proxyscrape.com/v2/?request=displayproxies&protocol=socks4&timeout=10000&country=all&ssl=all&anonymity=all"
14 | socks5 = "https://api.proxyscrape.com/v2/?request=displayproxies&protocol=socks5&timeout=10000&country=all&ssl=all&anonymity=all"
15 | allofthem = "https://api.proxyscrape.com/v2/?request=displayproxies&protocol=all&timeout=10000&country=all&ssl=all&anonymity=all"
16 | proxysite = "https://api.proxyscrape.com/v2/online_check.php"
17 |
18 | httpget = requests.get(http)
19 | socks4get = requests.get(socks4)
20 | socks5get = requests.get(socks5)
21 | allofthemget = requests.get(allofthem)
22 |
23 | # GREETING
24 |
25 |
26 | os.system('mode con: cols=200 lines=50')
27 | os.system("color")
28 | COLOR = {
29 | "cl": "\033[55m",
30 | "bgpurple": "\033[41m",
31 | "BLUE": "\033[94m",
32 | "GREEN": "\033[92m",
33 | "RED": "\033[91m",
34 | "ENDC": "\033[0m",
35 | "CYAN": "\033[0;34m",
36 | "YELLOW": "\033[33m"
37 | }
38 |
39 |
40 | print(" ")
41 | print(" ")
42 | print(COLOR["CYAN"], " ▄████████ ▄████████ ▄████████ ▄████████ ▄███████▄ ▄███████▄ ▄███████▄ ")
43 | print(COLOR["CYAN"], " ███ ███ ███ ███ ███ ███ ███ ███ ███ ███ ███ ███ ███ ███ ")
44 | print(COLOR["CYAN"], " ███ █▀ ███ █▀ ███ ███ ███ ███ ███ ███ ███ ███ ███ ███ ")
45 | print(COLOR["bgpurple"], " ███ ███ ▄███▄▄▄▄██▀ ███ ███ ███ ███ ███ ███ ███ ███ ")
46 | print(COLOR["bgpurple"], " ▀███████████ ███ ▀▀███▀▀▀▀▀ ▀███████████ ▀█████████▀ ▀█████████▀ ▀█████████▀ ")
47 | print(COLOR["bgpurple"], " ███ ███ █▄ ▀███████████ ███ ███ ███ ███ ███ ")
48 | print(COLOR["CYAN"], " ▄█ ███ ███ ███ ███ ███ ███ ███ ███ ███ ███ ")
49 | print(COLOR["CYAN"], " ▄████████▀ ████████▀ ███ ███ ███ █▀ ▄████▀ ▄████▀ ▄████▀ ")
50 | print(COLOR["CYAN"], " ███ ███ ")
51 |
52 | print("""
53 |
54 | [Coder] : Joji
55 | [Version] : 2
56 |
57 | """)
58 |
59 |
60 | print(COLOR["YELLOW"], """
61 | [1] HTTPS PROXY | [5] CHECKER HTTPS | [9] PING LOOKUP | [13] JOIN TELEGRAM
62 | [2] SOCKS 4 | [6] OPEN PORT CHECKER | [10] CHECK IF SITE IS DEAD | [14] LEAVE
63 | [3] SOCKS 5 | [7] WHOIS HOSTING | [11] TEXT TO HASH
64 | [4] ALL | [8] REVERS DNS LOOKUP | [12] SMTP CHECKER
65 |
66 | """)
67 |
68 | print(COLOR["ENDC"])
69 |
70 |
71 | # FUNCTIONS
72 |
73 | vl = input(" Scrappp ..::[ ")
74 |
75 | def HTTPPRX():
76 | print(COLOR["CYAN"], " [-] STARTED WORKING WITH API 1")
77 | passwordhttp = 'http'
78 | print(COLOR["RED"], " [-] [WORKING...]")
79 | print(COLOR["GREEN"]," [-] SAVED IN http.txt")
80 | httpfile = open(f"{passwordhttp}.txt", "a")
81 | httpfile.write(f"{httpget.text}")
82 | httpfile.close()
83 | print(" ")
84 | print(COLOR["CYAN"], " [-] STARTED WORKING WITH API 2")
85 | print(COLOR["RED"], " [-] [WORKING...]")
86 | site = requests.get("https://proxy-daily.com/").content
87 | soupsite = sp(site, "html.parser")
88 | soupsitediv = soupsite.find_all("div", {"class": "centeredProxyList freeProxyStyle"})
89 | httpfile = open(f"{passwordhttp}.txt", "a")
90 | httpfile.write(f"{soupsitediv[0]}")
91 | httpfile.close()
92 | print(COLOR["GREEN"], " [-] SAVED IN http.txt")
93 | print(" ")
94 | print(COLOR["GREEN"], " [-] DONE")
95 |
96 |
97 | def SOKS4PRX():
98 | print(COLOR["CYAN"], " [-] STARTED WORKING WITH API 1")
99 | passwordsocks4 = 'socks4'
100 | print(COLOR["RED"], " [-] [WORKING...]")
101 | print(COLOR["GREEN"], " [-] SAVED IN socks4.txt")
102 | socks4file = open(f"{passwordsocks4}.txt", "a")
103 | socks4file.write(f"{socks4get.text}")
104 | socks4file.close()
105 | print(" ")
106 | print(COLOR["CYAN"], " [-] STARTED WORKING WITH API 2")
107 | print(COLOR["RED"], " [-] [WORKING...]")
108 | site = requests.get("https://proxy-daily.com/").content
109 | soupsite = sp(site, "html.parser")
110 | soupsitediv = soupsite.find_all("div", {"class": "centeredProxyList freeProxyStyle"})
111 | socks4file = open(f"{passwordsocks4}.txt", "a")
112 | socks4file.write(f"{soupsitediv[1]}")
113 | socks4file.close()
114 | print(COLOR["GREEN"], " [-] SAVED IN socks4.txt")
115 | print(" ")
116 | print(COLOR["GREEN"], " [-] DONE")
117 |
118 | def SOKS5PRX():
119 | print(COLOR["CYAN"], " [-] STARTED WORKING WITH API 1")
120 | passwordsocks5 = 'socks5'
121 | print(COLOR["RED"], " [-] [WORKING...]")
122 | print(COLOR["GREEN"], " [-] SAVED IN socks5.txt")
123 | socks5file = open(f"{passwordsocks5}.txt", "a")
124 | socks5file.write(f"{socks5get.text}")
125 | socks5file.close()
126 | print(" ")
127 | print(COLOR["CYAN"], " [-] STARTED WORKING WITH API 2")
128 | print(COLOR["RED"], " [-] [WORKING...]")
129 | site = requests.get("https://proxy-daily.com/").content
130 | soupsite = sp(site, "html.parser")
131 | soupsitediv = soupsite.find_all("div", {"class": "centeredProxyList freeProxyStyle"})
132 | socks4file = open(f"{passwordsocks5}.txt", "a")
133 | socks4file.write(f"{soupsitediv[2]}")
134 | socks4file.close()
135 | print(COLOR["GREEN"], " [-] SAVED IN socks5.txt")
136 | print(" ")
137 | print(COLOR["GREEN"], " [-] DONE")
138 |
139 | def ALLPRX():
140 | print(COLOR["CYAN"], " [-] STARTED WORKING WITH API 1")
141 | passwordall = 'all'
142 | print(COLOR["RED"], " [-] [WORKING...]")
143 | print(COLOR["GREEN"]," [-] SAVED IN all.txt")
144 | allofthemfile = open(f"{passwordall}.txt", "a")
145 | allofthemfile.write(f"{allofthemget.text}")
146 | allofthemfile.close()
147 | print(" ")
148 | print(COLOR["CYAN"], " [-] STARTED WORKING WITH API 2")
149 | print(COLOR["RED"], " [-] [WORKING...]")
150 | site = requests.get("https://www.us-proxy.org/").content
151 | soupsite = sp(site, "html.parser")
152 | souptextarea = soupsite.find('textarea')
153 | allofthemfile = open(f"{passwordall}.txt", "a")
154 | allofthemfile.write(f"{souptextarea.text}")
155 | allofthemfile.close()
156 | print(COLOR["GREEN"], " [-] SAVED IN all.txt")
157 | print(" ")
158 | print(COLOR["CYAN"], " [-] STARTED WORKING WITH API 3")
159 | print(COLOR["RED"], " [-] [WORKING...]")
160 | site = requests.get("https://proxy-daily.com/").content
161 | soupsite = sp(site, "html.parser")
162 | soupsitediv = soupsite.find_all("div", {"class": "centeredProxyList freeProxyStyle"})
163 | allofthemfile = open(f"{passwordall}.txt", "a")
164 | allofthemfile.write(f"{soupsitediv}")
165 | allofthemfile.close()
166 |
167 | print(COLOR["GREEN"], " [-] SAVED IN all.txt")
168 | print(" ")
169 | print(COLOR["GREEN"], " [-] DONE")
170 |
171 |
172 | def CHKHTTP():
173 | count = 0
174 | filename = input(msg1)
175 | if filename == f"{filename}":
176 | with open(f'{filename}') as text:
177 | while True:
178 | for line in text:
179 | if line == ' ':
180 | print(COLOR["RED"], "[-] SOMETHING IS WRONG, CHECK YOUR LIST AND TRY AGAIN")
181 | input('')
182 | else:
183 | proxyip = line.split(":")[0]
184 | try:
185 | proxyport = line.split(":")[1].removeprefix('b')
186 | except IndexError:
187 | pass
188 |
189 | proxyserver = f'{proxyip}:{proxyport}'.replace('\n', '')
190 |
191 | r = requests.get(f'https://proxylist.geonode.com/api/check-proxy?ip={proxyip}&port={proxyport}&publish_proxy=no&filter_protocol=http%2Chttps').json()
192 |
193 | isworking = r["data"]["is_google"]
194 | #statusforproxychecking = r["data"]["ip_info"]["status"]
195 | #countryproxy = r["data"]["ip_info"]["country"]
196 | #protocols = r["data"]["protocols"]
197 |
198 | if isworking == True:
199 | print(COLOR["GREEN"], f" [+] WORKING : {proxyserver}")
200 | workinghttpsproxy = open("workinghttpsproxy.txt", "a")
201 | workinghttpsproxy.write(f"{proxyserver}\n")
202 | workinghttpsproxy.close()
203 | else:
204 | print(COLOR["RED"], f" [-] NOT WORKING : {proxyserver}")
205 |
206 | count = count + 1
207 |
208 | if count == 8:
209 | count = 0
210 | time.sleep(3)
211 | else:
212 | pass
213 |
214 |
215 | def OPC():
216 | ipnum = input(" [-] IP ADRESS TO CHECK : ")
217 | ipport = input(" [-] PORT NUMBER: ")
218 |
219 | site = f"https://ipasn.com/open-port-check/?host={ipnum}&port={ipport}"
220 |
221 | prtchkrzlt = requests.get(site).text
222 |
223 | # color:#F00000
224 |
225 | if "color:#F00000" in prtchkrzlt:
226 | print(COLOR["RED"],f" [-] DEAD : {ipnum}")
227 | else:
228 | pass
229 |
230 | if "We can see service on port" in prtchkrzlt:
231 | print(COLOR["GREEN"], f" [+] LIVE : {ipnum}:{ipport}")
232 |
233 | print(COLOR["CYAN"], " [!] DONE")
234 |
235 | def WHOISHOSTING():
236 | sitetochk = input(" [-] WEBSITE URL: ")
237 | payload = {
238 | 'url': f'{sitetochk}'
239 | }
240 | whoishostingchk = requests.get(f'https://hooshosting.com/api/hosting?url={sitetochk}', params=payload)
241 | whoishostingchktxt = whoishostingchk.text
242 | if '"result":null' in whoishostingchktxt:
243 | print(COLOR["RED"], " [x] ERROR, RE CHECK YOUR INFORMATION")
244 | else:
245 | whoishostingchk = whoishostingchk.json()
246 | whoishostingchklist = []
247 | for item in whoishostingchk["results"]:
248 | whoishostingchklist.append(item["ip"])
249 | whoishostingchklist.append(item["isp_name"])
250 | print(COLOR["GREEN"], " [+] IP : "+ whoishostingchklist[0])
251 | print(COLOR["GREEN"], " [+] HOSTING PROVIDER : "+ whoishostingchklist[1])
252 |
253 | print(COLOR["CYAN"], " [!] DONE")
254 |
255 | def REVERSDNS():
256 | api = input(" [-] PLEASE CHOOSE API ( 1 OR 2... READ GITHUB REPO TO KNOW MORE ): ")
257 | if api == "1":
258 | fromtext = input(" [+] DO YOU WANT TO GET WEBSITES FROM TEXT FILE ( Y / N) : ")
259 | if fromtext == "Y":
260 | filename = input(" [+] ENTER FILE NAME : ")
261 | print(" ")
262 | print(COLOR["RED"], " [-] WORKING...")
263 | with open(f'{filename}') as text:
264 | for line in text:
265 | if line == ' ':
266 | print(COLOR["RED"], " [-] SOMETHING IS WRONG, CHECK YOUR LIST AND TRY AGAIN")
267 | input('')
268 | else:
269 | try:
270 | domain_name = socket.gethostbyaddr(line)[0]
271 | print(COLOR["GREEN"], f" [+] FOUND FOR {line} : {domain_name}")
272 | with open('reversdns.txt', 'a') as file:
273 | file.writelines(domain_name)
274 | file.write('\n')
275 | file.close()
276 | print(COLOR["GREEN"], f" [+] SAVED IN reversdns.txt")
277 | except(socket.gaierror):
278 | pass
279 | elif fromtext == "N":
280 | iptochk = input(" [+] IP ADDRESS: ")
281 | try:
282 | domain_name = socket.gethostbyaddr(iptochk)[0]
283 | print(COLOR["GREEN"], f" [+] FOUND : {domain_name}")
284 | except(socket.gaierror):
285 | pass
286 | elif api == "2":
287 | with open(f'{filename}') as text:
288 | for line in text:
289 | getdomains = requests.get(f"https://api.hackertarget.com/reverseiplookup/?q={line}").text
290 | print(COLOR["GREEN"], " [+] DONE I FOUND A LIST")
291 | time.sleep(3)
292 | print(COLOR["GREEN"], getdomains)
293 | with open('reversdns.txt', 'a') as file:
294 | file.writelines(getdomains)
295 | file.close()
296 |
297 |
298 |
299 | print(COLOR["CYAN"], " [!] DONE")
300 | else:
301 | print(" ")
302 | print(COLOR["RED"], " [-] WORKING...")
303 | try:
304 | addressip = input(" [+] PLEASE ENTER A VALID IP ADDRESS : ")
305 | domain_name = socket.gethostbyaddr(addressip)[0]
306 | print(COLOR["GREEN"], f" [+] FOUND FOR {line} : {domain_name}")
307 | print(COLOR["GREEN"], f" [+] SAVED IN reversdns.txt")
308 | except(socket.gaierror):
309 | pass
310 | print(COLOR["CYAN"], " [!] DONE")
311 |
312 | def TEXTTOMD5():
313 | wordtomd5 = input(" [+] PLEASE ENTER THE TEXT YOU WANT TO HASH : ")
314 | hashtype = input(" [+] HASH METHODS ( MD5 , SHA1 , SHA256 ) : ")
315 | if hashtype == "MD5":
316 | sitetomd5 = requests.get(f"https://api.hashify.net/hash/md5/hex?value={wordtomd5}")
317 | sitetomd5content = sitetomd5.json()
318 | print(COLOR["GREEN"], " [+] MD5 : "+ sitetomd5content["Digest"])
319 | elif hashtype == "SHA1":
320 | sitetomd5 = requests.get(f"https://api.hashify.net/hash/sha1/hex?value={wordtomd5}")
321 | sitetomd5content = sitetomd5.json()
322 | print(COLOR["GREEN"], " [+] SHA1 : "+ sitetomd5content["Digest"])
323 | elif hashtype == "SHA256":
324 | sitetomd5 = requests.get(f"https://api.hashify.net/hash/sha256/hex?value={wordtomd5}")
325 | sitetomd5content = sitetomd5.json()
326 | print(COLOR["GREEN"], " [+] SHA256 : "+ sitetomd5content["Digest"])
327 | else:
328 | print(COLOR["RED"], " [x] ENTER VALID HASH TYPE :(")
329 |
330 | print(COLOR["CYAN"], " [!] DONE")
331 |
332 | def ISSITEDOWN():
333 | site = input(" [+] PLEASE ENTER A WEBSITE DOMAIN : ")
334 | issitedown = requests.get(f"https://www.isitdownrightnow.com/check.php?domain={site}").text
335 | if "UP" in issitedown:
336 | soupsite = sp(issitedown, "html.parser")
337 | soupsitedivsitedomainname = soupsite.find('div', {"class": "tabletrsimple"})
338 | domainname = soupsitedivsitedomainname.find("span", {"class": "tab"}).text
339 | print(COLOR["GREEN"], f" [+] WEBSITE CHECKED : {domainname}")
340 | print(COLOR["GREEN"], f" [+] IS IT DOWN : WEBSITE IS UP FOR EVERYONE")
341 | else:
342 | print(COLOR["RED"], " [-] WEBSITE IS DOWN FOR EVERYONE !")
343 |
344 | print(COLOR["CYAN"], " [!] DONE")
345 |
346 | def SMTPCHK():
347 | isfromlist = input(" [!] DO YOU WANT TO CHECK SMTP FROM LIST ? ( Y / N ): ")
348 | if isfromlist == "N":
349 | smtp = input("")
350 | smtplogin = input("")
351 | smtploginpswd = input("")
352 |
353 | payload = {
354 | "hostname": smtp,
355 | "from_email": smtplogin,
356 | "user": smtplogin,
357 | "password": smtploginpswd,
358 | "chk:is_try_tls": "N"
359 | }
360 |
361 |
362 | chk = requests.post("https://pingability.com/smtptest.jsp", params=payload).text
363 | if "message successfully delivered" in chk:
364 | print(COLOR["GREEN"], f" [+] SMTP IS WORKING : {smtp}")
365 | else:
366 | print(COLOR["RED"], " [-] SMTP IS NOT WORKING, SOMETHING WENT WRONG...")
367 |
368 | elif isfromlist == "Y":
369 | print(COLOR["YELLOW"], " [-] SOON... ")
370 |
371 |
372 | def JOINTELEGRAM():
373 | print(COLOR["YELLOW"]," [!] MY TELEGRAM CHANNEL : https://t.me/jojipydev")
374 |
375 | def LEAVE():
376 | exit()
377 | # SERVICES
378 |
379 |
380 |
381 | if vl == '1':
382 | HTTPPRX()
383 | if vl == '2':
384 | SOKS4PRX()
385 | if vl == '3':
386 | SOKS5PRX()
387 | if vl == '4':
388 | ALLPRX()
389 | if vl == '5':
390 | CHKHTTP()
391 | if vl == '6':
392 | OPC()
393 | if vl == '7':
394 | WHOISHOSTING()
395 | if vl == '8':
396 | REVERSDNS()
397 | if vl == '10':
398 | ISSITEDOWN()
399 | if vl == '11':
400 | TEXTTOMD5()
401 | if vl == '12':
402 | SMTPCHK()
403 | if vl == '13':
404 | JOINTELEGRAM()
405 | if vl == '14':
406 | LEAVE()
407 |
408 |
409 |
410 |
411 |
412 |
413 | input(" ")
--------------------------------------------------------------------------------