├── .gitignore ├── .travis.yml ├── Dockerfile ├── README.md ├── dark_server.py ├── dark_test.py ├── darksearch ├── __init__.py ├── darkmain.py ├── darkspace.py ├── docs │ ├── darksearch.png │ └── pipline.md ├── logs │ └── .gitignore ├── static │ ├── etc │ │ ├── mag.png │ │ ├── search.png │ │ └── searchb.png │ ├── front │ │ ├── css │ │ │ ├── normalize.css │ │ │ └── style.css │ │ ├── images │ │ │ ├── darksearch.png │ │ │ ├── darksearch2.png │ │ │ └── favicon.ico │ │ └── scss │ │ │ └── style.scss │ └── listjs │ │ ├── css │ │ ├── normalize.css │ │ └── style.css │ │ ├── images │ │ └── darksearch.png │ │ └── js │ │ └── index.js ├── templates │ ├── 400.html │ ├── 404.html │ ├── 429.html │ ├── index.html │ └── search.html └── tools │ ├── __init__.py │ ├── elas.py │ └── tk.py └── requirements.txt /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | 5 | # C extensions 6 | *.so 7 | 8 | # Distribution / packaging 9 | .Python 10 | env/ 11 | venv/ 12 | build/ 13 | develop-eggs/ 14 | dist/ 15 | downloads/ 16 | eggs/ 17 | .eggs/ 18 | lib/ 19 | lib64/ 20 | parts/ 21 | sdist/ 22 | var/ 23 | *.egg-info/ 24 | .installed.cfg 25 | *.egg 26 | 27 | # PyInstaller 28 | # Usually these files are written by a python script from a template 29 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 30 | *.manifest 31 | *.spec 32 | 33 | # Installer logs 34 | pip-log.txt 35 | pip-delete-this-directory.txt 36 | 37 | # Unit test / coverage reports 38 | htmlcov/ 39 | .tox/ 40 | .coverage 41 | .coverage.* 42 | .cache 43 | nosetests.xml 44 | coverage.xml 45 | *,cover 46 | 47 | # Translations 48 | *.mo 49 | *.pot 50 | 51 | # Django stuff: 52 | *.log 53 | 54 | # Sphinx documentation 55 | docs/_build/ 56 | 57 | # PyBuilder 58 | target/ 59 | 60 | # See the .gitignore in logs, it prevents the logs from being uploaded... 61 | *.json 62 | *.csv 63 | data/ 64 | .DS_Store 65 | icons/ 66 | darksearch-app/ 67 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | sudo: required 2 | language: python 3 | python: 4 | 2.7 5 | # Needs sudo... 6 | install: 7 | - sudo apt-get install gunicorn 8 | - sudo pip install -r requirements.txt 9 | script: 10 | sudo python dark_test.py 11 | after_success: 12 | coveralls 13 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | FROM ubuntu:14.04 2 | RUN apt-get update && apt-get install -y \ 3 | python \ 4 | build-essential \ 5 | python-dev \ 6 | python-pip \ 7 | git \ 8 | wget \ 9 | openjdk-7-jre \ 10 | libblas-dev liblapack-dev 11 | 12 | WORKDIR /home 13 | RUN \ 14 | wget "https://download.elasticsearch.org/elasticsearch/release/org/elasticsearch/distribution/deb/elasticsearch/2.2.0/elasticsearch-2.2.0.deb" -O elasticsearch-2.2.0.deb && \ 15 | dpkg -i elasticsearch-2.2.0.deb 16 | RUN git clone https://github.com/vlall/darksearch 17 | RUN pip install -r /home/darksearch/requirements.txt 18 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | [![Build Status](https://travis-ci.org/vlall/darksearch.svg?branch=master)](https://travis-ci.org/vlall/darksearch) 2 | 3 | ###About Darksearch 4 | [Darksearch](http://www.darksearch.com) allows you to query cached onion sites, irc chatrooms, various pdfs, game chats, blackhat forums etc... 5 | 6 | ## API 7 | Darksearch also has an API in the works. Currently you can't scrape specific data for your queries, but you can retrieve metadata on your searches by using a GET request on darksearch.io/api/YOUR_QUERY/PAGE_NUMBER 8 | ``` 9 | $ curl -XGET darksearch.com/api/spies/1 10 | 11 | { 12 | "Duration": "0.026", 13 | "Query": "spies", 14 | "Results": "{'10': {'10': {'Host': u'http://ss2v6i44b3vy4tdf.onion//advisory-board/', 'Description': u\"live in hiding for a year and face arrest her partner was imprisoned twice for exposing the crimes of the spies she is the author of spies lies and whistleblowers mi5 and the david shayler affair she is now a\", 'Timestamp (scraped)': u'2016-04-24 08:02:57', 'Title': u'Advisory Board Courage Foundation', 'URL': u'ss2v6i44b3vy4tdf.onion/advisory-board.html', 'Tor2Web (.cab)': u'ss2v6i44b3vy4tdf.onion.cab/advisory-board.html', 'Tor2Web (.to)': u'ss2v6i44b3vy4tdf.onion.to/advisory-board.html', 'Size (bytes)': 39006}}, '1': {'1': {'Host': u'http://maskravvbmurcaiz.onion/', 'Description': u\"maskrabbit maskrabbit maskrabbit is an anonymous agency for real world operators we specialize in couriers thieves spies saboteurs hackers and goons maskrabbit only works with professional agents and serious clients to apply use the appropriate form to submit your needs\", 'Timestamp (scraped)': u'2016-04-21 09:53:20', 'Title': u'MaskRabbit', 'URL': u'maskravvbmurcaiz.onion/index.html', 'Tor2Web (.cab)': u'maskravvbmurcaiz.onion.cab/index.html', 'Tor2Web (.to)': u'maskravvbmurcaiz.onion.to/index.html', 'Size (bytes)': 1668}}, '3': {'3': {'Host': u'http://uksfvgmwpiww3n4s.onion/', 'Description': u\"when gov imprison bradly manning and torture him until he says he is guilty in the time when they setup spies and sexual crime to imprison julian assange do you really believe that gov will be stopped in legal way\", 'Timestamp (scraped)': u'2016-04-23 10:50:27', 'Title': u'Our reality', 'URL': u'uksfvgmwpiww3n4s.onion/index.html', 'Tor2Web (.cab)': u'uksfvgmwpiww3n4s.onion.cab/index.html', 'Tor2Web (.to)': u'uksfvgmwpiww3n4s.onion.to/index.html', 'Size (bytes)': 3495}}, '2': {'2': {'Host': u'http://ac4jrkjk4ialqkoh.onion/category/revealed-documents/', 'Description': u\"this gchq manual from 2008 explains how analysts would unscramble the video signals from israeli drones see the intercept article spies in the sky israeli drone feeds hacked by british and american intellience 29 january 2016 continue reading s455n israeli\", 'Timestamp (scraped)': u'2016-04-21 08:54:29', 'Title': u'Revealed documents Courage Snowden', 'URL': u'ac4jrkjk4ialqkoh.onion/category/revealed-documents.html', 'Tor2Web (.cab)': u'ac4jrkjk4ialqkoh.onion.cab/category/revealed-documents.html', 'Tor2Web (.to)': u'ac4jrkjk4ialqkoh.onion.to/category/revealed-documents.html', 'Size (bytes)': 36944}}, '4': {'4': {'Host': u'http://lcvkso2t5t3cmy3x.onion/', 'Description': u\"provide his blackberry phone password to canada border services agency cbsa officers at a halifax airport 2015 02 25 canadian spies collect domestic emails in secret security sweep the communications security establishment cse is covertly monitoring canadians emails 2015 01\", 'Timestamp (scraped)': u'2016-04-22 06:45:48', 'Title': u'Hack Canada', 'URL': u'lcvkso2t5t3cmy3x.onion/index.html', 'Tor2Web (.cab)': u'lcvkso2t5t3cmy3x.onion.cab/index.html', 'Tor2Web (.to)': u'lcvkso2t5t3cmy3x.onion.to/index.html', 'Size (bytes)': 9269}}, '6': {'6': {'Host': u'http://h2am5w5ufhvdifrs.onion', 'Description': u\"november 20 2013 2013 1612 htm bis end user certificates for china november 20 2013 2013 1611 pdf fisc usa spies deny release of doc to aclu nobember 19 2013 2013 1610 vid video nsa ddir inglis at ny law\", 'Timestamp (scraped)': u'2016-04-21 11:39:09', 'Title': u'Cryptome', 'URL': u'h2am5w5ufhvdifrs.onion/index.html', 'Tor2Web (.cab)': u'h2am5w5ufhvdifrs.onion.cab/index.html', 'Tor2Web (.to)': u'h2am5w5ufhvdifrs.onion.to/index.html', 'Size (bytes)': 66052}}, '9': {'9': {'Host': u'http://swehackmzys2gpmb.onion/./viewforum.php?f=21&sid=51857442da28cbcb1e47336b287b2960', 'Description': u\"33 1 2 3 svar 27 27 svar 2842 visningar senaste inl gget av avlidienbrunn 18 apr 2016 13 14 spies in the skies artikel om fbi s flyg vervakning av chlo 13 apr 2016 03 50 svar 5 5\", 'Timestamp (scraped)': u'2016-04-22 18:45:05', 'Title': u'Lektyr och media swehack org', 'URL': u'swehackmzys2gpmb.onion/./viewforum.php?f=21&sid=51857442da28cbcb1e47336b287b2960', 'Tor2Web (.cab)': u'swehackmzys2gpmb.onion.cab/./viewforum.php?f=21&sid=51857442da28cbcb1e47336b287b2960', 'Tor2Web (.to)': u'swehackmzys2gpmb.onion.to/./viewforum.php?f=21&sid=51857442da28cbcb1e47336b287b2960', 'Size (bytes)': 48200}}, '8': {'8': {'Host': u'http://swehackmzys2gpmb.onion/./viewforum.php?f=21&sid=3941cd89256fc9b6c7561ccaa4d3a9a1', 'Description': u\"33 1 2 3 svar 27 27 svar 2769 visningar senaste inl gget av avlidienbrunn 18 apr 2016 13 14 spies in the skies artikel om fbi s flyg vervakning av chlo 13 apr 2016 03 50 svar 5 5\", 'Timestamp (scraped)': u'2016-04-21 07:22:11', 'Title': u'Lektyr och media swehack org', 'URL': u'swehackmzys2gpmb.onion/./viewforum.php?f=21&sid=3941cd89256fc9b6c7561ccaa4d3a9a1', 'Tor2Web (.cab)': u'swehackmzys2gpmb.onion.cab/./viewforum.php?f=21&sid=3941cd89256fc9b6c7561ccaa4d3a9a1', 'Tor2Web (.to)': u'swehackmzys2gpmb.onion.to/./viewforum.php?f=21&sid=3941cd89256fc9b6c7561ccaa4d3a9a1', 'Size (bytes)': 48175}}}", 15 | "Total Pages": "4", 16 | "Total Results": "40" 17 | } 18 | ``` 19 | 20 | The Darksearch index is growing as more scrapers get built... 21 | -------------------------------------------------------------------------------- /dark_server.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | 3 | from darksearch.darkmain import app as application 4 | 5 | def main(): 6 | application.run( 7 | host='0.0.0.0', 8 | port=80, 9 | threaded=True, 10 | debug=True 11 | ) 12 | 13 | 14 | if __name__ == '__main__': 15 | main() 16 | -------------------------------------------------------------------------------- /dark_test.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | 3 | import subprocess 4 | from time import sleep 5 | import requests 6 | 7 | process = subprocess.Popen("sudo gunicorn --bind 0.0.0.0:80 dark_server", shell=True) 8 | print('Darksearch started.') 9 | sleep(5) 10 | # Check maine search screen 11 | darkRequest = requests.get('http://0.0.0.0') 12 | darkRequest.raise_for_status() 13 | # Check cached pages get displayed 14 | process.kill() 15 | print('Darksearch is running in the background...') 16 | -------------------------------------------------------------------------------- /darksearch/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/vlall/Darksearch/706b1bfeaaac5ac35672d9836cc65e3444751dab/darksearch/__init__.py -------------------------------------------------------------------------------- /darksearch/darkmain.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | 3 | import os 4 | import json 5 | import urllib2 6 | import time 7 | import sys 8 | import requests 9 | import logging 10 | import threading 11 | from logging.handlers import RotatingFileHandler 12 | from darkspace import BackCheck 13 | from time import gmtime, strftime 14 | from flask import Flask, url_for, request, render_template 15 | from flask import redirect, Markup, session, abort, send_from_directory 16 | from flask.ext.api import FlaskAPI, status, exceptions 17 | from flask_limiter import Limiter 18 | from flask import jsonify 19 | from pympler import tracker 20 | 21 | 22 | app = Flask(__name__) 23 | limiter = Limiter( 24 | app, global_limits=[ 25 | "2000 per day", 26 | "400 per hour", 27 | "60 per minute" 28 | ] 29 | ) 30 | app.secret_key = os.urandom(24) # Creates 24-char cookie 31 | handler = RotatingFileHandler( 32 | 'darksearch/logs/info.log', 33 | maxBytes=100000, 34 | backupCount=10 35 | ) 36 | handler.setLevel(logging.INFO) 37 | app.logger.setLevel(logging.INFO) 38 | app.logger.addHandler(handler) 39 | 40 | 41 | def deFace(alias): 42 | """ 43 | Run query from darkspace.BackCheck. 44 | """ 45 | search = BackCheck(alias) 46 | return search 47 | 48 | 49 | @app.route("/", methods=['POST', 'GET']) 50 | @limiter.limit("3/second") 51 | def index(): 52 | return render_template('index.html') 53 | 54 | 55 | @app.route("/search/", methods=['POST', 'GET']) 56 | @limiter.limit("3/second") 57 | def search(page=1): 58 | start_time = time.time() 59 | try: 60 | alias = request.form['search'] 61 | except: 62 | try: 63 | alias = session['query'] # Check cookies. 64 | except: 65 | abort(400) 66 | alias = deFace(alias) 67 | engineList = alias.darkSites(page) 68 | query = str(alias.query) 69 | session['query'] = query 70 | results = str(alias.numDark) 71 | pageTotal = str(alias.maxPages) 72 | pageBar = alias.pageBar # Do not turn to str. 73 | dur = ('%.3f') % (time.time() - start_time) 74 | make_logs(query, dur, results, page) 75 | if page > pageTotal: 76 | abort(404) 77 | return render_template( 78 | 'search.html', 79 | dur=dur, 80 | results=results, 81 | query=query, 82 | engineList=engineList, 83 | pageTotal=pageTotal, 84 | pageBar=pageBar 85 | ) 86 | 87 | 88 | @app.route("/", methods=['POST', 'GET']) 89 | def link(onion): 90 | onion = onion.replace('.html', "") 91 | root_dir = os.path.dirname(os.getcwd()) 92 | # print root_dir 93 | return send_from_directory(os.path.join(root_dir, 'darksearch/darksearch/data'), onion+'.html') 94 | 95 | """ 96 | @app.errorhandler(404) 97 | def page_not_found(e): 98 | return render_template('404.html'), 404 99 | 100 | 101 | @app.errorhandler(400) 102 | def bad_request(e): 103 | return render_template('400.html'), 400 104 | 105 | 106 | @app.errorhandler(429) 107 | def ratelimit_handler(e): 108 | return render_template('429.html', notice=e.description), 429 109 | """ 110 | 111 | def make_logs(query, dur, results, page): 112 | """ 113 | Log site search traffic in /logs. 114 | """ 115 | ip = request.environ.get("REMOTE_ADDR") 116 | clock = strftime("%Y-%m-%d %H:%M:%S", gmtime()) 117 | log = '%s, %s, %s, %s, results:%s, page:%s' % ( 118 | clock, 119 | ip, 120 | query, 121 | dur, 122 | results, 123 | page 124 | ) 125 | app.logger.info(log) 126 | 127 | 128 | # API SECTION 129 | @app.route("/api//", methods=['GET']) 130 | @limiter.limit("3/second") 131 | def user_get(text, page=1): 132 | start_time = time.time() 133 | alias = text 134 | alias = deFace(alias) 135 | engineList = alias.darkSites(page) 136 | query = str(alias.query) 137 | results = str(alias.numDark) 138 | pageTotal = str(alias.maxPages) 139 | dur = ('%.3f') % (time.time() - start_time) 140 | make_logs(query, dur, results, page) 141 | if page > pageTotal: 142 | return '404 Error' 143 | return jsonify( 144 | { 145 | 'query': '%s' % query, 146 | 'size': '%s' % results, 147 | 'total_pages': '%s' % pageTotal, 148 | 'duration': '%s' % dur 149 | } 150 | ) 151 | 152 | 153 | if __name__ == '__main__': 154 | app.run( 155 | host='0.0.0.0', 156 | port=80, 157 | debug=True, 158 | threaded=True 159 | ) 160 | -------------------------------------------------------------------------------- /darksearch/darkspace.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | 3 | import json 4 | import urllib2 5 | import time 6 | import requests 7 | import re 8 | import sys 9 | import math 10 | import gc 11 | from tools import DarkElastic 12 | from flask import Flask, url_for, request, render_template, redirect, Markup 13 | 14 | reload(sys) 15 | sys.setdefaultencoding('utf8') 16 | 17 | 18 | class BackCheck(object): 19 | """ 20 | Search .onions for keywords 21 | """ 22 | 23 | def __init__(self, query, dob=None): 24 | # Removes all non-alphanumeric, non-white space characters but keeps quotes 25 | query = re.sub(r'[^a-z\'"A-Z\d\s:]', '', query) 26 | # Check if quotes are closed 27 | if query.count('\"') % 2 != 0: 28 | query = query.replace('\"', "") 29 | self.query = query 30 | 31 | 32 | 33 | def dark200(self, socialList, username): 34 | pass 35 | 36 | def imageResuts(self, links): 37 | """Scrape all of the profile images on a webpage.""" 38 | pass 39 | 40 | def darkResults(self, socialName, image, description, href, category='website'): 41 | description = str(description) 42 | hrefs = "" 43 | href = href.replace('.html', "") 44 | if description: 45 | nLink = "

%s...

" % (description) 46 | if not description: 47 | hrefs = "

Potential items not found or are hidden

" 48 | self.results = ( 49 | "
  • " 51 | "

    %s

    " 53 | "

    updated: %s

    " 54 | "

    %s

  • " 55 | % (image, href, socialName, category, description) 56 | ) 57 | return self.results 58 | 59 | def make_pageBar(self, current, end): 60 | start = 1 61 | results = "" 62 | back = "" 63 | next = "" 64 | if end >= 5: 65 | end = current + 2 66 | if current >= 3: 67 | start = current - 2 68 | else: 69 | end = current + (5 - current) 70 | if end > self.maxPages: 71 | end = self.maxPages 72 | for page in range(start, end + 1): 73 | if page == current: 74 | line = ( 75 | "
  • " 76 | "%s
  • " 77 | % (page, page) 78 | ) 79 | else: 80 | line = "
  • %s
  • " % (page, page) 81 | results = results + line 82 | if (current - 1) > 0: 83 | back = "
  • Prev
  • " % (current - 1) 84 | if (current + 1) <= end: 85 | next = "
  • Next
  • " % (current + 1) 86 | return (back + results + next) 87 | 88 | def darkSites(self, currentPage, limitResults=10): 89 | # Clean up 90 | gc.collect() 91 | # Start ElasticSearch 92 | elastic = DarkElastic() 93 | elastic.search_index('dark', self.query) 94 | self.numDark = elastic.size 95 | self.maxPages = math.ceil((self.numDark) / float(limitResults)) 96 | self.maxPages = int(self.maxPages) 97 | # Displays 10 results per page 98 | displayStart = int((currentPage * limitResults) - limitResults) 99 | displayEnd = int((currentPage * limitResults)) 100 | elastic.search_index('dark', self.query, displayStart, limitResults) 101 | descTotal = '' 102 | self.pageBar = Markup(self.make_pageBar(currentPage, self.maxPages)) 103 | for val in elastic.briefList: 104 | cat = elastic.check_cat(val) 105 | i = elastic.briefList.index(val) 106 | description = Markup( 107 | self.darkResults( 108 | elastic.titleList[i], 109 | cat, 110 | val, 111 | elastic.namesList[i], 112 | elastic.datesList[i] 113 | ) 114 | ) 115 | descTotal = descTotal + description 116 | elastic.free_mem() # Attempting to free up memory.. 117 | del elastic 118 | return Markup(descTotal) 119 | 120 | 121 | if __name__ == '__main__': 122 | example = BackCheck('John Smith') 123 | print example.output 124 | -------------------------------------------------------------------------------- /darksearch/docs/darksearch.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/vlall/Darksearch/706b1bfeaaac5ac35672d9836cc65e3444751dab/darksearch/docs/darksearch.png -------------------------------------------------------------------------------- /darksearch/docs/pipline.md: -------------------------------------------------------------------------------- 1 | # Data Pipeline 2 | - Torfka checks seedlist for 200 Response, scrapes page, logs date, size, name, link. 3 | - Take log, read it, pipe through Apache Tika, gather Tika metadata and generate 200 word description to Dataframe. 4 | - Ingest DataFrame into Elasticsearch index. 5 | - Use Flask to serve content and elas.py to read index 6 | 7 | # Technologies 8 | - Tor and Scrapy for web scraping 9 | - Apache Kafka for streaming messages 10 | - Apache Tika for text translation 11 | - Postgres for the database 12 | - Elasticsearch as an index 13 | - Flask/flask-api/Gunicorn for the server 14 | - Nginx for reverse proxy 15 | -------------------------------------------------------------------------------- /darksearch/logs/.gitignore: -------------------------------------------------------------------------------- 1 | gnore everything in this directory 2 | * 3 | # Except this file 4 | !.gitignore 5 | 6 | -------------------------------------------------------------------------------- /darksearch/static/etc/mag.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/vlall/Darksearch/706b1bfeaaac5ac35672d9836cc65e3444751dab/darksearch/static/etc/mag.png -------------------------------------------------------------------------------- /darksearch/static/etc/search.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/vlall/Darksearch/706b1bfeaaac5ac35672d9836cc65e3444751dab/darksearch/static/etc/search.png -------------------------------------------------------------------------------- /darksearch/static/etc/searchb.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/vlall/Darksearch/706b1bfeaaac5ac35672d9836cc65e3444751dab/darksearch/static/etc/searchb.png -------------------------------------------------------------------------------- /darksearch/static/front/css/normalize.css: -------------------------------------------------------------------------------- 1 | /*! normalize.css v3.0.2 | MIT License | git.io/normalize */html{font-family:sans-serif;-ms-text-size-adjust:100%;-webkit-text-size-adjust:100%}body{margin:0}article,aside,details,figcaption,figure,footer,header,hgroup,main,menu,nav,section,summary{display:block}audio,canvas,progress,video{display:inline-block;vertical-align:baseline}audio:not([controls]){display:none;height:0}[hidden],template{display:none}a{background-color:transparent}a:active,a:hover{outline:0}abbr[title]{border-bottom:1px dotted}b,strong{font-weight:bold}dfn{font-style:italic}h1{font-size:2em;margin:0.67em 0}mark{background:#ff0;color:#000}small{font-size:80%}sub,sup{font-size:75%;line-height:0;position:relative;vertical-align:baseline}sup{top:-0.5em}sub{bottom:-0.25em}img{border:0}svg:not(:root){overflow:hidden}figure{margin:1em 40px}hr{-moz-box-sizing:content-box;-webkit-box-sizing:content-box;box-sizing:content-box;height:0}pre{overflow:auto}code,kbd,pre,samp{font-family:monospace, monospace;font-size:1em}button,input,optgroup,select,textarea{color:inherit;font:inherit;margin:0}button{overflow:visible}button,select{text-transform:none}button,html input[type="button"],input[type="reset"],input[type="submit"]{-webkit-appearance:button;cursor:pointer}button[disabled],html input[disabled]{cursor:default}button::-moz-focus-inner,input::-moz-focus-inner{border:0;padding:0}input{line-height:normal}input[type="checkbox"],input[type="radio"]{-webkit-box-sizing:border-box;-moz-box-sizing:border-box;box-sizing:border-box;padding:0}input[type="number"]::-webkit-inner-spin-button,input[type="number"]::-webkit-outer-spin-button{height:auto}input[type="search"]{-webkit-appearance:textfield;-moz-box-sizing:content-box;-webkit-box-sizing:content-box;box-sizing:content-box}input[type="search"]::-webkit-search-cancel-button,input[type="search"]::-webkit-search-decoration{-webkit-appearance:none}fieldset{border:1px solid #c0c0c0;margin:0 2px;padding:0.35em 0.625em 0.75em}legend{border:0;padding:0}textarea{overflow:auto}optgroup{font-weight:bold}table{border-collapse:collapse;border-spacing:0}td,th{padding:0} 2 | -------------------------------------------------------------------------------- /darksearch/static/front/css/style.css: -------------------------------------------------------------------------------- 1 | body {background:url(https://s3-us-west-2.amazonaws.com/s.cdpn.io/41294/hero.jpg) no-repeat center center fixed; background-size:cover;} 2 | 3 | .search {width: 500px;height: 40px;margin: 150px auto;background: #444;background: rgba(0,0,0,.2);border-radius: 3px;border: 1px solid #fff;} 4 | 5 | .search input {width: 370px;padding: 10px 5px;float: left;color: #ccc;border: 0;background: transparent;border-radius: 3px 0 0 3px;} 6 | 7 | .search input:focus {outline: 0;background:transparent;} 8 | 9 | .search button {position: relative;float: right;border: 0;padding: 0;cursor: pointer;height: 40px;width: 120px;color: #fff;background: transparent;border-left: 1px solid #fff;border-radius: 0 3px 3px 0;} 10 | 11 | .search button:hover {background: #fff;color:#444;} 12 | .search button:active {box-shadow: 0px 0px 12px 0px rgba(225, 225, 225, 1);} 13 | 14 | .search button:focus {outline: 0;} 15 | -------------------------------------------------------------------------------- /darksearch/static/front/images/darksearch.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/vlall/Darksearch/706b1bfeaaac5ac35672d9836cc65e3444751dab/darksearch/static/front/images/darksearch.png -------------------------------------------------------------------------------- /darksearch/static/front/images/darksearch2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/vlall/Darksearch/706b1bfeaaac5ac35672d9836cc65e3444751dab/darksearch/static/front/images/darksearch2.png -------------------------------------------------------------------------------- /darksearch/static/front/images/favicon.ico: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/vlall/Darksearch/706b1bfeaaac5ac35672d9836cc65e3444751dab/darksearch/static/front/images/favicon.ico -------------------------------------------------------------------------------- /darksearch/static/front/scss/style.scss: -------------------------------------------------------------------------------- 1 | @import "bourbon"; 2 | 3 | $search-input-height: 50px; 4 | $search-input-padding: 0px 20px; 5 | $search-font-size: 20px; 6 | $search-line-height: 1; 7 | $search-input-icon-size: $search-font-size*$search-line-height; 8 | $background-color: #006fc2; 9 | $search-background: lighten($background-color, 35%); 10 | $search-background-active: lighten($background-color, 20%); 11 | $search-color: #fff; 12 | $search-btn-active: $background-color; 13 | 14 | @mixin input-placeholder { 15 | &.placeholder { @content; } 16 | &:-moz-placeholder { @content; } 17 | &::-moz-placeholder { @content; } 18 | &:-ms-input-placeholder { @content; } 19 | &::-webkit-input-placeholder { @content; } 20 | } 21 | 22 | * { 23 | @include box-sizing(border-box); 24 | } 25 | 26 | body { 27 | background-color: $background-color; 28 | } 29 | 30 | input[type="search"] { 31 | @include box-sizing(border-box); 32 | } 33 | 34 | .searchbox-container { 35 | margin: 0 auto; 36 | height: $search-input-height; 37 | display: inline-block; 38 | } 39 | 40 | .searchbutton, .searchbox { 41 | @include appearance(none); 42 | @include box-sizing(border-box); 43 | @include transition(background-color, .3s); 44 | font-size: $search-font-size; 45 | line-height: $search-line-height; 46 | color: $search-color; 47 | border-radius: 0; 48 | float: left; 49 | border: none; 50 | height: $search-input-height; 51 | outline: none; 52 | background: $search-background; 53 | } 54 | 55 | input[type="search"] { 56 | border-radius: 0; 57 | -webkit-appearance: none; 58 | } 59 | 60 | .searchbox { 61 | padding: $search-input-padding; 62 | 63 | &:focus, &:hover { 64 | background-color: $search-background-active; 65 | + .searchbutton { 66 | background-color: $search-background-active; 67 | } 68 | } 69 | 70 | @include input-placeholder { 71 | color: $search-color; 72 | font-style: italic; 73 | } 74 | } 75 | 76 | .searchbutton { 77 | width: 50px; 78 | padding: 0; 79 | text-align: center; 80 | 81 | &:hover, &:focus { 82 | color: $search-btn-active; 83 | } 84 | &:active { 85 | color: darken($search-btn-active, 20%); 86 | } 87 | } 88 | 89 | html, body, section { width: 100%; height: 100%; } 90 | 91 | .example { 92 | width: 100%; 93 | position: absolute; 94 | top: 40%; 95 | text-align: center; 96 | } -------------------------------------------------------------------------------- /darksearch/static/listjs/css/normalize.css: -------------------------------------------------------------------------------- 1 | /*! normalize.css v3.0.2 | MIT License | git.io/normalize */html{font-family:sans-serif;-ms-text-size-adjust:100%;-webkit-text-size-adjust:100%}body{margin:0}article,aside,details,figcaption,figure,footer,header,hgroup,main,menu,nav,section,summary{display:block}audio,canvas,progress,video{display:inline-block;vertical-align:baseline}audio:not([controls]){display:none;height:0}[hidden],template{display:none}a{background-color:transparent}a:active,a:hover{outline:0}abbr[title]{border-bottom:1px dotted}b,strong{font-weight:bold}dfn{font-style:italic}h1{font-size:2em;margin:0.67em 0}mark{background:#ff0;color:#000}small{font-size:80%}sub,sup{font-size:75%;line-height:0;position:relative;vertical-align:baseline}sup{top:-0.5em}sub{bottom:-0.25em}img{border:0}svg:not(:root){overflow:hidden}figure{margin:1em 40px}hr{-moz-box-sizing:content-box;-webkit-box-sizing:content-box;box-sizing:content-box;height:0}pre{overflow:auto}code,kbd,pre,samp{font-family:monospace, monospace;font-size:1em}button,input,optgroup,select,textarea{color:inherit;font:inherit;margin:0}button{overflow:visible}button,select{text-transform:none}button,html input[type="button"],input[type="reset"],input[type="submit"]{-webkit-appearance:button;cursor:pointer}button[disabled],html input[disabled]{cursor:default}button::-moz-focus-inner,input::-moz-focus-inner{border:0;padding:0}input{line-height:normal}input[type="checkbox"],input[type="radio"]{-webkit-box-sizing:border-box;-moz-box-sizing:border-box;box-sizing:border-box;padding:0}input[type="number"]::-webkit-inner-spin-button,input[type="number"]::-webkit-outer-spin-button{height:auto}input[type="search"]{-webkit-appearance:textfield;-moz-box-sizing:content-box;-webkit-box-sizing:content-box;box-sizing:content-box}input[type="search"]::-webkit-search-cancel-button,input[type="search"]::-webkit-search-decoration{-webkit-appearance:none}fieldset{border:1px solid #c0c0c0;margin:0 2px;padding:0.35em 0.625em 0.75em}legend{border:0;padding:0}textarea{overflow:auto}optgroup{font-weight:bold}table{border-collapse:collapse;border-spacing:0}td,th{padding:0} 2 | -------------------------------------------------------------------------------- /darksearch/static/listjs/css/style.css: -------------------------------------------------------------------------------- 1 | /* WARNING: This css extreamly unorganized, go away :) */ 2 | body, 3 | div, 4 | dl, 5 | dt, 6 | dd, 7 | ul, 8 | ol, 9 | li, 10 | h1, 11 | h2, 12 | h3, 13 | h4, 14 | h5, 15 | h6, 16 | pre, 17 | form, 18 | fieldset, 19 | input, 20 | textarea, 21 | p, 22 | blockquote, 23 | th, 24 | td { 25 | margin: 0; 26 | padding: 0; 27 | color:#000; 28 | } 29 | table { 30 | border-collapse: collapse; 31 | border-spacing: 0; 32 | } 33 | fieldset, 34 | img { 35 | border: 0; 36 | } 37 | address, 38 | caption, 39 | cite, 40 | code, 41 | dfn, 42 | em, 43 | strong, 44 | th, 45 | var { 46 | font-style: normal; 47 | font-weight: normal; 48 | } 49 | ol, 50 | ul { 51 | list-style: none; 52 | } 53 | caption, 54 | th { 55 | text-align: left; 56 | } 57 | h1, 58 | h2, 59 | h3, 60 | h4, 61 | h5, 62 | h6 { 63 | font-size: 100%; 64 | font-weight: normal; 65 | } 66 | q:before, 67 | q:after { 68 | content: ''; 69 | } 70 | abbr, 71 | acronym { 72 | border: 0; 73 | } 74 | .clearfix:after { 75 | content: "."; 76 | display: block; 77 | clear: both; 78 | visibility: hidden; 79 | line-height: 0; 80 | height: 0; 81 | } 82 | .clearfix { 83 | display: inline-block; 84 | } 85 | html[xmlns] .clearfix { 86 | display: block; 87 | } 88 | * html .clearfix { 89 | height: 1%; 90 | } 91 | /* * * * * * * * * * * * * * * * * * * * * 92 | * General 93 | * * * * * * * * * * * * * * * * * * * * */ 94 | 95 | 96 | /*I ADDED THIS*/ 97 | .main-logo{ 98 | float:right; 99 | } 100 | .note{ 101 | clear:both; 102 | } 103 | .results{ 104 | float: left; 105 | /* color: #fff; */ 106 | font-size: 14px 107 | } 108 | /* Check Codepen.io for list.js to see original. I changed the lovely-things-list-nthchild to get the borders different shade of grey */ 109 | 110 | 111 | 112 | body { 113 | background: #000; 114 | font: normal 14px/1.3 'helvetica neue', helvetica, arial; 115 | color: #bbb; 116 | text-shadow: #262729 0 -1px 0; 117 | -webkit-font-smoothing: antialiased; 118 | padding: 50px 0; 119 | } 120 | body.clearfix { 121 | display: block; 122 | } 123 | header { 124 | float: left; 125 | margin-bottom: 20px; 126 | width: 100%; 127 | } 128 | #container { 129 | width: 750px; 130 | margin: 0 auto 20px; 131 | } 132 | #container a { 133 | color: #eee; 134 | } 135 | #container a:hover { 136 | color: #fff; 137 | } 138 | .c1 { 139 | width: 750px; 140 | float: left; 141 | } 142 | .c2 { 143 | width: 355px; 144 | float: left; 145 | } 146 | .c3 { 147 | width: 235px; 148 | float: left; 149 | } 150 | .m10l { 151 | margin-left: 10px; 152 | } 153 | .m20l { 154 | margin-left: 20px; 155 | } 156 | .p10 { 157 | padding: 10px; 158 | } 159 | p { 160 | font-size: 12px; 161 | line-height: 1.3; 162 | } 163 | h2 { 164 | font-size: 18px; 165 | color: #bbb; 166 | text-shadow: #262729 0 -1px 0; 167 | font-weight: 300; 168 | text-align: center; 169 | margin-bottom: 15px; 170 | } 171 | h2.look { 172 | font-size: 24px; 173 | margin-bottom: 45px; 174 | } 175 | h2 strong { 176 | color: #fafafa; 177 | } 178 | h3 { 179 | font-size: 14px; 180 | color: #aaa; 181 | margin-bottom: 8px; 182 | } 183 | input { 184 | font: normal 12px/1.3 'helvetica neue', helvetica, arial; 185 | font-size: 20px; 186 | padding: 2px 10px; 187 | border-radius: 0px; 188 | border: solid 1px #555; 189 | float: left; 190 | margin-right: 10px; 191 | margin-bottom: 15px; 192 | } 193 | input:focus { 194 | border: none; 195 | outline: none; 196 | background-color: #eee; 197 | border-top: solid 1px #aaa; 198 | border-right: solid 1px #e6e6e6; 199 | border-bottom: solid 1px #e6e6e6; 200 | border-left: solid 1px #aaa; 201 | } 202 | .btn { 203 | color: #ccc; 204 | margin-right: 5px; 205 | padding: 5px 10px; 206 | border-radius: 10px; 207 | border: solid 1px #666; 208 | display: block; 209 | float: left; 210 | background-color: rgba(0, 0, 0, 0.1); 211 | cursor: pointer; 212 | cursor: hand; 213 | } 214 | .btn:hover { 215 | background-color: rgba(255, 255, 255, 0.05); 216 | color: #fafafa; 217 | } 218 | table { 219 | border-collapse: collapse; 220 | width: 100%; 221 | margin-bottom: 10px; 222 | } 223 | th { 224 | background-color: rgba(0, 0, 0, 0.1); 225 | border: solid 1px rgba(255, 255, 255, 0.1); 226 | padding: 10px 10px; 227 | text-align: left; 228 | font-weight: bold; 229 | color: #dddddd; 230 | } 231 | td { 232 | padding: 10px 10px; 233 | border: solid 1px rgba(255, 255, 255, 0.1); 234 | } 235 | /* * * * * * * * * * * * * * * * * * * * * 236 | * Meny 237 | * * * * * * * * * * * * * * * * * * * * */ 238 | .menu { 239 | position: fixed; 240 | left: 0; 241 | top: 0; 242 | margin: 0; 243 | margin-left: -20px; 244 | width: 100%; 245 | padding: 0 20px; 246 | background: #333; 247 | background: rgba(30, 30, 30, 0.8); 248 | border-bottom: solid 1px rgba(0, 0, 0, 0.1); 249 | box-shadow: 0 0 20px #000000; 250 | z-index: 100; 251 | } 252 | .menu .menu-container { 253 | width: 750px; 254 | font-size: 16px; 255 | margin: 0 auto; 256 | } 257 | .menu ul { 258 | float: left; 259 | } 260 | .menu ul li { 261 | display: block; 262 | float: left; 263 | line-height: 60px; 264 | } 265 | .menu ul li.name { 266 | font-size: 20px; 267 | color: #fff; 268 | font-weight: bold; 269 | } 270 | .menu ul li.name a { 271 | color: #eee; 272 | } 273 | .menu ul li a, 274 | .menu ul li.name { 275 | float: left; 276 | display: block; 277 | text-decoration: none; 278 | } 279 | .menu ul li a { 280 | padding: 0 20px; 281 | color: #aaa; 282 | } 283 | .menu ul li a:hover { 284 | background-color: rgba(255, 255, 255, 0.05); 285 | color: #fff; 286 | } 287 | .menu ul li a:hover b { 288 | color: #fff; 289 | } 290 | .menu ul.secondary { 291 | border: solid 1px #444; 292 | border-width: 1px 1px 0 1px; 293 | width: 750px; 294 | } 295 | .menu ul.secondary li { 296 | line-height: 30px; 297 | } 298 | .menu ul.secondary li a { 299 | padding: 0 20px; 300 | font-size: 14px; 301 | } 302 | .code { 303 | background-color: rgba(0, 0, 0, 0.2); 304 | border: solid 1px rgba(255, 255, 255, 0.15); 305 | border-radius: 10px; 306 | padding: 20px 20px 0; 307 | margin-bottom: 50px; 308 | font-size: 14px; 309 | } 310 | .code h3 { 311 | font-size: 18px; 312 | color: #aaa; 313 | text-shadow: #262729 0 -1px 0; 314 | font-weight: 300; 315 | margin-bottom: 15px; 316 | } 317 | /* * * * * * * * * * * * * * * * * * * * * 318 | * Start page 319 | * * * * * * * * * * * * * * * * * * * * */ 320 | .logo { 321 | background: url('http://listjs.com/images/graphics/listjs-logo.png') no-repeat; 322 | width: 328px; 323 | height: 334px; 324 | float: left; 325 | margin: 0 0 0 215px; 326 | } 327 | .slogan { 328 | display: none; 329 | } 330 | .logo span, 331 | .slogan span { 332 | display: block; 333 | margin-left: -10000px; 334 | } 335 | .download { 336 | clear: left; 337 | float: left; 338 | width: 600px; 339 | margin-bottom: 20px; 340 | } 341 | .download-btn { 342 | background: url('http://listjs.com/images/bg/download.png'); 343 | width: 217px; 344 | height: 76px; 345 | float: left; 346 | display: block; 347 | color: #fff; 348 | text-decoration: none; 349 | font-weight: bold; 350 | font-size: 26px; 351 | text-shadow: #047e0c 1px 1px 0; 352 | line-height: 1.1; 353 | } 354 | .download-btn:hover { 355 | color: #eee; 356 | background-position: 0 -76px; 357 | } 358 | .download-btn span { 359 | display: block; 360 | padding: 13px 0 0 70px; 361 | } 362 | .download-btn small { 363 | display: block; 364 | font-size: 13px; 365 | color: #b6e0f5; 366 | font-weight: normal; 367 | } 368 | .download p { 369 | color: #d8d5d5; 370 | font-size: 11px; 371 | margin-bottom: 5px; 372 | } 373 | .download p a { 374 | color: #54e45e; 375 | } 376 | .download p a:hover { 377 | color: #73f57c; 378 | } 379 | .share { 380 | padding: 13px 20px 5px; 381 | clear: both; 382 | border-radius: 10px; 383 | margin: 0 0 50px 24px; 384 | width: 604px; 385 | border-bottom: solid 1px #666; 386 | background-color: rgba(255, 255, 255, 0.1); 387 | } 388 | p.preamble { 389 | font-size: 22px; 390 | color: #cecece; 391 | text-shadow: #262729 0 -1px 0; 392 | font-weight: 300; 393 | } 394 | p.preamble strong { 395 | color: #fafafa; 396 | } 397 | p.preamble .bugwarning { 398 | color: #959494; 399 | font-weight: 300; 400 | font-size: 16px; 401 | } 402 | div.info { 403 | margin-bottom: 40px; 404 | clear: both; 405 | } 406 | #example-list { 407 | margin-bottom: 30px; 408 | } 409 | #example-list .list { 410 | clear: both; 411 | } 412 | #example-list li { 413 | font-size: 20px; 414 | color: #cecece; 415 | text-shadow: #262729 0 -1px 0; 416 | font-weight: 300; 417 | border-top: solid 1px #666; 418 | padding: 10px 0; 419 | } 420 | #example-list li strong { 421 | color: #fafafa; 422 | } 423 | /* * * * * * * * * * * * * * * * * * * * * 424 | * Examples 425 | * * * * * * * * * * * * * * * * * * * * */ 426 | #container.example-page { 427 | margin-top: 30px; 428 | } 429 | h1 { 430 | font: 100 40px "helvetica neue", helvetica, arial; 431 | letter-spacing: 1px; 432 | color: #cecece; 433 | text-align: center; 434 | margin-bottom: 20px; 435 | text-shadow: #262729 0 -1px 0; 436 | } 437 | .note { 438 | text-shadow: #262729 0 -1px 0; 439 | text-align: center; 440 | color: #999; 441 | font-size: 11px; 442 | } 443 | .note b { 444 | color: #eee; 445 | } 446 | .note a { 447 | color: #eee; 448 | } 449 | #lovely-things-list { 450 | margin-bottom: 40px; 451 | overflow: hidden; 452 | } 453 | #lovely-things-list .thumb { 454 | width: 60px; 455 | height: 60px; 456 | float: left; 457 | margin-right: 20px; 458 | border: solid 1px #333; 459 | } 460 | #lovely-things-list .list { 461 | clear: both; 462 | margin-bottom: 10px; 463 | } 464 | #lovely-things-list .list li { 465 | padding: 20px; 466 | } 467 | #lovely-things-list li:nth-child(n) { 468 | background-color: #101010 ; 469 | border: solid 1px rgba(255, 255, 255, 0.15); 470 | border-radius: 10px; 471 | } 472 | #lovely-things-list li:nth-child(2n+1) .thumb { 473 | border: solid 1px #666; 474 | } 475 | #lovely-things-list h4 { 476 | color: #fafafa; 477 | font-weight: bold; 478 | margin-bottom: 6px; 479 | } 480 | #lovely-things-list p { 481 | /** font: normal 14px 'helvetica neue', helvetica, arial; **/ 482 | font-size: 13px; 483 | color: #dddddd; 484 | text-align: left; 485 | } 486 | #lovely-things-list .sort-by { 487 | margin-bottom: 10px; 488 | width: 100%; 489 | float: left; 490 | clear: both; 491 | } 492 | #lovely-things-list .filter { 493 | margin-bottom: 10px; 494 | width: 100%; 495 | float: left; 496 | clear: both; 497 | } 498 | #contacts input { 499 | font-size: 13px; 500 | margin: 0; 501 | border-radius: 5px; 502 | width: 200px; 503 | } 504 | #contacts { 505 | margin-bottom: 100px; 506 | } 507 | #contacts td.name, 508 | #contacts td.age, 509 | #contacts td.city { 510 | width: 130px; 511 | } 512 | #contacts td.edit, 513 | #contacts td.remove { 514 | width: 130px; 515 | } 516 | #contacts td.add { 517 | width: 300px; 518 | } 519 | #contacts input { 520 | width: 100px; 521 | } 522 | .huge { 523 | max-height: 500px; 524 | overflow: scroll; 525 | } 526 | /* * * * * * * * * * * * * * * * * * * * * 527 | * Performance 528 | * * * * * * * * * * * * * * * * * * * * */ 529 | #size { 530 | margin-left: 100px; 531 | width: 590px; 532 | } 533 | #time-table { 534 | margin-bottom: 40px; 535 | } 536 | #performance-list .scroll-container { 537 | height: 300px; 538 | overflow: scroll; 539 | margin-bottom: 10px; 540 | } 541 | #performance-share { 542 | padding: 13px 20px 5px; 543 | clear: both; 544 | border-radius: 10px; 545 | margin: 0 0 50px 80px; 546 | width: 520px; 547 | border-bottom: solid 1px #666; 548 | background-color: rgba(255, 255, 255, 0.1); 549 | } 550 | /* * * * * * * * * * * * * * * * * * * * * 551 | * Paging 552 | * * * * * * * * * * * * * * * * * * * * */ 553 | .paging { 554 | padding-bottom: 10px; 555 | clear: both; 556 | float: left; 557 | } 558 | .paging li { 559 | display: block; 560 | float: left; 561 | padding: 10px 15px; 562 | border-radius: 50%; 563 | } 564 | .paging li a { 565 | color: #999; 566 | text-decoration: none; 567 | display: inline-block; 568 | line-height: 14px; 569 | } 570 | .paging li.active a { 571 | font-weight: bold; 572 | color: #eee; 573 | font-size: 18px; 574 | margin: -1px; 575 | } 576 | .paging li a:hover { 577 | color: #eee; 578 | } 579 | /* * * * * * * * * * * * * * * * * * * * * 580 | * Changelog 581 | * * * * * * * * * * * * * * * * * * * * */ 582 | .changelog h3 { 583 | color: #fff; 584 | font-weight: bold; 585 | } 586 | .changelog ul { 587 | line-height: 1.5; 588 | margin: 0 0 30px 20px; 589 | } 590 | .changelog li { 591 | list-style-type: circle; 592 | margin-bottom: 5px; 593 | } 594 | .changelog li b { 595 | color: #eee; 596 | } 597 | .changelog code { 598 | background-color: rgba(0, 0, 0, 0.1); 599 | border: solid 1px rgba(0, 0, 0, 0.15); 600 | padding: 2px 5px; 601 | color: #fff; 602 | } 603 | /* * * * * * * * * * * * * * * * * * * * * 604 | * Syntax coloring 605 | * * * * * * * * * * * * * * * * * * * * */ 606 | pre { 607 | border-top: solid 1px rgba(255, 255, 255, 0.15); 608 | border-bottom: solid 1px rgba(255, 255, 255, 0.15); 609 | background-color: rgba(0, 0, 0, 0.2); 610 | padding: 0 20px 10px; 611 | margin: 0 -20px 20px; 612 | line-height: 1.5; 613 | } 614 | pre, 615 | code { 616 | font-family: 'monaco', sans-serif; 617 | font-size: 12px; 618 | } 619 | pre .keyword { 620 | color: #FF0C4B; 621 | } 622 | pre .string { 623 | color: #01DBFF; 624 | } 625 | pre .regexp { 626 | color: #15E80B; 627 | } 628 | pre .class { 629 | color: #15E80B; 630 | font-weight: bold; 631 | } 632 | pre .special { 633 | color: #FFD901; 634 | } 635 | pre .number { 636 | color: #01DBFF; 637 | } 638 | pre .comment { 639 | color: grey; 640 | font-style: italic; 641 | } 642 | .pages li { 643 | display: inline-block; 644 | line-height: 60px; 645 | margin-left: 15px; 646 | } 647 | .pages { 648 | text-align: center; 649 | margin: 0 auto; 650 | } 651 | a:link .pages { 652 | color: white; 653 | } 654 | a:visited .pages { 655 | color: purple; 656 | } 657 | 658 | a { 659 | text-decoration: none; 660 | } 661 | 662 | .glowing-border { 663 | outline: none; 664 | border: solid 2px #334c66; 665 | border-radius: 3px 0 0 3px; 666 | } 667 | .glowing-border:focus { 668 | outline: none; 669 | border: solid 2px #3973ac; 670 | box-shadow: 0 0 10px #3973ac; 671 | border-radius: 3px 0 0 3px; 672 | 673 | } 674 | -------------------------------------------------------------------------------- /darksearch/static/listjs/images/darksearch.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/vlall/Darksearch/706b1bfeaaac5ac35672d9836cc65e3444751dab/darksearch/static/listjs/images/darksearch.png -------------------------------------------------------------------------------- /darksearch/static/listjs/js/index.js: -------------------------------------------------------------------------------- 1 | /* 2 | * LOVELY THINGS 3 | */ 4 | 5 | var options = { 6 | valueNames: [ 'name', 'description', 'category' ] 7 | }; 8 | 9 | var featureList = new List('lovely-things-list', options); 10 | 11 | $('#filter-social-media').click(function() { 12 | featureList.filter(function(item) { 13 | if (item.values().category == "Social Media") { 14 | return true; 15 | } else { 16 | return false; 17 | } 18 | }); 19 | return false; 20 | }); 21 | 22 | $('#filter-dark-web').click(function() { 23 | featureList.filter(function(item) { 24 | if (item.values().category == "Dark Web") { 25 | return true; 26 | } else { 27 | return false; 28 | } 29 | }); 30 | return false; 31 | }); 32 | $('#filter-none').click(function() { 33 | featureList.filter(); 34 | return false; 35 | }); -------------------------------------------------------------------------------- /darksearch/templates/400.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | darksearch 6 | 7 | 8 | 9 | 17 | 31 | 32 | 33 | 34 | 35 | 36 | 37 |

    400 ERROR

    38 |

    The link you are trying to visit wasn't found. Try redoing your search.
    If these errors continue, please email support@darksearch.io

    39 | 40 | 41 | 42 | -------------------------------------------------------------------------------- /darksearch/templates/404.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | darksearch 6 | 7 | 8 | 9 | 17 | 31 | 32 | 33 | 34 | 35 | 36 | 37 |

    404 ERROR

    38 |

    There was an error in proccessing your request.

    39 | 40 | 41 | 42 | -------------------------------------------------------------------------------- /darksearch/templates/429.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | darksearch 6 | 7 | 8 | 9 | 17 | 31 | 32 | 33 | 34 | 35 | 36 | 37 |

    429 ERROR

    38 |

    Slow down! Our server's reqeusts are limited to {{notice}}

    39 | 40 | 41 | 42 | -------------------------------------------------------------------------------- /darksearch/templates/index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | Darksearch 9 | 10 | 11 | 12 | 64 | 65 | 66 | 74 | 75 | 76 | 77 | 78 | 82 |
    83 |

    Feb 24, 2016: API being developed on Github. More data to come...

    84 |
    85 | 86 | 87 | 88 | -------------------------------------------------------------------------------- /darksearch/templates/search.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | Darksearch 6 | 7 | 8 | 9 | 17 | 18 | 19 |
    20 |
    21 |
    22 |

    23 | 27 | 28 | 29 |

    30 |
    31 |

    32 |
    33 |
    34 | {% if results == '0' %} 35 |

    36 |

    Hmmm... Darksearch couldn't find anything. Try being less specific.

    37 | {% else %} 38 |

    Darksearch found the following {{results}} results across {{pageTotal}} pages in {{dur}} seconds...

    39 | {% endif %} 40 |
    41 |
    42 |
      43 | 54 | {{engineList}} 55 |
    56 | 76 |
    77 |
      78 | {{pageBar}} 79 |
    80 |
    81 |
    82 | 83 | 84 | 85 |
    86 |

    Note: We do not host any media on Darksearch and take no liability for the content you may find.
    We cannot guarantee the safety of any .onion link. Contribute to this project on GitHub

    87 | 88 | 89 | -------------------------------------------------------------------------------- /darksearch/tools/__init__.py: -------------------------------------------------------------------------------- 1 | from elas import DarkElastic -------------------------------------------------------------------------------- /darksearch/tools/elas.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | 3 | import os 4 | import pandas as pd 5 | import json 6 | from elasticsearch import Elasticsearch 7 | import requests 8 | import re 9 | es = Elasticsearch() 10 | 11 | class DarkElastic(object): 12 | 13 | def __init__(self): 14 | self.size = 0 15 | 16 | def pandas_to_json(self, jsonPath): 17 | """ 18 | Take logFile, open as Dataframe, covert to JSON, Save JSON. 19 | """ 20 | self.jsonPath = jsonPath 21 | self.logPath = os.getcwd()+'/../logs/process2.csv' 22 | with open(self.logPath) as logs: 23 | searchIndex = pd.read_csv( 24 | logs, 25 | header=None, 26 | sep='\t', 27 | names=[ 28 | "DATES", 29 | "URLS", 30 | "NAMES", 31 | "SIZE", 32 | "LANG", 33 | "TITLE", 34 | "CONTENT" 35 | ] 36 | ) 37 | self.size = len(searchIndex.index) 38 | searchIndex = searchIndex.to_json(orient='index') 39 | # If you want to use a JSON file rather than converting 40 | # with open(self.jsonPath) as searchIndex: 41 | searchIndex = json.loads(searchIndex) 42 | self.searchIndex = searchIndex 43 | self.save_json(searchIndex) 44 | 45 | def save_json(self, dataframe): 46 | with open(self.jsonPath, "w") as outfile: 47 | json.dump(dataframe, outfile, indent=4) 48 | print('Dataframe converted to JSON.') 49 | 50 | def ingest_items(self): 51 | for i in range(0, self.size): 52 | doc = self.searchIndex[str(i)] 53 | res = es.index( 54 | index="dark", 55 | doc_type='html', 56 | id=i, 57 | body=doc 58 | ) 59 | print('Ingested document %d...' % i) 60 | return (res['created']) 61 | 62 | def get_items(self, i): 63 | res = es.get( 64 | index="dark", 65 | doc_type='html', 66 | id=i 67 | ) 68 | return (res['_source']) 69 | 70 | def search_index(self, myIndex, myQuery, start=0, end=10): 71 | res = es.search( 72 | index=myIndex, 73 | body={ 74 | "from": start, 75 | "size": end, 76 | 'query': { 77 | "query_string": { 78 | "default_field": "CONTENT", 79 | "query": myQuery 80 | } 81 | }, 82 | "sort": { 83 | "_score": { 84 | "order": "desc" 85 | } 86 | } 87 | } 88 | ) 89 | self.briefList = [] 90 | self.namesList = [] 91 | self.datesList = [] 92 | self.titleList = [] 93 | hitList = ("Got %d Hits:" % res['hits']['total']) 94 | for hit in res['hits']['hits']: 95 | print("%(DATES)s: %(URLS)s" % hit['_source']) 96 | content = hit['_source']['CONTENT'] 97 | names = hit['_source']['NAMES'] 98 | dates = hit['_source']['DATES'] 99 | title = hit['_source']['TITLE'] 100 | brief = self.get_brief(myQuery, content, 20) 101 | self.briefList.append(brief) 102 | self.namesList.append(names) 103 | self.datesList.append(dates) 104 | self.titleList.append(title) 105 | self.size = res['hits']['total'] 106 | return hitList 107 | 108 | def delete_deuplicates(self, i): 109 | pass 110 | 111 | def delete_all(self, index='dark'): 112 | """ 113 | Runs $ curl -XDELETE 'http://localhost:9200/your_index/' 114 | """ 115 | r = requests.delete('http://localhost:9200/%s' % (index)) 116 | print('Index %s deleted.' % index) 117 | 118 | def get_brief(self, query, content, n): 119 | """ 120 | Obtain the brief description that shows up in search 121 | """ 122 | query = query.lower() 123 | # Strips quotes 124 | query = query.replace('\"', "") 125 | queryList = query.split() 126 | queryList.sort(key=len) 127 | content = content.lower().split() 128 | try: 129 | pos = content.index(query) 130 | except ValueError: 131 | pos = 0 132 | if ((pos - n) < 0): 133 | start = 0 134 | end = pos + n + abs((pos - n)) 135 | else: 136 | start = pos - n 137 | end = pos + n 138 | # Find Nearest period to end sentence... 139 | # try: 140 | # endSentence = content.index(".") 141 | # if endSentence < (start+40): 142 | # end = endSentence 143 | # except: 144 | # pass 145 | content = content[start:end] 146 | if len(content) >= 500: 147 | content = content[0:400] 148 | for query in queryList: 149 | wrap = ''+query+'' 150 | try: 151 | content[content.index(query)] = wrap 152 | except: 153 | pass 154 | brief = " ".join(content) 155 | return brief 156 | 157 | def runSetup(self, jsonPath): 158 | self.pandas_to_json(jsonPath) 159 | self.save_json(self.searchIndex) 160 | 161 | def check_cat(self, description): 162 | return 'tor' 163 | 164 | def free_mem(self): 165 | del self.briefList 166 | del self.namesList 167 | del self.datesList 168 | del self.titleList 169 | 170 | if __name__ == '__main__': 171 | test = DarkElastic() 172 | test.runSetup("../logs/process2.json") 173 | # Build your index. 174 | test.ingest_items() 175 | es.indices.refresh(index='dark') 176 | print test.search_index('dark', 'cocaine', 15, 10) 177 | -------------------------------------------------------------------------------- /darksearch/tools/tk.py: -------------------------------------------------------------------------------- 1 | import sys 2 | import tika 3 | from tika import parser 4 | import re 5 | from tika import language, translate 6 | import os 7 | import csv 8 | import pandas as pd 9 | 10 | class Tikify(object): 11 | 12 | ''' 13 | DataBase Ingestion Script 14 | - You have Data in html files in '/data' 15 | - You have 'logs/scrape.log with the time scraped, size. 16 | - Create table in Postgres with 17 | - Date, .onion, name(.html), tikify(text), size, language, type, title, sentiment, etc 18 | - 19 | ''' 20 | 21 | def __init__(self, fileName): 22 | parsed = parser.from_file(fileName) 23 | metadata = parsed["metadata"] 24 | # Return re.sub('[\s+]', '', content) 25 | # TODO: Delete... Very Redundant.. 26 | content = parsed["content"] 27 | content = content.replace('\n', '') 28 | content = content.replace('\t', '') 29 | content = content.replace('\'', '') 30 | content = content.replace('\"', '') 31 | rx = re.compile('\W+') 32 | content = rx.sub(' ', content).strip() 33 | self.content = content 34 | # Title... 35 | try: 36 | title = metadata['title'] 37 | except: 38 | title = 'Untitled' 39 | title = title.replace('\t', '') 40 | title = title.replace('\t', '') 41 | title = title.replace('\'', '') 42 | title = title.replace('\"', '') 43 | title = rx.sub(' ', title).strip() 44 | self.title = title 45 | # self.type = self.metadata['Content-Type-Hint'] 46 | # self.name = self.metadata['resourceName'] 47 | # lanFix = re.sub('[\s+]', '', content) 48 | self.lang = language.from_file(fileName) 49 | 50 | def toEnglish(self, language='en'): 51 | self.eng = translate.from_file(self.content.encode('UTF-8'), self.lang, language) 52 | 53 | def analyze(self, translate): 54 | pass 55 | 56 | 57 | if __name__ == "__main__": 58 | dataPath = os.getcwd()+'/../data/' 59 | logPath = os.getcwd()+'/../logs/scrape.log' 60 | print 'Started...' 61 | with open(logPath) as logs: 62 | print 'Reading csv...' 63 | logs = pd.read_csv( 64 | logs, 65 | header=None, 66 | sep=',', 67 | skipinitialspace=True, 68 | names=[ 69 | "DATES", 70 | "URLS", 71 | "NAMES", 72 | "SIZE", 73 | ] 74 | 75 | ) 76 | # Columns = [DATE,URL,NAME,SIZE,LANG,CONTENT')] 77 | # with open("logs/process.csv", "a") as log: 78 | # log.write('DATE,URL,NAME,SIZE,LANG,CONTENT\n') 79 | for i in range(0, len(logs)): 80 | date = str(logs['DATE'][i].strip()) 81 | url = str(logs['URL'][i].strip()) 82 | name = str(logs['NAME'][i].strip()) 83 | size = str(logs['SIZE'][i]) 84 | try: 85 | output = Tikify(dataPath + name) 86 | content = unicode(output.content) 87 | title = str(output.title) 88 | lang = str(output.lang) 89 | with open("../logs/process2.csv", "a") as log: 90 | log.write(('%s\t%s\t%s\t%s\t%s\t%s\t%s\n') % (date, url, name, size, lang, title, content)) 91 | print ('Appended line %d...') % i 92 | except Exception: 93 | continue 94 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | elasticsearch==2.2.0 2 | Flask==0.10.1 3 | Flask-API==0.6.5 4 | Flask-Limiter==0.9.1 5 | itsdangerous==0.24 6 | Jinja2==2.8 7 | limits==1.1 8 | MarkupSafe==0.23 9 | numpy==1.10.4 10 | pandas==0.17.1 11 | Pympler==0.4.2 12 | python-dateutil==2.4.2 13 | pytz==2015.7 14 | requests==2.9.1 15 | six==1.10.0 16 | urllib3==1.14 17 | Werkzeug==0.11.4 18 | wheel==0.24.0 19 | --------------------------------------------------------------------------------