├── .gitignore
├── .idea
├── codeStyles
│ └── codeStyleConfig.xml
├── deployment.xml
├── dictionaries
│ └── devrus.xml
├── inspectionProfiles
│ └── Project_Default.xml
├── jsLibraryMappings.xml
├── misc.xml
├── modules.xml
├── vcs.xml
├── webServers.xml
└── workspace.xml
├── Backend
└── Backend.go
├── Config
├── App.go
└── Configuration.go
├── Errors
├── AuthErrors.go
├── ErrorsText.go
└── MarshalError.go
├── Middleware
└── Auth.go
├── Models
├── CountEstimateHolder.go
├── Counters.go
├── Credentials.go
├── Error.go
├── File.go
├── FileTreeItem.go
├── JwtRefreshToken.go
├── PaginatedTorrentsResponse.go
├── ScrapeTorrentResult.go
├── SearchSuggestResponse.go
├── Tag.go
├── Title.go
├── TokenClaims.go
├── Torrent.go
├── TorrentStatsPart.go
└── User.go
├── Readme.md
├── Routers
├── AuthCurrentUserInfo.go
├── AuthLoginHandler.go
├── AuthRegistrationHandler.go
├── Router.go
├── SearchSuggestHandler.go
├── TorrentCountHandler.go
├── TorrentInfoHandler.go
├── TorrentSearchHandler.go
├── TorrentsListHandler.go
└── TorrentsStatsHandler.go
├── RpcMaster
└── RpcMaster.go
├── Services
├── DhtCrawler.go
├── RpcCommon.go
├── ScrapeWork.go
├── TagProducer
│ ├── TorrentTagsProducer.go
│ ├── VideoInfoExtractor.go
│ ├── specialTagsProducer.go
│ ├── specialVideoTagProducer.go
│ └── tokenExistenceTagProducer.go
├── TorrentCountStatsUpdater.go
├── WorkQueue.go
├── jwt
│ ├── Init.go
│ ├── cookies.go
│ ├── dao.go
│ └── jwt.go
├── rpc
│ ├── RpClient.go
│ ├── RpcServer.go
│ ├── ScrapeRcpService.go
│ └── TorrentRpcService.go
├── scraper.go
└── tracker
│ ├── Hash.go
│ ├── http.go
│ ├── tracker.go
│ └── udp.go
├── Spider
└── Spider.go
├── Utils
├── HostParser.go
├── Random.go
├── SliceUtils.go
└── ZombodbQueryBuilder.go
├── bin
├── config.json
└── setup.sh
├── iconv-linux-amd64
└── static
├── .babelrc
├── .editorconfig
├── .eslintignore
├── .eslintrc.js
├── .gitignore
├── .postcssrc.js
├── API.md
├── README.md
├── build
├── build.js
├── check-versions.js
├── utils.js
├── vue-loader.conf.js
├── webpack.base.conf.js
├── webpack.dev.conf.js
└── webpack.prod.conf.js
├── config
├── dev.env.js
├── index.js
├── prod.env.js
└── test.env.js
├── index.html
├── index.json
├── package-lock.json
├── package.json
├── src
├── App.vue
├── Formatters.js
├── Interceptors.js
├── components
│ ├── Chart.vue
│ ├── Footer.vue
│ ├── LoginDialog.vue
│ ├── RegisterDialog.vue
│ └── SearchField.vue
├── main.js
├── pages
│ ├── HomePage.vue
│ ├── MaintenancePage.vue
│ ├── NotFoundPage.vue
│ ├── StatisticsPage.vue
│ ├── TorrentDetails.vue
│ └── TorrentList.vue
├── router
│ └── index.js
└── store
│ ├── auth.module.js
│ ├── index.js
│ └── search.module.js
├── static
├── .gitkeep
├── api-docs
│ └── index.json
├── logo.png
└── textfieldfix.css
└── test
└── unit
├── .eslintrc
├── jest.conf.js
├── setup.js
└── specs
└── HelloWorld.spec.js
/.gitignore:
--------------------------------------------------------------------------------
1 | # Compiled Object files, Static and Dynamic libs (Shared Objects)
2 | *.o
3 | *.a
4 | *.so
5 |
6 | # Folders
7 | _obj
8 | _test
9 |
10 | # Architecture specific extensions/prefixes
11 | *.[568vq]
12 | [568vq].out
13 |
14 | *.cgo1.go
15 | *.cgo2.c
16 | _cgo_defun.c
17 | _cgo_gotypes.go
18 | _cgo_export.*
19 |
20 | _testmain.go
21 |
22 | *.exe
23 | *.test
24 | *.prof
25 |
--------------------------------------------------------------------------------
/.idea/codeStyles/codeStyleConfig.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
--------------------------------------------------------------------------------
/.idea/deployment.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
--------------------------------------------------------------------------------
/.idea/dictionaries/devrus.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | gorm
5 |
6 |
7 |
--------------------------------------------------------------------------------
/.idea/inspectionProfiles/Project_Default.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
--------------------------------------------------------------------------------
/.idea/jsLibraryMappings.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
--------------------------------------------------------------------------------
/.idea/misc.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
--------------------------------------------------------------------------------
/.idea/modules.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
--------------------------------------------------------------------------------
/.idea/vcs.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
--------------------------------------------------------------------------------
/.idea/webServers.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
22 |
23 |
--------------------------------------------------------------------------------
/Backend/Backend.go:
--------------------------------------------------------------------------------
1 | package main
2 |
3 | import (
4 | "github.com/ruslanfedoseenko/dhtcrawler/Config"
5 | "github.com/ruslanfedoseenko/dhtcrawler/Routers"
6 | "github.com/ruslanfedoseenko/dhtcrawler/Services"
7 | "github.com/ruslanfedoseenko/dhtcrawler/Services/jwt"
8 | )
9 |
10 | // @APIVersion 1.0.0
11 | // @APITitle Btoogle Torrent Search API
12 | // @APIDescription API to search for Torrents
13 | // @Contact ruslan.fedoseenko.91@gmail.com
14 | // @TermsOfServiceUrl http://google.com/
15 | // @License BSD
16 | // @LicenseUrl http://opensource.org/licenses/BSD-2-Clause
17 | // @BasePath http://btoogle.com/api/
18 |
19 |
20 | func main() {
21 |
22 | app := Config.NewApp()
23 | jwt.InitJWT(app)
24 | Routers.Setup(app)
25 |
26 | Services.SetupTorrentCountStatsUpdater(app)
27 | app.Run()
28 | }
29 |
30 |
31 |
--------------------------------------------------------------------------------
/Config/App.go:
--------------------------------------------------------------------------------
1 | package Config
2 |
3 | import (
4 | "errors"
5 | "fmt"
6 | "github.com/getsentry/raven-go"
7 | "github.com/jasonlvhit/gocron"
8 | "github.com/jinzhu/gorm"
9 | _ "github.com/jinzhu/gorm/dialects/postgres"
10 | "log"
11 | "os"
12 | "os/signal"
13 | "runtime/debug"
14 | "syscall"
15 | "github.com/op/go-logging"
16 | "github.com/ruslanfedoseenko/dhtcrawler/Models"
17 | )
18 |
19 | var clog = logging.MustGetLogger("Config")
20 |
21 | type Service interface {
22 | Start()
23 | }
24 |
25 | type App struct {
26 | Config *Configuration
27 | Db *gorm.DB
28 | Scheduler *gocron.Scheduler
29 | Services []Service
30 | }
31 |
32 | func NewApp() *App {
33 | raven.SetDSN("http://b80ba7ce05cb42e7983d962d95a1a6e5:37cd46ed05b54ce69fca375afd606bff@sentry.btoogle.com/4")
34 | setupLog()
35 | clog.Info("Test output")
36 | config := SetupConfiguration()
37 | app := App{
38 |
39 | Config: config,
40 | Scheduler: gocron.NewScheduler(),
41 | }
42 | if config.DbConfig.DbDriver != "" {
43 | var connectionString string = fmt.Sprintf("host=%s port=%d user=%s dbname=%s sslmode=disable password=%s", config.DbConfig.Host, config.DbConfig.Port, config.DbConfig.UserName, config.DbConfig.TableName, config.DbConfig.Password)
44 | log.Println("Using Connection string", connectionString)
45 | db, err := gorm.Open(config.DbConfig.DbDriver, connectionString)
46 | if err != nil {
47 | log.Println("Db open error:", err.Error())
48 | }
49 | err = db.DB().Ping()
50 | if err != nil {
51 | log.Println("Db open error:", err.Error())
52 | }
53 | applySentryPlugin(db)
54 | db.AutoMigrate(&Models.JwtRefreshToken{}, &Models.User{})
55 | app.Db = db
56 | }
57 |
58 | return &app
59 | }
60 |
61 | var format = logging.MustStringFormatter(
62 | `%{color}%{time:15:04:05.000} %{module} %{shortfunc} - %{level:.4s} %{color:reset}: %{message}`,
63 | )
64 |
65 | type SentryLogBackend struct {
66 | }
67 |
68 | func (b SentryLogBackend) Log(logLvl logging.Level, i int, record *logging.Record) error {
69 |
70 |
71 | switch logLvl {
72 | case logging.ERROR:
73 | case logging.CRITICAL:
74 | case logging.NOTICE:
75 | {
76 | e := errors.New(record.Formatted(i))
77 |
78 | evntId := raven.CaptureError(e, nil)
79 | log.Println("captureing error with raven:", e, "event id:", evntId)
80 | break
81 | }
82 | }
83 | return nil
84 | }
85 |
86 |
87 | func setupLog() {
88 | stdOutBackend := logging.NewLogBackend(os.Stdout, "", 0)
89 | leveledSentryBackend := logging.AddModuleLevel(SentryLogBackend{})
90 | leveledSentryBackend.SetLevel(logging.NOTICE, "")
91 | leveledStdOutBackend := logging.AddModuleLevel(stdOutBackend)
92 | leveledStdOutBackend.SetLevel(logging.DEBUG, "")
93 | logging.SetFormatter(format)
94 | logging.SetBackend(leveledStdOutBackend, leveledSentryBackend)
95 | }
96 |
97 | func reportErrToSentryio(scope *gorm.Scope) {
98 | if scope.HasError() {
99 | err := scope.DB().Error
100 | sql := scope.CombinedConditionSql()
101 | sentryAdditionalData := map[string]string{
102 | "sql": sql,
103 | }
104 | raven.CaptureError(err, sentryAdditionalData)
105 | }
106 | }
107 | func applySentryPlugin(db *gorm.DB) {
108 | db.Callback().Create().After("gorm:create").Register("sentryio_plugin:after_create", reportErrToSentryio)
109 | db.Callback().Query().After("gorm:query").Register("sentryio_plugin:after_query", reportErrToSentryio)
110 | db.Callback().Delete().After("gorm:delete").Register("sentryio_plugin:after_delete", reportErrToSentryio)
111 | db.Callback().Update().After("gorm:update").Register("sentryio_plugin:after_update", reportErrToSentryio)
112 | }
113 |
114 | func (app *App) AddService(svc Service) {
115 | app.Services = append(app.Services, svc)
116 | }
117 |
118 | func (app *App) Run() {
119 |
120 | for _, service := range app.Services {
121 | service.Start()
122 | }
123 |
124 | go func() {
125 | log.Println("Starting Scheduler")
126 | <-app.Scheduler.Start()
127 | }()
128 | if app.Db != nil {
129 | defer app.Db.Close()
130 | }
131 |
132 | sigs := make(chan os.Signal, 1)
133 | done := make(chan bool, 1)
134 | signal.Notify(sigs, os.Interrupt)
135 | signal.Notify(sigs, syscall.SIGTERM)
136 | signal.Notify(sigs, os.Kill)
137 | log.Println("pushed global error handler")
138 | defer func() {
139 | if r := recover(); r != nil {
140 | log.Println("Recovered in f", r)
141 | debug.PrintStack()
142 | // find out exactly what the error was and set err
143 | var err error
144 | switch x := r.(type) {
145 | case string:
146 | err = errors.New(x)
147 | case error:
148 | err = x
149 | default:
150 | err = errors.New("Unknown panic")
151 | }
152 | if err != nil {
153 | // sendMeMail(err)
154 | log.Println("Panic occured", err.Error())
155 | }
156 | }
157 | }()
158 | go func() {
159 | log.Println("Waiting for SIGTERM to exit cleanly")
160 | sig := <-sigs
161 | log.Println("recived signal", sig)
162 | done <- true
163 | }()
164 |
165 | <-done
166 |
167 | }
168 |
--------------------------------------------------------------------------------
/Config/Configuration.go:
--------------------------------------------------------------------------------
1 | package Config
2 |
3 | import (
4 | "encoding/json"
5 | "errors"
6 | "flag"
7 | "log"
8 | "os"
9 | "strings"
10 | "github.com/op/go-logging"
11 | )
12 |
13 | type DbConfig struct {
14 | DbDriver string
15 | Host string
16 | Port uint32
17 | TableName string
18 | UserName string
19 | Password string
20 | }
21 | type DhtConfig struct {
22 | StartPort int
23 | Workers int
24 | }
25 | type HttpConfig struct {
26 | StaticDataFolder string
27 | JwtAuthPrivateKeyPath string
28 | JwtAuthPublicKeyPath string
29 | }
30 | type TagProducersConfig struct {
31 | TagProducers map[string][]string
32 | }
33 | type ScrapeConfig struct {
34 | Trackers []string
35 | WorkerThreads int
36 | ScrapeTimeout int
37 | }
38 | type RpcMode string
39 |
40 | const (
41 | SERVER RpcMode = "SERVER"
42 | CLIENT = "CLIENT"
43 | )
44 |
45 | func (o *RpcMode) UnmarshalText(b []byte) (e error) {
46 | str := strings.Trim(string(b), `"`)
47 |
48 | switch str {
49 | case string(SERVER), string(CLIENT):
50 | *o = RpcMode(str)
51 |
52 | default:
53 | e = errors.New("Unknown RpcMode specified")
54 | }
55 |
56 | return e
57 | }
58 |
59 | type RpcConfig struct {
60 | Mode RpcMode
61 | Host string
62 | Port int
63 | }
64 | type TmdbApi struct {
65 | ApiKey string
66 | }
67 | type Configuration struct {
68 | DbConfig DbConfig
69 | DhtConfig DhtConfig
70 | HttpConfig HttpConfig
71 | ScrapeConfig ScrapeConfig
72 | RpcConfig RpcConfig
73 | TagProducersConfig TagProducersConfig
74 | TmdbApi TmdbApi
75 | ItemsPerPage uint64
76 | }
77 |
78 | func SetupConfiguration() *Configuration {
79 | configFileName := flag.String("config", "config.json", "Path to config file")
80 | flag.Parse()
81 | file, err := os.Open(*configFileName)
82 | if err != nil {
83 | log.Panicln("Error opening Config:", err.Error())
84 | }
85 | defer file.Close()
86 | decoder := json.NewDecoder(file)
87 | configuration := Configuration{}
88 | err = decoder.Decode(&configuration)
89 | if err != nil {
90 | log.Panicln("Error reading Config:", err)
91 | }
92 | log.Println("Level values", logging.CRITICAL, logging.ERROR, logging.WARNING, logging.NOTICE, logging.INFO, logging.DEBUG)
93 | log.Println("Configuration ", configuration)
94 | return &configuration
95 | }
96 |
--------------------------------------------------------------------------------
/Errors/AuthErrors.go:
--------------------------------------------------------------------------------
1 | package Errors
2 |
3 | const (
4 | UserNameAlreadyExists = 1
5 | UserEmailIsUsed = 2
6 | InvalidUsername = 3
7 | InvalidPassword = 4
8 | InvalidMail = 5
9 | InvalidToken = 6
10 | )
11 |
--------------------------------------------------------------------------------
/Errors/ErrorsText.go:
--------------------------------------------------------------------------------
1 | package Errors
2 |
3 | var errText = map[int]string{
4 | UserNameAlreadyExists: "User with same name already exists. Please choose anther one",
5 | UserEmailIsUsed: "Email is used by another user",
6 | InvalidUsername: "User is not found",
7 | InvalidPassword: "Incorrect password",
8 | FailedToMarshalStruct: "Struct Marshaling failed",
9 | InvalidMail: "Mail is invalid.",
10 | InvalidToken: "Specified token is invalid",
11 | }
12 |
13 | func ErrorText(code int) string {
14 | return errText[code]
15 | }
16 |
--------------------------------------------------------------------------------
/Errors/MarshalError.go:
--------------------------------------------------------------------------------
1 | package Errors
2 |
3 |
4 | const (
5 | FailedToMarshalStruct = 100
6 | )
7 |
--------------------------------------------------------------------------------
/Middleware/Auth.go:
--------------------------------------------------------------------------------
1 | package Middleware
2 |
3 | import (
4 | "github.com/julienschmidt/httprouter"
5 | "net/http"
6 | "log"
7 | "github.com/ruslanfedoseenko/dhtcrawler/Services/jwt"
8 | "github.com/op/go-logging"
9 | )
10 |
11 | var AuthMiddlewareLog = logging.MustGetLogger("AuthMiddleware")
12 |
13 | func AuthRequest(handleFunc httprouter.Handle) httprouter.Handle {
14 | return func(w http.ResponseWriter, r *http.Request, params httprouter.Params) {
15 | AuthMiddlewareLog.Info("In auth restricted section")
16 |
17 | // read cookies
18 | AuthCookie, authErr := r.Cookie("AuthToken")
19 | if authErr == http.ErrNoCookie {
20 | AuthMiddlewareLog.Error("Unauthorized attempt! No auth cookie")
21 | jwt.NullifyTokenCookies(&w, r)
22 | // http.Redirect(w, r, "/login", 302)
23 | http.Error(w, http.StatusText(401), 401)
24 | return
25 | } else if authErr != nil {
26 | log.Panic("panic: %+v", authErr)
27 | jwt.NullifyTokenCookies(&w, r)
28 | http.Error(w, http.StatusText(500), 500)
29 | return
30 | }
31 |
32 | RefreshCookie, refreshErr := r.Cookie("RefreshToken")
33 | if refreshErr == http.ErrNoCookie {
34 | AuthMiddlewareLog.Error("Unauthorized attempt! No refresh cookie")
35 | jwt.NullifyTokenCookies(&w, r)
36 | http.Redirect(w, r, "/login", 302)
37 | return
38 | } else if refreshErr != nil {
39 | log.Panic("panic: %+v", refreshErr)
40 | jwt.NullifyTokenCookies(&w, r)
41 | http.Error(w, http.StatusText(500), 500)
42 | return
43 | }
44 |
45 | // grab the csrf token
46 | requestCsrfToken := grabCsrfFromReq(r)
47 | AuthMiddlewareLog.Infof("Request Refresh Token: %s",requestCsrfToken)
48 |
49 |
50 | // check the jwt's for validity
51 | authTokenString, refreshTokenString, csrfSecret, err := jwt.CheckAndRefreshTokens(AuthCookie.Value, RefreshCookie.Value, requestCsrfToken)
52 | if err != nil {
53 | if err.Error() == "Unauthorized" {
54 | AuthMiddlewareLog.Error("Unauthorized attempt! JWT's not valid!")
55 | // Utils.NullifyTokenCookies(&w, r)
56 | // http.Redirect(w, r, "/login", 302)
57 | http.Error(w, http.StatusText(401), 401)
58 | return
59 | } else {
60 | // @adam-hanna: do we 401 or 500, here?
61 | // it could be 401 bc the token they provided was messed up
62 | // or it could be 500 bc there was some error on our end
63 | AuthMiddlewareLog.Error("err not nil")
64 | log.Panic("panic: %+v", err)
65 | // Utils.NullifyTokenCookies(&w, r)
66 | http.Error(w, http.StatusText(500), 500)
67 | return
68 | }
69 | }
70 | claims, err := jwt.GetClaims(AuthCookie.Value)
71 |
72 | params = append(params, httprouter.Param{
73 | Key: "AuthUserId",
74 | Value: claims.Subject,
75 | })
76 |
77 | AuthMiddlewareLog.Error("Successfully recreated jwts")
78 |
79 | // @adam-hanna: Change this. Only allow whitelisted origins! Also check referer header
80 | w.Header().Set("Access-Control-Allow-Origin", "*")
81 |
82 | // if we've made it this far, everything is valid!
83 | // And tokens have been refreshed if need-be
84 | jwt.SetAuthAndRefreshCookies(&w, r, authTokenString, refreshTokenString)
85 | w.Header().Set("X-CSRF-Token", csrfSecret)
86 |
87 | handleFunc(w, r, params)
88 | }
89 | }
90 |
91 | func grabCsrfFromReq(r *http.Request) string {
92 | csrfFromFrom := r.FormValue("X-CSRF-Token")
93 | csrfFromCookie, err := r.Cookie("csrf")
94 | if csrfFromFrom != "" {
95 | return csrfFromFrom
96 | } else if err == nil && csrfFromCookie.Value != "" {
97 | return csrfFromCookie.Value
98 | } else {
99 | return r.Header.Get("X-CSRF-Token")
100 | }
101 | }
102 |
--------------------------------------------------------------------------------
/Models/CountEstimateHolder.go:
--------------------------------------------------------------------------------
1 | package Models
2 |
3 | type ZdbEstimateCountHolder struct {
4 | Count uint64
5 | }
6 |
--------------------------------------------------------------------------------
/Models/Counters.go:
--------------------------------------------------------------------------------
1 | package Models
2 |
3 | type Counters struct {
4 | TorrentCount int32 `gorm:"column:torrent_count"`
5 | FileCount int32 `gorm:"column:files_count"`
6 | TotalFilesSize int64 `gorm:"column:total_file_size"`
7 | LastScrapedId int32 `gorm:"column:last_scraped_id" json:"-"`
8 | LastTaggedTorrentId int32 `gorm:"column:last_taged_torrent_id" json:"-"`
9 | }
10 |
11 | func (Counters) TableName() string {
12 | return "realtime_counters"
13 | }
14 |
--------------------------------------------------------------------------------
/Models/Credentials.go:
--------------------------------------------------------------------------------
1 | package Models
2 |
3 | type Credentials struct {
4 | Username string `json:"username"`
5 | Password string `json:"password"`
6 | }
7 |
--------------------------------------------------------------------------------
/Models/Error.go:
--------------------------------------------------------------------------------
1 | package Models
2 |
3 | import "github.com/ruslanfedoseenko/dhtcrawler/Errors"
4 |
5 | type Error struct {
6 | Message string `json:"message"`
7 | Code int `json:"code"`
8 | }
9 |
10 | func NewError(code int) Error {
11 | return Error{
12 | Code: code,
13 | Message: Errors.ErrorText(code),
14 | }
15 | }
16 |
17 | func NewErrorAddText(code int, e error) Error {
18 | return Error{
19 | Code: code,
20 | Message: Errors.ErrorText(code) + " " + e.Error(),
21 | }
22 | }
23 |
--------------------------------------------------------------------------------
/Models/File.go:
--------------------------------------------------------------------------------
1 | package Models
2 |
3 | type File struct {
4 | Id int32 `gorm:"column:id;primary_key;AUTO_INCREMENT" json:"-"`
5 | Path string `gorm:"column:path;size:250"`
6 | Size int `gorm:"column:size"`
7 | TorrentId int32 `gorm:"index;column:torrent_id" json:"-"`
8 | }
9 |
10 | func (File) TableName() string {
11 | return "files"
12 | }
13 |
--------------------------------------------------------------------------------
/Models/FileTreeItem.go:
--------------------------------------------------------------------------------
1 | package Models
2 |
3 | import (
4 | "strings"
5 | )
6 |
7 | type FileTreeItem struct {
8 | Size int
9 | Name string
10 | Icon string `json:"icon"`
11 | Children []FileTreeItem `json:"Children,omitempty"`
12 | }
13 |
14 | func (fti *FileTreeItem) AddPath(path string, size int, icon string, folderIcon string) bool {
15 | pathParts := strings.Split(path, "/")
16 | pathPartsLen := len(pathParts)
17 | fti.Icon = folderIcon
18 | if fti.Name != pathParts[0] {
19 | return false
20 | }
21 | if len(fti.Children) == 0 {
22 | var currentItem = fti
23 | for i := 1; i < pathPartsLen; i++ {
24 | item := new(FileTreeItem)
25 | item.Name = pathParts[i]
26 | item.Size = size
27 |
28 | currentItem.Children = append(currentItem.Children, *item)
29 | currentItem = item
30 | if i == pathPartsLen-1 {
31 | currentItem.Icon = icon
32 | } else {
33 | currentItem.Icon = folderIcon
34 | }
35 | }
36 | } else {
37 | currentItem := fti
38 | for i := 1; i < pathPartsLen; i++ {
39 | childIndex := FindChild(¤tItem.Children, pathParts[i])
40 | if childIndex > -1 {
41 | tmpItem := ¤tItem.Children[childIndex]
42 | tmpItem.Size += size
43 | currentItem = tmpItem
44 | continue
45 | }
46 | item := new(FileTreeItem)
47 | item.Name = pathParts[i]
48 | item.Size = size
49 |
50 | currentItem.Children = append(currentItem.Children, *item)
51 |
52 | currentItem = item
53 | if i == pathPartsLen-1 {
54 | currentItem.Icon = icon
55 | } else {
56 | currentItem.Icon = folderIcon
57 | }
58 | }
59 | }
60 | return true
61 | }
62 |
63 | func initFileTreeItem(path string, size int) FileTreeItem {
64 | pathParts := strings.Split(path, "/")
65 | var result = FileTreeItem{
66 | Name: pathParts[0],
67 | Size: size,
68 | }
69 | if len(pathParts) > 1 {
70 | result.AddPath(path, size, "insert_drive_file", "folder")
71 | } else {
72 |
73 | result.Icon = "insert_drive_file"
74 |
75 | }
76 |
77 | return result
78 | }
79 |
80 | func FindChild(items *[]FileTreeItem, name string) int {
81 | for index := 0; index < len(*items); index++ {
82 | if (*items)[index].Name == name {
83 | return index
84 | }
85 |
86 | }
87 | return -1
88 | }
89 |
90 | func BuildTree(fileList []File) []FileTreeItem {
91 | var result []FileTreeItem
92 |
93 | fileListLen := len(fileList)
94 | for i := 0; i < fileListLen; i++ {
95 | file := fileList[i]
96 | pathParts := strings.Split(file.Path, "/")
97 |
98 | childIndex := FindChild(&result, pathParts[0])
99 | if childIndex == -1 {
100 | result = append(result, initFileTreeItem(file.Path, file.Size))
101 | } else {
102 | result[childIndex].Size += file.Size
103 | result[childIndex].AddPath(file.Path, file.Size, "insert_drive_file", "folder")
104 | }
105 | }
106 |
107 | return result
108 | }
109 |
--------------------------------------------------------------------------------
/Models/JwtRefreshToken.go:
--------------------------------------------------------------------------------
1 | package Models
2 |
3 | type JwtRefreshToken struct {
4 | Id int `gorm:"column:id;primary_key;AUTO_INCREMENT" json:"-"`
5 | Token string `gorm:"column:token;size:32"`
6 | }
7 |
--------------------------------------------------------------------------------
/Models/PaginatedTorrentsResponse.go:
--------------------------------------------------------------------------------
1 | package Models
2 |
3 | type PaginatedTorrentsResponse struct {
4 | Page uint64
5 | PageCount uint64
6 | ItemsCount uint64
7 | ItemsPerPage uint64
8 | Torrents []Torrent
9 | }
10 |
--------------------------------------------------------------------------------
/Models/ScrapeTorrentResult.go:
--------------------------------------------------------------------------------
1 | package Models
2 |
3 | import "github.com/lib/pq"
4 |
5 | type ScrapeTorrentResult struct {
6 | Id int64 `gorm:"column:id;primary_key;AUTO_INCREMENT" json:"-"`
7 | TrackerUrl string `gorm:"column:tracker_url"`
8 | Seeds int32 `gorm:"column:seeds"`
9 | Leaches int32 `gorm:"column:leechers"`
10 | TorrentId int32 `gorm:"column:torrent_id" json:"-"`
11 | LastUpdate pq.NullTime `gorm:"column:last_update"`
12 | }
13 |
14 | func (s *ScrapeTorrentResult) TableName() string {
15 | return "torrent_scrape_results"
16 | }
17 |
--------------------------------------------------------------------------------
/Models/SearchSuggestResponse.go:
--------------------------------------------------------------------------------
1 | package Models
2 |
3 | type SearchSuggestResponse struct {
4 | Suggestions []string `json:"data"`
5 | }
6 |
--------------------------------------------------------------------------------
/Models/Tag.go:
--------------------------------------------------------------------------------
1 | package Models
2 |
3 | type Tag struct {
4 | Id uint32
5 | Tag string
6 | }
7 |
8 | func (Tag) TableName() string {
9 | return "tags"
10 | }
11 |
--------------------------------------------------------------------------------
/Models/Title.go:
--------------------------------------------------------------------------------
1 | package Models
2 |
3 | import "database/sql/driver"
4 |
5 | type TitleType string
6 |
7 | const (
8 | TV TitleType = "tv"
9 | Movie TitleType = "movie"
10 | Game TitleType = "game"
11 | )
12 |
13 | func (u *TitleType) Scan(value interface{}) error { *u = TitleType(value.([]byte)); return nil }
14 | func (u TitleType) Value() (driver.Value, error) { return string(u), nil }
15 |
16 | type Title struct {
17 | Id uint32 `gorm:"column:id"`
18 | Year uint32 `gorm:"column:year"`
19 | Title string `gorm:"column:title"`
20 | TitleType TitleType `gorm:"column:title_type"`
21 | Description string `gorm:"column:description"`
22 | Ganres []string `gorm:"-"`
23 | PosterUrl string `gorm:"column:poster"`
24 | }
25 |
26 | func (Title) TableName() string {
27 | return "titles"
28 | }
29 |
--------------------------------------------------------------------------------
/Models/TokenClaims.go:
--------------------------------------------------------------------------------
1 | package Models
2 |
3 | import (
4 | "time"
5 | "github.com/dgrijalva/jwt-go"
6 | )
7 |
8 | type TokenClaims struct {
9 | jwt.StandardClaims
10 | Role string `json:"role"`
11 | Csrf string `json:"csrf"`
12 | }
13 |
14 | const (
15 | RefreshTokenValidTime = time.Hour * 72
16 | AuthTokenValidTime = time.Minute * 15
17 | )
18 |
19 |
--------------------------------------------------------------------------------
/Models/Torrent.go:
--------------------------------------------------------------------------------
1 | package Models
2 |
3 | type Torrent struct {
4 | Id int32 `gorm:"primary_key;AUTO_INCREMENT" json:"-"`
5 | Infohash string `gorm:"size:40"`
6 | Name string `gorm:"column:name;size:250"`
7 | HasTag bool `gorm:"column:hastag;" json:"-"`
8 | ScraperResults []ScrapeTorrentResult `gorm:"ForeignKey:TorrentId" json:"TrackersInfo,omitempty"`
9 | Files []File `gorm:"ForeignKey:TorrentId" json:"Files"`
10 | FilesTree []FileTreeItem `gorm:"-" json:"FilesTree"`
11 | Titles []Title `gorm:"many2many:torrent_to_title" json:"Titles,omitempty"`
12 | Tags []Tag `gorm:"many2many:torrent_tags" json:"Tags,omitempty"`
13 | }
14 |
15 | func (Torrent) TableName() string {
16 | return "torrents"
17 | }
18 |
--------------------------------------------------------------------------------
/Models/TorrentStatsPart.go:
--------------------------------------------------------------------------------
1 | package Models
2 |
3 | import (
4 | "time"
5 | )
6 |
7 | type TorrentStatsPart struct {
8 | Id int `gorm:"column:id;primary_key;AUTO_INCREMENT" json:"-"`
9 | TorrentsCount int `gorm:"column:torrents_count"`
10 | FilesCount int `gorm:"column:files_count"`
11 | TotalFilesSize int64 `gorm:"column:files_size"`
12 | Date time.Time `gorm:"column:date"`
13 | }
14 |
15 | func (TorrentStatsPart) TableName() string {
16 | return "torrent_stats"
17 | }
18 |
--------------------------------------------------------------------------------
/Models/User.go:
--------------------------------------------------------------------------------
1 | package Models
2 |
3 | type User struct {
4 | Id int `gorm:"column:id;primary_key;AUTO_INCREMENT" json:"-"`
5 | Username string `json:"username"`
6 | PasswordHash string `json:"-"`
7 | Mail string `json:"mail"`
8 | Role string `json:"-"`
9 | }
10 |
--------------------------------------------------------------------------------
/Readme.md:
--------------------------------------------------------------------------------
1 | # BitTorrent Dht Crawler
2 | ## What is it?
3 | This is a distributed DHT crawling engine with search capabilities.
4 | Search was implemented on top of Postgres and Elasticsearch cluster.
5 |
6 |
7 |
--------------------------------------------------------------------------------
/Routers/AuthCurrentUserInfo.go:
--------------------------------------------------------------------------------
1 | package Routers
2 |
3 | import (
4 | "github.com/julienschmidt/httprouter"
5 | "net/http"
6 | "github.com/ruslanfedoseenko/dhtcrawler/Models"
7 | "strconv"
8 | "encoding/json"
9 | "github.com/ruslanfedoseenko/dhtcrawler/Errors"
10 | )
11 |
12 | func AuthCurrentUserInfo(w http.ResponseWriter, r *http.Request, params httprouter.Params) {
13 | authUserIdStr := params.ByName("AuthUserId")
14 |
15 | authUserId, err := strconv.Atoi(authUserIdStr)
16 | if err != nil {
17 | w.WriteHeader(http.StatusBadRequest)
18 | var data,err = json.Marshal(Models.NewError(Errors.InvalidUsername))
19 |
20 | if err != nil {
21 | w.Write([]byte(err.Error()))
22 | return
23 | }
24 | w.Header().Set("Content-Type", "application/json")
25 | w.Write(data)
26 | return
27 | }
28 | var user Models.User
29 |
30 | err = App.Db.Where(&Models.User{
31 | Id: authUserId,
32 | }).First(&user).Error
33 |
34 | data, err := json.Marshal(&user)
35 |
36 | if err != nil {
37 | httpLog.Errorf(err.Error())
38 | }
39 |
40 | w.Header().Set("Content-Type", "application/json")
41 | w.Write(data)
42 | }
43 |
--------------------------------------------------------------------------------
/Routers/AuthLoginHandler.go:
--------------------------------------------------------------------------------
1 | package Routers
2 |
3 | import (
4 | "github.com/julienschmidt/httprouter"
5 | "net/http"
6 | "log"
7 | "github.com/ruslanfedoseenko/dhtcrawler/Services/jwt"
8 | "github.com/ruslanfedoseenko/dhtcrawler/Models"
9 | "errors"
10 | "golang.org/x/crypto/bcrypt"
11 | "strconv"
12 | "encoding/json"
13 | "io/ioutil"
14 | "github.com/ruslanfedoseenko/dhtcrawler/Errors"
15 | )
16 |
17 | var UserNotFound = errors.New("User not found");
18 |
19 | func AuthLogoutHandler(w http.ResponseWriter, r *http.Request, _ httprouter.Params) {
20 | jwt.NullifyTokenCookies(&w, r)
21 | // use 302 to force browser to do GET request
22 | http.Redirect(w, r, "/#/login", 302)
23 | }
24 |
25 | func AuthLoginHandler(w http.ResponseWriter, r *http.Request, _ httprouter.Params) {
26 |
27 | var credentials Models.Credentials
28 | bytes, e := ioutil.ReadAll(r.Body)
29 | if e != nil {
30 | w.WriteHeader(http.StatusBadRequest)
31 | w.Write([]byte(e.Error()))
32 | }
33 | json.Unmarshal(bytes, &credentials)
34 |
35 |
36 | user, uuid, loginErr := LogUserIn(credentials.Username, credentials.Password)
37 | log.Println(user, uuid, loginErr)
38 | if loginErr != nil {
39 | w.WriteHeader(http.StatusUnauthorized)
40 | if loginErr == UserNotFound {
41 | var data,err = json.Marshal(Models.NewError(Errors.InvalidUsername))
42 |
43 | if err != nil {
44 | w.Write([]byte(err.Error()))
45 | return
46 | }
47 | w.Header().Set("Content-Type", "application/json")
48 | w.Write(data)
49 | }
50 | } else {
51 | // no login err
52 | // now generate cookies for this user
53 | authTokenString, refreshTokenString, csrfSecret, err := jwt.CreateNewTokens(uuid, user.Role)
54 | if err != nil {
55 | http.Error(w, http.StatusText(500), 500)
56 | }
57 |
58 | // set the cookies to these newly created jwt's
59 | jwt.SetAuthAndRefreshCookies(&w, r, authTokenString, refreshTokenString)
60 | w.Header().Set("X-CSRF-Token", csrfSecret)
61 |
62 | w.WriteHeader(http.StatusOK)
63 |
64 | var data, err2 = json.Marshal(user)
65 | if err2 != nil {
66 | w.Write([]byte(err.Error()))
67 | return
68 | }
69 | w.Write(data)
70 | }
71 | }
72 | func LogUserIn(username string, password string) (user *Models.User,uuid string,err error) {
73 | userCount := 0
74 | user = &Models.User{}
75 |
76 | err = App.Db.Debug().Model(&Models.User{}).Where(&Models.User{
77 | Username: username,
78 | }).First(user).Count(&userCount).Error
79 | if err != nil {
80 | return
81 | }
82 |
83 | if userCount == 0 {
84 | err = UserNotFound
85 | return nil, "", err
86 | }
87 | log.Println(user)
88 |
89 | return user, strconv.Itoa(user.Id), checkPasswordAgainstHash(user.PasswordHash, password)
90 | }
91 |
92 | func checkPasswordAgainstHash(hash string, password string) error {
93 | return bcrypt.CompareHashAndPassword([]byte(hash), []byte(password))
94 | }
95 |
--------------------------------------------------------------------------------
/Routers/AuthRegistrationHandler.go:
--------------------------------------------------------------------------------
1 | package Routers
2 |
3 | import (
4 | "github.com/julienschmidt/httprouter"
5 | "net/http"
6 | "encoding/json"
7 | "github.com/ruslanfedoseenko/dhtcrawler/Models"
8 | "github.com/ruslanfedoseenko/dhtcrawler/Errors"
9 | "net/mail"
10 | "golang.org/x/crypto/bcrypt"
11 | "github.com/ruslanfedoseenko/dhtcrawler/Services/jwt"
12 | "strconv"
13 | )
14 |
15 | func AuthRegistrationHandler(w http.ResponseWriter, r *http.Request, _ httprouter.Params) {
16 | decoder := json.NewDecoder(r.Body)
17 | var user Models.User
18 | decoder.Decode(&user)
19 | validateUser(w, user)
20 | user.Role = "user"
21 | e := saveUser(&user)
22 | if e != nil {
23 | err := Models.NewErrorAddText(Errors.InvalidMail, e)
24 | bytes, e := json.Marshal(err)
25 | if e != nil {
26 | w.Write([]byte(e.Error()))
27 | return
28 | }
29 | w.Header().Set("Content-Type", "application/json")
30 | w.Write(bytes)
31 | return
32 | }
33 |
34 | authTokenString, refreshTokenString, csrfSecret, err := jwt.CreateNewTokens(strconv.Itoa(user.Id), user.Role)
35 | if err != nil {
36 | http.Error(w, http.StatusText(500), 500)
37 | }
38 |
39 | // set the cookies to these newly created jwt's
40 | jwt.SetAuthAndRefreshCookies(&w, r, authTokenString, refreshTokenString)
41 | w.Header().Set("X-CSRF-Token", csrfSecret)
42 |
43 | w.WriteHeader(http.StatusOK)
44 | }
45 | func saveUser(user *Models.User) (err error) {
46 |
47 | user.PasswordHash, err = generateBcryptHash(user.PasswordHash)
48 | if err != nil {
49 | return
50 | }
51 | err = App.Db.Create(user).Error
52 | return
53 | }
54 |
55 | func generateBcryptHash(password string) (string, error) {
56 | hash, err := bcrypt.GenerateFromPassword([]byte(password), bcrypt.DefaultCost)
57 | return string(hash[:]), err
58 | }
59 |
60 | func validateUser(w http.ResponseWriter, user Models.User) {
61 | userCount := 0
62 | App.Db.Where(&Models.User{
63 | Username: user.Username,
64 | }).Count(&userCount)
65 |
66 | if userCount > 0 {
67 | w.WriteHeader(http.StatusBadRequest)
68 | bytes, e := json.Marshal(Models.NewError(Errors.UserNameAlreadyExists))
69 | if e != nil {
70 | w.Write([]byte(e.Error()))
71 | return
72 | }
73 | w.Header().Set("Content-Type", "application/json")
74 | w.Write(bytes)
75 | return
76 | }
77 |
78 | App.Db.Where(&Models.User{
79 | Mail: user.Mail,
80 | }).Count(&userCount)
81 |
82 | if userCount > 0 {
83 | w.WriteHeader(http.StatusBadRequest)
84 | bytes, e := json.Marshal(Models.NewError(Errors.UserEmailIsUsed))
85 | if e != nil {
86 | w.Write([]byte(e.Error()))
87 | return
88 | }
89 | w.Header().Set("Content-Type", "application/json")
90 | w.Write(bytes)
91 | return
92 | }
93 | _, e := mail.ParseAddress(user.Mail)
94 | if e != nil {
95 | err := Models.NewErrorAddText(Errors.InvalidMail, e)
96 | bytes, e := json.Marshal(err)
97 | if e != nil {
98 | w.Write([]byte(e.Error()))
99 | return
100 | }
101 | w.Header().Set("Content-Type", "application/json")
102 | w.Write(bytes)
103 | return
104 | }
105 | }
106 |
--------------------------------------------------------------------------------
/Routers/Router.go:
--------------------------------------------------------------------------------
1 | package Routers
2 |
3 | import (
4 | "github.com/abbot/go-http-auth"
5 | "github.com/julienschmidt/httprouter"
6 | "github.com/ruslanfedoseenko/dhtcrawler/Config"
7 | "net/http"
8 | "path"
9 | "github.com/ruslanfedoseenko/dhtcrawler/Middleware"
10 | "github.com/op/go-logging"
11 | )
12 |
13 | var App *Config.App
14 |
15 | type HostSwitch map[string]http.Handler
16 | var RouterLog = logging.MustGetLogger("Router")
17 | // Implement the ServerHTTP method on our new type
18 | func (hs HostSwitch) ServeHTTP(w http.ResponseWriter, r *http.Request) {
19 | // Check if a http.Handler is registered for the given host.
20 | // If yes, use it to handle the request.
21 | //log.Println("Trying to serve host", r.Host)
22 | if handler := hs[r.Host]; handler != nil {
23 | handler.ServeHTTP(w, r)
24 | } else {
25 | // Handle host names for wich no handler is registered
26 | http.Error(w, "Forbidden", 403) // Or Redirect?
27 | }
28 | }
29 |
30 | func Secret(user, realm string) string {
31 | users := map[string]string{
32 | "admin": "$apr1$upxZKqIX$nHMIFp/bAjG8VuZpIzKEP1",
33 | }
34 |
35 | if a, ok := users[user]; ok {
36 | return a
37 | }
38 | return ""
39 | }
40 | func setupAdminHandlers(router *httprouter.Router) {
41 | authentificactor := auth.NewBasicAuthenticator("Search Admin", Secret)
42 | adminFolderPath := path.Join(App.Config.HttpConfig.StaticDataFolder, "admin")
43 | strippedHandler := http.StripPrefix(
44 | "/admin",
45 | http.FileServer(http.Dir(adminFolderPath)),
46 | )
47 |
48 | router.HandlerFunc("GET", "/admin/*filepath",
49 | auth.JustCheck(
50 | authentificactor,
51 | strippedHandler.ServeHTTP),
52 | )
53 |
54 | }
55 | func Setup(app *Config.App) {
56 | App = app
57 | go func() {
58 | router := httprouter.New()
59 | router.GET("/search/suggest/:term", SearchSuggestHandler)
60 | router.GET("/torrents/count/", TorrentCountHandler)
61 | router.GET("/torrent/info/:infohash", TorrentInfoHandler)
62 | router.GET("/torrents/", TorrentsListHandler)
63 | router.GET("/torrents/stats/", TorrentStatsHandler)
64 | router.GET("/torrents/page/:pageNumber", TorrentsListHandler)
65 | router.GET("/torrents/search/:term", TorrentSearchHandler)
66 | router.GET("/torrents/search/:term/page/:pageNumber", TorrentSearchHandler)
67 | RouterLog.Info("Starting Http Backend")
68 | setupAdminHandlers(router)
69 | setupAuthHandlers(router)
70 | router.ServeFiles("/index/*filepath", http.Dir(App.Config.HttpConfig.StaticDataFolder))
71 |
72 | hs := make(HostSwitch)
73 | hs["btoogle.com"] = router
74 | hs["www.btoogle.com"] = router
75 | hs["localhost:6060"] = router
76 |
77 | http.ListenAndServe(":6060", hs)
78 |
79 | }()
80 | }
81 | func setupAuthHandlers(router *httprouter.Router) {
82 | router.POST("/auth/login", AuthLoginHandler)
83 | router.POST("/auth/register", AuthRegistrationHandler)
84 | router.GET("/auth/logout", Middleware.AuthRequest(AuthLogoutHandler))
85 | router.GET("/auth/currentUser", Middleware.AuthRequest(AuthCurrentUserInfo))
86 | }
87 |
--------------------------------------------------------------------------------
/Routers/SearchSuggestHandler.go:
--------------------------------------------------------------------------------
1 | // @SubApi Search Helper API [/search]
2 | package Routers
3 |
4 | import (
5 | "encoding/json"
6 | "github.com/julienschmidt/httprouter"
7 | "github.com/ruslanfedoseenko/dhtcrawler/Models"
8 | "net/http"
9 | "strings"
10 | )
11 |
12 | // @Title SearchSuggestHandler
13 | // @Description retrieves suggestion for input search string
14 | // @Accept json
15 | // @Produce json
16 | // @Param term path string true "Value of Search imput"
17 | // @Success 200 {array} dhtcrawler.Model.SearchSuggestResponse
18 | // @Failure 400 {object} error "Customer ID must be specified"
19 | // @Router /search/suggest/{term} [get]
20 |
21 | func SearchSuggestHandler(w http.ResponseWriter, r *http.Request, ps httprouter.Params) {
22 | searchTerm := ps.ByName("term")
23 |
24 | var torrents []Models.Torrent
25 |
26 | words := strings.Split(strings.Trim(searchTerm, " "), " ")
27 | lastWord := words[len(words)-1]
28 | App.Db.Debug().
29 | Table("zdb_termlist('torrents', 'name_autocomplete', '" + lastWord + "', NULL, 5000) ").
30 | Select("term as name").
31 | Order("totalfreq desc").
32 | Limit(25).
33 | Find(&torrents)
34 | var response Models.SearchSuggestResponse
35 | response.Suggestions = make([]string, len(torrents))
36 | for i, torrent := range torrents {
37 | response.Suggestions[i] = torrent.Name
38 | }
39 | data, err := json.Marshal(response)
40 |
41 | if err != nil {
42 | httpLog.Errorf(err.Error())
43 | }
44 |
45 | w.Header().Set("Content-Type", "application/json")
46 | w.Write(data)
47 | }
48 |
--------------------------------------------------------------------------------
/Routers/TorrentCountHandler.go:
--------------------------------------------------------------------------------
1 | package Routers
2 |
3 | import (
4 | "encoding/json"
5 | "github.com/julienschmidt/httprouter"
6 | "github.com/ruslanfedoseenko/dhtcrawler/Models"
7 | "net/http"
8 | )
9 |
10 | func TorrentCountHandler(w http.ResponseWriter, r *http.Request, _ httprouter.Params) {
11 |
12 | var counters Models.Counters
13 |
14 | err := App.Db.Model(&Models.Counters{}).First(&counters).Error
15 |
16 | if err != nil {
17 | httpLog.Error(err.Error())
18 | }
19 | data, err := json.Marshal(counters)
20 | if err != nil {
21 | httpLog.Error(err.Error())
22 | }
23 | w.Header().Set("Content-Type", "application/json")
24 | w.Write(data)
25 |
26 | }
27 |
--------------------------------------------------------------------------------
/Routers/TorrentInfoHandler.go:
--------------------------------------------------------------------------------
1 | package Routers
2 |
3 | import (
4 | "encoding/json"
5 | "github.com/julienschmidt/httprouter"
6 | "github.com/ruslanfedoseenko/dhtcrawler/Models"
7 | "net/http"
8 | )
9 |
10 | func TorrentInfoHandler(w http.ResponseWriter, r *http.Request, ps httprouter.Params) {
11 |
12 | var infohash string = ps.ByName("infohash")
13 | if len(infohash) != 40 {
14 | httpLog.Errorf("Invalid infohash specified: %s", infohash)
15 | w.WriteHeader(http.StatusBadRequest)
16 | return
17 | }
18 | var torrent Models.Torrent
19 | App.Db.
20 | Preload("Files").
21 | Preload("Titles").
22 | Preload("Tags").
23 | Preload("ScraperResults").
24 | First(&torrent, map[string]interface{}{"infohash": infohash})
25 |
26 | torrent.FilesTree = Models.BuildTree(torrent.Files)
27 |
28 | data, err := json.Marshal(torrent)
29 |
30 | if err != nil {
31 | httpLog.Error(err.Error())
32 | }
33 |
34 | w.Header().Set("Content-Type", "application/json")
35 | w.Write(data)
36 |
37 | }
38 |
--------------------------------------------------------------------------------
/Routers/TorrentSearchHandler.go:
--------------------------------------------------------------------------------
1 | package Routers
2 |
3 | import (
4 | "encoding/json"
5 | "github.com/julienschmidt/httprouter"
6 | "github.com/op/go-logging"
7 | "github.com/ruslanfedoseenko/dhtcrawler/Models"
8 | "github.com/ruslanfedoseenko/dhtcrawler/Utils"
9 | "net/http"
10 | "strconv"
11 | )
12 |
13 | var searchLog = logging.MustGetLogger("SearchHandler")
14 |
15 | func TorrentSearchHandler(w http.ResponseWriter, r *http.Request, ps httprouter.Params) {
16 | searchTerm := ps.ByName("term")
17 | searchLog.Info("Search Term:", searchTerm)
18 | pageNumberStr := ps.ByName("pageNumber")
19 | var page uint64 = 1
20 | var err error
21 | if len(pageNumberStr) > 0 {
22 | page, err = strconv.ParseUint(pageNumberStr, 10, 64)
23 | if err != nil {
24 | httpLog.Errorf("Parsing Error %s", err.Error())
25 | w.WriteHeader(http.StatusBadRequest)
26 | return
27 | }
28 | if page < 1 {
29 | w.WriteHeader(http.StatusBadRequest)
30 | return
31 | }
32 | }
33 | if len(searchTerm) == 0 {
34 | w.WriteHeader(http.StatusBadRequest)
35 | return
36 | }
37 | var torrents []Models.Torrent
38 | var itemsCount uint64 = 0
39 | itemsPerPage := App.Config.ItemsPerPage
40 |
41 | nameQuery := Utils.ZdbBuildQuery("name", searchTerm, (page-1)*itemsPerPage, itemsPerPage)
42 |
43 | var countHolder Models.ZdbEstimateCountHolder
44 | App.Db.Debug().Raw("select zdb_estimate_count as count from zdb_estimate_count('torrents'," + nameQuery + ")").Scan(&countHolder)
45 | itemsCount = countHolder.Count
46 | App.Db.Debug().
47 | Preload("ScraperResults").
48 | Preload("Files").
49 | Preload("Titles").
50 | Preload("Tags").
51 | Model(Models.Torrent{}).
52 | Where("zdb('torrents', ctid) ==> " + nameQuery).
53 | Order("zdb_score('torrents', torrents.ctid) desc").
54 | Find(&torrents)
55 |
56 | for i := range torrents {
57 |
58 | torrents[i].FilesTree = Models.BuildTree(torrents[i].Files)
59 |
60 | }
61 |
62 | var pageCountFix uint64 = 0
63 | if itemsCount%itemsPerPage != 0 {
64 | pageCountFix = 1
65 | }
66 | paginatedResponse := Models.PaginatedTorrentsResponse{
67 | Torrents: torrents,
68 | PageCount: itemsCount/itemsPerPage + pageCountFix,
69 | ItemsCount: itemsCount,
70 | Page: page,
71 | ItemsPerPage: itemsPerPage,
72 | }
73 | data, err := json.Marshal(paginatedResponse)
74 |
75 | if err != nil {
76 | httpLog.Error(err.Error())
77 | }
78 |
79 | w.Header().Set("Content-Type", "application/json")
80 | w.Write(data)
81 | }
82 |
--------------------------------------------------------------------------------
/Routers/TorrentsListHandler.go:
--------------------------------------------------------------------------------
1 | package Routers
2 |
3 | import (
4 | "encoding/json"
5 | "github.com/julienschmidt/httprouter"
6 | "github.com/ruslanfedoseenko/dhtcrawler/Models"
7 | "log"
8 | "net/http"
9 | "strconv"
10 | "github.com/op/go-logging"
11 | )
12 |
13 | var httpLog = logging.MustGetLogger("httpLog")
14 |
15 | func TorrentsListHandler(w http.ResponseWriter, r *http.Request, ps httprouter.Params) {
16 | var page uint64 = 1
17 | var err error
18 | pageNumberStr := ps.ByName("pageNumber")
19 | if len(pageNumberStr) > 0 {
20 |
21 | page, err = strconv.ParseUint(pageNumberStr, 10, 64)
22 | if err != nil {
23 | log.Println("Parsing Error", err.Error())
24 | w.WriteHeader(http.StatusBadRequest)
25 | return
26 | }
27 | if page < 1 {
28 | w.WriteHeader(http.StatusBadRequest)
29 | return
30 | }
31 | }
32 |
33 | var itemsPerPage uint64 = App.Config.ItemsPerPage
34 | var torrents []Models.Torrent = make([]Models.Torrent, itemsPerPage, itemsPerPage)
35 | var itemsCount uint64
36 |
37 | row := App.Db.Debug().Table("realtime_counters").Select("torrent_count").Row()
38 | row.Scan(&itemsCount)
39 | err = App.Db.Debug().
40 | Preload("Files").Preload("Titles").Preload("Tags").
41 | Model(&Models.Torrent{}).
42 | Limit(itemsPerPage).
43 | Offset((page - uint64(1)) * itemsPerPage).Order("id desc").
44 | Find(&torrents).Error
45 |
46 | for i := range torrents {
47 | torrents[i].FilesTree = Models.BuildTree(torrents[i].Files)
48 | }
49 |
50 | if err != nil {
51 | httpLog.Errorf("Error: %q", err.Error())
52 | }
53 | httpLog.Debug("Total items count: %i", itemsCount)
54 |
55 | var pageCountFix uint64 = 0
56 | if itemsCount%itemsPerPage != 0 {
57 | pageCountFix = 1
58 | }
59 | paginatedResponse := Models.PaginatedTorrentsResponse{
60 | Torrents: torrents,
61 | PageCount: itemsCount/itemsPerPage + pageCountFix,
62 | ItemsCount: itemsCount,
63 | Page: page,
64 | ItemsPerPage: itemsPerPage,
65 | }
66 | data, err := json.Marshal(paginatedResponse)
67 |
68 | if err != nil {
69 | log.Println(err.Error())
70 | }
71 |
72 | w.Header().Set("Content-Type", "application/json")
73 | w.Write(data)
74 |
75 | }
76 |
--------------------------------------------------------------------------------
/Routers/TorrentsStatsHandler.go:
--------------------------------------------------------------------------------
1 | package Routers
2 |
3 | import (
4 | "encoding/json"
5 | "github.com/julienschmidt/httprouter"
6 | "github.com/ruslanfedoseenko/dhtcrawler/Models"
7 | "log"
8 | "net/http"
9 | )
10 |
11 | func TorrentStatsHandler(w http.ResponseWriter, r *http.Request, ps httprouter.Params) {
12 | var stats []Models.TorrentStatsPart
13 |
14 | App.Db.Model(&Models.TorrentStatsPart{}).Order("id desc").Limit(24).Find(&stats)
15 |
16 | len := len(stats)
17 |
18 | for i, j := 0, len-1; i < j; i, j = i+1, j-1 {
19 | stats[i], stats[j] = stats[j], stats[i]
20 | }
21 |
22 | data, err := json.Marshal(stats)
23 |
24 | if err != nil {
25 | log.Println(err.Error())
26 | }
27 |
28 | w.Header().Set("Content-Type", "application/json")
29 | w.Write(data)
30 |
31 | }
32 |
--------------------------------------------------------------------------------
/RpcMaster/RpcMaster.go:
--------------------------------------------------------------------------------
1 | package main
2 |
3 | import (
4 | "github.com/ruslanfedoseenko/dhtcrawler/Config"
5 | "github.com/ruslanfedoseenko/dhtcrawler/Services"
6 | )
7 |
8 |
9 |
10 | func main() {
11 |
12 | app := Config.NewApp()
13 |
14 | Services.SetupRpc(app)
15 | app.Run()
16 | }
17 |
18 |
--------------------------------------------------------------------------------
/Services/DhtCrawler.go:
--------------------------------------------------------------------------------
1 | package Services
2 |
3 | import (
4 | "encoding/hex"
5 | "fmt"
6 | "github.com/op/go-logging"
7 | "github.com/ruslanfedoseenko/dhtcrawler/Config"
8 | "github.com/ruslanfedoseenko/dhtcrawler/Models"
9 | "github.com/ruslanfedoseenko/dhtcrawler/Services/rpc"
10 | "github.com/ruslanfedoseenko/dhtcrawler/Utils"
11 | "github.com/ruslanfedoseenko/dht"
12 | "unicode/utf8"
13 | "strings"
14 | "github.com/saintfish/chardet"
15 | "gopkg.in/iconv.v1"
16 | "strconv"
17 | )
18 |
19 | type DhtCrawlingService struct {
20 | dht []*dht.DHT
21 | //wire *dht.Wire
22 | config *dht.Config
23 | rpcClient *Rpc.RpcClient
24 | shouldStop bool
25 | }
26 |
27 | var dhtLog = logging.MustGetLogger("DhtCrawler")
28 | var App *Config.App
29 | var dhtCrawlingSvc DhtCrawlingService
30 |
31 | func SetupDhtCrawling(app *Config.App) {
32 | App = app
33 | dhtCrawlingSvc = DhtCrawlingService{
34 | rpcClient: Rpc.GetRpcCLientInstance(app),
35 | }
36 | App.AddService(dhtCrawlingSvc)
37 |
38 | }
39 | var charsetAliases = map[string]string{
40 | "GB-18030": "GB18030",
41 | "ISO-8859-8-I": "ISO-8859-8",
42 | "IBM420_ltr" : "IBM-420",
43 | "IBM420_rtl" : "IBM-420",
44 | "IBM424_ltr" : "IBM-424",
45 | "IBM424_rtl" : "IBM-424",
46 |
47 | }
48 | var detector = chardet.NewTextDetector()
49 |
50 | func (svc DhtCrawlingService) Start() {
51 | svc.rpcClient.Start()
52 |
53 | svc.dht = make([]*dht.DHT, App.Config.DhtConfig.Workers)
54 | for i := 0; i < App.Config.DhtConfig.Workers; i++ {
55 | wire := dht.NewWire(65536, 6144, 6144)
56 | var config = dht.NewCrawlConfig()
57 | config.MaxNodes = 70000
58 | config.Address = fmt.Sprintf(":%d", App.Config.DhtConfig.StartPort+i)
59 | dhtLog.Info("Starting Dht Crawling On ", App.Config.DhtConfig.StartPort+i)
60 |
61 | config.OnAnnouncePeer = func(infoHash, ip string, port int) {
62 | go func(infoHash, ip string, port int) {
63 | var infoHashStr = hex.EncodeToString([]byte(infoHash))
64 |
65 | if ok, _ := svc.rpcClient.HasTorrent(infoHashStr); !ok {
66 | wire.Request([]byte(infoHash), ip, port)
67 | }
68 | }(infoHash, ip, port)
69 |
70 | }
71 | svc.dht[i] = dht.New(config)
72 |
73 | go func() {
74 | for resp := range wire.Response() {
75 |
76 | metadata, err := dht.Decode(resp.MetadataInfo)
77 | if err != nil {
78 | continue
79 | }
80 | var info map[string]interface{}
81 | var ok bool
82 | if info, ok = metadata.(map[string]interface{}); !ok {
83 | continue
84 | }
85 |
86 | if _, ok = info["name"]; !ok {
87 | continue
88 | }
89 | var name string
90 |
91 | if name, ok = info["name"].(string); !ok {
92 | dhtLog.Error("Info section has name but it is not a string", info)
93 | continue
94 | }
95 | r, err := detector.DetectBest([]byte(name))
96 | if err == nil {
97 | if r.Charset != "UTF-8" {
98 | name = convertStringToUtf8(r, name)
99 | }
100 | } else {
101 | dhtLog.Info("Error Detecting charset", err)
102 | }
103 | if !utf8.ValidString(name) {
104 | dhtLog.Error("Name string is not valid utf8 string:", name)
105 | }
106 | bt := Models.Torrent{
107 | Infohash: hex.EncodeToString(resp.InfoHash),
108 | Name: name,
109 | }
110 | var v interface{}
111 | if v, ok = info["files"]; ok {
112 |
113 | var files []interface{}
114 | if files, ok = v.([]interface{}); !ok {
115 | continue
116 | }
117 |
118 | bt.Files = make([]Models.File, len(files))
119 |
120 | for i, item := range files {
121 | var f map[string]interface{}
122 | if f, ok = item.(map[string]interface{}); !ok {
123 | continue
124 | }
125 | if f == nil || f["path"] == nil {
126 | continue
127 | }
128 | pathString := Utils.SliceToPathString(f["path"].([]interface{}))
129 | if strings.Contains(pathString, "___padding") {
130 | continue
131 | }
132 | r, err := detector.DetectBest([]byte(pathString))
133 | if err == nil {
134 | if r.Charset != "UTF-8" {
135 | pathString = convertStringToUtf8(r, pathString)
136 | }
137 | } else {
138 | dhtLog.Info("Error Detecting charset", err)
139 | }
140 | if !utf8.ValidString(pathString) {
141 | dhtLog.Error("Path string is not valid utf8 string:", pathString)
142 | }
143 | var size int
144 | if size, ok = f["length"].(int); !ok {
145 | if sizeStr, ok := f["length"].(string); ok {
146 | size,err = strconv.Atoi(sizeStr)
147 | if err != nil {
148 | dhtLog.Error("Failed to convert", sizeStr, "to int")
149 | size = -1
150 | }
151 | } else {
152 | size = -1
153 | }
154 | }
155 | bt.Files[i] = Models.File{
156 | Path: pathString,
157 | Size: size,
158 | }
159 |
160 | }
161 |
162 | } else if _, ok := info["length"]; ok {
163 | bt.Files = make([]Models.File, 1)
164 | bt.Files[0] = Models.File{
165 | Path: bt.Name,
166 | Size: info["length"].(int),
167 | }
168 |
169 | }
170 |
171 | svc.rpcClient.AddTorrent(&bt)
172 | /*Avar count int
173 |
174 | pp.Db.Model(&Models.Torrent{}).Where(Models.Torrent{Infohash: bt.Infohash}).Count(&count)
175 |
176 | if count > 0 {
177 | continue
178 | } else {
179 | //dhtLog.Println("Found new torrent", bt.Infohash, "group", bt.Group.Name, "group_id", bt.GroupId)
180 | for _, title := range bt.Titles {
181 | dhtLog.Println("Inserting title", title)
182 | App.Db.Debug().Exec("INSERT INTO `titles`(`id`, `title`, `poster`, `title_type`, `description`) " +
183 | "VALUES (?,?,?,?,?) ON DUPLICATE KEY " +
184 | "UPDATE `title`=VALUES(`title`),`poster`=VALUES(`poster`),`title_type`=VALUES(`title_type`),`description`=VALUES(`description`)", title.Id, title.Title,title.PosterUrl, title.TitleType, title.Description)
185 | }
186 | err = App.Db.Create(&bt).Error
187 | if err != nil {
188 | dhtLog.Error("Torrent Add to DB err:", err.Error())
189 | }
190 | }*/
191 |
192 | }
193 | }()
194 | go wire.Run()
195 | go svc.dht[i].Run()
196 | }
197 | }
198 | func convertStringToUtf8(r *chardet.Result, name string) string {
199 | charset := r.Charset
200 | if alias, ok := charsetAliases[charset]; ok {
201 | charset = alias
202 | }
203 | converter, err := iconv.Open("UTF-8", charset)
204 | if err != nil {
205 | dhtLog.Error("Unable to convert string", name, " from", charset, "to UTF-8")
206 | } else {
207 | defer converter.Close()
208 | name = converter.ConvString(name)
209 | }
210 | return name
211 | }
212 |
--------------------------------------------------------------------------------
/Services/RpcCommon.go:
--------------------------------------------------------------------------------
1 | package Services
2 |
3 | import (
4 | "github.com/op/go-logging"
5 | "github.com/ruslanfedoseenko/dhtcrawler/Config"
6 | "github.com/ruslanfedoseenko/dhtcrawler/Services/rpc"
7 | )
8 |
9 | var rpcLog = logging.MustGetLogger("RpcCommon")
10 |
11 | func SetupRpc(app *Config.App) {
12 | if app.Config.RpcConfig.Mode == "" {
13 | rpcLog.Error("RpcConfig section missing")
14 | return
15 | }
16 |
17 | switch app.Config.RpcConfig.Mode {
18 | case Config.CLIENT:
19 | {
20 | rpcLog.Error("Invalid RpcConfig.Mode value")
21 | }
22 | case Config.SERVER:
23 | {
24 | Rpc.SetupRpcServer(app)
25 | }
26 | }
27 | }
28 |
--------------------------------------------------------------------------------
/Services/ScrapeWork.go:
--------------------------------------------------------------------------------
1 | package Services
2 |
3 | type work struct {
4 | startId int32
5 | count int32
6 | }
7 |
--------------------------------------------------------------------------------
/Services/TagProducer/TorrentTagsProducer.go:
--------------------------------------------------------------------------------
1 | package TagProducer
2 |
3 | import (
4 | "container/list"
5 | "github.com/jinzhu/gorm"
6 | "github.com/op/go-logging"
7 | "github.com/ruslanfedoseenko/dhtcrawler/Config"
8 | "github.com/ruslanfedoseenko/dhtcrawler/Models"
9 | "github.com/ruslanfedoseenko/dhtcrawler/Utils"
10 | "gopkg.in/fatih/set.v0"
11 | "hash/crc32"
12 | "strings"
13 | "time"
14 | )
15 |
16 | var TagProducerLog = logging.MustGetLogger("TagProducer")
17 |
18 | type TorrentTagsProducer struct {
19 | tagProducers *list.List
20 | specialTagProducers *list.List
21 | db *gorm.DB
22 | }
23 |
24 | var tagLog = logging.MustGetLogger("TagProducer")
25 |
26 | func NewTorrentTagsProducer(app *Config.App) (p *TorrentTagsProducer) {
27 | p = &TorrentTagsProducer{
28 | tagProducers: list.New(),
29 | specialTagProducers: list.New(),
30 | db: app.Db,
31 | }
32 | p.initTokenExistenceProducers(app)
33 | p.initSpecialProducers(app)
34 | go p.processQueue()
35 | return
36 | }
37 |
38 | func (t *TorrentTagsProducer) initSpecialProducers(app *Config.App) {
39 | t.specialTagProducers.PushBack(NewVideoTagProducer(app))
40 | }
41 | func (t *TorrentTagsProducer) initTokenExistenceProducers(app *Config.App) {
42 | for key, value := range app.Config.TagProducersConfig.TagProducers {
43 | producer := tokenExistenceTagProducer{
44 | tag: key,
45 | tokens: convertValues(value),
46 | }
47 | t.tagProducers.PushBack(producer)
48 | }
49 | }
50 | func convertValues(tokens []string) (res map[string]uint32) {
51 | res = make(map[string]uint32)
52 | tokenLen := len(tokens)
53 | for i := 0; i < tokenLen; i++ {
54 | res[tokens[i]] = 0
55 | }
56 | return
57 | }
58 | func tokenizer(c rune) bool {
59 | return strings.ContainsRune(";., -_|[]()/\\", c)
60 | }
61 |
62 | func (t *TorrentTagsProducer) processQueue() {
63 | TagProducerLog.Info("Starting processing queue")
64 | for {
65 | var torrents []Models.Torrent
66 | TagProducerLog.Info("Loading torrents")
67 | err := t.db.Table("torrents t").Preload("Files").
68 | Select("t.*").
69 | Where("t.hastag = false").
70 | Order("t.id asc").
71 | Limit(25).
72 | Find(&torrents).Error
73 | if err != nil {
74 | TagProducerLog.Error("Error while loading torrents:", err)
75 | }
76 | torrentsLen := len(torrents)
77 | if torrentsLen == 0 {
78 | TagProducerLog.Info("No torrents found waiting 5 secs....")
79 | <-time.After(time.Second * 5)
80 | continue
81 | }
82 | TagProducerLog.Info("Found", torrentsLen, "with no tags. First id:", torrents[0].Id, "Last Id:", torrents[torrentsLen-1].Id)
83 | for i := 0; i < torrentsLen; i++ {
84 | TagProducerLog.Info("Processing torrent:", torrents[i].Name)
85 | t.FillTorrentTags(&torrents[i])
86 |
87 | TagProducerLog.Info("Appending Tags:", torrents[i].Tags)
88 | torrents[i].HasTag = len(torrents[i].Tags) > 0
89 | t.db.Set("gorm:save_associations", false).Save(&torrents[i])
90 | err = t.db.Model(&torrents[i]).Association("Tags").Append(torrents[i].Tags).Error
91 | if err != nil {
92 | TagProducerLog.Error("Error while appending torrent tags:", err)
93 | }
94 | err = t.db.Model(&torrents[i]).Association("Titles").Append(torrents[i].Titles).Error
95 | if err != nil {
96 | TagProducerLog.Error("Error while appending torrent Titles:", err)
97 | }
98 | }
99 | }
100 |
101 | }
102 |
103 | func (t *TorrentTagsProducer) getTorrentTokens(torrent *Models.Torrent) []string {
104 | tokens := set.New(set.NonThreadSafe)
105 | for _, file := range torrent.Files {
106 | tokens.Add(Utils.ToInterfaceSlice(strings.FieldsFunc(strings.ToLower(file.Path), tokenizer))...)
107 | }
108 | tokens.Add(Utils.ToInterfaceSlice(strings.FieldsFunc(strings.ToLower(torrent.Name), tokenizer))...)
109 | return set.StringSlice(tokens)
110 | }
111 |
112 | func (t *TorrentTagsProducer) FillTorrentTags(torrent *Models.Torrent) {
113 | torrentTokens := t.getTorrentTokens(torrent)
114 |
115 | tagsBulkInsertArgs := make([]interface{}, 0, 20)
116 | tagsBulkInsertSql := "INSERT INTO tags (id, tag) VALUES "
117 | for e := t.tagProducers.Front(); e != nil; e = e.Next() {
118 | tagProducer := e.Value.(tokenExistenceTagProducer)
119 | if tagProducer.SatisfyTag(torrentTokens) {
120 | tag := tagProducer.GetTag()
121 | torrentTokens = append(torrentTokens, tag)
122 | tagModel := Models.Tag{
123 | Id: crc32.ChecksumIEEE(([]byte)(tag)),
124 | Tag: tag,
125 | }
126 | var tagCount int
127 | t.db.Model(&Models.Tag{}).Where(&Models.Tag{Id: tagModel.Id}).Count(&tagCount)
128 | if tagCount == 0 {
129 | tagsBulkInsertArgs = append(tagsBulkInsertArgs, tagModel.Id, tagModel.Tag)
130 | tagsBulkInsertSql += " (?, ?),"
131 | }
132 | torrent.Tags = append(torrent.Tags, tagModel)
133 |
134 | }
135 | }
136 | for e := t.specialTagProducers.Front(); e != nil; e = e.Next() {
137 | tagProducer := e.Value.(specialTagProducer)
138 |
139 | if tagProducer.SatisfyTag(torrentTokens) {
140 | tags := tagProducer.GetTags(torrent)
141 | videoLog.Debug("Titles after exit GetTags", torrent.Titles)
142 | torrentTokens = append(torrentTokens, tags...)
143 | for i := 0; i < len(tags); i++ {
144 | tagModel := Models.Tag{
145 | Id: crc32.ChecksumIEEE(([]byte)(tags[i])),
146 | Tag: tags[i],
147 | }
148 | torrent.Tags = append(torrent.Tags, tagModel)
149 | var tagCount int
150 | t.db.Model(&Models.Tag{}).Where(&Models.Tag{Id: tagModel.Id}).Count(&tagCount)
151 | if tagCount == 0 {
152 | tagsBulkInsertArgs = append(tagsBulkInsertArgs, tagModel.Id, tagModel.Tag)
153 | tagsBulkInsertSql += " (?, ?),"
154 | }
155 | }
156 |
157 | }
158 |
159 | }
160 | if len(torrent.Tags) == 0 {
161 | var tag = "Other"
162 | tagModel := Models.Tag{
163 | Id: crc32.ChecksumIEEE(([]byte)(tag)),
164 | Tag: tag,
165 | }
166 | torrent.Tags = append(torrent.Tags, tagModel)
167 |
168 | }
169 | tagsBulkInsertSql = strings.TrimRight(tagsBulkInsertSql, ",")
170 | tagLog.Info("Insering tags:", tagsBulkInsertArgs)
171 | err := t.db.Raw(tagsBulkInsertSql, tagsBulkInsertArgs...).Error
172 | if err != nil {
173 | tagLog.Error("Failed to perform bulk insert", err)
174 | return
175 | }
176 | }
177 |
--------------------------------------------------------------------------------
/Services/TagProducer/VideoInfoExtractor.go:
--------------------------------------------------------------------------------
1 | package TagProducer
2 |
3 | import (
4 | "github.com/ruslanfedoseenko/go-tmdb"
5 | "hash/crc32"
6 |
7 | "github.com/jinzhu/gorm"
8 | "github.com/op/go-logging"
9 | "github.com/ruslanfedoseenko/dhtcrawler/Config"
10 | "github.com/ruslanfedoseenko/dhtcrawler/Models"
11 | "regexp"
12 | "strconv"
13 | "strings"
14 | "reflect"
15 | )
16 |
17 | var videoExtractorLog = logging.MustGetLogger("VideoInfoExtractor")
18 |
19 | type VideoInfoExtractWork struct {
20 | Id int32
21 | Name string
22 | }
23 |
24 | type VideoInfoExtractor struct {
25 | tmDb *tmdb.TMDb
26 | tmdbConfig *tmdb.Configuration
27 | ganres map[uint32]string
28 | extraTitleContent *regexp.Regexp
29 | titleDelimiters *regexp.Regexp
30 | db *gorm.DB
31 | }
32 |
33 | var instance *VideoInfoExtractor = nil
34 |
35 | func NewVideoInfoExtractor(app *Config.App) *VideoInfoExtractor {
36 | if instance == nil {
37 | videoExtractorLog.Info("Creating new VideoInfoExtractor")
38 | tmDb := tmdb.Init(app.Config.TmdbApi.ApiKey)
39 | config, err := tmDb.GetConfiguration()
40 | if err != nil {
41 | videoExtractorLog.Error("Failed to get TMDB config", err.Error())
42 | return nil
43 | }
44 | instance = &VideoInfoExtractor{
45 | tmDb: tmDb,
46 | tmdbConfig: config,
47 | extraTitleContent: regexp.MustCompile("(?i)(\\d{4}|\\[([^]]+)]|\\(([^)]+)\\)|\\d(\\d)?x\\d(\\d)?(-\\d(\\d)?)?|web-dl|webdl|complete|temporada|season|episode|ep \\d+|tc|xxx|hdrip|dvdrip|bdrip|hdtv|1080p|1080|720|720p|480|480p|576|576p|xvid|divx|mkv|mp4|avi|brrip|ac3|mp3|x264|aac|s\\d(\\d)?(e\\d(\\d)?)?|bluray|rip|avc)"),
48 | titleDelimiters: regexp.MustCompile("(\\.|-|;|,|-|_|\\||\\(|\\)|\\[|]|/|\\\\)"),
49 | db: app.Db,
50 | }
51 | var genres *tmdb.Genre
52 | genres, err = tmDb.GetMovieGenres(map[string]string{})
53 | genresCount := len(genres.Genres)
54 | instance.ganres = make(map[uint32]string)
55 | for i := 0; i < genresCount; i++ {
56 | genre := genres.Genres[i]
57 | instance.ganres[uint32(genre.ID)] = genre.Name
58 | }
59 |
60 | }
61 | return instance
62 | }
63 |
64 | func minInt32(x, y int32) int32 {
65 | if x < y {
66 | return x
67 | }
68 | return y
69 | }
70 |
71 | func (ve *VideoInfoExtractor) GetAssociatedVideos(work VideoInfoExtractWork) (titles []Models.Title) {
72 | //videoExtractorLog.Println("Cleaning", name)
73 | cleanedName := ve.cleanupName(work.Name)
74 |
75 | videoExtractorLog.Info("Searcing for", cleanedName)
76 | res, err := ve.tmDb.SearchMulti(cleanedName, nil)
77 | if err != nil {
78 | videoExtractorLog.Error("SearchMulti Failed:", err.Error())
79 | //videoExtractorLog.Println("Re adding task")
80 | //ve.workQueue <-work
81 | return
82 | }
83 |
84 | videoExtractorLog.Debug("SearchMulti Result:", res)
85 | titlesCount := minInt32(int32(res.TotalResults), 5)
86 | var i int32
87 |
88 | for i = 0; i < titlesCount; i++ {
89 | var name string
90 |
91 | var posterUrl string
92 | var description string
93 | var titleType Models.TitleType
94 | var genres []string
95 | var year int64
96 | var ok = false
97 | var tvSeriesInfo *tmdb.MultiSearchTvInfo
98 | var movieInfo *tmdb.MultiSearchMovieInfo
99 | var base = &res.Results[i]
100 | videoExtractorLog.Debug("SearchMulti Result:", i, reflect.TypeOf(*base), *base)
101 | if tvSeriesInfo, ok = (*base).(*tmdb.MultiSearchTvInfo); ok {
102 | name = tvSeriesInfo.OriginalName
103 | var yearStr string
104 | if len(tvSeriesInfo.FirstAirDate) > 4 {
105 | yearStr = tvSeriesInfo.FirstAirDate[0:4]
106 | }
107 | genres = ve.getGenres(tvSeriesInfo.GenreIDs)
108 |
109 | if len(tvSeriesInfo.PosterPath) > 0 {
110 | posterUrl = ve.tmdbConfig.Images.BaseURL + "original" + tvSeriesInfo.PosterPath
111 | }
112 | year, err = strconv.ParseInt(yearStr, 10, 32)
113 | if err != nil {
114 | year = 0
115 | }
116 |
117 | description = tvSeriesInfo.Overview
118 | titleType = Models.TitleType(tvSeriesInfo.MediaType)
119 | } else if movieInfo, ok = (*base).(*tmdb.MultiSearchMovieInfo); ok {
120 | videoExtractorLog.Debug("Casted to tmdb.MultiSearchMovieInfo")
121 | name = movieInfo.OriginalTitle
122 | var yearStr string
123 | if len(movieInfo.ReleaseDate) > 4 {
124 | yearStr = movieInfo.ReleaseDate[0:4]
125 | }
126 |
127 | if len(movieInfo.PosterPath) > 0 {
128 | posterUrl = ve.tmdbConfig.Images.BaseURL + "original" + movieInfo.PosterPath
129 | }
130 | year, err = strconv.ParseInt(yearStr, 10, 32)
131 | if err != nil {
132 | year = 0
133 | }
134 |
135 | description = movieInfo.Overview
136 | titleType = Models.TitleType(movieInfo.MediaType)
137 |
138 | }
139 | if ok {
140 | titles = append(titles, Models.Title{
141 | Title: name,
142 | Year: uint32(year),
143 | Description: description,
144 | Ganres: genres,
145 | TitleType: titleType,
146 | Id: crc32.ChecksumIEEE([]byte(name)),
147 | PosterUrl: posterUrl,
148 | })
149 | }
150 |
151 |
152 | }
153 | return
154 | }
155 | func (ve *VideoInfoExtractor) getGenres(ganre_ids []uint32) (genreNames []string) {
156 | ganresLen := len(ganre_ids)
157 | genreNames = make([]string, ganresLen, ganresLen)
158 | for i := 0; i < ganresLen; i++ {
159 | genreNames[i] = ve.ganres[ganre_ids[i]]
160 | }
161 | return
162 | }
163 |
164 | func (ve *VideoInfoExtractor) cleanupName(text string) string {
165 | videoExtractorLog.Info("data", text, ve)
166 | index := ve.extraTitleContent.FindStringIndex(text)
167 | if len(index) > 0 {
168 | if index[0] == 0 {
169 | text = text[index[1]:]
170 | index = ve.extraTitleContent.FindStringIndex(text)
171 |
172 | }
173 | if len(index) > 0 {
174 | text = text[:index[0]]
175 | }
176 | }
177 | cleaned := ve.extraTitleContent.ReplaceAllString(text, "")
178 | cleaned = ve.titleDelimiters.ReplaceAllString(cleaned, " ")
179 | cleaned = strings.Trim(cleaned, " ")
180 | return cleaned
181 | }
182 |
--------------------------------------------------------------------------------
/Services/TagProducer/specialTagsProducer.go:
--------------------------------------------------------------------------------
1 | package TagProducer
2 |
3 | import "github.com/ruslanfedoseenko/dhtcrawler/Models"
4 |
5 | type specialTagProducer interface {
6 | SatisfyTag(torrentTokens []string) bool
7 | GetTags(torrent *Models.Torrent) []string
8 | }
9 |
--------------------------------------------------------------------------------
/Services/TagProducer/specialVideoTagProducer.go:
--------------------------------------------------------------------------------
1 | package TagProducer
2 |
3 | import (
4 | "github.com/ruslanfedoseenko/dhtcrawler/Config"
5 | "github.com/ruslanfedoseenko/dhtcrawler/Models"
6 | "github.com/ruslanfedoseenko/dhtcrawler/Utils"
7 | "regexp"
8 | "github.com/op/go-logging"
9 | )
10 |
11 | type videoTagProducer struct {
12 | videoInfoExtractor *VideoInfoExtractor
13 | qualityInfo *regexp.Regexp
14 | }
15 |
16 | var videoLog = logging.MustGetLogger("DhtCrawler")
17 |
18 | func NewVideoTagProducer(app *Config.App) *videoTagProducer {
19 | return &videoTagProducer{
20 | videoInfoExtractor: NewVideoInfoExtractor(app),
21 | qualityInfo: regexp.MustCompile("web-dl|webdl|season|episode|ep(\\s+)?\\d+|tc|xxx|hdrip|bdrip|dvdrip|bdrip|hdtv|1080p|1080|720p|720|480|480p|576|576p|xvid|divx|mkv|mp4|avi|brrip|ac3|mp3|x264|h264|x265|h265|aac|s\\d(\\d)?(e\\d(\\d)?)?|bluray|rip|avc"),
22 | }
23 | }
24 |
25 | func (vtp *videoTagProducer) SatisfyTag(torrentTokens []string) bool {
26 | if Utils.IndexOf(torrentTokens, "video") > -1 {
27 | return true
28 | }
29 | return false
30 | }
31 |
32 | func (vtp *videoTagProducer) GetTags(torrent *Models.Torrent) []string {
33 | TagProducerLog.Info("GetTags", vtp, vtp.videoInfoExtractor)
34 | titles := vtp.videoInfoExtractor.GetAssociatedVideos(VideoInfoExtractWork{Name: torrent.Name})
35 | torrent.Titles = append(torrent.Titles , titles...)
36 |
37 | titlesLen := len(titles)
38 | tags := make([]string, 0, titlesLen*5)
39 | for i := 0; i < titlesLen; i++ {
40 | tags = append(tags, titles[i].Ganres...)
41 | }
42 | tags = append(tags, vtp.lookForQuality(torrent.Name)...)
43 | videoLog.Debug("Titles before exit GetTags", torrent.Titles)
44 | return tags
45 | }
46 | func (vtp *videoTagProducer) lookForQuality(i string) []string {
47 | return vtp.qualityInfo.FindAllString(i, -1)
48 | }
49 |
--------------------------------------------------------------------------------
/Services/TagProducer/tokenExistenceTagProducer.go:
--------------------------------------------------------------------------------
1 | package TagProducer
2 |
3 | type tokenExistenceTagProducer struct {
4 | tokens map[string]uint32
5 | tag string
6 | }
7 |
8 | func (p tokenExistenceTagProducer) SatisfyTag(torrentTokens []string) bool {
9 | tokensLen := len(torrentTokens)
10 | for i := 0; i < tokensLen; i++ {
11 | if _, ok := p.tokens[torrentTokens[i]]; ok {
12 | return true
13 | }
14 | }
15 | return false
16 | }
17 |
18 | func (p tokenExistenceTagProducer) GetTag() string {
19 | return p.tag
20 | }
21 |
--------------------------------------------------------------------------------
/Services/TorrentCountStatsUpdater.go:
--------------------------------------------------------------------------------
1 | package Services
2 |
3 | import (
4 | "github.com/jasonlvhit/gocron"
5 | "github.com/op/go-logging"
6 | "github.com/ruslanfedoseenko/dhtcrawler/Config"
7 | "github.com/ruslanfedoseenko/dhtcrawler/Models"
8 | "time"
9 | )
10 |
11 | var statsUpdLog = logging.MustGetLogger("StatsUpdater")
12 |
13 | type StatsUpdater struct {
14 | }
15 |
16 | func (updater *StatsUpdater) updateStatistics() {
17 |
18 | statsUpdLog.Debug("Starting Update Torrent Statistic")
19 | var lastInserted Models.TorrentStatsPart
20 |
21 | err := App.Db.Model(&Models.TorrentStatsPart{}).Where("id = ( SELECT MAX( id ) FROM torrent_stats )").Scan(&lastInserted).Error
22 |
23 | if err != nil {
24 | statsUpdLog.Error("Select last id: Error occured", err)
25 | }
26 |
27 | if lastInserted.Date.Hour() == time.Now().Hour() {
28 | _, time := gocron.NextRun()
29 | statsUpdLog.Info("Next Update Statistics task run in:", time)
30 | return
31 | }
32 |
33 | err = App.Db.Exec("INSERT INTO torrent_stats ( torrents_count , files_count , files_size ) SELECT torrent_count , files_count , total_file_size FROM realtime_counters").Error
34 |
35 | if err != nil {
36 | statsUpdLog.Error("App.Db.Create: Error occured", err)
37 | }
38 | _, time := gocron.NextRun()
39 | statsUpdLog.Info("Next Update Statistics task run in:", time)
40 | }
41 |
42 | func (updater StatsUpdater) Start() {
43 | go updater.updateStatistics()
44 | }
45 |
46 | func SetupTorrentCountStatsUpdater(app *Config.App) {
47 | App = app
48 | statsUpdater := StatsUpdater{}
49 | App.AddService(statsUpdater)
50 | App.Scheduler.Every(1).Hour().Do(statsUpdater.Start)
51 |
52 | }
53 |
--------------------------------------------------------------------------------
/Services/WorkQueue.go:
--------------------------------------------------------------------------------
1 | package Services
2 |
3 | import (
4 | "container/heap"
5 | "sync"
6 | )
7 |
8 | type Priority int32
9 |
10 | const (
11 | High Priority = 2
12 | Normal Priority = 1
13 | Low Priority = 0
14 | )
15 |
16 | // An Item is something we manage in a priority queue.
17 | type Item struct {
18 | value work // The value of the item; arbitrary.
19 | priority Priority // The priority of the item in the queue.
20 | // The index is needed by update and is maintained by the heap.Interface methods.
21 | index int // The index of the item in the heap.
22 | }
23 |
24 | // A PriorityQueue implements heap.Interface and holds Items.
25 | type PriorityQueue struct {
26 | sync.RWMutex
27 | items []*Item
28 | HasMoreItems chan bool
29 | }
30 |
31 | func newPriorityQueue(initialCapacity int) *PriorityQueue {
32 | return &PriorityQueue{
33 | items: make([]*Item, initialCapacity),
34 | HasMoreItems: make(chan bool),
35 | }
36 | }
37 |
38 | func (pq *PriorityQueue) SetItem(index int, item *Item) {
39 | pq.Lock()
40 | defer pq.Unlock()
41 | pq.items[index] = item
42 | }
43 | func (pq PriorityQueue) Len() int {
44 | pq.RLock()
45 | defer pq.RUnlock()
46 | return len(pq.items)
47 | }
48 |
49 | func (pq PriorityQueue) Less(i, j int) bool {
50 | // We want Pop to give us the highest, not lowest, priority so we use greater than here.
51 | pq.RLock()
52 | defer pq.RUnlock()
53 | return pq.items[i].priority > pq.items[j].priority
54 | }
55 |
56 | func (pq PriorityQueue) Swap(i, j int) {
57 | pq.Lock()
58 | defer pq.Unlock()
59 | pq.items[i], pq.items[j] = pq.items[j], pq.items[i]
60 | pq.items[i].index = i
61 | pq.items[j].index = j
62 | }
63 |
64 | func (pq *PriorityQueue) Push(x interface{}) {
65 | pq.Lock()
66 | defer pq.Unlock()
67 | n := len(pq.items)
68 | item := x.(*Item)
69 | item.index = n
70 | pq.items = append(pq.items, item)
71 | if n == 0 {
72 | select {
73 | case pq.HasMoreItems <- true:
74 | {
75 |
76 | }
77 | default:
78 | {
79 |
80 | }
81 | }
82 | }
83 | }
84 |
85 | func (pq *PriorityQueue) Pop() interface{} {
86 | pq.Lock()
87 | defer pq.Unlock()
88 | old := *pq
89 | n := len(old.items)
90 | item := old.items[n-1]
91 | item.index = -1 // for safety
92 | pq.items = old.items[0 : n-1]
93 | return item
94 | }
95 |
96 | // update modifies the priority and value of an Item in the queue.
97 | func (pq *PriorityQueue) update(item *Item, value work, priority Priority) {
98 | pq.Lock()
99 | defer pq.Unlock()
100 | item.value = value
101 | item.priority = priority
102 | heap.Fix(pq, int(item.index))
103 | }
104 |
--------------------------------------------------------------------------------
/Services/jwt/Init.go:
--------------------------------------------------------------------------------
1 | package jwt
2 |
3 | import (
4 | "crypto/rsa"
5 | "io/ioutil"
6 | "github.com/dgrijalva/jwt-go"
7 | "github.com/ruslanfedoseenko/dhtcrawler/Config"
8 | "github.com/op/go-logging"
9 | )
10 |
11 | // location of the files used for signing and verification
12 | const (
13 | privKeyPath = "keys/app.rsa" // `$ openssl genrsa -out app.rsa 2048`
14 | pubKeyPath = "keys/app.rsa.pub" // `$ openssl rsa -in app.rsa -pubout > app.rsa.pub`
15 | )
16 |
17 | // keys are held in global variables
18 | // i havn't seen a memory corruption/info leakage in go yet
19 | // but maybe it's a better idea, just to store the public key in ram?
20 | // and load the signKey on every signing request? depends on your usage i guess
21 | var (
22 | verifyKey *rsa.PublicKey
23 | signKey *rsa.PrivateKey
24 | )
25 |
26 |
27 | var jwtLog = logging.MustGetLogger("jwtLog")
28 | var App *Config.App
29 |
30 | // read the key files before starting http handlers
31 | func InitJWT(app *Config.App) error {
32 | App = app
33 | signBytes, err := ioutil.ReadFile(App.Config.HttpConfig.JwtAuthPrivateKeyPath)
34 | if err != nil {
35 | return err
36 | }
37 |
38 | signKey, err = jwt.ParseRSAPrivateKeyFromPEM(signBytes)
39 | if err != nil {
40 | return err
41 | }
42 |
43 | verifyBytes, err := ioutil.ReadFile(App.Config.HttpConfig.JwtAuthPublicKeyPath)
44 | if err != nil {
45 | return err
46 | }
47 |
48 | verifyKey, err = jwt.ParseRSAPublicKeyFromPEM(verifyBytes)
49 | if err != nil {
50 | return err
51 | }
52 |
53 | return nil
54 | }
--------------------------------------------------------------------------------
/Services/jwt/cookies.go:
--------------------------------------------------------------------------------
1 | package jwt
2 |
3 | import (
4 | "log"
5 | "time"
6 | "net/http"
7 | )
8 |
9 | func NullifyTokenCookies(w *http.ResponseWriter, r *http.Request) {
10 |
11 | var isHttps = r.Header.Get("X-Forwarded-Proto") == "https"
12 |
13 | authCookie := http.Cookie{
14 | Name: "AuthToken",
15 | Value: "",
16 | Expires: time.Now().Add(-1000 * time.Hour),
17 | HttpOnly: true,
18 | Domain: r.Host,
19 | Path: "/api",
20 | Secure: isHttps,
21 | }
22 |
23 | http.SetCookie(*w, &authCookie)
24 |
25 | refreshCookie := http.Cookie{
26 | Name: "RefreshToken",
27 | Value: "",
28 | Expires: time.Now().Add(-1000 * time.Hour),
29 | HttpOnly: true,
30 | Domain: r.Host,
31 | Path: "/api",
32 | Secure: isHttps,
33 | }
34 |
35 | http.SetCookie(*w, &refreshCookie)
36 |
37 | // if present, revoke the refresh cookie from our db
38 | RefreshCookie, refreshErr := r.Cookie("RefreshToken")
39 | if refreshErr == http.ErrNoCookie {
40 | // do nothing, there is no refresh cookie present
41 | return
42 | } else if refreshErr != nil {
43 | log.Panic("panic: %+v", refreshErr)
44 | http.Error(*w, http.StatusText(500), 500)
45 | }
46 |
47 | RevokeRefreshToken(RefreshCookie.Value)
48 | }
49 |
50 | func SetAuthAndRefreshCookies(w *http.ResponseWriter, r *http.Request, authTokenString string, refreshTokenString string) {
51 |
52 | var isHttps = r.Header.Get("X-Forwarded-Proto") == "https"
53 | authCookie := http.Cookie{
54 | Name: "AuthToken",
55 | Value: authTokenString,
56 | HttpOnly: true,
57 | Domain: r.Host,
58 | Path: "/api",
59 | Secure: isHttps,
60 | }
61 |
62 | http.SetCookie(*w, &authCookie)
63 |
64 | refreshCookie := http.Cookie{
65 | Name: "RefreshToken",
66 | Value: refreshTokenString,
67 | HttpOnly: true,
68 | Domain: r.Host,
69 | Path: "/api",
70 | Secure: isHttps,
71 | }
72 |
73 | http.SetCookie(*w, &refreshCookie)
74 | }
--------------------------------------------------------------------------------
/Services/jwt/dao.go:
--------------------------------------------------------------------------------
1 | package jwt
2 |
3 | import (
4 | "github.com/ruslanfedoseenko/dhtcrawler/Models"
5 | "github.com/ruslanfedoseenko/dhtcrawler/Utils"
6 | )
7 |
8 | func StoreNewRefreshToken() (token string ,e error) {
9 | token, e = Utils.GenerateRandomString(32)
10 | App.Db.Create(&Models.JwtRefreshToken{
11 | Token: token,
12 | })
13 | return
14 | }
15 |
16 |
17 | func CheckRefreshToken(token string) bool {
18 | var tokensCount = 0
19 | App.Db.Model(&Models.JwtRefreshToken{}).Where(&Models.JwtRefreshToken{
20 | Token: token,
21 | }).Count(&tokensCount)
22 | return tokensCount > 0
23 | }
24 |
25 |
26 | func DeleteRefreshToken(token string) {
27 | App.Db.Where(&Models.JwtRefreshToken{
28 | Token: token,
29 | }).Delete(Models.JwtRefreshToken{})
30 | }
31 |
32 |
--------------------------------------------------------------------------------
/Services/jwt/jwt.go:
--------------------------------------------------------------------------------
1 | package jwt
2 |
3 | import (
4 | "time"
5 | "errors"
6 | "github.com/dgrijalva/jwt-go"
7 | "github.com/ruslanfedoseenko/dhtcrawler/Models"
8 | "github.com/ruslanfedoseenko/dhtcrawler/Utils"
9 | )
10 |
11 | func GetClaims(token string) (claims *Models.TokenClaims,err error) {
12 | authToken, err := jwt.ParseWithClaims(token, &Models.TokenClaims{}, func(token *jwt.Token) (interface{}, error) {
13 | return verifyKey, nil
14 | })
15 | claims = authToken.Claims.(*Models.TokenClaims)
16 |
17 | return
18 | }
19 |
20 | func CreateNewTokens(uuid string, role string) (authTokenString, refreshTokenString, csrfSecret string, err error) {
21 | // generate the csrf secret
22 | csrfSecret, err = Utils.GenerateCSRFSecret()
23 | if err != nil {
24 | return
25 | }
26 |
27 |
28 | // generate the refresh token
29 | refreshTokenString, err = createRefreshTokenString(uuid, role, csrfSecret)
30 |
31 |
32 | // generate the auth token
33 | authTokenString, err = createAuthTokenString(uuid, role, csrfSecret)
34 | if err != nil {
35 | return
36 | }
37 | // don't need to check for err bc we're returning everything anyway
38 | return
39 | }
40 |
41 | // @adam-hanna: check if refreshToken["sub"] == authToken["sub"]?
42 | // I don't think this is necessary bc a valid refresh token will always generate
43 | // a valid auth token of the same "sub"
44 | func CheckAndRefreshTokens(oldAuthTokenString string, oldRefreshTokenString string, oldCsrfSecret string) (newAuthTokenString, newRefreshTokenString, newCsrfSecret string, err error) {
45 | // first, check that a csrf token was provided
46 | if oldCsrfSecret == "" {
47 | jwtLog.Error("No CSRF token!")
48 | err = errors.New("Unauthorized")
49 | return
50 | }
51 | // now, check that it matches what's in the auth token claims
52 | authToken, err := jwt.ParseWithClaims(oldAuthTokenString, &Models.TokenClaims{}, func(token *jwt.Token) (interface{}, error) {
53 | return verifyKey, nil
54 | })
55 | authTokenClaims, ok := authToken.Claims.(*Models.TokenClaims)
56 | if !ok {
57 | return
58 | }
59 | if oldCsrfSecret != authTokenClaims.Csrf {
60 | jwtLog.Error("CSRF token doesn't match jwt!")
61 | err = errors.New("Unauthorized")
62 | return
63 | }
64 |
65 |
66 | // next, check the auth token in a stateless manner
67 | if authToken.Valid {
68 | jwtLog.Error("Auth token is valid")
69 | // auth token has not expired
70 | // we need to return the csrf secret bc that's what the function calls for
71 | newCsrfSecret = authTokenClaims.Csrf
72 |
73 | // update the exp of refresh token string, but don't save to the db
74 | // we don't need to check if our refresh token is valid here
75 | // because we aren't renewing the auth token, the auth token is already valid
76 | newRefreshTokenString, err = updateRefreshTokenExp(oldRefreshTokenString)
77 | newAuthTokenString = oldAuthTokenString
78 | return
79 | } else if ve, ok := err.(*jwt.ValidationError); ok {
80 | jwtLog.Error("Auth token is not valid")
81 | if ve.Errors&(jwt.ValidationErrorExpired) != 0 {
82 | jwtLog.Error("Auth token is expired")
83 | // auth token is expired
84 | newAuthTokenString, newCsrfSecret, err = updateAuthTokenString(oldRefreshTokenString, oldAuthTokenString)
85 | if err != nil {
86 | return
87 | }
88 |
89 | // update the exp of refresh token string
90 | newRefreshTokenString, err = updateRefreshTokenExp(oldRefreshTokenString)
91 | if err != nil {
92 | return
93 | }
94 |
95 | // update the csrf string of the refresh token
96 | newRefreshTokenString, err = updateRefreshTokenCsrf(newRefreshTokenString, newCsrfSecret)
97 | return
98 | } else {
99 | jwtLog.Error("Error in auth token")
100 | err = errors.New("Error in auth token")
101 | return
102 | }
103 | } else {
104 | jwtLog.Error("Error in auth token")
105 | err = errors.New("Error in auth token")
106 | return
107 | }
108 |
109 | // if we get here, there was some error validating the token
110 | err = errors.New("Unauthorized")
111 | return
112 | }
113 |
114 | func createAuthTokenString(uuid string, role string, csrfSecret string) (authTokenString string, err error) {
115 | authTokenExp := time.Now().Add(Models.AuthTokenValidTime).Unix()
116 | authClaims := Models.TokenClaims{
117 | StandardClaims: jwt.StandardClaims{
118 | Subject: uuid,
119 | ExpiresAt: authTokenExp,
120 | },
121 | Role: role,
122 | Csrf: csrfSecret,
123 | }
124 |
125 | // create a signer for rsa 256
126 | authJwt := jwt.NewWithClaims(jwt.GetSigningMethod("RS256"), authClaims)
127 |
128 | // generate the auth token string
129 | authTokenString, err = authJwt.SignedString(signKey)
130 | return
131 | }
132 |
133 | func createRefreshTokenString(uuid string, role string, csrfString string) (refreshTokenString string, err error) {
134 | refreshTokenExp := time.Now().Add(Models.RefreshTokenValidTime).Unix()
135 | refreshJti, err := StoreNewRefreshToken()
136 | if err != nil {
137 | return
138 | }
139 |
140 | refreshClaims := Models.TokenClaims{
141 | StandardClaims: jwt.StandardClaims{
142 | Id: refreshJti, // jti
143 | Subject: uuid,
144 | ExpiresAt: refreshTokenExp,
145 | },
146 | Role: role,
147 | Csrf: csrfString,
148 | }
149 |
150 | // create a signer for rsa 256
151 | refreshJwt := jwt.NewWithClaims(jwt.GetSigningMethod("RS256"), refreshClaims)
152 |
153 | // generate the refresh token string
154 | refreshTokenString, err = refreshJwt.SignedString(signKey)
155 | return
156 | }
157 |
158 |
159 | func updateRefreshTokenExp(oldRefreshTokenString string) (newRefreshTokenString string, err error) {
160 | refreshToken, err := jwt.ParseWithClaims(oldRefreshTokenString, &Models.TokenClaims{}, func(token *jwt.Token) (interface{}, error) {
161 | return verifyKey, nil
162 | })
163 |
164 | oldRefreshTokenClaims, ok := refreshToken.Claims.(*Models.TokenClaims)
165 | if !ok {
166 | return
167 | }
168 |
169 | refreshTokenExp := time.Now().Add(Models.RefreshTokenValidTime).Unix()
170 |
171 | refreshClaims := Models.TokenClaims{
172 | StandardClaims: jwt.StandardClaims{
173 | Id: oldRefreshTokenClaims.StandardClaims.Id, // jti
174 | Subject: oldRefreshTokenClaims.StandardClaims.Subject,
175 | ExpiresAt: refreshTokenExp,
176 | },
177 | Role: oldRefreshTokenClaims.Role,
178 | Csrf: oldRefreshTokenClaims.Csrf,
179 | }
180 |
181 | // create a signer for rsa 256
182 | refreshJwt := jwt.NewWithClaims(jwt.GetSigningMethod("RS256"), refreshClaims)
183 |
184 | // generate the refresh token string
185 | newRefreshTokenString, err = refreshJwt.SignedString(signKey)
186 | return
187 | }
188 |
189 | func updateAuthTokenString(refreshTokenString string, oldAuthTokenString string) (newAuthTokenString, csrfSecret string, err error) {
190 | refreshToken, err := jwt.ParseWithClaims(refreshTokenString, &Models.TokenClaims{}, func(token *jwt.Token) (interface{}, error) {
191 | return verifyKey, nil
192 | })
193 | refreshTokenClaims, ok := refreshToken.Claims.(*Models.TokenClaims)
194 | if !ok {
195 | err = errors.New("Error reading jwt claims")
196 | return
197 | }
198 |
199 | // check if the refresh token has been revoked
200 | if CheckRefreshToken(refreshTokenClaims.StandardClaims.Id) {
201 | // the refresh token has not been revoked
202 | // has it expired?
203 | if refreshToken.Valid {
204 | // nope, the refresh token has not expired
205 | // issue a new auth token
206 | authToken, _ := jwt.ParseWithClaims(oldAuthTokenString, &Models.TokenClaims{}, func(token *jwt.Token) (interface{}, error) {
207 | return verifyKey, nil
208 | })
209 |
210 | oldAuthTokenClaims, ok := authToken.Claims.(*Models.TokenClaims)
211 | if !ok {
212 | err = errors.New("Error reading jwt claims")
213 | return
214 | }
215 |
216 | // our policy is to regenerate the csrf secret for each new auth token
217 | csrfSecret, err = Utils.GenerateCSRFSecret()
218 | if err != nil {
219 | return
220 | }
221 |
222 | newAuthTokenString, err = createAuthTokenString(oldAuthTokenClaims.StandardClaims.Subject, oldAuthTokenClaims.Role, csrfSecret)
223 |
224 | return
225 | } else {
226 | jwtLog.Error("Refresh token has expired!")
227 | // the refresh token has expired!
228 | // Revoke the token in our db and require the user to login again
229 | DeleteRefreshToken(refreshTokenClaims.StandardClaims.Id)
230 |
231 | err = errors.New("Unauthorized")
232 | return
233 | }
234 | } else {
235 | jwtLog.Error("Refresh token has been revoked!")
236 | // the refresh token has been revoked!
237 | err = errors.New("Unauthorized")
238 | return
239 | }
240 | }
241 |
242 |
243 | func RevokeRefreshToken(refreshTokenString string) error {
244 | refreshToken, err := jwt.ParseWithClaims(refreshTokenString, &Models.TokenClaims{}, func(token *jwt.Token) (interface{}, error) {
245 | return verifyKey, nil
246 | })
247 | if err != nil {
248 | return errors.New("Could not parse refresh token with claims")
249 | }
250 |
251 | refreshTokenClaims, ok := refreshToken.Claims.(*Models.TokenClaims)
252 | if !ok {
253 | return errors.New("Could not read refresh token claims")
254 | }
255 |
256 | DeleteRefreshToken(refreshTokenClaims.StandardClaims.Id)
257 |
258 | return nil
259 | }
260 |
261 | func updateRefreshTokenCsrf(oldRefreshTokenString string, newCsrfString string) (newRefreshTokenString string, err error) {
262 | refreshToken, err := jwt.ParseWithClaims(oldRefreshTokenString, &Models.TokenClaims{}, func(token *jwt.Token) (interface{}, error) {
263 | return verifyKey, nil
264 | })
265 |
266 | oldRefreshTokenClaims, ok := refreshToken.Claims.(*Models.TokenClaims)
267 | if !ok {
268 | return
269 | }
270 |
271 | refreshClaims := Models.TokenClaims{
272 | StandardClaims: jwt.StandardClaims{
273 | Id: oldRefreshTokenClaims.StandardClaims.Id, // jti
274 | Subject: oldRefreshTokenClaims.StandardClaims.Subject,
275 | ExpiresAt: oldRefreshTokenClaims.StandardClaims.ExpiresAt,
276 | },
277 | Role: oldRefreshTokenClaims.Role,
278 | Csrf: newCsrfString,
279 | }
280 |
281 | // create a signer for rsa 256
282 | refreshJwt := jwt.NewWithClaims(jwt.GetSigningMethod("RS256"), refreshClaims)
283 |
284 | // generate the refresh token string
285 | newRefreshTokenString, err = refreshJwt.SignedString(signKey)
286 | return
287 | }
288 |
289 | func GrabUUID(authTokenString string) (string, error) {
290 | authToken, _ := jwt.ParseWithClaims(authTokenString, &Models.TokenClaims{}, func(token *jwt.Token) (interface{}, error) {
291 | return "", errors.New("Error fetching claims")
292 | })
293 | authTokenClaims, ok := authToken.Claims.(*Models.TokenClaims)
294 | if !ok {
295 | return "", errors.New("Error fetching claims")
296 | }
297 |
298 | return authTokenClaims.StandardClaims.Subject, nil
299 | }
--------------------------------------------------------------------------------
/Services/rpc/RpClient.go:
--------------------------------------------------------------------------------
1 | package Rpc
2 |
3 | import (
4 | "fmt"
5 | "github.com/op/go-logging"
6 | "github.com/ruslanfedoseenko/dhtcrawler/Config"
7 | "github.com/ruslanfedoseenko/dhtcrawler/Models"
8 | "github.com/valyala/gorpc"
9 | )
10 |
11 | var rpcClsLog = logging.MustGetLogger("RpcClient")
12 |
13 | type RpcClient struct {
14 | dispatcher *gorpc.Dispatcher
15 | tcpClient *gorpc.Client
16 | torrentServiceClient *gorpc.DispatcherClient
17 | scrapeServiceClient *gorpc.DispatcherClient
18 | }
19 |
20 | var rpcClient *RpcClient = nil
21 |
22 | func GetRpcCLientInstance(app *Config.App) *RpcClient {
23 | if rpcClient == nil {
24 | rpcClient = newRpcClient(app)
25 | }
26 | return rpcClient
27 |
28 | }
29 |
30 | func newRpcClient(app *Config.App) *RpcClient {
31 | var addressStr string = fmt.Sprintf("%s:%d", app.Config.RpcConfig.Host, app.Config.RpcConfig.Port)
32 | gorpc.RegisterType(&Models.Torrent{})
33 | rpcClient := RpcClient{
34 | dispatcher: gorpc.NewDispatcher(),
35 | tcpClient: gorpc.NewTCPClient(addressStr),
36 | }
37 | rpcClient.dispatcher.AddService(TorrentRpcServerName, &TorrentRpcService{})
38 | rpcClient.dispatcher.AddService(ScrapeRpcServerNAme, &ScrapeRpcService{})
39 | rpcClient.torrentServiceClient = rpcClient.dispatcher.NewServiceClient(TorrentRpcServerName, rpcClient.tcpClient)
40 | rpcClient.scrapeServiceClient = rpcClient.dispatcher.NewServiceClient(ScrapeRpcServerNAme, rpcClient.tcpClient)
41 | return &rpcClient
42 | }
43 |
44 | func (c *RpcClient) Start() {
45 | c.tcpClient.Start()
46 | }
47 |
48 | func (c *RpcClient) HasTorrent(infoHash string) (b bool, err error) {
49 | response, err := c.torrentServiceClient.Call("HasTorrent", infoHash)
50 | rpcClsLog.Debug("HasTorrent response:", response, "err:", err)
51 | b, ok := response.(bool)
52 | if !ok {
53 | rpcClsLog.Error("Failed to convert response", response, "to bool")
54 | }
55 | return
56 | }
57 |
58 | func (c *RpcClient) AddTorrent(torrent *Models.Torrent) (err error) {
59 | response, err := c.torrentServiceClient.CallAsync("AddTorrent", torrent)
60 | rpcClsLog.Debug("AddTorrent response:", response, "err:", err)
61 | return nil
62 | }
63 |
64 | func (c *RpcClient) GetNextScrapeTask() *ScrapeTask {
65 | response, err := c.scrapeServiceClient.Call("GetNextScrapeTask", nil)
66 | rpcClsLog.Debug("GetNextScrapeTask response:", response, "err:", err)
67 | task, ok := response.(*ScrapeTask)
68 | if !ok {
69 | rpcClsLog.Error("Failed to convert response", response, "to ScrapeTask")
70 | }
71 | return task
72 | }
73 |
74 | func (c *RpcClient) ReportScrapeResults(reuslt *ScrapeResult) {
75 | response, err := c.scrapeServiceClient.Call("ReportScrapeResults", reuslt)
76 | rpcClsLog.Debug("ReportScrapeResults response:", response, "err:", err)
77 | }
78 | func (c *RpcClient) HasAvailableTasks() bool {
79 | response, err := c.scrapeServiceClient.Call("HasAvailableTasks", nil)
80 | rpcClsLog.Debug("HasAvailableTasks response:", response, "err:", err)
81 | b, ok := response.(bool)
82 | if !ok {
83 | rpcClsLog.Error("Failed to convert response", response, "to bool")
84 | }
85 | return b
86 | }
87 |
--------------------------------------------------------------------------------
/Services/rpc/RpcServer.go:
--------------------------------------------------------------------------------
1 | package Rpc
2 |
3 | import (
4 | "fmt"
5 | "github.com/op/go-logging"
6 | "github.com/ruslanfedoseenko/dhtcrawler/Config"
7 | "github.com/valyala/gorpc"
8 | )
9 |
10 | var rpcSrvLog = logging.MustGetLogger("RpcServer")
11 |
12 | type RpcServer struct {
13 | dispatcher *gorpc.Dispatcher
14 | torrentService *TorrentRpcService
15 | scrapeRpcService *ScrapeRpcService
16 | server *gorpc.Server
17 | }
18 |
19 | const (
20 | TorrentRpcServerName string = "TorrentsRpc"
21 | ScrapeRpcServerNAme = "ScrapeRpc"
22 | )
23 |
24 | func SetupRpcServer(app *Config.App) {
25 | var addressStr string = fmt.Sprintf("%s:%d", app.Config.RpcConfig.Host, app.Config.RpcConfig.Port)
26 | rpcSrvLog.Debug("Address string:", addressStr)
27 | rpcServer := RpcServer{
28 | dispatcher: gorpc.NewDispatcher(),
29 | torrentService: NewTorrentRpcService(app),
30 | scrapeRpcService: NewScrapeRpcService(app),
31 | }
32 | rpcServer.dispatcher.AddService(TorrentRpcServerName, rpcServer.torrentService)
33 | rpcServer.dispatcher.AddService(ScrapeRpcServerNAme, rpcServer.scrapeRpcService)
34 | rpcServer.server = gorpc.NewTCPServer(addressStr, rpcServer.dispatcher.NewHandlerFunc())
35 | app.AddService(rpcServer)
36 | }
37 |
38 | func (s RpcServer) Start() {
39 | rpcSrvLog.Info("Starting RpcServer %s", s.server.Addr)
40 | s.server.Start()
41 | }
42 |
--------------------------------------------------------------------------------
/Services/rpc/ScrapeRcpService.go:
--------------------------------------------------------------------------------
1 | package Rpc
2 |
3 | import (
4 | "github.com/jasonlvhit/gocron"
5 | "github.com/jinzhu/gorm"
6 | "github.com/lib/pq"
7 | "github.com/op/go-logging"
8 | "github.com/ruslanfedoseenko/dhtcrawler/Config"
9 | "github.com/ruslanfedoseenko/dhtcrawler/Models"
10 | "github.com/ruslanfedoseenko/dhtcrawler/Services/tracker"
11 | "sync"
12 | "sync/atomic"
13 | "time"
14 | )
15 |
16 | var scrapeRpcServiceLog = logging.MustGetLogger("ScrapeRpcService")
17 |
18 | type ScrapeRpcService struct {
19 | lastScrapedId int32
20 | lastTorrentId int32
21 | hasAvailableTasks bool
22 | scrapeTimeOut int
23 | db *gorm.DB
24 | scheduler *gocron.Scheduler
25 | results chan *ScrapeResult
26 | lastScrapeMutex sync.Mutex
27 | }
28 |
29 | func NewScrapeRpcService(app *Config.App) *ScrapeRpcService {
30 | service := ScrapeRpcService{
31 | db: app.Db,
32 | scheduler: app.Scheduler,
33 | lastScrapedId: -1,
34 | lastTorrentId: -1,
35 | hasAvailableTasks: true,
36 | scrapeTimeOut: app.Config.ScrapeConfig.ScrapeTimeout,
37 | results: make(chan *ScrapeResult, 65536),
38 | }
39 | for i := 0; i < 8; i++ {
40 | go service.resultsWriterThread()
41 | }
42 |
43 | return &service
44 | }
45 |
46 | const DefaultWorkSize int = 74
47 |
48 | type ScrapeTask struct {
49 | InfoHashes []string
50 | LastID uint32
51 | }
52 |
53 | type ScrapeResult struct {
54 | TrackerUrl string
55 | Response *tracker.ScrapeResponse
56 | LastID uint32
57 | }
58 |
59 | func (s *ScrapeRpcService) getLastScrapeID() int32 {
60 | return atomic.LoadInt32(&s.lastScrapedId)
61 | }
62 | func (s *ScrapeRpcService) setLastScrapeId(value int32) {
63 | atomic.StoreInt32(&s.lastScrapedId, value)
64 | s.db.Exec("UPDATE realtime_counters SET last_scraped_id = ?", value)
65 | }
66 |
67 | func (s *ScrapeRpcService) HasAvailableTasks() bool {
68 | return s.hasAvailableTasks
69 | }
70 |
71 | func (s *ScrapeRpcService) GetNextScrapeTask() *ScrapeTask {
72 | s.lastScrapeMutex.Lock()
73 | defer s.lastScrapeMutex.Unlock()
74 | lastScrapeID := s.getLastScrapeID()
75 |
76 | var torrent Models.Torrent
77 | s.db.Model(&Models.Torrent{}).Order("id desc").First(&torrent)
78 | s.lastTorrentId = torrent.Id
79 |
80 | var counters Models.Counters
81 | err := s.db.First(&counters).Error
82 | if err != nil {
83 | scrapeRpcServiceLog.Error("Error reading counters from db:", err)
84 | }
85 | if lastScrapeID == -1 {
86 | scrapeRpcServiceLog.Info("Last scraped", counters.LastScrapedId, "Torrent Count", counters.TorrentCount)
87 | s.setLastScrapeId(counters.LastScrapedId)
88 | lastScrapeID = counters.LastScrapedId
89 | } else {
90 | scrapeRpcServiceLog.Info("Db Last scraped", counters.LastScrapedId, "Torrent Count", counters.TorrentCount, "Local LastScraped ID", lastScrapeID)
91 | if s.lastTorrentId-lastScrapeID < 100 {
92 | lastScrapeID = 1
93 | }
94 | }
95 |
96 | var torrents []Models.Torrent
97 | err = s.db.Model(&Models.Torrent{}).
98 | Where("((extract( epoch from (now() - last_scrape)))/3600 > ? OR last_scrape IS NULL)"+
99 | " AND id >= ?", s.scrapeTimeOut, lastScrapeID).Limit(DefaultWorkSize).Scan(&torrents).Error
100 | if err != nil {
101 | scrapeRpcServiceLog.Error("Failed to get torrents", err)
102 | }
103 | if len(torrents) == 0 {
104 | scrapeRpcServiceLog.Error("Failed to get more torretns from ", lastScrapeID)
105 | }
106 | var task ScrapeTask
107 | task.InfoHashes = make([]string, len(torrents))
108 | task.LastID = uint32(torrents[len(torrents)-1].Id)
109 | s.setLastScrapeId(int32(task.LastID))
110 | for i, torrent := range torrents {
111 | task.InfoHashes[i] = torrent.Infohash
112 | }
113 | return &task
114 | }
115 |
116 | func (s *ScrapeRpcService) resultsWriterThread() {
117 | for {
118 | result := <-s.results
119 |
120 | var infohases []string
121 | for key, value := range result.Response.ScrapeDatas {
122 | if value.Completed+value.Leechers+value.Seeders != 0 {
123 | infohases = append(infohases, key)
124 | }
125 |
126 | }
127 | if len(infohases) > 0 {
128 | var torrentsToHash = make(map[string]Models.Torrent, len(infohases))
129 | var torrents []Models.Torrent
130 | s.db.Preload("ScraperResults").Where("infohash in (?)", infohases).Find(&torrents)
131 | for i := 0; i < len(torrents); i++ {
132 | torrentsToHash[torrents[i].Infohash] = torrents[i]
133 | }
134 |
135 | for i := 0; i < len(torrents); i++ {
136 | torrent := torrents[i]
137 | info := result.Response.ScrapeDatas[torrent.Infohash]
138 |
139 | if info.Completed+info.Leechers+info.Seeders != 0 {
140 | var found = false
141 | for j := 0; j < len(torrent.ScraperResults); j++ {
142 | if torrent.ScraperResults[j].TrackerUrl == result.TrackerUrl {
143 | found = true
144 | torrent.ScraperResults[j].LastUpdate = pq.NullTime{
145 | Time: time.Now(),
146 | Valid: true,
147 | }
148 | torrent.ScraperResults[j].Leaches = info.Leechers
149 | torrent.ScraperResults[j].Seeds = info.Seeders
150 | s.db.Model(&torrent.ScraperResults[j]).Update(torrent.ScraperResults[j])
151 | }
152 | }
153 | if !found {
154 | s.db.Model(&torrent).
155 | Association("ScraperResults").
156 | Append(&Models.ScrapeTorrentResult{
157 | Leaches: info.Leechers,
158 | Seeds: info.Seeders,
159 | TrackerUrl: result.TrackerUrl,
160 | LastUpdate: pq.NullTime{
161 | Time: time.Now(),
162 | Valid: true,
163 | },
164 | })
165 | }
166 | }
167 | }
168 | }
169 |
170 | }
171 | }
172 |
173 | func (s *ScrapeRpcService) ReportScrapeResults(result *ScrapeResult) {
174 |
175 | if len(result.Response.ScrapeDatas) != DefaultWorkSize {
176 | var torrent Models.Torrent
177 | err := s.db.Model(&Models.Torrent{}).
178 | Where("extract( epoch from (now() - last_scrape))/3600 > ? OR last_scrape IS NULL",
179 | s.scrapeTimeOut).First(&torrent).Error
180 | if err != nil {
181 | scrapeRpcServiceLog.Error("Failed to find lastscrapeId", err)
182 | }
183 | if torrent.Id < s.lastTorrentId {
184 | s.setLastScrapeId(torrent.Id)
185 | }
186 | } else {
187 | if result.LastID > uint32(s.getLastScrapeID()) {
188 | s.setLastScrapeId(int32(result.LastID))
189 |
190 | }
191 | }
192 | scrapeRpcServiceLog.Debug("Results Queue Len", len(s.results))
193 | s.results <- result
194 |
195 | }
196 |
--------------------------------------------------------------------------------
/Services/rpc/TorrentRpcService.go:
--------------------------------------------------------------------------------
1 | package Rpc
2 |
3 | import (
4 | "errors"
5 | "github.com/jinzhu/gorm"
6 | "github.com/op/go-logging"
7 | "github.com/ruslanfedoseenko/dhtcrawler/Config"
8 | "github.com/ruslanfedoseenko/dhtcrawler/Models"
9 | "github.com/ruslanfedoseenko/dhtcrawler/Services/TagProducer"
10 | )
11 |
12 | var torrentRpcService = logging.MustGetLogger("TorrentRpcService")
13 |
14 | type TorrentRpcService struct {
15 | db *gorm.DB
16 | tagProducer *TagProducer.TorrentTagsProducer
17 | }
18 |
19 | func NewTorrentRpcService(app *Config.App) *TorrentRpcService {
20 | return &TorrentRpcService{
21 | db: app.Db,
22 | tagProducer: TagProducer.NewTorrentTagsProducer(app),
23 | }
24 | }
25 |
26 | func (s *TorrentRpcService) HasTorrent(infoHash string) (b bool, err error) {
27 | var count uint32 = 0
28 | err = s.db.Model(&Models.Torrent{}).Where(Models.Torrent{Infohash: infoHash}).Count(&count).Error
29 |
30 | b = count != 0
31 | torrentRpcService.Info("Handling TorrentRpcService.HasTorrent(", infoHash, ")=", b, err)
32 | return
33 | }
34 |
35 | func (s *TorrentRpcService) AddTorrent(torrent *Models.Torrent) (err error) {
36 | torrentRpcService.Info("Handling TorrentRpcService.AddTorrent(", torrent, ")")
37 | if ok, _ := s.HasTorrent(torrent.Infohash); ok {
38 | err = errors.New("Torrent with InfoHash" + torrent.Infohash + "already exists")
39 | return
40 | }
41 |
42 | //s.tagProducer.FillTorrentTags(torrent)
43 |
44 | tx := s.db.Begin()
45 | tx.Exec("SET zombodb.batch_mode = true;")
46 | err = tx.Create(torrent).Error
47 | tx.Commit()
48 | if err != nil {
49 | torrentRpcService.Error("Failed while adding torrent", torrent, err)
50 | }
51 | return
52 | }
53 |
--------------------------------------------------------------------------------
/Services/scraper.go:
--------------------------------------------------------------------------------
1 | package Services
2 |
3 | import (
4 | "sync"
5 | "sync/atomic"
6 |
7 | "github.com/ruslanfedoseenko/dhtcrawler/Config"
8 |
9 | "github.com/op/go-logging"
10 | "github.com/ruslanfedoseenko/dhtcrawler/Services/rpc"
11 | "github.com/ruslanfedoseenko/dhtcrawler/Services/tracker"
12 | "time"
13 | )
14 |
15 | var scrapeerLog = logging.MustGetLogger("Scraper")
16 |
17 | type Scraper struct {
18 | workersDone sync.WaitGroup
19 | isAlreadyWaiting bool
20 | quit chan bool
21 | numThreads int
22 | trackerUrls []string
23 | workerId int32
24 | rpcClient *Rpc.RpcClient
25 | }
26 |
27 | func NewScraper() (s *Scraper) {
28 | s = new(Scraper)
29 |
30 | s.trackerUrls = App.Config.ScrapeConfig.Trackers
31 | s.numThreads = App.Config.ScrapeConfig.WorkerThreads
32 | s.workerId = 0
33 | return
34 | }
35 |
36 | var scraper *Scraper
37 |
38 | func SetupScrape(app *Config.App) {
39 | App = app
40 |
41 | scraper = NewScraper()
42 | scraper.rpcClient = Rpc.GetRpcCLientInstance(app)
43 | scrapeerLog.Info("Scrape will repeat every", uint64(App.Config.ScrapeConfig.ScrapeTimeout), "hours")
44 | App.Scheduler.Every(uint64(App.Config.ScrapeConfig.ScrapeTimeout)).Hours().Do(scraper.Start)
45 | App.AddService(scraper)
46 |
47 | }
48 | func (s *Scraper) Start() {
49 | go s.startInternal()
50 | }
51 | func (s *Scraper) startInternal() {
52 | scrapeerLog.Info("waiting for previos task")
53 | if s.isAlreadyWaiting {
54 | return
55 | }
56 | s.isAlreadyWaiting = true
57 | s.workersDone.Wait()
58 | s.isAlreadyWaiting = false
59 | scrapeerLog.Info("waiting for previos task completed")
60 | s.initChannels()
61 | s.workersDone.Add(s.numThreads)
62 | for i := 0; i < s.numThreads; i++ {
63 | go s.scrapeThreadWorker()
64 | }
65 | scrapeerLog.Info("Scraping started")
66 | }
67 |
68 | func (s *Scraper) initChannels() {
69 | s.quit = make(chan bool, s.numThreads+1)
70 | }
71 |
72 | func (s *Scraper) scrapeThreadWorker() {
73 | workerId := atomic.LoadInt32(&s.workerId)
74 | atomic.AddInt32(&s.workerId, 1)
75 |
76 | for {
77 |
78 | if !s.rpcClient.HasAvailableTasks() {
79 | break
80 | }
81 | task := s.rpcClient.GetNextScrapeTask()
82 | if task == nil {
83 | <-time.After(5 * time.Second)
84 | continue
85 | }
86 | scrapeerLog.Info("ScrapeWorker", workerId, "recived work", task)
87 |
88 | if len(task.InfoHashes) > 0 {
89 |
90 | for _, trackerUrl := range s.trackerUrls {
91 | var result Rpc.ScrapeResult
92 | result.TrackerUrl = trackerUrl
93 | var err error
94 | var response tracker.ScrapeResponse
95 | response, err = tracker.Scrape(trackerUrl, task.InfoHashes)
96 | result.Response = &response
97 | scrapeerLog.Info("ScrapeResult from", trackerUrl, "Len", len(result.Response.ScrapeDatas), result.Response.ScrapeDatas)
98 | if err != nil {
99 | scrapeerLog.Error("failed scraping tracker", trackerUrl, err.Error())
100 | continue
101 | }
102 | if len(result.Response.ScrapeDatas) == 0 {
103 | scrapeerLog.Info("Empty scrape response from tracker", trackerUrl)
104 | continue
105 | }
106 | scrapeerLog.Info("reporting scrape result for", trackerUrl, "result.TrackerUrl = ", result.TrackerUrl, "whole result", result)
107 | s.rpcClient.ReportScrapeResults(&result)
108 | }
109 |
110 | }
111 |
112 | select {
113 | case <-s.quit:
114 | {
115 | scrapeerLog.Info("Recieved Quit Signal")
116 | break
117 | }
118 | default:
119 | {
120 | scrapeerLog.Info("Not recieved Quit Signal")
121 | }
122 | }
123 |
124 | }
125 | s.workersDone.Done()
126 | scrapeerLog.Info("Exit scrapeThreadWorker", workerId)
127 | }
128 |
--------------------------------------------------------------------------------
/Services/tracker/Hash.go:
--------------------------------------------------------------------------------
1 | package tracker
2 |
3 | import (
4 | "encoding/hex"
5 | "fmt"
6 | )
7 |
8 | type Hash [20]byte
9 |
10 | func (h Hash) Bytes() []byte {
11 | return h[:]
12 | }
13 |
14 | func (h Hash) AsString() string {
15 | return string(h[:])
16 | }
17 |
18 | func (h Hash) HexString() string {
19 | return fmt.Sprintf("%x", h[:])
20 | }
21 |
22 | func (h *Hash) FromString(s string) (err error) {
23 | if len(s) != 20 {
24 | err = fmt.Errorf("string has bad length: %d", len(s))
25 | return
26 | }
27 | for i := 0; i < len(s); i++ {
28 | h[i] = s[i]
29 | }
30 | return
31 | }
32 |
33 | func (h *Hash) FromHexString(s string) (err error) {
34 | if len(s) != 40 {
35 | err = fmt.Errorf("hash hex string has bad length: %d", len(s))
36 | return
37 | }
38 | n, err := hex.Decode(h[:], []byte(s))
39 | if err != nil {
40 | return
41 | }
42 | if n != 20 {
43 | panic(n)
44 | }
45 | return
46 | }
47 |
--------------------------------------------------------------------------------
/Services/tracker/http.go:
--------------------------------------------------------------------------------
1 | package tracker
2 |
3 | import (
4 | "fmt"
5 | "github.com/anacrolix/missinggo/httptoo"
6 | "log"
7 | "net/http"
8 | "net/url"
9 |
10 | "github.com/zeebo/bencode"
11 | )
12 |
13 | type HttpScrapeResponse struct {
14 | Files map[string]ScrapeTorrentInfo `bencode:"files"`
15 | }
16 |
17 | func setAnnounceParams(uri *url.URL, sr *ScrapeRequest) {
18 |
19 | q := uri.Query()
20 | for _, infoHash := range sr.InfoHashes {
21 | q.Add("info_hash", string(infoHash[:]))
22 | }
23 | uri.RawQuery = q.Encode()
24 | log.Println("Final tracker Url", uri.RequestURI())
25 |
26 | }
27 |
28 | func scrapeHTTP(sr *ScrapeRequest, _url *url.URL) (ret ScrapeResponse, err error) {
29 | uri := httptoo.CopyURL(_url)
30 | setAnnounceParams(uri, sr)
31 | log.Println("Creating Http Req with URI:", uri.String())
32 | req, err := http.NewRequest("GET", uri.String(), nil)
33 | log.Println("Request:", req)
34 | resp, err := http.DefaultClient.Do(req)
35 | if err != nil {
36 | return
37 | }
38 | defer resp.Body.Close()
39 |
40 | if resp.StatusCode != 200 {
41 | err = fmt.Errorf("response from tracker: %s:", resp.Status)
42 | return
43 | }
44 | var httpResponse HttpScrapeResponse
45 |
46 | decoder := bencode.NewDecoder(resp.Body)
47 | err = decoder.Decode(&httpResponse)
48 |
49 | ret.ScrapeDatas = make(map[string]ScrapeTorrentInfo)
50 | //log.Println("Bytes:", string(bytesArr), "decoded as:")
51 | for key, value := range httpResponse.Files {
52 | var infoHash Hash
53 | infoHash.FromString(key)
54 | ret.ScrapeDatas[infoHash.HexString()] = value
55 | }
56 | if err != nil {
57 | err = fmt.Errorf("error decoding %s", err)
58 | return
59 | }
60 | return
61 | }
62 |
--------------------------------------------------------------------------------
/Services/tracker/tracker.go:
--------------------------------------------------------------------------------
1 | package tracker
2 |
3 | import (
4 | "errors"
5 | "log"
6 | "net"
7 | "net/url"
8 | "strings"
9 | )
10 |
11 | type Peer struct {
12 | IP net.IP
13 | Port int
14 | }
15 |
16 | type ScrapeRequest struct {
17 | InfoHashes []Hash
18 | }
19 | type ScrapeTorrentInfo struct {
20 | Seeders int32 `bencode:"downloaded"`
21 | Completed int32 `bencode:"complete"`
22 | Leechers int32 `bencode:"incomplete"`
23 | }
24 | type ScrapeResponse struct {
25 | ScrapeDatas map[string]ScrapeTorrentInfo `bencode:"files"`
26 | }
27 |
28 | var (
29 | ErrBadScheme = errors.New("unknown scheme")
30 | )
31 |
32 | func Scrape(tracker string, infoHashes []string) (r ScrapeResponse, err error) {
33 | _url, err := url.Parse(strings.Replace(tracker, "announce", "scrape", -1))
34 | if err != nil {
35 | return
36 | }
37 | var req ScrapeRequest
38 | hashesLen := len(infoHashes)
39 | for i := 0; i < hashesLen; i++ {
40 | var hash Hash
41 | err := hash.FromHexString(infoHashes[i])
42 | if err != nil {
43 | log.Println("Error: ", err.Error())
44 | }
45 | req.InfoHashes = append(req.InfoHashes, hash)
46 | }
47 | switch _url.Scheme {
48 | case "http", "https":
49 | return scrapeHTTP(&req, _url)
50 | //break
51 | case "udp":
52 | return scrapeUDP(&req, _url)
53 | default:
54 | err = ErrBadScheme
55 | return
56 | }
57 | return
58 | }
59 |
--------------------------------------------------------------------------------
/Services/tracker/udp.go:
--------------------------------------------------------------------------------
1 | package tracker
2 |
3 | import (
4 | "bytes"
5 | "encoding/binary"
6 | "errors"
7 | "fmt"
8 | "github.com/ruslanfedoseenko/dhtcrawler/Utils"
9 | "io"
10 | "log"
11 | "math/rand"
12 | "net"
13 | "net/url"
14 | "time"
15 | )
16 |
17 | type Action int32
18 |
19 | const (
20 | ActionConnect Action = iota
21 | ActionAnnounce
22 | ActionScrape
23 | ActionError
24 |
25 | connectRequestConnectionId = 0x41727101980
26 |
27 | // BEP 41
28 | optionTypeEndOfOptions = 0
29 | optionTypeNOP = 1
30 | optionTypeURLData = 2
31 | )
32 |
33 | type ConnectionRequest struct {
34 | ConnectionId int64
35 | Action int32
36 | TransctionId int32
37 | }
38 |
39 | type ConnectionResponse struct {
40 | ConnectionId int64
41 | }
42 |
43 | type ResponseHeader struct {
44 | Action Action
45 | TransactionId int32
46 | }
47 |
48 | type RequestHeader struct {
49 | ConnectionId int64
50 | Action Action
51 | TransactionId int32
52 | } // 16 bytes
53 |
54 | type udpScrapeResponse struct {
55 | torrentDatas []ScrapeTorrentInfo
56 | }
57 |
58 | type udpScrapeRequest struct {
59 | RequestHeader
60 | InfoHashes []Hash
61 | }
62 |
63 | func newTransactionId() int32 {
64 | return int32(rand.Uint32())
65 | }
66 |
67 | func timeout(contiguousTimeouts int) (d time.Duration) {
68 | if contiguousTimeouts > 8 {
69 | contiguousTimeouts = 8
70 | }
71 | d = 15 * time.Second
72 | for ; contiguousTimeouts > 0; contiguousTimeouts-- {
73 | d *= 2
74 | }
75 | return
76 | }
77 |
78 | type udpAnnounce struct {
79 | contiguousTimeouts int
80 | connectionIdReceived time.Time
81 | connectionId int64
82 | socket net.Conn
83 | url url.URL
84 | }
85 |
86 | func (c *udpAnnounce) Close() error {
87 | if c.socket != nil {
88 | log.Println("Closing Udp Scraper Connetction")
89 | return c.socket.Close()
90 | }
91 | return nil
92 | }
93 |
94 | func (c *udpAnnounce) Do(req *ScrapeRequest) (res ScrapeResponse, err error) {
95 | err = c.connect()
96 | if err != nil {
97 | return
98 | }
99 | // Clearly this limits the request URI to 255 bytes. BEP 41 supports
100 | // longer but I'm not fussed.
101 | //options := append([]byte{optionTypeURLData, byte(len(reqURI))}, []byte(reqURI)...)
102 | b, err := c.request(ActionScrape, req.InfoHashes)
103 | if err != nil {
104 | return
105 | }
106 | var scrapeResponse []int32 = make([]int32, 3*len(req.InfoHashes))
107 |
108 | err = readBody(b, &(scrapeResponse))
109 | if err != nil {
110 | if err == io.EOF {
111 | err = io.ErrUnexpectedEOF
112 | }
113 | err = fmt.Errorf("error parsing scrape response: %s", err)
114 | return
115 | }
116 | if len(scrapeResponse)/3 != len(req.InfoHashes) {
117 | err = errors.New("Reqest and respons infohash count not match")
118 | return
119 | }
120 |
121 | res.ScrapeDatas = make(map[string]ScrapeTorrentInfo)
122 | for i, infohash := range req.InfoHashes {
123 | res.ScrapeDatas[infohash.HexString()] = ScrapeTorrentInfo{
124 | Seeders: scrapeResponse[i*3],
125 | Completed: scrapeResponse[i*3+1],
126 | Leechers: scrapeResponse[i*3+2],
127 | }
128 | }
129 | return
130 | }
131 |
132 | // body is the binary serializable request body. trailer is optional data
133 | // following it, such as for BEP 41.
134 | func (c *udpAnnounce) write(h *RequestHeader, body interface{}) (err error) {
135 | var buf bytes.Buffer
136 | err = binary.Write(&buf, binary.BigEndian, h)
137 | if err != nil {
138 | panic(err)
139 | }
140 | if body != nil {
141 | err = binary.Write(&buf, binary.BigEndian, body)
142 | if err != nil {
143 | panic(err)
144 | }
145 | }
146 | n, err := c.socket.Write(buf.Bytes())
147 | if err != nil {
148 | return
149 | }
150 | if n != buf.Len() {
151 | panic("write should send all or error")
152 | }
153 | return
154 | }
155 |
156 | func read(r io.Reader, data interface{}) error {
157 | return binary.Read(r, binary.BigEndian, data)
158 | }
159 |
160 | func write(w io.Writer, data interface{}) error {
161 | return binary.Write(w, binary.BigEndian, data)
162 | }
163 |
164 | // args is the binary serializable request body. trailer is optional data
165 | // following it, such as for BEP 41.
166 | func (c *udpAnnounce) request(action Action, args interface{}) (responseBody *bytes.Buffer, err error) {
167 | tid := newTransactionId()
168 | err = c.write(&RequestHeader{
169 | ConnectionId: c.connectionId,
170 | Action: action,
171 | TransactionId: tid,
172 | }, args)
173 | if err != nil {
174 | return
175 | }
176 | c.socket.SetReadDeadline(time.Now().Add(timeout(c.contiguousTimeouts)))
177 | b := make([]byte, 65536) // 2KiB
178 | for {
179 | var n int
180 | n, err = c.socket.Read(b)
181 | if opE, ok := err.(*net.OpError); ok {
182 | if opE.Timeout() {
183 | c.contiguousTimeouts++
184 | return
185 | }
186 | }
187 | if err != nil {
188 | return
189 | }
190 | buf := bytes.NewBuffer(b[:n])
191 | var h ResponseHeader
192 | err = binary.Read(buf, binary.BigEndian, &h)
193 | switch err {
194 | case io.ErrUnexpectedEOF:
195 | continue
196 | case nil:
197 | default:
198 | return
199 | }
200 | if h.TransactionId != tid {
201 | continue
202 | }
203 | c.contiguousTimeouts = 0
204 | if h.Action == ActionError {
205 | err = errors.New(buf.String())
206 | }
207 | responseBody = buf
208 | return
209 | }
210 | }
211 |
212 | func readBody(r io.Reader, data ...interface{}) (err error) {
213 | for _, datum := range data {
214 | err = binary.Read(r, binary.BigEndian, datum)
215 | if err != nil {
216 | break
217 | }
218 | }
219 | return
220 | }
221 |
222 | func (c *udpAnnounce) connected() bool {
223 | return !c.connectionIdReceived.IsZero() && time.Now().Before(c.connectionIdReceived.Add(time.Minute))
224 | }
225 |
226 | func (c *udpAnnounce) connect() (err error) {
227 | if c.connected() {
228 | return nil
229 | }
230 | c.connectionId = connectRequestConnectionId
231 | if c.socket == nil {
232 | hmp := Utils.SplitHostMaybePort(c.url.Host)
233 | if hmp.NoPort {
234 | hmp.NoPort = false
235 | hmp.Port = 80
236 | }
237 | c.socket, err = net.Dial("udp", hmp.String())
238 | if err != nil {
239 | return
240 | }
241 | }
242 | b, err := c.request(ActionConnect, nil)
243 | if err != nil {
244 | return
245 | }
246 | var res ConnectionResponse
247 | err = readBody(b, &res)
248 | if err != nil {
249 | return
250 | }
251 | c.connectionId = res.ConnectionId
252 | c.connectionIdReceived = time.Now()
253 | return
254 | }
255 |
256 | func scrapeUDP(ar *ScrapeRequest, _url *url.URL) (ScrapeResponse, error) {
257 | ua := udpAnnounce{
258 | url: *_url,
259 | }
260 | defer ua.Close()
261 | log.Println("New Udp Scraper Connetction")
262 | return ua.Do(ar)
263 | }
264 |
--------------------------------------------------------------------------------
/Spider/Spider.go:
--------------------------------------------------------------------------------
1 | package main
2 |
3 | import (
4 | "github.com/ruslanfedoseenko/dhtcrawler/Config"
5 | "github.com/ruslanfedoseenko/dhtcrawler/Services"
6 | )
7 |
8 | func main() {
9 |
10 | app := Config.NewApp()
11 |
12 | Services.SetupScrape(app)
13 | Services.SetupDhtCrawling(app)
14 | app.Run()
15 | }
16 |
--------------------------------------------------------------------------------
/Utils/HostParser.go:
--------------------------------------------------------------------------------
1 | package Utils
2 |
3 | import (
4 | "net"
5 | "strconv"
6 | "strings"
7 | )
8 |
9 | // Represents a split host port.
10 | type HostMaybePort struct {
11 | Host string // Just the host, with no port.
12 | Port int // The port if NoPort is false.
13 | NoPort bool // Whether a port is specified.
14 | Err error // The error returned from net.SplitHostPort.
15 | }
16 |
17 | func (me *HostMaybePort) String() string {
18 | if me.NoPort {
19 | return me.Host
20 | }
21 | return net.JoinHostPort(me.Host, strconv.FormatInt(int64(me.Port), 10))
22 | }
23 |
24 | // Parse a "hostport" string, a concept that floats around the stdlib a lot
25 | // and is painful to work with. If no port is present, what's usually present
26 | // is just the host.
27 | func SplitHostMaybePort(hostport string) HostMaybePort {
28 | host, portStr, err := net.SplitHostPort(hostport)
29 | if err != nil {
30 | if strings.Contains(err.Error(), "missing port") {
31 | return HostMaybePort{
32 | Host: hostport,
33 | NoPort: true,
34 | }
35 | }
36 | return HostMaybePort{
37 | Err: err,
38 | }
39 | }
40 | portI64, err := strconv.ParseInt(portStr, 0, 0)
41 | if err != nil {
42 | return HostMaybePort{
43 | Host: host,
44 | Port: -1,
45 | Err: err,
46 | }
47 | }
48 | return HostMaybePort{
49 | Host: host,
50 | Port: int(portI64),
51 | }
52 | }
53 |
--------------------------------------------------------------------------------
/Utils/Random.go:
--------------------------------------------------------------------------------
1 | package Utils
2 |
3 | import (
4 | "crypto/rand"
5 | "encoding/base64"
6 | )
7 |
8 | func GenerateCSRFSecret() (string, error) {
9 | return GenerateRandomString(32)
10 | }
11 |
12 |
13 | // GenerateRandomBytes returns securely generated random bytes.
14 | // It will return an error if the system's secure random
15 | // number generator fails to function correctly, in which
16 | // case the caller should not continue.
17 | func GenerateRandomBytes(n int) ([]byte, error) {
18 | b := make([]byte, n)
19 | _, err := rand.Read(b)
20 | // Note that err == nil only if we read len(b) bytes.
21 | if err != nil {
22 | return nil, err
23 | }
24 |
25 | return b, nil
26 | }
27 |
28 | // GenerateRandomString returns a URL-safe, base64 encoded
29 | // securely generated random string.
30 | // It will return an error if the system's secure random
31 | // number generator fails to function correctly, in which
32 | // case the caller should not continue.
33 | func GenerateRandomString(s int) (string, error) {
34 | b, err := GenerateRandomBytes(s)
35 | return base64.URLEncoding.EncodeToString(b), err
36 | }
--------------------------------------------------------------------------------
/Utils/SliceUtils.go:
--------------------------------------------------------------------------------
1 | package Utils
2 |
3 | import (
4 | "fmt"
5 | "strings"
6 | )
7 |
8 | func SliceToPathString(values []interface{}) string {
9 | s := make([]string, len(values)) // Pre-allocate the right size
10 | for index := range values {
11 | s[index] = fmt.Sprintf("%v", values[index])
12 | }
13 | return strings.Join(s, "/")
14 | }
15 | func ToInterfaceSlice(slice []string) (res []interface{}) {
16 | sliceLen := len(slice)
17 | res = make([]interface{}, sliceLen)
18 | for index, item := range slice {
19 | res[index] = item
20 | }
21 | return
22 | }
23 | func IndexOf(slice []string, item string) int {
24 |
25 | for index, s := range slice {
26 | if strings.Trim(s, "\n\r ") == strings.Trim(item, "\n\r ") {
27 | return index
28 | }
29 | }
30 |
31 | return -1
32 | }
33 |
--------------------------------------------------------------------------------
/Utils/ZombodbQueryBuilder.go:
--------------------------------------------------------------------------------
1 | package Utils
2 |
3 | import (
4 | "strconv"
5 | "strings"
6 | )
7 |
8 | func tokenizer(c rune) bool {
9 | return strings.ContainsRune(" #''\":*~?!%(),<=>[]^{}\r\n\t\f.", c)
10 | }
11 |
12 | func ZdbBuildQuery(field, searchRequest string, offset, limit uint64) string {
13 | words := strings.FieldsFunc(searchRequest, tokenizer)
14 | wordsLen := len(words)
15 |
16 | for i := 0; i < wordsLen; i++ {
17 | if len(words[i]) == 0 {
18 | words = append(words[:i], words[:i+1]...)
19 | i--
20 | } else {
21 | if zdbShouldEscape(words[i]) {
22 | words[i] = strconv.Quote(words[i])
23 | }
24 | }
25 | }
26 |
27 | var nameQuery string
28 | if wordsLen == 1 {
29 | nameQuery = " '#limit(_score desc," + strconv.FormatUint(offset, 10) + "," + strconv.FormatUint(limit, 10) + ") " + field + ":(" + words[0] + ")'"
30 | } else {
31 | nameQuery = " '#limit(_score desc," + strconv.FormatUint(offset, 10) + "," + strconv.FormatUint(limit, 10) + ") " + field + ":(" + strings.Join(words, ",") + ")'"
32 | }
33 |
34 | return nameQuery
35 | }
36 |
37 | var zdbKeyWords []string = []string{
38 | "with",
39 | "and",
40 | "or",
41 | "not",
42 | }
43 |
44 | func zdbShouldEscape(s string) bool {
45 | return IndexOf(zdbKeyWords, strings.ToLower(s)) > -1
46 | }
47 |
--------------------------------------------------------------------------------
/bin/config.json:
--------------------------------------------------------------------------------
1 | {
2 | "DbConfig" : {
3 | "DbDriver": "postgres",
4 | "TableName": "table",
5 | "UserName" : "user",
6 | "Password" : "password"
7 | },
8 | "DhtConfig" : {
9 | "StartPort" : 6991,
10 | "Workers" : 6
11 | },
12 | "HttpConfig" : {
13 | "StaticDataFolder" : "/home/devrus/IdeaProjects/DhtCrawler/bin/static"
14 | },
15 | "RpcConfig" : {
16 | "Mode" : "CLIENT",
17 | "Host": "79.143.188.25",
18 | "Port": 13699
19 | },
20 | "TagProducersConfig": {
21 | "TagProducers": {
22 | "video":["vob",
23 | "3gp",
24 | "webdlrip",
25 | "vts",
26 | "xvid",
27 | "subs",
28 | "brrip",
29 | "bluray",
30 | "m2ts",
31 | "avi",
32 | "mpeg",
33 | "divx",
34 | "x265aac",
35 | "480",
36 | "480p",
37 | "720",
38 | "720p",
39 | "1080",
40 | "1080p",
41 | "xxx",
42 | "srt",
43 | "ogm",
44 | "x264",
45 | "dvdrip",
46 | "bdmv",
47 | "smi",
48 | "satrip",
49 | "mp4",
50 | "dvd9",
51 | "flv",
52 | "screener",
53 | "bd-mux",
54 | "wmv",
55 | "mpg",
56 | "bdmux",
57 | "ts",
58 | "web-dlrip",
59 | "str",
60 | "dvd",
61 | "mov",
62 | "rmvb",
63 | "webdl",
64 | "hdrip",
65 | "dvd5",
66 | "ass",
67 | "qt",
68 | "mkv",
69 | "bdrip",
70 | "m4v"],
71 | "audio":[
72 | "ogg",
73 | "ram",
74 | "wawpack",
75 | "voc",
76 | "ras",
77 | "wma",
78 | "aiff",
79 | "ac3",
80 | "ape",
81 | "asf",
82 | "mpc",
83 | "m4a",
84 | "aac",
85 | "mp3",
86 | "wav",
87 | "shn",
88 | "cue",
89 | "flac",
90 | "m3u",
91 | "oga"
92 | ],
93 | "app":[
94 | "apk",
95 | "tgz",
96 | "gzip",
97 | "linux",
98 | "dmg",
99 | "ace",
100 | "deb",
101 | "android",
102 | "keymaker",
103 | "mdx",
104 | "portable",
105 | "lzma",
106 | "jar",
107 | "nosteam",
108 | "b5t",
109 | "au",
110 | "7z",
111 | "gz",
112 | "nrg",
113 | "rpm",
114 | "crack",
115 | "exe",
116 | "activator",
117 | "dll",
118 | "rar",
119 | "cb7",
120 | "bz2",
121 | "mdf",
122 | "msi",
123 | "keygen",
124 | "zip",
125 | "tar",
126 | "b6t",
127 | "mds",
128 | "iso",
129 | "bin"
130 | ],
131 | "book" :[
132 | "pdf",
133 | "m4b",
134 | "txt",
135 | "cba",
136 | "cbr",
137 | "cbz",
138 | "ppt",
139 | "djvu",
140 | "mobi",
141 | "fb2",
142 | "cbt",
143 | "rtf",
144 | "magazine",
145 | "epub",
146 | "doc"
147 | ],
148 | "image":[
149 | "gif",
150 | "psd",
151 | "bmp",
152 | "tiff",
153 | "tga",
154 | "pcx",
155 | "jpg",
156 | "jpeg",
157 | "png"
158 | ]
159 |
160 | }
161 |
162 |
163 | },
164 | "ScrapeConfig" :{
165 | "Trackers" : [
166 | "udp://tracker.coppersurfer.tk:6969/announce",
167 | "udp://tracker.opentrackr.org:1337/announce",
168 | "udp://tracker.zer0day.to:1337/announce",
169 | "udp://tracker.leechers-paradise.org:6969/announce",
170 | "udp://9.rarbg.com:2710/announce",
171 | "udp://explodie.org:6969/announce",
172 | "udp://p4p.arenabg.com:1337/announce",
173 | "udp://tracker.aletorrenty.pl:2710/announce",
174 | "udp://torrent.gresille.org:80/announce",
175 | "udp://tracker.internetwarriors.net:1337/announce",
176 | "udp://tracker.yoshi210.com:6969/announce",
177 | "udp://tracker.tiny-vps.com:6969/announce",
178 | "udp://tracker.mg64.net:6969/announce",
179 | "udp://tracker.kicks-ass.net:80/announce",
180 | "udp://tracker.grepler.com:6969/announce",
181 | "udp://tracker.filetracker.pl:8089/announce",
182 | "udp://tracker.ex.ua:80/announce",
183 | "udp://open.stealth.si:80/announce",
184 | "udp://mgtracker.org:2710/announce",
185 | "udp://ipv4.tracker.harry.lu:80/announce",
186 | "udp://182.176.139.129:6969/announce",
187 | "udp://zephir.monocul.us:6969/announce",
188 | "udp://tracker.vanitycore.co:6969/announce",
189 | "udp://tracker.tordb.ml:6881/announce",
190 | "udp://tracker.edoardocolombo.eu:6969/announce",
191 | "udp://poweredcore.com:6969/announce",
192 | "udp://bt.xxx-tracker.com:2710/announce",
193 | "udp://91.218.230.81:6969/announce",
194 | "udp://208.67.16.113:8000/announce",
195 | "udp://open.acgtracker.com:1096/announce",
196 | "udp://tracker.aletorrenty.pl:2710/announce",
197 | "udp://anisaishuu.de:2710/announce",
198 | "udp://p4p.arenabg.ch:1337/announce",
199 | "udp://tracker.dutchtracking.com:80/announce",
200 | "udp://tracker.dutchtracking.nl:80/announce",
201 | "udp://tracker.ex.ua:80/announce",
202 | "udp://explodie.org:6969/announce",
203 | "udp://torrent.gresille.org:80/announce",
204 | "udp://tracker.kicks-ass.net:80/announce",
205 | "udp://retracker.krs-ix.ru:80/announce",
206 | "udp://megapeer.org:6969/announce",
207 | "udp://mgtracker.org:2710/announce",
208 | "udp://tracker.ohys.net:80/announce",
209 | "udp://tracker.opentrackr.org:1337/announce",
210 | "udp://tracker4.piratux.com:6969/announce",
211 | "udp://tracker.pubt.net:2710/announce",
212 | "udp://pubt.net:2710/announce",
213 | "udp://9.rarbg.com:80/announce",
214 | "udp://9.rarbg.com:2710/announce",
215 | "udp://tracker.skyts.net:6969/announce",
216 | "udp://retracker.telecom.kz:80/announce",
217 | "udp://tracker.tiny-vps.com:6969/announce",
218 | "udp://tracker2.torrentino.com:80/announce",
219 | "udp://torrentsmd.com:6969/announce",
220 | "udp://torrentsmd.com:8080/announce",
221 | "udp://tracker.tricitytorrents.com:2710/announce",
222 | "udp://www.wareztorrent.com:80/announce",
223 | "udp://www.wareztorrent.com:6969/announce",
224 | "udp://tracker4.infohash.org:80/announce",
225 | "udp://tracker4.infohash.org:6969/announce",
226 | "udp://91.218.230.81:80/announce",
227 | "udp://91.218.230.81:6969/announce"
228 | ],
229 | "WorkerThreads": 6,
230 | "ScrapeTimeout": 12
231 | },
232 | "ItemsPerPage" : 20
233 | }
--------------------------------------------------------------------------------
/bin/setup.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 | iptables -t raw -I PREROUTING -p tcp --dport 9999 -j NOTRACK
3 | iptables -t raw -I PREROUTING -p tcp --sport 9999 -j NOTRACK
4 | iptables -t raw -I PREROUTING -p tcp --sport 5432 -j NOTRACK
5 | iptables -t raw -I PREROUTING -p tcp --dport 5432 -j NOTRACK
6 | iptables -t raw -I PREROUTING -p tcp --dport 9200 -j NOTRACK
7 | iptables -t raw -I PREROUTING -p tcp --sport 9200 -j NOTRACK
8 | iptables -t raw -I PREROUTING -p udp --dport 9999 -j NOTRACK
9 | iptables -t raw -I PREROUTING -p udp --sport 9999 -j NOTRACK
10 | iptables -t raw -I PREROUTING -p udp --sport 5432 -j NOTRACK
11 | iptables -t raw -I PREROUTING -p udp --dport 5432 -j NOTRACK
12 | iptables -F
--------------------------------------------------------------------------------
/iconv-linux-amd64:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/devrus91/dhtcrawler/5f9bb9ccef4c3c8a7b8b7d69f163d746b4914e92/iconv-linux-amd64
--------------------------------------------------------------------------------
/static/.babelrc:
--------------------------------------------------------------------------------
1 | {
2 | "presets": [
3 | ["env", {
4 | "modules": false,
5 | "targets": {
6 | "browsers": ["> 1%", "last 2 versions", "not ie <= 8"]
7 | }
8 | }],
9 | "stage-2"
10 | ],
11 | "plugins": ["transform-vue-jsx", "transform-runtime"],
12 | "env": {
13 | "test": {
14 | "presets": ["env", "stage-2"],
15 | "plugins": ["transform-vue-jsx", "transform-es2015-modules-commonjs", "dynamic-import-node"]
16 | }
17 | }
18 | }
19 |
--------------------------------------------------------------------------------
/static/.editorconfig:
--------------------------------------------------------------------------------
1 | root = true
2 |
3 | [*]
4 | charset = utf-8
5 | indent_style = space
6 | indent_size = 2
7 | end_of_line = lf
8 | insert_final_newline = true
9 | trim_trailing_whitespace = true
10 |
--------------------------------------------------------------------------------
/static/.eslintignore:
--------------------------------------------------------------------------------
1 | /build/
2 | /config/
3 | /dist/
4 | /*.js
5 | /test/unit/coverage/
6 |
--------------------------------------------------------------------------------
/static/.eslintrc.js:
--------------------------------------------------------------------------------
1 | // https://eslint.org/docs/user-guide/configuring
2 |
3 | module.exports = {
4 | root: true,
5 | parser: 'babel-eslint',
6 | parserOptions: {
7 | sourceType: 'module'
8 | },
9 | env: {
10 | browser: true,
11 | },
12 | // https://github.com/standard/standard/blob/master/docs/RULES-en.md
13 | extends: 'standard',
14 | // required to lint *.vue files
15 | plugins: [
16 | 'html'
17 | ],
18 | // add your custom rules here
19 | rules: {
20 | // allow async-await
21 | 'space-before-function-paren': [2, 'never'],
22 | 'generator-star-spacing': 'off',
23 | // allow debugger during development
24 | 'no-debugger': process.env.NODE_ENV === 'production' ? 'error' : 'off'
25 | }
26 | }
27 |
--------------------------------------------------------------------------------
/static/.gitignore:
--------------------------------------------------------------------------------
1 | .DS_Store
2 | node_modules/
3 | /dist/
4 | npm-debug.log*
5 | yarn-debug.log*
6 | yarn-error.log*
7 | /test/unit/coverage/
8 |
9 | # Editor directories and files
10 | .idea
11 | .vscode
12 | *.suo
13 | *.ntvs*
14 | *.njsproj
15 | *.sln
16 |
--------------------------------------------------------------------------------
/static/.postcssrc.js:
--------------------------------------------------------------------------------
1 | // https://github.com/michael-ciniawsky/postcss-load-config
2 |
3 | module.exports = {
4 | "plugins": {
5 | "postcss-import": {},
6 | "postcss-url": {},
7 | // to edit target browsers: use "browserslist" field in package.json
8 | "autoprefixer": {}
9 | }
10 | }
11 |
--------------------------------------------------------------------------------
/static/API.md:
--------------------------------------------------------------------------------
1 |
2 | #
3 |
4 |
5 | Table of Contents
6 |
7 |
8 |
--------------------------------------------------------------------------------
/static/README.md:
--------------------------------------------------------------------------------
1 | # btoogle-frontend
2 |
3 | > A Vue.js project
4 |
5 | ## Build Setup
6 |
7 | ``` bash
8 | # install dependencies
9 | npm install
10 |
11 | # serve with hot reload at localhost:8080
12 | npm run dev
13 |
14 | # build for production with minification
15 | npm run build
16 |
17 | # build for production and view the bundle analyzer report
18 | npm run build --report
19 |
20 | # run unit tests
21 | npm run unit
22 |
23 | # run all tests
24 | npm test
25 | ```
26 |
27 | For a detailed explanation on how things work, check out the [guide](http://vuejs-templates.github.io/webpack/) and [docs for vue-loader](http://vuejs.github.io/vue-loader).
28 |
--------------------------------------------------------------------------------
/static/build/build.js:
--------------------------------------------------------------------------------
1 | 'use strict'
2 | require('./check-versions')()
3 |
4 | process.env.NODE_ENV = 'production'
5 |
6 | const ora = require('ora')
7 | const rm = require('rimraf')
8 | const path = require('path')
9 | const chalk = require('chalk')
10 | const webpack = require('webpack')
11 | const config = require('../config')
12 | const webpackConfig = require('./webpack.prod.conf')
13 |
14 | const spinner = ora('building for production...')
15 | spinner.start()
16 |
17 | rm(path.join(config.build.assetsRoot, config.build.assetsSubDirectory), err => {
18 | if (err) throw err
19 | webpack(webpackConfig, (err, stats) => {
20 | spinner.stop()
21 | if (err) throw err
22 | process.stdout.write(stats.toString({
23 | colors: true,
24 | modules: false,
25 | children: false, // If you are using ts-loader, setting this to true will make TypeScript errors show up during build.
26 | chunks: false,
27 | chunkModules: false
28 | }) + '\n\n')
29 |
30 | if (stats.hasErrors()) {
31 | console.log(chalk.red(' Build failed with errors.\n'))
32 | process.exit(1)
33 | }
34 |
35 | console.log(chalk.cyan(' Build complete.\n'))
36 | console.log(chalk.yellow(
37 | ' Tip: built files are meant to be served over an HTTP server.\n' +
38 | ' Opening index.html over file:// won\'t work.\n'
39 | ))
40 | })
41 | })
42 |
--------------------------------------------------------------------------------
/static/build/check-versions.js:
--------------------------------------------------------------------------------
1 | 'use strict'
2 | const chalk = require('chalk')
3 | const semver = require('semver')
4 | const packageConfig = require('../package.json')
5 | const shell = require('shelljs')
6 |
7 | function exec (cmd) {
8 | return require('child_process').execSync(cmd).toString().trim()
9 | }
10 |
11 | const versionRequirements = [
12 | {
13 | name: 'node',
14 | currentVersion: semver.clean(process.version),
15 | versionRequirement: packageConfig.engines.node
16 | }
17 | ]
18 |
19 | if (shell.which('npm')) {
20 | versionRequirements.push({
21 | name: 'npm',
22 | currentVersion: exec('npm --version'),
23 | versionRequirement: packageConfig.engines.npm
24 | })
25 | }
26 |
27 | module.exports = function () {
28 | const warnings = []
29 |
30 | for (let i = 0; i < versionRequirements.length; i++) {
31 | const mod = versionRequirements[i]
32 |
33 | if (!semver.satisfies(mod.currentVersion, mod.versionRequirement)) {
34 | warnings.push(mod.name + ': ' +
35 | chalk.red(mod.currentVersion) + ' should be ' +
36 | chalk.green(mod.versionRequirement)
37 | )
38 | }
39 | }
40 |
41 | if (warnings.length) {
42 | console.log('')
43 | console.log(chalk.yellow('To use this template, you must update following to modules:'))
44 | console.log()
45 |
46 | for (let i = 0; i < warnings.length; i++) {
47 | const warning = warnings[i]
48 | console.log(' ' + warning)
49 | }
50 |
51 | console.log()
52 | process.exit(1)
53 | }
54 | }
55 |
--------------------------------------------------------------------------------
/static/build/utils.js:
--------------------------------------------------------------------------------
1 | 'use strict'
2 | const path = require('path')
3 | const config = require('../config')
4 | const ExtractTextPlugin = require('extract-text-webpack-plugin')
5 | const packageConfig = require('../package.json')
6 |
7 | exports.assetsPath = function (_path) {
8 | const assetsSubDirectory = process.env.NODE_ENV === 'production'
9 | ? config.build.assetsSubDirectory
10 | : config.dev.assetsSubDirectory
11 |
12 | return path.posix.join(assetsSubDirectory, _path)
13 | }
14 |
15 | exports.cssLoaders = function (options) {
16 | options = options || {}
17 |
18 | const cssLoader = {
19 | loader: 'css-loader',
20 | options: {
21 | sourceMap: options.sourceMap
22 | }
23 | }
24 |
25 | const postcssLoader = {
26 | loader: 'postcss-loader',
27 | options: {
28 | sourceMap: options.sourceMap
29 | }
30 | }
31 |
32 | // generate loader string to be used with extract text plugin
33 | function generateLoaders (loader, loaderOptions) {
34 | const loaders = options.usePostCSS ? [cssLoader, postcssLoader] : [cssLoader]
35 |
36 | if (loader) {
37 | loaders.push({
38 | loader: loader + '-loader',
39 | options: Object.assign({}, loaderOptions, {
40 | sourceMap: options.sourceMap
41 | })
42 | })
43 | }
44 |
45 | // Extract CSS when that option is specified
46 | // (which is the case during production build)
47 | if (options.extract) {
48 | return ExtractTextPlugin.extract({
49 | use: loaders,
50 | fallback: 'vue-style-loader'
51 | })
52 | } else {
53 | return ['vue-style-loader'].concat(loaders)
54 | }
55 | }
56 |
57 | // https://vue-loader.vuejs.org/en/configurations/extract-css.html
58 | return {
59 | css: generateLoaders(),
60 | postcss: generateLoaders(),
61 | less: generateLoaders('less'),
62 | sass: generateLoaders('sass', { indentedSyntax: true }),
63 | scss: generateLoaders('sass'),
64 | stylus: generateLoaders('stylus'),
65 | styl: generateLoaders('stylus')
66 | }
67 | }
68 |
69 | // Generate loaders for standalone style files (outside of .vue)
70 | exports.styleLoaders = function (options) {
71 | const output = []
72 | const loaders = exports.cssLoaders(options)
73 |
74 | for (const extension in loaders) {
75 | const loader = loaders[extension]
76 | output.push({
77 | test: new RegExp('\\.' + extension + '$'),
78 | use: loader
79 | })
80 | }
81 |
82 | return output
83 | }
84 |
85 | exports.createNotifierCallback = () => {
86 | const notifier = require('node-notifier')
87 |
88 | return (severity, errors) => {
89 | if (severity !== 'error') return
90 |
91 | const error = errors[0]
92 | const filename = error.file && error.file.split('!').pop()
93 |
94 | notifier.notify({
95 | title: packageConfig.name,
96 | message: severity + ': ' + error.name,
97 | subtitle: filename || '',
98 | icon: path.join(__dirname, 'logo.png')
99 | })
100 | }
101 | }
102 |
--------------------------------------------------------------------------------
/static/build/vue-loader.conf.js:
--------------------------------------------------------------------------------
1 | 'use strict'
2 | const utils = require('./utils')
3 | const config = require('../config')
4 | const isProduction = process.env.NODE_ENV === 'production'
5 | const sourceMapEnabled = isProduction
6 | ? config.build.productionSourceMap
7 | : config.dev.cssSourceMap
8 |
9 | module.exports = {
10 | loaders: utils.cssLoaders({
11 | sourceMap: sourceMapEnabled,
12 | extract: isProduction
13 | }),
14 | cssSourceMap: sourceMapEnabled,
15 | cacheBusting: config.dev.cacheBusting,
16 | transformToRequire: {
17 | video: ['src', 'poster'],
18 | source: 'src',
19 | img: 'src',
20 | image: 'xlink:href'
21 | }
22 | }
23 |
--------------------------------------------------------------------------------
/static/build/webpack.base.conf.js:
--------------------------------------------------------------------------------
1 | 'use strict'
2 | const path = require('path')
3 | const utils = require('./utils')
4 | const config = require('../config')
5 | const vueLoaderConfig = require('./vue-loader.conf')
6 |
7 | function resolve (dir) {
8 | return path.join(__dirname, '..', dir)
9 | }
10 |
11 | const createLintingRule = () => ({
12 | test: /\.(js|vue)$/,
13 | loader: 'eslint-loader',
14 | enforce: 'pre',
15 | include: [resolve('src'), resolve('test')],
16 | options: {
17 | formatter: require('eslint-friendly-formatter'),
18 | emitWarning: !config.dev.showEslintErrorsInOverlay
19 | }
20 | })
21 |
22 | module.exports = {
23 | context: path.resolve(__dirname, '../'),
24 | entry: {
25 | app: ['babel-polyfill', './src/main.js']
26 | },
27 | output: {
28 | path: config.build.assetsRoot,
29 | filename: '[name].js',
30 | publicPath: process.env.NODE_ENV === 'production'
31 | ? config.build.assetsPublicPath
32 | : config.dev.assetsPublicPath
33 | },
34 | resolve: {
35 | extensions: ['.js', '.vue', '.json'],
36 | alias: {
37 | 'vue$': 'vue/dist/vue.esm.js',
38 | '@': resolve('src'),
39 | }
40 | },
41 | module: {
42 | rules: [
43 | ...(config.dev.useEslint ? [createLintingRule()] : []),
44 | {
45 | test: /\.vue$/,
46 | loader: 'vue-loader',
47 | options: vueLoaderConfig
48 | },
49 | {
50 | test: /\.js$/,
51 | loader: 'babel-loader',
52 | include: [resolve('src'), resolve('test'), resolve('node_modules/webpack-dev-server/client')]
53 | },
54 | {
55 | test: /\.(png|jpe?g|gif|svg)(\?.*)?$/,
56 | loader: 'url-loader',
57 | options: {
58 | limit: 10000,
59 | name: utils.assetsPath('img/[name].[hash:7].[ext]')
60 | }
61 | },
62 | {
63 | test: /\.(mp4|webm|ogg|mp3|wav|flac|aac)(\?.*)?$/,
64 | loader: 'url-loader',
65 | options: {
66 | limit: 10000,
67 | name: utils.assetsPath('media/[name].[hash:7].[ext]')
68 | }
69 | },
70 | {
71 | test: /\.(woff2?|eot|ttf|otf)(\?.*)?$/,
72 | loader: 'url-loader',
73 | options: {
74 | limit: 10000,
75 | name: utils.assetsPath('fonts/[name].[hash:7].[ext]')
76 | }
77 | }
78 | ]
79 | },
80 | node: {
81 | // prevent webpack from injecting useless setImmediate polyfill because Vue
82 | // source contains it (although only uses it if it's native).
83 | setImmediate: false,
84 | // prevent webpack from injecting mocks to Node native modules
85 | // that does not make sense for the client
86 | dgram: 'empty',
87 | fs: 'empty',
88 | net: 'empty',
89 | tls: 'empty',
90 | child_process: 'empty'
91 | }
92 | }
93 |
--------------------------------------------------------------------------------
/static/build/webpack.dev.conf.js:
--------------------------------------------------------------------------------
1 | 'use strict'
2 | const utils = require('./utils')
3 | const webpack = require('webpack')
4 | const config = require('../config')
5 | const merge = require('webpack-merge')
6 | const path = require('path')
7 | const baseWebpackConfig = require('./webpack.base.conf')
8 | const CopyWebpackPlugin = require('copy-webpack-plugin')
9 | const HtmlWebpackPlugin = require('html-webpack-plugin')
10 | const FriendlyErrorsPlugin = require('friendly-errors-webpack-plugin')
11 | const portfinder = require('portfinder')
12 |
13 | const HOST = process.env.HOST
14 | const PORT = process.env.PORT && Number(process.env.PORT)
15 |
16 | const devWebpackConfig = merge(baseWebpackConfig, {
17 | module: {
18 | rules: utils.styleLoaders({ sourceMap: config.dev.cssSourceMap, usePostCSS: true })
19 | },
20 | // cheap-module-eval-source-map is faster for development
21 | devtool: config.dev.devtool,
22 |
23 | // these devServer options should be customized in /config/index.js
24 | devServer: {
25 | clientLogLevel: 'warning',
26 | historyApiFallback: {
27 | rewrites: [
28 | { from: /.*/, to: path.join(config.dev.assetsPublicPath, 'index.html') },
29 | ],
30 | },
31 | hot: true,
32 | contentBase: false, // since we use CopyWebpackPlugin.
33 | compress: true,
34 | host: HOST || config.dev.host,
35 | port: PORT || config.dev.port,
36 | open: config.dev.autoOpenBrowser,
37 | overlay: config.dev.errorOverlay
38 | ? { warnings: false, errors: true }
39 | : false,
40 | publicPath: config.dev.assetsPublicPath,
41 | proxy: config.dev.proxyTable,
42 | quiet: true, // necessary for FriendlyErrorsPlugin
43 | watchOptions: {
44 | poll: config.dev.poll,
45 | }
46 | },
47 | plugins: [
48 | new webpack.DefinePlugin({
49 | 'process.env': require('../config/dev.env')
50 | }),
51 | new webpack.HotModuleReplacementPlugin(),
52 | new webpack.NamedModulesPlugin(), // HMR shows correct file names in console on update.
53 | new webpack.NoEmitOnErrorsPlugin(),
54 | // https://github.com/ampedandwired/html-webpack-plugin
55 | new HtmlWebpackPlugin({
56 | filename: 'index.html',
57 | template: 'index.html',
58 | inject: true
59 | }),
60 | // copy custom static assets
61 | new CopyWebpackPlugin([
62 | {
63 | from: path.resolve(__dirname, '../static'),
64 | to: config.dev.assetsSubDirectory,
65 | ignore: ['.*']
66 | }
67 | ])
68 | ]
69 | })
70 |
71 | module.exports = new Promise((resolve, reject) => {
72 | portfinder.basePort = process.env.PORT || config.dev.port
73 | portfinder.getPort((err, port) => {
74 | if (err) {
75 | reject(err)
76 | } else {
77 | // publish the new Port, necessary for e2e tests
78 | process.env.PORT = port
79 | // add port to devServer config
80 | devWebpackConfig.devServer.port = port
81 |
82 | // Add FriendlyErrorsPlugin
83 | devWebpackConfig.plugins.push(new FriendlyErrorsPlugin({
84 | compilationSuccessInfo: {
85 | messages: [`Your application is running here: http://${devWebpackConfig.devServer.host}:${port}`],
86 | },
87 | onErrors: config.dev.notifyOnErrors
88 | ? utils.createNotifierCallback()
89 | : undefined
90 | }))
91 |
92 | resolve(devWebpackConfig)
93 | }
94 | })
95 | })
96 |
--------------------------------------------------------------------------------
/static/build/webpack.prod.conf.js:
--------------------------------------------------------------------------------
1 | 'use strict'
2 | const path = require('path')
3 | const utils = require('./utils')
4 | const webpack = require('webpack')
5 | const config = require('../config')
6 | const merge = require('webpack-merge')
7 | const baseWebpackConfig = require('./webpack.base.conf')
8 | const CopyWebpackPlugin = require('copy-webpack-plugin')
9 | const HtmlWebpackPlugin = require('html-webpack-plugin')
10 | const ExtractTextPlugin = require('extract-text-webpack-plugin')
11 | const OptimizeCSSPlugin = require('optimize-css-assets-webpack-plugin')
12 | const UglifyJsPlugin = require('uglifyjs-webpack-plugin')
13 |
14 | const env = process.env.NODE_ENV === 'testing'
15 | ? require('../config/test.env')
16 | : require('../config/prod.env')
17 |
18 | const webpackConfig = merge(baseWebpackConfig, {
19 | module: {
20 | rules: utils.styleLoaders({
21 | sourceMap: config.build.productionSourceMap,
22 | extract: true,
23 | usePostCSS: true
24 | })
25 | },
26 | devtool: config.build.productionSourceMap ? config.build.devtool : false,
27 | output: {
28 | path: config.build.assetsRoot,
29 | filename: utils.assetsPath('js/[name].[chunkhash].js'),
30 | chunkFilename: utils.assetsPath('js/[id].[chunkhash].js')
31 | },
32 | plugins: [
33 | // http://vuejs.github.io/vue-loader/en/workflow/production.html
34 | new webpack.DefinePlugin({
35 | 'process.env': env
36 | }),
37 | new UglifyJsPlugin({
38 | uglifyOptions: {
39 | compress: {
40 | warnings: false
41 | }
42 | },
43 | sourceMap: config.build.productionSourceMap,
44 | parallel: true
45 | }),
46 | // extract css into its own file
47 | new ExtractTextPlugin({
48 | filename: utils.assetsPath('css/[name].[contenthash].css'),
49 | // Setting the following option to `false` will not extract CSS from codesplit chunks.
50 | // Their CSS will instead be inserted dynamically with style-loader when the codesplit chunk has been loaded by webpack.
51 | // It's currently set to `true` because we are seeing that sourcemaps are included in the codesplit bundle as well when it's `false`,
52 | // increasing file size: https://github.com/vuejs-templates/webpack/issues/1110
53 | allChunks: true,
54 | }),
55 | // Compress extracted CSS. We are using this plugin so that possible
56 | // duplicated CSS from different components can be deduped.
57 | new OptimizeCSSPlugin({
58 | cssProcessorOptions: config.build.productionSourceMap
59 | ? { safe: true, map: { inline: false } }
60 | : { safe: true }
61 | }),
62 | // generate dist index.html with correct asset hash for caching.
63 | // you can customize output by editing /index.html
64 | // see https://github.com/ampedandwired/html-webpack-plugin
65 | new HtmlWebpackPlugin({
66 | filename: process.env.NODE_ENV === 'testing'
67 | ? 'index.html'
68 | : config.build.index,
69 | template: 'index.html',
70 | inject: true,
71 | minify: {
72 | removeComments: true,
73 | collapseWhitespace: true,
74 | removeAttributeQuotes: true
75 | // more options:
76 | // https://github.com/kangax/html-minifier#options-quick-reference
77 | },
78 | // necessary to consistently work with multiple chunks via CommonsChunkPlugin
79 | chunksSortMode: 'dependency'
80 | }),
81 | // keep module.id stable when vendor modules does not change
82 | new webpack.HashedModuleIdsPlugin(),
83 | // enable scope hoisting
84 | new webpack.optimize.ModuleConcatenationPlugin(),
85 | // split vendor js into its own file
86 | new webpack.optimize.CommonsChunkPlugin({
87 | name: 'vendor',
88 | minChunks (module) {
89 | // any required modules inside node_modules are extracted to vendor
90 | return (
91 | module.resource &&
92 | /\.js$/.test(module.resource) &&
93 | module.resource.indexOf(
94 | path.join(__dirname, '../node_modules')
95 | ) === 0
96 | )
97 | }
98 | }),
99 | // extract webpack runtime and module manifest to its own file in order to
100 | // prevent vendor hash from being updated whenever app bundle is updated
101 | new webpack.optimize.CommonsChunkPlugin({
102 | name: 'manifest',
103 | minChunks: Infinity
104 | }),
105 | // This instance extracts shared chunks from code splitted chunks and bundles them
106 | // in a separate chunk, similar to the vendor chunk
107 | // see: https://webpack.js.org/plugins/commons-chunk-plugin/#extra-async-commons-chunk
108 | new webpack.optimize.CommonsChunkPlugin({
109 | name: 'app',
110 | async: 'vendor-async',
111 | children: true,
112 | minChunks: 3
113 | }),
114 |
115 | // copy custom static assets
116 | new CopyWebpackPlugin([
117 | {
118 | from: path.resolve(__dirname, '../static'),
119 | to: config.build.assetsSubDirectory,
120 | ignore: ['.*']
121 | }
122 | ])
123 | ]
124 | })
125 |
126 | if (config.build.productionGzip) {
127 | const CompressionWebpackPlugin = require('compression-webpack-plugin')
128 |
129 | webpackConfig.plugins.push(
130 | new CompressionWebpackPlugin({
131 | asset: '[path].gz[query]',
132 | algorithm: 'gzip',
133 | test: new RegExp(
134 | '\\.(' +
135 | config.build.productionGzipExtensions.join('|') +
136 | ')$'
137 | ),
138 | threshold: 10240,
139 | minRatio: 0.8
140 | })
141 | )
142 | }
143 |
144 | if (config.build.bundleAnalyzerReport) {
145 | const BundleAnalyzerPlugin = require('webpack-bundle-analyzer').BundleAnalyzerPlugin
146 | webpackConfig.plugins.push(new BundleAnalyzerPlugin())
147 | }
148 |
149 | module.exports = webpackConfig
150 |
--------------------------------------------------------------------------------
/static/config/dev.env.js:
--------------------------------------------------------------------------------
1 | 'use strict'
2 | const merge = require('webpack-merge')
3 | const prodEnv = require('./prod.env')
4 |
5 | module.exports = merge(prodEnv, {
6 | NODE_ENV: '"development"'
7 | })
8 |
--------------------------------------------------------------------------------
/static/config/index.js:
--------------------------------------------------------------------------------
1 | 'use strict'
2 | // Template version: 1.2.8
3 | // see http://vuejs-templates.github.io/webpack for documentation.
4 |
5 | const path = require('path')
6 |
7 | module.exports = {
8 | dev: {
9 |
10 | // Paths
11 | assetsSubDirectory: 'static',
12 | assetsPublicPath: '/',
13 | proxyTable: {
14 | // proxy all requests starting with /api to jsonplaceholder
15 | '/api': {
16 | target: 'http://btoogle.com',
17 | changeOrigin: true,
18 | }
19 | },
20 |
21 | // Various Dev Server settings
22 | host: 'localhost', // can be overwritten by process.env.HOST
23 | port: 8080, // can be overwritten by process.env.PORT, if port is in use, a free one will be determined
24 | autoOpenBrowser: true,
25 | errorOverlay: true,
26 | notifyOnErrors: true,
27 | poll: false, // https://webpack.js.org/configuration/dev-server/#devserver-watchoptions-
28 |
29 | // Use Eslint Loader?
30 | // If true, your code will be linted during bundling and
31 | // linting errors and warnings will be shown in the console.
32 | useEslint: true,
33 | // If true, eslint errors and warnings will also be shown in the error overlay
34 | // in the browser.
35 | showEslintErrorsInOverlay: false,
36 |
37 | /**
38 | * Source Maps
39 | */
40 |
41 | // https://webpack.js.org/configuration/devtool/#development
42 | devtool: 'cheap-module-eval-source-map',
43 |
44 | // If you have problems debugging vue-files in devtools,
45 | // set this to false - it *may* help
46 | // https://vue-loader.vuejs.org/en/options.html#cachebusting
47 | cacheBusting: true,
48 |
49 | cssSourceMap: true,
50 | },
51 |
52 | build: {
53 | // Template for index.html
54 | index: path.resolve(__dirname, '../dist/index.html'),
55 |
56 | // Paths
57 | assetsRoot: path.resolve(__dirname, '../dist'),
58 | assetsSubDirectory: 'static',
59 | assetsPublicPath: '/',
60 |
61 | /**
62 | * Source Maps
63 | */
64 |
65 | productionSourceMap: true,
66 | // https://webpack.js.org/configuration/devtool/#production
67 | devtool: '#source-map',
68 |
69 | // Gzip off by default as many popular static hosts such as
70 | // Surge or Netlify already gzip all static assets for you.
71 | // Before setting to `true`, make sure to:
72 | // npm install --save-dev compression-webpack-plugin
73 | productionGzip: false,
74 | productionGzipExtensions: ['js', 'css'],
75 |
76 | // Run the build command with an extra argument to
77 | // View the bundle analyzer report after build finishes:
78 | // `npm run build --report`
79 | // Set to `true` or `false` to always turn it on or off
80 | bundleAnalyzerReport: process.env.npm_config_report
81 | }
82 | }
83 |
--------------------------------------------------------------------------------
/static/config/prod.env.js:
--------------------------------------------------------------------------------
1 | 'use strict'
2 | module.exports = {
3 | NODE_ENV: '"production"'
4 | }
5 |
--------------------------------------------------------------------------------
/static/config/test.env.js:
--------------------------------------------------------------------------------
1 | 'use strict'
2 | const merge = require('webpack-merge')
3 | const devEnv = require('./dev.env')
4 |
5 | module.exports = merge(devEnv, {
6 | NODE_ENV: '"testing"'
7 | })
8 |
--------------------------------------------------------------------------------
/static/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 | Btoogle - Torrent Search Engine
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
--------------------------------------------------------------------------------
/static/index.json:
--------------------------------------------------------------------------------
1 | {
2 | "apiVersion": "",
3 | "swaggerVersion": "1.2",
4 | "basePath": "{{.}}",
5 | "apis": [],
6 | "info": {}
7 | }
--------------------------------------------------------------------------------
/static/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "btoogle-frontend",
3 | "version": "1.0.0",
4 | "description": "A Vue.js project",
5 | "author": "Ruslan Fedoseenko ",
6 | "private": true,
7 | "scripts": {
8 | "dev": "webpack-dev-server --inline --progress --config build/webpack.dev.conf.js",
9 | "start": "npm run dev",
10 | "unit": "jest --config test/unit/jest.conf.js --coverage",
11 | "test": "npm run unit",
12 | "lint": "eslint --ext .js,.vue src test/unit/specs",
13 | "build": "node build/build.js"
14 | },
15 | "dependencies": {
16 | "babel-polyfill": "^6.26.0",
17 | "chart.js": "^2.7.2",
18 | "raven-js": "^3.21.0",
19 | "sha1": "^1.1.1",
20 | "vee-validate": "^2.1.0-beta.9",
21 | "vue": "^2.5.2",
22 | "vue-chartjs": "^2.8.7",
23 | "vue-resource": "^1.3.5",
24 | "vue-router": "^3.0.1",
25 | "vuetify": "^1.2.4",
26 | "vuex": "^3.0.1"
27 | },
28 | "devDependencies": {
29 | "autoprefixer": "^7.1.2",
30 | "babel-core": "^6.26.3",
31 | "babel-eslint": "^7.1.1",
32 | "babel-helper-vue-jsx-merge-props": "^2.0.3",
33 | "babel-jest": "^21.0.2",
34 | "babel-loader": "^7.1.1",
35 | "babel-plugin-dynamic-import-node": "^1.2.0",
36 | "babel-plugin-syntax-jsx": "^6.18.0",
37 | "babel-plugin-transform-es2015-modules-commonjs": "^6.26.2",
38 | "babel-plugin-transform-runtime": "^6.22.0",
39 | "babel-plugin-transform-vue-jsx": "^3.5.0",
40 | "babel-preset-env": "^1.7.0",
41 | "babel-preset-stage-2": "^6.22.0",
42 | "chalk": "^2.0.1",
43 | "compression-webpack-plugin": "^1.1.3",
44 | "copy-webpack-plugin": "^4.5.2",
45 | "css-loader": "^0.28.0",
46 | "eslint": "^3.19.0",
47 | "eslint-config-standard": "^10.2.1",
48 | "eslint-friendly-formatter": "^3.0.0",
49 | "eslint-loader": "^1.7.1",
50 | "eslint-plugin-html": "^3.0.0",
51 | "eslint-plugin-import": "^2.7.0",
52 | "eslint-plugin-node": "^5.2.0",
53 | "eslint-plugin-promise": "^3.4.0",
54 | "eslint-plugin-standard": "^3.0.1",
55 | "extract-text-webpack-plugin": "^3.0.0",
56 | "file-loader": "^1.1.4",
57 | "friendly-errors-webpack-plugin": "^1.6.1",
58 | "html-webpack-plugin": "^2.30.1",
59 | "jest": "^21.2.0",
60 | "jest-serializer-vue": "^0.3.0",
61 | "node-notifier": "^5.1.2",
62 | "optimize-css-assets-webpack-plugin": "^3.2.0",
63 | "ora": "^1.2.0",
64 | "portfinder": "^1.0.13",
65 | "postcss-import": "^11.0.0",
66 | "postcss-loader": "^2.0.8",
67 | "postcss-url": "^7.2.1",
68 | "rimraf": "^2.6.0",
69 | "semver": "^5.3.0",
70 | "shelljs": "^0.7.6",
71 | "uglifyjs-webpack-plugin": "^1.1.1",
72 | "url-loader": "^0.5.8",
73 | "vue-jest": "^1.0.2",
74 | "vue-loader": "^13.3.0",
75 | "vue-style-loader": "^3.0.1",
76 | "vue-template-compiler": "^2.5.2",
77 | "webpack": "^3.6.0",
78 | "webpack-bundle-analyzer": "^2.9.0",
79 | "webpack-dev-server": "^2.9.1",
80 | "webpack-merge": "^4.1.0"
81 | },
82 | "engines": {
83 | "node": ">= 6.0.0",
84 | "npm": ">= 3.0.0"
85 | },
86 | "browserslist": [
87 | "> 1%",
88 | "last 2 versions",
89 | "not ie <= 8"
90 | ]
91 | }
92 |
--------------------------------------------------------------------------------
/static/src/App.vue:
--------------------------------------------------------------------------------
1 |
2 |
6 |
13 |
14 |
15 |
16 | {{ item.icon }}
17 |
18 |
19 |
20 | {{ item.text }}
21 |
22 |
23 |
24 | SUBSCRIPTIONS
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 | settings
33 |
34 | Manage Subscriptions
35 |
36 |
37 |
38 |
39 |
40 |
41 |
42 |
43 |
44 |
45 |
46 |
47 |
48 |
49 |
50 |
51 |
52 |
53 |
54 |
61 |
62 |
63 | account_circle
64 |
65 |
66 | fas fa-sort-down
67 |
68 |
69 |
70 |
71 |
72 |
73 |
74 |
75 |
76 |
77 | {{userInfo.username}}
78 | {{userInfo.mail}}
79 |
80 |
81 |
82 |
87 | favorite
88 |
89 |
90 |
91 |
92 |
93 |
94 |
95 |
96 |
97 |
98 |
99 |
100 | Cancel
101 | Log Out
102 |
103 |
104 |
105 |
106 |
107 |
108 |
109 |
110 |
111 |
112 |
113 |
114 |
115 |
116 |
117 |
118 |
119 |
174 |
175 |
205 |
--------------------------------------------------------------------------------
/static/src/Formatters.js:
--------------------------------------------------------------------------------
1 |
2 | export default {
3 | formatBytes(bytes, decimals) {
4 | if (bytes === 0) return '0 Byte'
5 | const k = 1024 // or 1024 for binary
6 | let dm = decimals || 3
7 | const sizes = ['Bytes', 'KB', 'MB', 'GB', 'TB', 'PB', 'EB', 'ZB', 'YB']
8 | let i = Math.floor(Math.log(bytes) / Math.log(k))
9 | return parseFloat((bytes / Math.pow(k, i)).toFixed(dm)) + ' ' + sizes[i]
10 | },
11 | nFormatter(num, digits) {
12 | let si = [
13 | {value: 1, symbol: ''},
14 | {value: 1E3, symbol: 'k'},
15 | {value: 1E6, symbol: 'M'},
16 | {value: 1E9, symbol: 'G'},
17 | {value: 1E12, symbol: 'T'},
18 | {value: 1E15, symbol: 'P'},
19 | {value: 1E18, symbol: 'E'}
20 | ]
21 | let rx = /\.0+$|(\.[0-9]*[1-9])0+$/
22 | let i
23 | for (i = si.length - 1; i > 0; i--) {
24 | if (num >= si[i].value) {
25 | break
26 | }
27 | }
28 | return (num / si[i].value).toFixed(digits).replace(rx, '$1') + ' ' + si[i].symbol
29 | }
30 | }
31 |
--------------------------------------------------------------------------------
/static/src/Interceptors.js:
--------------------------------------------------------------------------------
1 | export default {
2 | csrfTokenApplier(request, next) {
3 | request.headers.map['X-CSRF-Token'] = [this.state.auth.csrfToken]
4 | next()
5 | },
6 | saveCsrf(csrfToken) {
7 | document.cookie = 'csrf=' + csrfToken + '; Path=/api'
8 | },
9 | clearCsrf() {
10 | document.cookie = 'csrf=;expires=Thu, 01 Jan 1970 00:00:01 GMT; Path=/api'
11 | }
12 | }
13 |
--------------------------------------------------------------------------------
/static/src/components/Chart.vue:
--------------------------------------------------------------------------------
1 |
14 |
15 |
18 |
--------------------------------------------------------------------------------
/static/src/components/Footer.vue:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 | © {{ new Date().getFullYear() }} btoogle.com
8 |
9 |
10 |
11 |
12 |
13 |
14 |
19 |
20 |
23 |
--------------------------------------------------------------------------------
/static/src/components/LoginDialog.vue:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | LOGIN
5 |
6 |
7 |
8 |
9 |
10 | Login
11 |
12 |
13 |
14 |
15 |
16 |
18 |
19 |
20 |
22 |
23 |
24 |
25 | *indicates required field
26 |
27 |
28 |
29 | Close
30 |
32 | Login
33 |
34 | cached
35 |
36 |
37 |
38 |
39 |
40 |
47 | {{ validationText }}
48 | Close
49 |
50 |
51 |
52 |
112 |
154 |
--------------------------------------------------------------------------------
/static/src/components/RegisterDialog.vue:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | REGISTER
5 |
6 |
7 |
8 |
9 |
10 | Registration
11 |
12 |
13 |
14 |
15 |
16 |
18 |
19 |
20 |
22 |
23 |
24 |
26 |
27 |
28 |
29 | *indicates required field
30 |
31 |
32 |
33 | Close
34 |
36 | Register
37 |
38 | cached
39 |
40 |
41 |
42 |
43 |
44 |
51 | {{ validationText }}
52 | Close
53 |
54 |
55 |
56 |
57 |
108 |
109 |
112 |
--------------------------------------------------------------------------------
/static/src/components/SearchField.vue:
--------------------------------------------------------------------------------
1 |
2 |
3 |
9 | {{ validationText }}
10 | Close
11 |
12 |
13 |
14 |
26 |
27 |
28 | {{ item }}
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
151 |
152 |
155 |
--------------------------------------------------------------------------------
/static/src/main.js:
--------------------------------------------------------------------------------
1 | // The Vue build version to load with the `import` command
2 | // (runtime-only or standalone) has been set in webpack.base.conf with an alias.
3 | import Vue from 'vue'
4 | import App from './App'
5 | import router from './router'
6 | import vuetify from 'vuetify'
7 | import vueresource from 'vue-resource'
8 | import store from './store/index'
9 | import VeeValidate from 'vee-validate'
10 | import 'vuetify/dist/vuetify.css'
11 | import Raven from 'raven-js'
12 | import RavenVue from 'raven-js/plugins/vue'
13 | import commonFooter from './components/Footer'
14 | import chart from './components/Chart'
15 | import SearchField from './components/SearchField'
16 | import LoginDlg from './components/LoginDialog'
17 | import RegDlg from './components/RegisterDialog'
18 | import 'babel-polyfill'
19 | const reduce = Function.bind.call(Function.call, Array.prototype.reduce)
20 | const isEnumerable = Function.bind.call(Function.call, Object.prototype.propertyIsEnumerable)
21 | const concat = Function.bind.call(Function.call, Array.prototype.concat)
22 | const keys = Reflect.ownKeys
23 | if (!Object.values) {
24 | Object.values = function values(O) {
25 | return reduce(keys(O), (v, k) => concat(v, typeof k === 'string' && isEnumerable(O, k) ? [O[k]] : []), [])
26 | }
27 | }
28 | if (!Object.entries) {
29 | Object.entries = function entries(O) {
30 | return reduce(keys(O), (e, k) => concat(e, typeof k === 'string' && isEnumerable(O, k) ? [[k, O[k]]] : []), [])
31 | }
32 | }
33 | Raven
34 | .config('http://067a88a545394790801f0086c15c8d15@sentry.btoogle.com/3')
35 | .addPlugin(RavenVue, Vue)
36 | .install()
37 | Vue.use(vueresource)
38 | Vue.use(vuetify)
39 | Vue.use(VeeValidate)
40 |
41 | Vue.component('line-chart', chart)
42 | Vue.component('btoogle-login-dialog', LoginDlg)
43 | Vue.component('btoogle-register-dialog', RegDlg)
44 | Vue.component('btoogle-footer', commonFooter)
45 | Vue.component('btoogle-search-field', SearchField)
46 | /* eslint-disable no-new */
47 | new Vue({
48 | el: '#app',
49 | router,
50 | store,
51 | template: '',
52 | components: {App}
53 | })
54 |
--------------------------------------------------------------------------------
/static/src/pages/HomePage.vue:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
54 |
55 |
58 |
--------------------------------------------------------------------------------
/static/src/pages/MaintenancePage.vue:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 | fitness_center
7 |
8 |
9 | Btoogle.com is currenly down for maintenance
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
22 |
23 |
26 |
--------------------------------------------------------------------------------
/static/src/pages/NotFoundPage.vue:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 | explore
7 |
8 |
9 | Page not found
10 |
11 |
12 | The link you followed may be broken, or the page may have been removed.
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
26 |
27 |
30 |
--------------------------------------------------------------------------------
/static/src/pages/StatisticsPage.vue:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
168 |
169 |
172 |
--------------------------------------------------------------------------------
/static/src/pages/TorrentDetails.vue:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 | {{torrent.Name}}
7 | fas fa-cloud-download-alt
8 |
9 |
10 |
11 |
12 | Tags:
13 |
14 |
15 |
16 |
17 | {{tag.Tag}}
18 |
19 |
20 |
21 | Descriptions:
22 |
23 |
24 |
25 | No descriptions found
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
40 |
41 |
42 |
43 |
44 |
{{title.Title}}
45 |
({{title.Year}})
46 |
47 | {{title.Description}}
48 |
49 |
50 |
51 |
52 |
53 |
54 |
55 | Rate this description as most suitable
56 |
57 |
60 |
61 |
62 |
63 |
64 |
65 |
66 | Files
67 |
68 |
69 | Trackers
70 |
71 |
72 |
73 |
74 |
75 | insert_drive_file
76 | {{ props.item.Path }}
77 | |
78 | {{ formatBytes(props.item.Size,2) }} |
79 |
80 |
81 |
82 |
83 |
85 |
86 | {{props.item.TrackerUrl}} |
87 | {{props.item.Seeds}} |
88 | {{props.item.Leaches}} |
89 | {{toDateTimeStr(props.item.LastUpdate)}} |
90 |
91 |
92 |
93 |
94 |
95 |
96 |
97 |
181 |
182 |
185 |
--------------------------------------------------------------------------------
/static/src/pages/TorrentList.vue:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
12 |
13 |
14 |
15 |
16 |
38 |
39 |
40 |
41 |
42 |
43 |
44 |
45 |
46 |
47 |
48 |
49 |
50 |
51 |
52 |
53 |
178 |
179 |
182 |
--------------------------------------------------------------------------------
/static/src/router/index.js:
--------------------------------------------------------------------------------
1 | import Vue from 'vue'
2 | import Router from 'vue-router'
3 | import torrentList from '@/pages/TorrentList'
4 | import homePage from '@/pages/HomePage'
5 | import torrentDetails from '@/pages/TorrentDetails'
6 | import page404 from '@/pages/NotFoundPage'
7 | import stats from '@/pages/StatisticsPage'
8 | import maintanance from '@/pages/MaintenancePage'
9 | Vue.use(Router)
10 | let isMaintenance = false
11 | let router
12 | if (isMaintenance) {
13 | router = new Router({
14 | routes: [
15 | {
16 | path: '*',
17 | name: 'MaintenancePage',
18 | component: maintanance
19 | }
20 | ]
21 | })
22 | } else {
23 | router = new Router({
24 | routes: [
25 | {
26 | path: '*',
27 | component: page404
28 | },
29 | {
30 | path: '/',
31 | name: 'HomePage',
32 | component: homePage
33 | },
34 | {
35 | path: '/page/:page',
36 | name: 'pagedTorrentList',
37 | component: torrentList
38 | },
39 | {
40 | path: '/search/:search',
41 | name: 'SearchTorrentList',
42 | component: torrentList
43 | },
44 | {
45 | path: '/search/:search/page/:page',
46 | name: 'SearchPagedTorrentList',
47 | component: torrentList
48 | },
49 | {
50 | path: '/details/:infohash',
51 | name: 'TorrentDetails',
52 | component: torrentDetails
53 | },
54 | {
55 | path: '/stats',
56 | name: 'Stats',
57 | component: stats
58 | },
59 | {
60 | path: '/login',
61 | name: 'Login',
62 | component: homePage,
63 | props: {
64 | showLogin: true
65 | }
66 | }
67 | ]
68 | })
69 | }
70 | export default router
71 |
--------------------------------------------------------------------------------
/static/src/store/auth.module.js:
--------------------------------------------------------------------------------
1 | import Vue from 'vue'
2 | import Interceptors from '@/Interceptors'
3 |
4 | export default {
5 | state: {
6 | isLoggedIn: false,
7 | csrfToken: '',
8 | user: null
9 | },
10 |
11 | mutations: {
12 | LoginSuccess(state, loginInfo) {
13 | state.isLoggedIn = true
14 | state.csrfToken = loginInfo.csrf
15 | Interceptors.saveCsrf(loginInfo.csrf)
16 | state.user = loginInfo.user
17 | Vue.http.interceptors.push(Interceptors.csrfTokenApplier.bind(this))
18 | },
19 | LogOutSuccess(state) {
20 | state.isLoggedIn = false
21 | state.csrfToken = ''
22 | state.user = null
23 | Interceptors.clearCsrf()
24 | let interceptorIndex = Vue.http.interceptors.indexOf(Interceptors.csrfTokenApplier)
25 | if (interceptorIndex > -1) {
26 | Vue.http.interceptors.splice(interceptorIndex, 1)
27 | }
28 | }
29 | },
30 | actions: {
31 | performLogin(ctx, credentials) {
32 | return Vue.http.post('/api/auth/login', credentials).then(
33 | resp => {
34 | if (resp.status === 200) {
35 | if (resp.headers.map['x-csrf-token']) {
36 | ctx.commit('LoginSuccess', {
37 | csrf: resp.headers.map['x-csrf-token'],
38 | user: resp.body
39 | })
40 | }
41 | }
42 | }
43 | )
44 | },
45 | performRegistration(ctx, regData) {
46 | return Vue.http.post('/api/auth/register', regData)
47 | },
48 | performLogOut(ctx) {
49 | Vue.http.get('/api/auth/logout').then(
50 | resp => {
51 | if (resp.status === 200) {
52 | ctx.commit('LogOutSuccess')
53 | }
54 | }
55 | )
56 | },
57 | tryLoadUserInfo(ctx) {
58 | Vue.http.interceptors.push(Interceptors.csrfTokenApplier.bind(this))
59 |
60 | Vue.http.get('/api/auth/currentUser').then(
61 | resp => {
62 | if (resp.status === 200) {
63 | ctx.commit('LoginSuccess', {
64 | user: resp.body,
65 | csrf: resp.headers.map['x-csrf-token']
66 | })
67 | }
68 | },
69 | _ => {
70 | ctx.commit('LogOutSuccess')
71 | }
72 | )
73 | }
74 |
75 | }
76 | }
77 |
--------------------------------------------------------------------------------
/static/src/store/index.js:
--------------------------------------------------------------------------------
1 | import Vue from 'vue'
2 | import vuex from 'vuex'
3 | import search from '@/store/search.module'
4 | import auth from '@/store/auth.module'
5 | Vue.use(vuex)
6 |
7 | export default new vuex.Store({
8 | modules: {
9 | search: search,
10 | auth: auth
11 | }
12 | })
13 |
--------------------------------------------------------------------------------
/static/src/store/search.module.js:
--------------------------------------------------------------------------------
1 | import Vue from 'vue'
2 |
3 | export default {
4 | state: {
5 | torrents: [],
6 | page: 0,
7 | searchTerm: '',
8 | pageCount: 0,
9 | suggestions: [],
10 | torrentDetails: {},
11 | torrentStats: {}
12 | },
13 | getters: {
14 | getPage: state => {
15 | return state.page
16 | },
17 | getPageCount: state => {
18 | return state.pageCount
19 | }
20 | },
21 | mutations: {
22 | ChangeTorrentStats(state, stats) {
23 | state.torrentStats = stats
24 | },
25 | ChangeSuggestions(state, suggestions) {
26 | state.suggestions = suggestions
27 | },
28 | ChangeSearch(state, term) {
29 | state.searchTerm = term
30 | },
31 | ChangePage(state, page) {
32 | state.page = page
33 | },
34 | ChangePageCount(state, pageCount) {
35 | state.pageCount = pageCount
36 | },
37 | ChangeTorrents(state, torrents) {
38 | state.torrents = torrents
39 | },
40 | ChangeTorrentDetails(state, torrent) {
41 | state.torrentDetails = torrent
42 | }
43 | },
44 | actions: {
45 | fetchStats(ctx) {
46 | return Vue.http.get('/api/torrents/stats/').then(resp => {
47 | ctx.commit('ChangeTorrentStats', resp.body || {})
48 | })
49 | },
50 | fetchSuggestions(ctx, input) {
51 | return Vue.http.get('/api/search/suggest/' + encodeURIComponent(input)).then(resp => {
52 | ctx.commit('ChangeSuggestions', resp.body.data || [])
53 | })
54 | },
55 | fetchTorrents(ctx) {
56 | return Vue.http.get('/api/torrents/').then(resp => {
57 | ctx.commit('ChangeTorrents', resp.body.Torrents || [])
58 | ctx.commit('ChangePageCount', resp.body.PageCount || 0)
59 | ctx.commit('ChangePage', resp.body.Page || 0)
60 | })
61 | },
62 | fetchTorrent(ctx, infoHash) {
63 | return Vue.http.get('/api/torrent/info/' + encodeURIComponent(infoHash)).then(res => {
64 | ctx.commit('ChangeTorrentDetails', res.body)
65 | })
66 | },
67 | fetchTorrentsPaged(ctx, page) {
68 | let url = '/api/torrents/'
69 | if (page && page > 1) {
70 | url += 'page/' + encodeURIComponent(page)
71 | }
72 | return Vue.http.get(url).then(resp => {
73 | ctx.commit('ChangeTorrents', resp.body.Torrents || [])
74 | ctx.commit('ChangePageCount', resp.body.PageCount || 0)
75 | ctx.commit('ChangePage', resp.body.Page || 0)
76 | })
77 | },
78 | searchTorrents(ctx, searchArgs) {
79 | let searchText = searchArgs.search || ''
80 | return Vue.http.get('/api/torrents/search/' + encodeURIComponent(searchText)).then(resp => {
81 | if (ctx.state.page !== 1) {
82 | ctx.commit('ChangePage', 1)
83 | }
84 | ctx.commit('ChangeTorrents', resp.body.Torrents || [])
85 | ctx.commit('ChangePageCount', resp.body.PageCount || 0)
86 | })
87 | },
88 | searchTorrentsPaged(ctx, searchArgs) {
89 | let searchText = searchArgs.search || ''
90 | let page = searchArgs.page || 1
91 | return Vue.http.get('/api/torrents/search/' + encodeURIComponent(searchText) + '/page/' + page).then(resp => {
92 | ctx.commit('ChangeTorrents', resp.body.Torrents || [])
93 | ctx.commit('ChangePageCount', resp.body.PageCount || 0)
94 | ctx.commit('ChangePage', resp.body.Page || 0)
95 | })
96 | }
97 | }
98 | }
99 |
--------------------------------------------------------------------------------
/static/static/.gitkeep:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/devrus91/dhtcrawler/5f9bb9ccef4c3c8a7b8b7d69f163d746b4914e92/static/static/.gitkeep
--------------------------------------------------------------------------------
/static/static/api-docs/index.json:
--------------------------------------------------------------------------------
1 | {
2 | "apiVersion": "1.0.0",
3 | "swaggerVersion": "1.2",
4 | "basePath": "http://btoogle.com/api/",
5 | "apis": [
6 | {
7 | "path": "/search",
8 | "description": "Search Helper API"
9 | }
10 | ],
11 | "info": {
12 | "title": "Btoogle Torrent Search API",
13 | "description": "API to search for Torrents",
14 | "contact": "ruslan.fedoseenko.91@gmail.com",
15 | "termsOfServiceUrl": "http://google.com/",
16 | "license": "BSD",
17 | "licenseUrl": "http://opensource.org/licenses/BSD-2-Clause"
18 | }
19 | }
--------------------------------------------------------------------------------
/static/static/logo.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/devrus91/dhtcrawler/5f9bb9ccef4c3c8a7b8b7d69f163d746b4914e92/static/static/logo.png
--------------------------------------------------------------------------------
/static/static/textfieldfix.css:
--------------------------------------------------------------------------------
1 | .menu-content-bg-fix{
2 | background: #424242;
3 | }
4 | .torrent-link{
5 | text-decoration: none !important;
6 | color: #fff;
7 | }
8 | .torrent-list-link{
9 | text-decoration: none !important;
10 | }
11 | footer{
12 | height: auto !important;
13 | }
14 | .footer-aware {
15 | margin-bottom: 83px;
16 | }
17 | .menu-content-bg-fix .list {
18 | padding-top: 0;
19 | padding-bottom: 0;
20 | }
21 |
--------------------------------------------------------------------------------
/static/test/unit/.eslintrc:
--------------------------------------------------------------------------------
1 | {
2 | "env": {
3 | "jest": true
4 | },
5 | "globals": {
6 | }
7 | }
8 |
--------------------------------------------------------------------------------
/static/test/unit/jest.conf.js:
--------------------------------------------------------------------------------
1 | const path = require('path')
2 |
3 | module.exports = {
4 | rootDir: path.resolve(__dirname, '../../'),
5 | moduleFileExtensions: [
6 | 'js',
7 | 'json',
8 | 'vue'
9 | ],
10 | moduleNameMapper: {
11 | '^@/(.*)$': '/src/$1'
12 | },
13 | transform: {
14 | '^.+\\.js$': '/node_modules/babel-jest',
15 | '.*\\.(vue)$': '/node_modules/vue-jest'
16 | },
17 | snapshotSerializers: ['/node_modules/jest-serializer-vue'],
18 | setupFiles: ['/test/unit/setup'],
19 | mapCoverage: true,
20 | coverageDirectory: '/test/unit/coverage',
21 | collectCoverageFrom: [
22 | 'src/**/*.{js,vue}',
23 | '!src/main.js',
24 | '!src/router/index.js',
25 | '!**/node_modules/**'
26 | ]
27 | }
28 |
--------------------------------------------------------------------------------
/static/test/unit/setup.js:
--------------------------------------------------------------------------------
1 | import Vue from 'vue'
2 |
3 | Vue.config.productionTip = false
4 |
--------------------------------------------------------------------------------
/static/test/unit/specs/HelloWorld.spec.js:
--------------------------------------------------------------------------------
1 | import Vue from 'vue'
2 | import HelloWorld from '@/components/HelloWorld'
3 |
4 | describe('HelloWorld.vue', () => {
5 | it('should render correct contents', () => {
6 | const Constructor = Vue.extend(HelloWorld)
7 | const vm = new Constructor().$mount()
8 | expect(vm.$el.querySelector('.hello h1').textContent)
9 | .toEqual('Welcome to Your Vue.js App')
10 | })
11 | })
12 |
--------------------------------------------------------------------------------