├── .gitignore ├── test ├── index.js ├── addon.js └── storage.js ├── bin └── addon.js ├── lib ├── manifestNoCatalogs.js ├── findFiles │ ├── index.js │ ├── darwin.js │ ├── unix.js │ └── win.js ├── consts.js ├── manifest.js ├── catalogHandler.js ├── mapEntryToMeta.js ├── streamHandler.js ├── metaHandler.js ├── indexer.js └── storage.js ├── package.json ├── LICENSE.md ├── README.md └── index.js /.gitignore: -------------------------------------------------------------------------------- 1 | localFiles 2 | localFilesMeta 3 | node_modules 4 | -------------------------------------------------------------------------------- /test/index.js: -------------------------------------------------------------------------------- 1 | require('./storage') 2 | require('./addon') 3 | -------------------------------------------------------------------------------- /bin/addon.js: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | 3 | const localAddon = require('..') 4 | 5 | localAddon.addon().runHTTPWithOptions({ port: process.env.PORT || 1222 }) 6 | 7 | localAddon.startIndexing('./localFiles') 8 | -------------------------------------------------------------------------------- /lib/manifestNoCatalogs.js: -------------------------------------------------------------------------------- 1 | const manifest = require('./manifest') 2 | const manifestNoCatalogs = Object.assign({}, manifest) 3 | manifestNoCatalogs.name += ' (without catalog support)' 4 | manifestNoCatalogs.catalogs = [] 5 | manifestNoCatalogs.resources = manifest.resources.filter(resource => (resource.name != 'catalog' && resource != 'catalog')) 6 | module.exports = manifestNoCatalogs -------------------------------------------------------------------------------- /lib/findFiles/index.js: -------------------------------------------------------------------------------- 1 | const os = require('os') 2 | 3 | const findFilesWin = require('./win') 4 | const findFilesDarwin = require('./darwin') 5 | const findFilesUnix = require('./unix') 6 | 7 | // default is unix 8 | let findFiles = findFilesUnix 9 | 10 | if (os.platform() === 'win32') findFiles = findFilesWin 11 | if (os.platform() === 'darwin') findFiles = findFilesDarwin 12 | 13 | module.exports = findFiles 14 | -------------------------------------------------------------------------------- /lib/consts.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | PREFIX_BT: 'bt:', 3 | PREFIX_LOCAL: 'local:', 4 | PREFIX_IMDB: 'tt', 5 | STREAM_LOCALFILE_SUBTITLE: 'ADDON_STREAM_LOCALFILE', 6 | CINEMETA_URL: 'https://v3-cinemeta.strem.io', 7 | METAHUB_URL: 'https://images.metahub.space', 8 | METAHUB_EPISODES_URL: 'https://episodes.metahub.space', 9 | INTERESTING_FILE: /.mkv$|.avi$|.mp4$|.wmv$|.vp8$|.mov$|.mpg$|.mp3$|.flac$/i, 10 | INTERESTING_TYPE: ['movie', 'series'], 11 | MAX_INDEXED: 10000, 12 | } -------------------------------------------------------------------------------- /lib/manifest.js: -------------------------------------------------------------------------------- 1 | const consts = require('./consts') 2 | 3 | const pkg = require('../package') 4 | 5 | module.exports = { 6 | id: 'org.stremio.local', 7 | version: pkg.version, 8 | description: pkg.description, 9 | 10 | name: 'Local Files', 11 | 12 | // Properties that determine when Stremio picks this add-on 13 | resources: [ 14 | 'catalog', 15 | { name: 'meta', types: ['other'], idPrefixes: [consts.PREFIX_LOCAL, consts.PREFIX_BT] }, 16 | { name: 'stream', types: ['movie', 'series'], idPrefixes: [consts.PREFIX_IMDB] }, 17 | ], 18 | types: ['movie', 'series', 'other'], 19 | 20 | // @TODO: search? 21 | catalogs: [ 22 | { type: 'other', id: 'local' }, 23 | ] 24 | } -------------------------------------------------------------------------------- /lib/findFiles/darwin.js: -------------------------------------------------------------------------------- 1 | const child = require('child_process') 2 | const byline = require('byline') 3 | const events = require('events') 4 | 5 | const cmd = `mdfind '(kMDItemFSName=*.avi || kMDItemFSName=*.mp4 || kMDItemFSName=*.mkv || kMDItemFSName=*.torrent)'` 6 | 7 | // "&& kMDItemFSContentChangeDate >= $time.today(-1)'" 8 | 9 | function findFilesDarwin() { 10 | const ev = new events.EventEmitter() 11 | 12 | var p = child.exec(cmd) 13 | 14 | p.on('error', function(err) { 15 | ev.emit('err', err) 16 | }) 17 | 18 | p.stdout.pipe(byline()).on('data', function(line) { 19 | ev.emit('file', line.toString().trim()) 20 | }) 21 | 22 | return ev 23 | } 24 | 25 | module.exports = findFilesDarwin -------------------------------------------------------------------------------- /lib/catalogHandler.js: -------------------------------------------------------------------------------- 1 | const consts = require('./consts') 2 | 3 | function catalogHandler(storage, metaStorage, args, cb) { 4 | const metas = [] 5 | 6 | storage.indexes.itemId.forEach(function(items, itemId) { 7 | const entry = storage.getAggrEntry('itemId', itemId, ['files']) 8 | if (!(entry.itemId && entry.files && entry.files.length)) 9 | return 10 | 11 | const firstFile = entry.files[0] 12 | 13 | // @TODO: should we assert that itemId begins with the supported prefixes? 14 | const meta = metaStorage.indexes.primaryKey.get(entry.itemId) 15 | metas.push(meta || { 16 | id: entry.itemId, 17 | type: 'other', 18 | name: firstFile.parsedName || entry.name, 19 | poster: firstFile.imdb_id ? consts.METAHUB_URL+'/poster/medium/'+firstFile.imdb_id+'/img' : null, 20 | }) 21 | }) 22 | 23 | cb(null, { metas: metas }) 24 | } 25 | 26 | module.exports = catalogHandler -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "stremio-local-addon", 3 | "version": "1.10.0", 4 | "description": "Local add-on to find playable files: .torrent, .mp4, .mkv and .avi", 5 | "main": "index.js", 6 | "dependencies": { 7 | "byline": "^5.0.0", 8 | "name-to-imdb": "^3.0.4", 9 | "node-fetch": "^2.3.0", 10 | "parse-torrent": "^6.1.2", 11 | "stremio-addon-sdk": "^0.6.4", 12 | "video-name-parser": "^1.4.7", 13 | "which": "^1.3.1" 14 | }, 15 | "devDependencies": { 16 | "stremio-addon-client": "^1.12.1", 17 | "tape": "^4.10.1" 18 | }, 19 | "scripts": { 20 | "test": "node test/index", 21 | "start": "node bin/addon" 22 | }, 23 | "repository": { 24 | "type": "git", 25 | "url": "git+https://github.com/Stremio/stremio-local-addon.git" 26 | }, 27 | "keywords": [ 28 | "stremio", 29 | "local", 30 | "bittorrent" 31 | ], 32 | "author": "Smart Code OOD", 33 | "license": "MIT", 34 | "bugs": { 35 | "url": "https://github.com/Stremio/stremio-local-addon/issues" 36 | }, 37 | "homepage": "https://github.com/Stremio/stremio-local-addon#readme" 38 | } 39 | -------------------------------------------------------------------------------- /LICENSE.md: -------------------------------------------------------------------------------- 1 | The MIT License (MIT) 2 | ===================== 3 | 4 | Copyright © 2019 SmartCode OOD 5 | 6 | Permission is hereby granted, free of charge, to any person 7 | obtaining a copy of this software and associated documentation 8 | files (the “Software”), to deal in the Software without 9 | restriction, including without limitation the rights to use, 10 | copy, modify, merge, publish, distribute, sublicense, and/or sell 11 | copies of the Software, and to permit persons to whom the 12 | Software is furnished to do so, subject to the following 13 | conditions: 14 | 15 | The above copyright notice and this permission notice shall be 16 | included in all copies or substantial portions of the Software. 17 | 18 | THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, 19 | EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES 20 | OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND 21 | NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT 22 | HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, 23 | WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING 24 | FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR 25 | OTHER DEALINGS IN THE SOFTWARE. 26 | 27 | -------------------------------------------------------------------------------- /lib/findFiles/unix.js: -------------------------------------------------------------------------------- 1 | const child = require('child_process') 2 | const events = require('events') 3 | const byline = require('byline') 4 | const which = require('which') 5 | 6 | const cmdLine = [ 7 | '-L', 8 | process.env.HOME, 9 | '-maxdepth', '7', 10 | '-not', '-path', '*/\\.*', 11 | '-not', '-path', '*/node_modules/*', 12 | '-not', '-path', '*/bin/*', 13 | '-not', '-path', '*/src/*', 14 | '-not', '-path', '*/build/*', 15 | '-not', '-path', '*/dist/*', 16 | '-type', 'f', 17 | '(', 18 | // @WARNING: when a new file that we look for is added, we have to update all platform-specific finders individually 19 | '-iname', '*.torrent', 20 | '-o','-iname', '*.mp4', 21 | '-o', '-iname', '*.mkv', 22 | '-o', '-iname', '*.avi', 23 | ')', 24 | ] 25 | 26 | function findFilesUnix() { 27 | const ev = new events.EventEmitter() 28 | setImmediate(startIndexing.bind(ev)) 29 | return ev 30 | } 31 | 32 | function startIndexing() { 33 | var ev = this 34 | 35 | const findPath = which.sync('find') 36 | 37 | if (!findPath) { 38 | ev.emit('err', 'find executable not found in PATH') 39 | return 40 | } 41 | 42 | // @TODO: pipe stderr 43 | // @TODO: re-index every 30 mins or so? 44 | // @TODO: consider inotify/dir watching to react to new files 45 | 46 | var p = child.spawn(findPath, cmdLine) 47 | 48 | p.on('error', function(err) { 49 | ev.emit('err', err) 50 | }) 51 | 52 | p.stdout.pipe(byline()).on('data', function(line) { 53 | ev.emit('file', line.toString().trim()) 54 | }).on('close', function() { 55 | ev.emit('finished') 56 | }) 57 | } 58 | 59 | module.exports = findFilesUnix 60 | -------------------------------------------------------------------------------- /lib/findFiles/win.js: -------------------------------------------------------------------------------- 1 | const child = require('child_process') 2 | const byline = require('byline') 3 | const events = require('events') 4 | 5 | const psScript = ` 6 | [console]::InputEncoding = [console]::OutputEncoding = New-Object System.Text.UTF8Encoding 7 | $sql = "SELECT System.ItemUrl FROM SystemIndex WHERE scope='file:' AND (System.Kind IS Null OR System.Kind = 'Video') AND System.FileAttributes <> ALL BITWISE 0x2 AND NOT System.ItemUrl LIKE '%/Program Files%' AND NOT System.ItemUrl LIKE '%/SteamLibrary/%' AND NOT System.ItemUrl LIKE '%/node_modules/%' AND (System.fileExtension = '.torrent' OR System.FileExtension = '.mp4' OR System.FileExtension = '.mkv' OR System.FileExtension = '.avi')" 8 | $connector = New-Object -ComObject ADODB.Connection 9 | $rs = New-Object -ComObject ADODB.Recordset 10 | $connector.Open("Provider=Search.CollatorDSO;Extended Properties='Application=Windows';DateTimeFormat=Ticks;") 11 | $rs.Open($sql, $connector) 12 | While (-Not $rs.EOF) { 13 | $pos = $rs.Fields.Item("System.ItemUrl").Value.IndexOf(":") 14 | $rs.Fields.Item("System.ItemUrl").Value.Substring($pos + 1) 15 | $rs.MoveNext() 16 | } 17 | ` 18 | 19 | function findFilesWin() { 20 | const ev = new events.EventEmitter() 21 | 22 | var propsProc = child.spawn('powershell', [ '-command', psScript ]) 23 | 24 | propsProc.on('error', function(err) { 25 | ev.emit('err', err) 26 | }) 27 | 28 | propsProc.stdout.pipe(byline()).on('data', function(line) { 29 | ev.emit('file', line.toString().trim()) 30 | }) 31 | 32 | propsProc.stderr.on('data', function(chunk) { 33 | console.log('powershell search: '+chunk.toString()) 34 | }) 35 | 36 | propsProc.on('close', function() { 37 | ev.emit('finished') 38 | }) 39 | 40 | return ev 41 | } 42 | 43 | module.exports = findFilesWin 44 | -------------------------------------------------------------------------------- /lib/mapEntryToMeta.js: -------------------------------------------------------------------------------- 1 | const fetch = require('node-fetch') 2 | const consts = require('./consts') 3 | 4 | function mapEntryToMeta(entry) { 5 | 6 | // We assume that one torrent may have only one IMDB ID for now: this is the only way to a decent UX now 7 | const imdbIdFile = entry.files.find(function(f) { return f.imdb_id }) 8 | const biggestFileWithName = entry.files.sort((a, b) => b.length - a.length).find(f => f.parsedName); 9 | const genericMeta = { 10 | id: entry.itemId, 11 | type: 'other', 12 | name: (biggestFileWithName && biggestFileWithName.parsedName) || entry.name, 13 | showAsVideos: true, 14 | } 15 | 16 | if (!imdbIdFile) { 17 | return Promise.resolve(genericMeta) 18 | } 19 | 20 | // If we have IMDB ID, first we can fill in those, then try to get the actual object from cinemeta 21 | genericMeta.poster = consts.METAHUB_URL+'/poster/medium/'+imdbIdFile.imdb_id+'/img' 22 | genericMeta.background = consts.METAHUB_URL+'/background/medium/'+imdbIdFile.imdb_id+'/img' 23 | genericMeta.logo = consts.METAHUB_URL+'/logo/medium/'+imdbIdFile.imdb_id+'/img' 24 | 25 | return fetch(consts.CINEMETA_URL+'/meta/'+imdbIdFile.type+'/'+imdbIdFile.imdb_id+'.json') 26 | .then(function(resp) { return resp.json() }) 27 | .then(function(resp) { 28 | if (!(resp && resp.meta)) throw 'no meta found' 29 | const interestingFields = [ 30 | 'imdb_id', 'name', 'genre', 'director', 'cast', 'poster', 'description', 'trailers', 'background', 'logo', 'imdbRating', 'runtime', 'genres', 'releaseInfo' 31 | ]; 32 | Object.keys(resp.meta).forEach(key => interestingFields.includes(key) || delete resp.meta[key]) 33 | Object.assign(resp.meta, { 34 | id: genericMeta.id, 35 | type: genericMeta.type, 36 | }) 37 | return resp.meta 38 | }) 39 | .catch(function(err) { 40 | // NOTE: not fatal, we can just fallback to genericMeta 41 | console.log('local-addon', imdbIdFile, err) 42 | 43 | return genericMeta 44 | }) 45 | } 46 | 47 | module.exports = mapEntryToMeta 48 | -------------------------------------------------------------------------------- /lib/streamHandler.js: -------------------------------------------------------------------------------- 1 | const path = require('path') 2 | 3 | const consts = require('./consts') 4 | 5 | const SUPPORTED_TYPES = ['movie', 'series'] 6 | 7 | // @TODO: this currently doesn't support finding files in bt:* entries 8 | // it's something we can do additionally by iterating over the bt: items and checking their first file 9 | 10 | function streamHandler(storage, args, cb) { 11 | if (!args.id.startsWith(consts.PREFIX_IMDB) || !SUPPORTED_TYPES.includes(args.type)) { 12 | return cb(null, { streams: [] }) 13 | } 14 | 15 | const idSplit = args.id.split(':') 16 | const itemIdLocal = consts.PREFIX_LOCAL + idSplit[0] 17 | 18 | const streams = [] 19 | 20 | if (storage.indexes.itemId.has(itemIdLocal)) { 21 | const entries = storage.indexes.itemId.get(itemIdLocal) 22 | for (var entry of entries.values()) { 23 | const f = entry.files[0] 24 | if (args.type === f.type && args.id === getFileVideoId(f)) streams.push({ 25 | id: 'file://'+f.path, 26 | url: 'file://'+f.path, 27 | subtitle: consts.STREAM_LOCALFILE_SUBTITLE, 28 | title: path.basename(f.path), 29 | }) 30 | } 31 | } 32 | 33 | // WARNING: this here is expensive iteration; if the user has thousands of torrents, it may become a problem 34 | // and we might have to switch to using an index 35 | // quick benchmarks show that iterating over half a million items takes ~80ms on an 2017 i5 36 | for (let k of storage.indexes.itemId.keys()) { 37 | if (k.startsWith(consts.PREFIX_BT)) { 38 | // for PREFIX_BT, we only care for the first, since they're all equivalent 39 | const entry = storage.indexes.itemId.get(k).values().next().value 40 | 41 | entry.files.forEach(function(f, i) { 42 | if (args.type === f.type && args.id === getFileVideoId(f)) streams.push({ 43 | title: path.basename(f.path), 44 | infoHash: entry.ih, 45 | fileIdx: i, 46 | id: entry.ih+'/'+i, 47 | sources: entry.sources 48 | }) 49 | }) 50 | } 51 | } 52 | 53 | cb(null, { streams: streams }) 54 | } 55 | 56 | function getFileVideoId(f) { 57 | const segments = (f.season && f.episode) ? 58 | [f.imdb_id, f.season, f.episode] 59 | : [f.imdb_id] 60 | return segments.join(':') 61 | } 62 | 63 | module.exports = streamHandler -------------------------------------------------------------------------------- /test/addon.js: -------------------------------------------------------------------------------- 1 | const tape = require('tape') 2 | const AddonClient = require('stremio-addon-client') 3 | 4 | const addonUrl = 'http://127.0.0.1:1222/manifest.json' 5 | const testIh = '7782ab24188091eae3f61fd218b2dffb4bf9cf9c' 6 | const testIhRecoginzed = '07a9de9750158471c3302e4e95edb1107f980fa6' 7 | 8 | let addon 9 | 10 | tape('initialize add-on', function(t) { 11 | return AddonClient.detectFromURL(addonUrl) 12 | .then(function(resp) { 13 | t.ok(resp, 'has response') 14 | t.ok(resp.addon, 'has addon') 15 | t.ok(resp.addon.manifest.catalogs, 'has catalogs') 16 | t.ok(resp.addon.manifest.catalogs.length, 'has catalogs length') 17 | const resource = resp.addon.manifest.resources.find(r => r.name === 'meta') 18 | t.ok(resource.idPrefixes.includes('bt:'), 'idPrefixes has bt:') 19 | t.ok(resource.idPrefixes.includes('local:'), 'idPrefixes has local:') 20 | 21 | addon = resp.addon 22 | 23 | t.end() 24 | }) 25 | .catch(function(e) { 26 | t.error(e) 27 | t.end() 28 | }) 29 | 30 | }) 31 | 32 | tape('catalog', function(t) { 33 | addon.get('catalog', addon.manifest.catalogs[0].type, addon.manifest.catalogs[0].id) 34 | .then(function(resp) { 35 | t.ok(Array.isArray(resp.metas), 'resp has metas') 36 | t.end() 37 | }) 38 | .catch(function(e) { 39 | t.error(e) 40 | t.end() 41 | }) 42 | }) 43 | 44 | tape('meta - bittorrent', function(t) { 45 | addon.get('meta', 'other', 'bt:'+testIh) 46 | .then(function(resp) { 47 | t.ok(resp.meta, 'has meta') 48 | t.equals(resp.meta.id, 'bt:'+testIh, 'id is correct') 49 | t.ok(Array.isArray(resp.meta.videos), 'has videos') 50 | 51 | resp.meta.videos.forEach(function(vid) { 52 | t.ok(vid.stream, 'video has stream') 53 | }) 54 | t.end() 55 | }) 56 | .catch(function(e) { 57 | t.error(e) 58 | t.end() 59 | }) 60 | }) 61 | 62 | 63 | tape('meta - bittorrent - recognized item', function(t) { 64 | addon.get('meta', 'other', 'bt:'+testIhRecoginzed) 65 | .then(function(resp) { 66 | t.ok(resp.meta, 'has meta') 67 | t.equals(resp.meta.type, 'other', 'recognized as other') 68 | t.equals(resp.meta.imdb_id, 'tt1748166', 'recognized as pioneer one') 69 | t.equals(resp.meta.name, 'Pioneer One') 70 | t.ok(Array.isArray(resp.meta.videos), 'has videos') 71 | 72 | t.end() 73 | }) 74 | .catch(function(e) { 75 | t.error(e) 76 | t.end() 77 | }) 78 | }) 79 | 80 | 81 | // @TODO: stream resource test 82 | -------------------------------------------------------------------------------- /test/storage.js: -------------------------------------------------------------------------------- 1 | const os = require('os') 2 | const crypto = require('crypto') 3 | const path = require('path') 4 | 5 | const Storage = require('../lib/storage') 6 | 7 | const tmpPath = path.join(os.tmpdir(), 'storage'+crypto.randomBytes(4).readUInt32LE(0)) 8 | 9 | const tape = require('tape') 10 | 11 | let storage1 12 | let storage2 13 | 14 | tape('storage: can construct', function(t) { 15 | storage1 = new Storage({entryIndexes: ['itemId']}) 16 | t.ok(storage1, 'object returned') 17 | t.ok(storage1.indexes.primaryKey, 'indexes.primaryKey exists') 18 | t.end() 19 | }) 20 | 21 | tape('storage: can load an empty storage', function(t) { 22 | storage1.load(tmpPath) 23 | .catch(function(err) { 24 | t.error(err) 25 | }) 26 | .then(function(err) { 27 | t.end() 28 | }) 29 | }) 30 | 31 | 32 | function checkAllData(t, storage) { 33 | t.equals(storage.indexes.primaryKey.size, 3) 34 | t.equals(storage.indexes.primaryKey.get('/file/test1').itemId, 'test1') 35 | t.equals(storage.indexes.primaryKey.get('/file/test2').itemId, 'test2') 36 | t.equals(storage.indexes.primaryKey.get('/file/test2-2').itemId, 'test2') 37 | 38 | const f2 = { path: '/file/test2', name: 'test\nt' } 39 | const f22 = { path: '/file/test2-2', name: 'test\nt\nt' } 40 | t.deepEqual(storage.indexes.itemId.get('test2').get('/file/test2'), 41 | { itemId: 'test2', files: [f2] } 42 | ) 43 | t.deepEqual(storage.indexes.itemId.get('test2').get('/file/test2-2'), 44 | { itemId: 'test2', files: [f22] } 45 | ) 46 | t.deepEqual(storage.getAggrEntry('itemId', 'test2', ['files']), { 47 | itemId: 'test2', 48 | files: [f2, f22] 49 | }) 50 | } 51 | 52 | tape('storage: can persist', function(t) { 53 | storage1.saveEntry('/file/test1', { itemId: 'test1', files: [{ path: '/file/test1' }] }, function(err) { 54 | t.error(err) 55 | 56 | let pending = 2 57 | storage1.saveEntry('/file/test2', { itemId: 'test2', files: [{ path: '/file/test2', name: 'test\nt' }] }, function(err) { 58 | t.error(err) 59 | if (--pending === 0) { 60 | checkAllData(t, storage1) 61 | t.end() 62 | } 63 | }) 64 | storage1.saveEntry('/file/test2-2', { itemId: 'test2', files: [{ path: '/file/test2-2', name: 'test\nt\nt' }] }, function(err) { 65 | t.error(err) 66 | if (--pending === 0) { 67 | checkAllData(t, storage1) 68 | t.end() 69 | } 70 | }) 71 | }) 72 | }) 73 | 74 | 75 | 76 | tape('storage: can load', function(t) { 77 | storage2 = new Storage({entryIndexes: ['itemId']}) 78 | storage2.load(tmpPath) 79 | .catch(function(err) { 80 | t.error(err) 81 | }) 82 | .then(function(err) { 83 | checkAllData(t, storage2) 84 | t.end() 85 | }) 86 | }) -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # stremio-local-addon 2 | 3 | An add-on for stremio meant to be ran locally which indexes locally found torrent and video files 4 | 5 | It does a few things: 6 | 7 | * Scans the filesystem for video files (currently `mp4`, `mkv`, `avi` and others - depends on the implementation in `lib/findFiles`) and `torrent` files containing videos 8 | * Tries to recognize video files as entertainment content and associate them with an IMDB ID 9 | * Presents a `catalog` to Stremio containing all the found items, where IMDB-recognized video files are grouped by IMDB ID and torrents are grouped by BitTorrent infohash; non-recognized video files are omitted 10 | * Allows Stremio to open any BitTorrent infoHash using `/meta/bt:` request to this add-on 11 | 12 | ## Testing 13 | 14 | ``npm start`` 15 | 16 | ``npm test`` 17 | 18 | ## Data structure 19 | 20 | The data is kept in an set (dictionary) of entries (`filePath=>entry`). Each entry represents one file on the filesystem. 21 | 22 | Each entry is defined by `{ itemId, name, files }`. When the entry represents a `filePath` of no interest (not an indexable video), we just save `{ files: [] }`. Entries may contain other extra properties such as the Bittorrent-specific `ih` and `sources`. 23 | 24 | The reason `files` is an array is that one file on the filesystem may contain multiple indexable videos (e.g. a `.torrent` file). 25 | 26 | ## itemId 27 | 28 | `itemId` is the stremio metadata ID this entry corresponds to. 29 | 30 | It may be formed in two ways: 31 | 32 | `bt:` 33 | 34 | `local:` 35 | 36 | In other words, to make a single Stremio `MetaItem` object, files are grouped either by the torrent they belong in, or by the IMDB ID they are indexed by. So this add-on will only display videos either indexed by IMDB ID or belonging in a torrent. 37 | 38 | ## Storage 39 | 40 | The persistence layer is defined in `lib/storage`, and it keeps each entry as one line in the storage file. Entries may only be added to the storage file, and no entries may be removed. 41 | 42 | It allows referencing entries by file path (`.byFilePath`) or by item ID (`.byItemId`). There may be more than one entry per item ID. 43 | 44 | The in-memory structure is as follows 45 | 46 | `byFilePath`: `filePath=>entry` 47 | 48 | `byItemId`: `itemId=>(filePath=>entry)` 49 | 50 | Finally, we have the `storage.getAggrEntry` function, which gives us an aggregate entry for an `itemId`, by taking all entries for the given `itemId` and merging them by concatting `files` and taking the leftmost values of the other properties (`name`, `ih`, `sources`). Taking the leftmost values is OK for torrents since `itemId` implies grouping by torrent anyway (in other words all `ih` and `sources` values will be the same). 51 | -------------------------------------------------------------------------------- /lib/metaHandler.js: -------------------------------------------------------------------------------- 1 | const fetch = require('node-fetch') 2 | 3 | const indexer = require('./indexer') 4 | const mapEntryToMeta = require('./mapEntryToMeta') 5 | const consts = require('./consts') 6 | 7 | function metaHandler(storage, metaStorage, engineUrl, args, cb) { 8 | let entry = storage.getAggrEntry('itemId', args.id, ['files']) 9 | if(!entry && args.id.startsWith(consts.PREFIX_BT)) { 10 | entry = getNonIndexedTorrent(engineUrl, args.id.slice(consts.PREFIX_BT.length)) 11 | } 12 | if(!entry) { 13 | return cb(null, null); 14 | } 15 | Promise.resolve(entry) 16 | .then(function(entry) { 17 | const videos = entry.files.sort(function(a, b) { 18 | // If we have season and episode, sort videos; otherwise retain the order 19 | try { 20 | return a.season - b.season || a.episode - b.episode; 21 | } catch(e) {} 22 | return 0; 23 | }).map(mapFile.bind(null, entry, new Date().getTime())) 24 | 25 | return Promise.resolve(metaStorage.indexes.primaryKey.get(entry.itemId)) 26 | .then(function(meta) { 27 | return meta || mapEntryToMeta(entry) 28 | }) 29 | .then(function(meta) { 30 | meta.videos = videos 31 | cb(null, { meta: meta }) 32 | }); 33 | }) 34 | .catch(function(err) { 35 | console.log(err) 36 | cb(null, null) 37 | }) 38 | } 39 | 40 | function getNonIndexedTorrent(engineUrl, ih) { 41 | return fetch(engineUrl+'/'+ih+'/create', { method: 'POST' }) 42 | .then(function(resp) { return resp.json() }) 43 | .then(function(torrent) { 44 | return new Promise(function(resolve, reject) { 45 | // torrent.announce = (torrent.sources || []).map(function(source) { 46 | // return source.url.startsWith('tracker:') ? source.url.substr(8) : source.url 47 | // }) 48 | indexer.indexParsedTorrent(torrent, function(err, entry) { 49 | if (err) return reject(err) 50 | if (!entry) return reject(new Error('internal err: no entry from indexParsedTorrent')) 51 | resolve(entry); 52 | }) 53 | }) 54 | }) 55 | } 56 | 57 | function mapFile(entry, uxTime, file, index) { 58 | const stream = entry.ih ? { 59 | infoHash: entry.ih, 60 | fileIdx: file.idx, 61 | title: entry.ih + '/' + file.idx, 62 | sources: entry.sources 63 | } : { 64 | title: file.path, 65 | url: 'file://'+file.path, 66 | subtitle: consts.STREAM_LOCALFILE_SUBTITLE, 67 | } 68 | const videoId = [file.imdb_id, file.season, file.episode].filter(x => x).join(':') 69 | const thumbnail = file.season && file.episode 70 | ? `${consts.METAHUB_EPISODES_URL}/${file.imdb_id}/${file.season}/${file.episode}/w780.jpg` 71 | : `${consts.METAHUB_URL}/background/medium/${file.imdb_id}/img` 72 | return { 73 | id: videoId || stream.title, 74 | // We used to have a thumbnail here. 75 | // This caused downloading of all episodes in order to be generated a preview. 76 | title: file.name, 77 | publishedAt: entry.dateModified || new Date(), 78 | // The videos in the UI are sorted by release date. Newest at top. 79 | // For local files we want oldest at top 80 | released: new Date(uxTime - index * 60000), 81 | stream: stream, 82 | season: file.season, 83 | episode: file.episode, 84 | thumbnail: file.imdb_id ? thumbnail : null 85 | } 86 | } 87 | 88 | module.exports = metaHandler -------------------------------------------------------------------------------- /index.js: -------------------------------------------------------------------------------- 1 | const addonSDK = require('stremio-addon-sdk') 2 | const fs = require('fs') 3 | 4 | // Variables 5 | let engineUrl = 'http://127.0.0.1:11470' 6 | 7 | // Internal modules 8 | const manifest = require('./lib/manifest') 9 | const manifestNoCatalogs = require('./lib/manifestNoCatalogs') 10 | const catalogHandler = require('./lib/catalogHandler') 11 | const metaHandler = require('./lib/metaHandler') 12 | const streamHandler = require('./lib/streamHandler') 13 | const Storage = require('./lib/storage') 14 | const findFiles = require('./lib/findFiles') 15 | const indexer = require('./lib/indexer') 16 | const mapEntryToMeta = require('./lib/mapEntryToMeta') 17 | 18 | const MAX_INDEXED = 10000 19 | 20 | // Initiate the storage 21 | const storage = new Storage({ 22 | validateRecord: function(index, entry) { 23 | fs.accessSync(index, fs.constants.R_OK) 24 | }, 25 | entryIndexes: ['itemId'], 26 | }) 27 | const metaStorage = new Storage() 28 | 29 | // Define the addon 30 | function addon(options) { 31 | options = options || {} 32 | const addonBuilder = new addonSDK(options.disableCatalogSupport ? manifestNoCatalogs : manifest) 33 | 34 | addonBuilder.defineCatalogHandler(function(args, cb) { 35 | catalogHandler(storage, metaStorage, args, cb) 36 | }) 37 | 38 | addonBuilder.defineMetaHandler(function(args, cb) { 39 | metaHandler(storage, metaStorage, engineUrl, args, cb) 40 | }) 41 | 42 | addonBuilder.defineStreamHandler(function(args, cb) { 43 | streamHandler(storage, args, cb) 44 | }) 45 | return addonBuilder; 46 | } 47 | 48 | // Exported methods 49 | function setEngineUrl(url) { 50 | engineUrl = url 51 | } 52 | 53 | function logError(err) { 54 | console.log('Error:', err); 55 | } 56 | 57 | function startIndexing(fPath) { 58 | // NOTE: storage.load just loads existing records from the fs 59 | // we don't need to wait for it in order to use the storage, so we don't wait for it 60 | // to start the add-on and we don't consider it fatal if it fails 61 | Promise.all([ 62 | metaStorage.load(fPath+'Meta').catch(logError), 63 | storage.load(fPath).catch(logError) 64 | ]) 65 | .then(function(err) { 66 | // Start indexing 67 | findFiles().on('file', onDiscoveredFile) 68 | }) 69 | } 70 | 71 | // Internal methods 72 | function onDiscoveredFile(fPath) { 73 | // Storage: contains a hash map by filePath and another one by itemId; both point to entry objects 74 | // Indexing: turns a filePath into an entry { id, filePath, itemId, files, ih } 75 | 76 | if (storage.indexes.primaryKey.has(fPath)) { 77 | return 78 | } 79 | 80 | if (storage.indexes.primaryKey.size >= MAX_INDEXED) { 81 | return 82 | } 83 | 84 | indexer.indexFile(fPath, function(err, entry) { 85 | if (err) { 86 | indexLog(fPath, 'indexing error: '+(err.message || err)) 87 | return 88 | } 89 | 90 | if (entry) { 91 | storage.saveEntry(fPath, entry, function(err) { 92 | if (err) console.log(err) 93 | else if(entry.itemId) indexLog(fPath, 'is now indexed: '+entry.itemId) 94 | }) 95 | if(entry.files && entry.files.length > 0 && entry.itemId) { 96 | mapEntryToMeta(entry) 97 | .then(function(meta) { 98 | metaStorage.saveEntry(meta.id, meta, function() {}); 99 | }) 100 | .catch(()=>{}) 101 | } 102 | } 103 | }) 104 | } 105 | 106 | function indexLog(fPath, status) { 107 | console.log('-> '+fPath+': '+status) 108 | } 109 | 110 | module.exports = { addon, setEngineUrl, startIndexing } 111 | -------------------------------------------------------------------------------- /lib/indexer.js: -------------------------------------------------------------------------------- 1 | const parseTorrent = require('parse-torrent') 2 | const fs = require('fs') 3 | const path = require('path') 4 | const nameToImdb = require('name-to-imdb') 5 | const videoNameParser = require('video-name-parser') 6 | const promisify = require('util').promisify 7 | const consts = require('./consts') 8 | 9 | function indexFile(fPath, cb) { 10 | if (fPath.match('\.torrent$')) { 11 | indexTorrent(fPath, cb) 12 | return 13 | } 14 | 15 | if (!fPath.match(consts.INTERESTING_FILE)) { 16 | cb(null, { files: [] }) 17 | return 18 | } 19 | 20 | fs.stat(fPath, function(err, stat) { 21 | if (err) return cb(err) 22 | 23 | let file = { 24 | path: fPath, 25 | name: path.basename(fPath), 26 | length: stat.size, 27 | } 28 | 29 | processFile(file, function(err, f) { 30 | if (err) return cb(err) 31 | 32 | // Those files are only interesting if they map to an IMDB ID 33 | if (!f.imdb_id) return cb(null, { files: [] }) 34 | else return cb(null, { 35 | dateModified: stat.mtime, 36 | itemId: consts.PREFIX_LOCAL+f.imdb_id, 37 | name: f.name, 38 | files: [f], 39 | }) 40 | }) 41 | }) 42 | } 43 | 44 | function indexTorrent(fPath, cb) { 45 | fs.readFile(fPath, function(err, buf) { 46 | if (err) return cb(err) 47 | 48 | let torrent 49 | try { 50 | torrent = parseTorrent(buf) 51 | } catch(e) { 52 | return cb(e) 53 | } 54 | 55 | indexParsedTorrent(torrent, cb) 56 | }) 57 | } 58 | 59 | function indexParsedTorrent(torrent, cb) { 60 | // NOTE: torrent here may be retrieved via parse-torrent or via enginefs /create 61 | // enginefs /create uses parse-torrent-file, but the format between the two is almost the same (.files/.name/.infoHash are the same) 62 | 63 | torrent = torrent || {} 64 | 65 | const ih = (torrent.infoHash || "").toLowerCase() 66 | const name = torrent.name 67 | 68 | const files = (torrent.files || []).map(function(f, i) { 69 | f.idx = i 70 | return f 71 | }).filter(function(x) { 72 | return x.path.match(consts.INTERESTING_FILE) 73 | }) 74 | 75 | if (!files.length) { 76 | cb(null, { files: [] }) 77 | return 78 | } 79 | 80 | const procFile = promisify(processFile) 81 | 82 | Promise.all(files.map(f => procFile(f))) 83 | .then(function(processedFiles) { 84 | cb(null, { 85 | itemId: 'bt:'+ih, 86 | ih: ih, 87 | name: name, 88 | files: processedFiles, 89 | sources: torrent.announce ? getSources(torrent) : null, 90 | }) 91 | }) 92 | .catch(cb) 93 | } 94 | 95 | function processFile(f, cb) { 96 | var parsed = videoNameParser(f.path, { 97 | strict: true, 98 | fromInside: true, 99 | fileLength: f.length 100 | }) 101 | 102 | if (!consts.INTERESTING_TYPE.includes(parsed.type)) { 103 | return cb(null, f) 104 | } 105 | 106 | // NOTE: nameToImdb has a built-in queue, we don't need to worry about concurrency 107 | nameToImdb({ 108 | name: parsed.name, 109 | year: parsed.year, 110 | type: parsed.type, 111 | }, function(err, imdbId) { 112 | // NOTE: the error here is totally ignored, as this is not fatal 113 | if (imdbId) { 114 | f.parsedName = parsed.name 115 | f.type = parsed.type 116 | f.imdb_id = imdbId 117 | if (parsed.season) { 118 | f.season = parsed.season 119 | f.episode = [].concat(parsed.episode).shift() 120 | } 121 | } 122 | 123 | cb(null, f) 124 | }) 125 | } 126 | 127 | function getSources(t) { 128 | return [ 'dht:'+t.infoHash ] 129 | .concat(t.announce.map(function(x) { return 'tracker:'+x })) 130 | } 131 | 132 | 133 | module.exports = { 134 | indexFile, 135 | indexParsedTorrent 136 | } 137 | -------------------------------------------------------------------------------- /lib/storage.js: -------------------------------------------------------------------------------- 1 | const pkg = require('../package') 2 | var fs = require('fs') 3 | var byline = require('byline') 4 | var promisify = require('util').promisify 5 | 6 | // The database structure is like: 7 | // { id: PrimaryKey, entry: RecordedData, v: storageVersion } 8 | 9 | // The opts may be some of: 10 | // entryIndexes - Array of entry properties to be indexed 11 | // validateRecord - function used on load to validate a record. If record is considered invalid the storage is rebuilt 12 | function Storage(opts) { 13 | var self = this; 14 | this.opts = Object.assign({ 15 | entryIndexes: [], 16 | validateRecord: null, 17 | }, opts) 18 | 19 | this.indexes = { 20 | primaryKey: new Map() 21 | } 22 | 23 | this.opts.entryIndexes.forEach(function(key) { 24 | self.indexes[key] = new Map(); 25 | }) 26 | 27 | var writeStream 28 | 29 | this.load = function(dbPath) { 30 | var truncate = false 31 | var open = promisify(fs.open); 32 | var close = promisify(fs.close); 33 | return open(dbPath, 'a+') 34 | .then(function(fd) { 35 | return new Promise(function(resolve) { 36 | fs.createReadStream(null, { fd: fd, autoClose: false }) 37 | .on('error', onInternalErr) 38 | .pipe(byline()) 39 | .on('error', onInternalErr) 40 | .on('data', function(line) { 41 | var record 42 | try { 43 | record = JSON.parse(line.toString()) 44 | if(record.v !== pkg.version) throw "Version missmatch"; 45 | if(self.opts.validateRecord) { 46 | self.opts.validateRecord(record.id, record.entry); 47 | } 48 | commitEntry(record.id, record.entry) 49 | } catch (e) { 50 | // If we have corrupred data or deleted/moved file 51 | // We will rewrite the database with only the healthy records 52 | truncate = true 53 | } 54 | }) 55 | .on('finish', function() { 56 | Promise.resolve() 57 | .then(function() { 58 | if(truncate) { 59 | return close(fd) 60 | .then(function() { 61 | return open(dbPath, 'w') 62 | }) 63 | } 64 | return fd 65 | }) 66 | .then(function(fd) { 67 | writeStream = fs.createWriteStream(null, { fd: fd, autoClose: false }) 68 | writeStream.on('error', onInternalErr) 69 | if(truncate) { 70 | self.indexes.primaryKey.forEach(function(entry, key) { 71 | persistEntry(key, entry) 72 | }) 73 | } 74 | }) 75 | .catch(onInternalErr) 76 | .then(resolve) 77 | }) 78 | }) 79 | }) 80 | } 81 | 82 | this.saveEntry = function(primaryKey, entry, cb) { 83 | if (self.indexes.primaryKey.has(primaryKey)) return cb() 84 | commitEntry(primaryKey, entry) 85 | persistEntry(primaryKey, entry, cb) 86 | } 87 | 88 | this.getAggrEntry = function(index, key, groups) { 89 | const items = this.indexes[index].get(key) 90 | if (!items) return null 91 | 92 | let entry 93 | items.forEach(function(item) { 94 | // copy the first entry, therefore maintaining stuff like {name, ih, sources} 95 | if (!entry) { 96 | entry = Object.assign({ }, item) 97 | return; 98 | } 99 | for(let group of groups) { 100 | if(typeof entry[group] === 'undefined') return; 101 | if(!Array.isArray(entry[group])) entry[group] = [entry[group]]; 102 | entry[group] = entry[group].concat(item[group]); 103 | } 104 | }) 105 | 106 | return entry 107 | } 108 | 109 | // This function creates the storage indexes 110 | function commitEntry(key, entry) { 111 | self.indexes.primaryKey.set(key, entry) 112 | 113 | self.opts.entryIndexes.forEach(function(property) { 114 | if(!entry[property]) return; 115 | if(!self.indexes[property].has(entry[property])) { 116 | self.indexes[property].set(entry[property], new Map()) 117 | } 118 | self.indexes[property].get(entry[property]).set(key, entry) 119 | }) 120 | } 121 | 122 | function persistEntry(key, entry, cb) { 123 | if (!writeStream) return cb(new Error('unable to persist, no fd')) 124 | writeStream.write(JSON.stringify({ id: key, entry: entry, v: pkg.version })+'\n', cb) 125 | } 126 | 127 | function onInternalErr(err) { 128 | console.error('storage', err) 129 | } 130 | } 131 | 132 | module.exports = Storage --------------------------------------------------------------------------------