├── .gitignore ├── .gitmodules ├── .vscode ├── launch.json └── settings.json ├── LICENSE ├── README.md ├── animeFormat.groovy.j2 ├── excludes.txt ├── movieFormat.groovy.j2 ├── notify.txt ├── partials ├── audioPart.groovy ├── extraSource.groovy ├── extraTags.groovy ├── groupPart.groovy ├── hdrPart.groovy ├── repackPart.groovy └── stdPart.groovy ├── playbook.yml ├── post-script.sh ├── qbittorrent-postprocess ├── requirements.txt ├── seriesFormat.groovy.j2 ├── subs ├── test.yml ├── trakt.groovy ├── transmission-postprocess.sh ├── websources.txt ├── websources_legend.txt └── xem.groovy /.gitignore: -------------------------------------------------------------------------------- 1 | clear_* 2 | dist/ 3 | 4 | # Logs 5 | logs 6 | *.log 7 | npm-debug.log* 8 | yarn-debug.log* 9 | yarn-error.log* 10 | lerna-debug.log* 11 | 12 | # Diagnostic reports (https://nodejs.org/api/report.html) 13 | report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json 14 | 15 | # Runtime data 16 | pids 17 | *.pid 18 | *.seed 19 | *.pid.lock 20 | 21 | # Directory for instrumented libs generated by jscoverage/JSCover 22 | lib-cov 23 | 24 | # Coverage directory used by tools like istanbul 25 | coverage 26 | 27 | # nyc test coverage 28 | .nyc_output 29 | 30 | # Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files) 31 | .grunt 32 | 33 | # Bower dependency directory (https://bower.io/) 34 | bower_components 35 | 36 | # node-waf configuration 37 | .lock-wscript 38 | 39 | # Compiled binary addons (https://nodejs.org/api/addons.html) 40 | build/Release 41 | 42 | # Dependency directories 43 | node_modules/ 44 | jspm_packages/ 45 | 46 | # TypeScript v1 declaration files 47 | typings/ 48 | 49 | # Optional npm cache directory 50 | .npm 51 | 52 | # Optional eslint cache 53 | .eslintcache 54 | 55 | # Optional REPL history 56 | .node_repl_history 57 | 58 | # Output of 'npm pack' 59 | *.tgz 60 | 61 | # Yarn Integrity file 62 | .yarn-integrity 63 | 64 | # dotenv environment variables file 65 | .env 66 | .env.test 67 | 68 | # parcel-bundler cache (https://parceljs.org/) 69 | .cache 70 | 71 | # next.js build output 72 | .next 73 | 74 | # nuxt.js build output 75 | .nuxt 76 | 77 | # vuepress build output 78 | .vuepress/dist 79 | 80 | # Serverless directories 81 | .serverless/ 82 | 83 | # FuseBox cache 84 | .fusebox/ 85 | 86 | # DynamoDB Local files 87 | .dynamodb/ -------------------------------------------------------------------------------- /.gitmodules: -------------------------------------------------------------------------------- 1 | [submodule "rednoah"] 2 | path = rednoah 3 | url = https://github.com/filebot/scripts.git 4 | -------------------------------------------------------------------------------- /.vscode/launch.json: -------------------------------------------------------------------------------- 1 | { 2 | // Use IntelliSense to learn about possible attributes. 3 | // Hover to view descriptions of existing attributes. 4 | // For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387 5 | "version": "0.2.0", 6 | "configurations": [ 7 | { 8 | "type": "node", 9 | "request": "launch", 10 | "name": "Launch Program", 11 | "program": "${workspaceFolder}/index.js" 12 | } 13 | ] 14 | } -------------------------------------------------------------------------------- /.vscode/settings.json: -------------------------------------------------------------------------------- 1 | { 2 | "python.pythonPath": "/usr/local/bin/python3", 3 | "python.linting.pylintEnabled": false, 4 | "python.linting.flake8Enabled": true, 5 | "python.linting.enabled": true, 6 | "python.formatting.provider": "black" 7 | } -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2019 Daniele Riccucci 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # FileBot format templating 2 | To generate files run: 3 | ``` 4 | yarn 5 | yarn run build 6 | ``` 7 | If you want to see the log output use `yarn run build-dbg`, 8 | `env DEBUG='*' yarn run build` (fish shell) or equivalent in other shells. 9 | The build script expects a directory tree like the following: 10 | ``` 11 | . 12 | ├── README.md 13 | ├── index.js 14 | ├── package.json 15 | ├── templates 16 | │   ├── clear_secrets.json 17 | │   ├── movieFormat.mustache 18 | │   ├── partials 19 | │   │   └── audio.mustache 20 | │   ├── secrets.json 21 | │   ├── seriesFormat.mustache 22 | │   └── vars.json 23 | └── yarn.lock 24 | ``` 25 | with template files (including desired extension) ending in `.mustache` 26 | contained in `templates` (e.g. `movieFormat.groovy.mustache`) and 27 | partials ending in `.mustache` and contained in `templates/partials`. 28 | The script also requires a `vars.json` file and optionally a `clear_secrets.json`, 29 | both contained in `templates`. These get merged at runtime. 30 | The scripts outputs generated files into `dist`. -------------------------------------------------------------------------------- /animeFormat.groovy.j2: -------------------------------------------------------------------------------- 1 | { 2 | // \u201C = “ 3 | // \u201D = ” 4 | // \u201E = „ 5 | // \u201F = “ 6 | def normTV = { 7 | it.replaceAll(/[`´‘’ʻ""“”“„‟]/, "'") 8 | .replaceAll(/[|]/, ' - ') 9 | .replaceAll(/[?]/, '\uFE56') // '﹖' Small Question Mark 10 | .replaceAll(/[\*]/, '\u204E') // '⁎' low asterisk 11 | .replaceAll(/[*\p{Zs}]+/, ' ') 12 | .replaceAll(/\b[IiVvXx]+\b/, { it.upper() }) 13 | .replaceAll(/\b[0-9](?i:th|nd|rd)\b/, { it.lower() }) 14 | } 15 | 16 | Boolean isEng = any{ audio.language.any{ it ==~ /en/ } }{ audio.language ==~ /en/ }{true} 17 | Boolean isJpn = any{ languages.first().ISO2 ==~ /ja/ }{ audio.language.first() ==~ /ja/ }{ false } 18 | 19 | // WARNING: any db.{AniDB,TMDb,TheTVDB} binding requires FileBot 4.8.6 or above 20 | String mainTitle = any{ db.TMDb.n }{ db.TheTVDB.n }{ normTV(n).colon(' - ').replaceTrailingBrackets() } 21 | String primTitle = normTV(primaryTitle).colon(' - ').replaceTrailingBrackets() 22 | 23 | String.metaClass.surround { l = '(', r = ')' -> 24 | l + delegate + r 25 | } 26 | 27 | allOf 28 | { 'Anime' } 29 | { 30 | allOf 31 | { mainTitle } 32 | { db.TMDb.y.toString().surround() } 33 | .join(' ') 34 | } 35 | { 36 | // TODO: possibly replace with db.TMDb.special 37 | if (episode.special) { // else { if (sc > 0) "Season $s" } 38 | 'Specials' 39 | } else { 40 | allOf 41 | { ['Season', db.TMDb.s].join(' ') } 42 | { if (mainTitle.getSimilarity(primTitle) < 0.95) primTitle.surround('[', ']') } 43 | { db.TMDb.sy.bounds().join('-').surround() } 44 | .join(' ') 45 | } 46 | } 47 | { 48 | allOf 49 | { 50 | allOf 51 | { include '{{ scriptDir }}/partials/groupPart.groovy' } 52 | { mainTitle } 53 | .join(' ') 54 | } 55 | { // EPISODE NUMBERING 56 | // String _absolute = 'EP' + db.TMDb.absolute.pad(2) 57 | if (episode.special) { 58 | "S$special" 59 | } else { 60 | any 61 | { 62 | allOf 63 | /* 64 | { if (db.TMDb.sc > 1) db.TMDb.s00e00 } 65 | { db.TMDb.sc > 1 ? _absolute.surround('(', ')') : _absolute } 66 | */ 67 | { db.TMDb.sxe } 68 | { db.TMDb.absolute.pad(2).surround() } 69 | .join(' ') 70 | } 71 | { absolute.pad(2) } 72 | } 73 | } 74 | { 75 | allOf 76 | { // EPISODE NAME 77 | def trLang = any{ if (isJpn) 'x-jat' }{ if (isEng) 'eng' }{ audio.language.first() }{'eng'} 78 | def epName = any{ db.TMDb.t }{t} 79 | // ╱ is the replacement for slash 80 | switch (trLang) { 81 | case { it == 'x-jat' }: 82 | allOf 83 | { normTV(localize."$trLang".t).colon('\u2236').slash('\u2571') } 84 | { normTV(epName).colon('\u2236').slash('\u2571').surround('[', ']') } 85 | .join(' ') 86 | break 87 | case { it == 'eng' }: 88 | normTV(epName).colon('\u2236').slash('\u2571') 89 | break 90 | default: 91 | normTV(localize."$trLang".t).colon('\u2236').slash('\u2571') 92 | } 93 | } 94 | { tags.join(', ').replaceAll(/^/, ' - ') } 95 | { "PT $pi" } 96 | { 97 | allOf 98 | { allOf 99 | { // Video 100 | // net.filebot.media.VideoFormat.DEFAULT_GROUPS.guessFormat(dim[0], dim[1]) 101 | allOf 102 | { vf } 103 | { vc } 104 | { include '{{ scriptDir }}/partials/hdrPart.groovy' } 105 | .join(' ') 106 | } 107 | { include '{{ scriptDir }}/partials/audioPart.groovy' } 108 | { include '{{ scriptDir }}/partials/extraSource.groovy' } 109 | .join(' - ').surround('[', ']') 110 | } 111 | { "[$crc32]" } 112 | { include '{{ scriptDir }}/partials/repackPart.groovy' } 113 | {subt} 114 | .join('') 115 | } 116 | .join(' ') 117 | } 118 | .join(' - ') 119 | } 120 | .join('/') 121 | } 122 | -------------------------------------------------------------------------------- /excludes.txt: -------------------------------------------------------------------------------- 1 | The Stray Cat 2 | Boris (2013) -------------------------------------------------------------------------------- /movieFormat.groovy.j2: -------------------------------------------------------------------------------- 1 | { 2 | import groovy.json.JsonSlurper 3 | import groovy.json.JsonOutput 4 | 5 | def sub = include('{{ scriptDir }}//partials/stdPart.groovy') 6 | 7 | def normMovie = { 8 | it.replaceTrailingBrackets() 9 | // .upperInitial().lowerTrail() 10 | .replaceAll(/[`´‘’ʻ""“”]/, "'") 11 | .replaceAll(/[:|]/, " - ") 12 | // .replaceAll(/[:]/, "\u2236") // "∶" Ratio symbol 13 | // .replaceAll(/[:]/, "\uFF1A") // ":" Fullwidth Colon 14 | // .replaceAll(/[:]/, "\uFE55") // "﹕" Small Colon 15 | // .replaceAll("/", "\u29F8") // "⧸" Big Solidus 16 | // .replaceAll("/", "\u2215") // "∕" Division Slash 17 | // .replaceAll("/", "\u2044") // "⁄" Fraction Slash 18 | // .replaceAll(/[?]/, "\uFF1F") // "?" Fullwidth Question Mark 19 | .replaceAll(/[?]/, "\uFE56") // "﹖" Small Question Mark 20 | .replaceAll(/[\*]/, "\u204E") // "⁎" low asterisk 21 | .replaceAll(/[*\p{Zs}]+/, " ") 22 | .replaceAll(/\b[IiVvXx]+\b/, { it.upper() }) 23 | .replaceAll(/\b[0-9](?i:th|nd|rd)\b/, { it.lower() }) 24 | } 25 | 26 | Closure isLatin = { 27 | java.text.Normalizer.normalize(it, java.text.Normalizer.Form.NFD) 28 | .replaceAll(/\p{InCombiningDiacriticalMarks}+/, '') ==~ /^\p{InBasicLatin}+$/ 29 | } 30 | 31 | def translJap = { 32 | /* rate limited to 100 per day I believe, please be careful */ 33 | Object url = new URL('https://api.kuroshiro.org/convert') 34 | Map requestHeaders = [:] 35 | Map postBody = [:] 36 | postBody.str = it 37 | postBody.to = 'romaji' 38 | postBody.mode = 'spaced' 39 | postBody.romajiSystem = 'hepburn' 40 | def postResponse = url.post(JsonOutput.toJson(postBody).getBytes('UTF-8'), 'application/json', requestHeaders) 41 | Object json = new JsonSlurper().parseText(postResponse.text) 42 | return json.result 43 | } 44 | 45 | def transl = { 46 | (languages.first().iso_639_2B == 'jpn') ? translJap(it) : it.transliterate('Any-Latin; NFD; NFC; Title') 47 | } 48 | 49 | allOf 50 | { 51 | if ((media.OverallBitRate.toInteger() / 1000 < 3000 && vf.minus("p").toInteger() >= 720) 52 | || vf.minus("p").toInteger() < 720) { 53 | return "LQ_Movies" 54 | } else { 55 | return "Movies" 56 | } 57 | } 58 | // Movies directory 59 | { def film_directors = info.directors.sort().join(", ") 60 | n.colon("\u2236 ") + " ($y) [$film_directors]" } 61 | // File name 62 | { 63 | allOf 64 | { isLatin(primaryTitle) ? primaryTitle.colon("\u2236 ") : transl(primaryTitle).colon("\u2236 ") } 65 | {" ($y)"} 66 | // tags + a few more variants 67 | { include '{{ scriptDir }}/partials/extraTags.groovy' } 68 | {" PT $pi"} 69 | {" ["} 70 | { 71 | allOf 72 | { // Video 73 | // net.filebot.media.VideoFormat.DEFAULT_GROUPS.guessFormat(dim[0], dim[1]) 74 | allOf 75 | { vf } 76 | { vc } 77 | { include '{{ scriptDir }}/partials/hdrPart.groovy' } 78 | .join(" ") 79 | } 80 | { include '{{ scriptDir }}/partials/audioPart.groovy' } 81 | { include '{{ scriptDir }}/partials/extraSource.groovy' } 82 | .join(" - ") 83 | } 84 | {"]"} 85 | { include '{{ scriptDir }}/partials/repackPart.groovy' } 86 | { include '{{ scriptDir }}/partials/groupPart.groovy' } 87 | { subt } 88 | .join("") 89 | } 90 | .join("/") 91 | } 92 | -------------------------------------------------------------------------------- /notify.txt: -------------------------------------------------------------------------------- 1 | pushover={{ pushover.user }}:{{ pushover.api }} 2 | mail={{ mailgun.server }}:{{ mailgun.port }}:{{ mailgun.from }}:{{ mailgun.user }}:{{ mailgun.pass }} 3 | mailto=devster31@gmail.com 4 | reportError=y 5 | -------------------------------------------------------------------------------- /partials/audioPart.groovy: -------------------------------------------------------------------------------- 1 | /* def audioClean = { if (it != null) it.replaceAll(/[\p{Pd}\p{Space}]/, " ").replaceAll(/\p{Space}{2,}/, " ") } 2 | def mCFP = [ 3 | "AC3" : "AC3", 4 | "AC3+" : "E-AC3", 5 | "TrueHD" : "TrueHD", 6 | "TrueHD TrueHD+Atmos / TrueHD" : "TrueHD ATMOS", 7 | "DTS" : "DTS", 8 | "DTS HD HRA / Core" : "DTS-HD HRA", 9 | "DTS HD MA / Core" : "DTS-HD MA", 10 | "DTS HD X / MA / Core" : "DTS-X", 11 | "FLAC" : "FLAC", 12 | "PCM" : "PCM", 13 | "AC3+ E AC 3+Atmos / E AC 3": "E-AC3+Atmos", 14 | "AAC LC LC" : "AAC-LC", 15 | "AAC LC SBR HE AAC LC": "HE-AAC" 16 | ] */ 17 | 18 | // audio map, some of these are probably not needed anymore 19 | Map codecMap = [ 20 | "FLAC": "FLAC", 21 | "PCM": "PCM", 22 | "MPEG Audio Layer 3": "MP3", 23 | "AAC LC": "AAC LC", 24 | "AAC LC SBR": "HE-AAC", // HE-AACv1 25 | "AAC LC SBR PS": "HE-AACv2", 26 | "AC-3 Dep": "E-AC-3+Dep", 27 | "AC-3 Blu-ray Disc Dep": "E-AC-3+Dep", 28 | "E-AC-3 Blu-ray Disc Dep": "E-AC-3+Dep", 29 | "E-AC-3 Dep": "E-AC-3+Dep", 30 | "E-AC-3 JOC": "E-AC-3 JOC", 31 | "DTS XBR": "DTS-HD HRA", // needs review 32 | "DTS ES": "DTS-ES Matrix", 33 | "DTS ES XBR": "DTS-HD HRA", 34 | "DTS ES XXCH XBR": "DTS-HD HRA", // needs review 35 | "DTS ES XXCH": "DTS-ES Discrete", 36 | "DTS ES XXCH XLL": "DTS-HD MA", // needs review 37 | "DTS XLL": "DTS-HD MA", 38 | /* "DTS XLL X": "DTS\u02D0X", // IPA triangular colon */ 39 | "DTS XLL X": "DTS-X", 40 | "MLP FBA": "TrueHD", 41 | "MLP FBA 16-ch": "TrueHD", 42 | "DTS 96/24": "DTS 96-24", // needs review 43 | ] 44 | 45 | audio.collect { au -> 46 | /* Format seems to be consistently defined and identical to Format/String 47 | Format_Profile and Format_AdditionalFeatures instead 48 | seem to be usually mutually exclusive 49 | Format_Commercial (and _If_Any variant) seem to be defined 50 | mainly for Dolby/DTS formats */ 51 | String _ac = any 52 | { allOf 53 | { any { au["Format/String"] } { au["Format"] } } 54 | { au["Format_Profile"] } 55 | { au["Format_AdditionalFeatures"] } 56 | .collectMany{ it.tokenize() }.unique().join(" ") } 57 | { au["Format_Commercial"] } 58 | /* original _aco_ binding uses "Codec_Profile", "Format_Profile", "Format_Commercial" */ 59 | String _aco = any { au["Codec_Profile"] } { au["Format_Profile"] } { au["Format_Commercial"] } 60 | /* def atmos = (_aco =~ /(?i:atmos)/) ? "Atmos" : null */ 61 | Boolean fAtmos = any { au.FormatCommercial =~ /(?i)atmos/ } { false } 62 | Boolean oAtmos = any { au.NumberOfDynamicObjects } { false } 63 | String isAtmos = (fAtmos || oAtmos) ? "Atmos" : null 64 | /* _channels_ uses "ChannelPositions/String2", "Channel(s)_Original", "Channel(s)" 65 | compared to _af_ which uses "Channel(s)_Original", "Channel(s)" 66 | local _channels uses the same variables as {channels} but calculates 67 | the result for each audio stream */ 68 | String _channels = any 69 | { au["ChannelPositions/String2"] } 70 | { au["Channel(s)_Original"] } 71 | { au["Channel(s)"] } 72 | String _ch 73 | /* _channels can contain no numbers */ 74 | Object splitCh = _channels =~ /^(?i)object.based$/ ? "Object Based" : 75 | _channels.tokenize("\\/\\.") 76 | /* the below may be needed for 3/2/0.2.1/3/2/0.1 files */ 77 | // _channels.tokenize("\\/").take(3)*.tokenize("\\.") 78 | // .flatten()*.toInteger() 79 | 80 | 81 | String chSimple = any { au["Channel(s)"] } { au["Channel(s)/String"].replaceAll("channels", "") } 82 | 83 | switch (splitCh) { 84 | case { it instanceof String }: 85 | _ch = allOf { splitCh } { chSimple + "ch" }.join(" ") 86 | break 87 | 88 | case { it.size > 4 }: 89 | def wide = splitCh.takeRight(1) 90 | Double main = splitCh.take(4)*.toDouble().inject(0, { a, b -> a + b }) 91 | Double sub = Double.parseDouble("0." + wide.last()) 92 | _ch = (main + sub).toBigDecimal().setScale(1, java.math.RoundingMode.HALF_UP).toString() 93 | break 94 | 95 | case { it.size > 1 }: 96 | /* original logic is _mostly_ unchanged if format is like 3/2/0.1 */ 97 | Double sub = Double.parseDouble(splitCh.takeRight(2).join(".")) 98 | _ch = splitCh.take(2)*.toDouble().plus(sub).inject(0) { a, b -> a + b } 99 | .toBigDecimal().setScale(1, java.math.RoundingMode.HALF_UP).toString() 100 | break 101 | 102 | default: 103 | _ch = splitCh.first().toDouble() 104 | } 105 | 106 | String _channelPositions = any{au["ChannelPositions"]}{null} 107 | String channelParse 108 | if ( _channelPositions != null && chSimple.toInteger() != _ch.tokenize(".")*.toInteger().sum() ) { 109 | List channelsPos = _channelPositions.tokenize(",") 110 | String mainFix = channelsPos.take(3).inject(0) { acc, p -> 111 | Integer parsedCh = p.tokenize(":").takeRight(1).first().trim().tokenize(" ").size() 112 | acc + parsedCh 113 | } 114 | String subFix = channelsPos.takeRight(1).first().trim().tokenize(" ").size() 115 | channelParse = "${mainFix}.${subFix}" 116 | } 117 | 118 | String _chFix 119 | if (channelParse != null && Float.parseFloat(_ch) <=> Float.parseFloat(channelParse)) { 120 | _chFix = channelParse 121 | } else { 122 | _chFix = _ch.replaceAll(/(?i)Object Based/, '') 123 | } 124 | 125 | String _lang = any { au["Language"] } { video.first()["Language"] } 126 | List stream = allOf 127 | { allOf { _chFix } { au["NumberOfDynamicObjects"].concat("obj") }.join("+") } 128 | { allOf { codecMap.get(_ac, _ac) } {isAtmos/* atmos */}.join("+") } 129 | /* { allOf{ codecMap.get(combined, _aco) }{atmos}.join("+") } /* bit risky keeping _aco as default */ 130 | { net.filebot.Language.findLanguage(_lang).ISO3.upperInitial() } 131 | /* _cf_ not being used > "Codec/Extensions", "Format" */ 132 | Map ret = [:] 133 | /* this is done to retain stream order */ 134 | ret.id = any{ au["StreamKindId"] }{ au["StreamKindPos"] }{ au["ID"] } 135 | ret.data = stream 136 | return ret 137 | }.toSorted{ it.id }.collect{ it.data }*.join(" ").join(", ") 138 | /* .sort{ a, b -> a.first() <=> b.first() }.reverse() */ 139 | -------------------------------------------------------------------------------- /partials/extraSource.groovy: -------------------------------------------------------------------------------- 1 | /* logo-free release source finder + source */ 2 | Object fileURL = new URL('file:///scripts/websources.txt') 3 | Object file = new File(fileURL.toURI()) 4 | def websources = file.exists() ? lines(file).join("|") : null 5 | Boolean isWeb = (source ==~ /WEB.*/) 6 | // def isWeb = source.matches(/WEB.*/) don't know which one is preferrable 7 | String lfr 8 | if (isWeb) { 9 | lfr = any 10 | { source.match(/($websources)\.(?i)WEB/) } 11 | { fn.match(/($websources)\.(?i)WEB/) } 12 | { if (fn.matches(/(?<=\d{3}[p].)WEB|WEB(?=.[hx]\d{3})/)) 'WEB-DL' } 13 | { null } 14 | } 15 | def replacements = [ 16 | 'dvdrip': 'DVDRip', 17 | 'bluray': 'Blu-ray', 18 | 'Blu-Ray': 'Blu-ray', 19 | 'BluRay': 'Blu-ray', 20 | 'BD': 'Blu-ray', 21 | ] 22 | 23 | allOf 24 | { 25 | def yrange = (y-1)..(y+1) 26 | fn.find(/([0-9]{4}).([A-Z]{3}).1080p/) { match, year, country -> 27 | if (match && 28 | yrange.contains(year.toInteger()) && 29 | Locale.getISOCountries(Locale.IsoCountryCode.PART1_ALPHA3).contains(country)) 30 | country 31 | } 32 | } 33 | { fn.match(/(?i)(UHD).$source/).upper() } 34 | { lfr } 35 | { 36 | !(vs ==~ /(?i)BluRay|DVDRip|WEB-DL/) ? vs : source.replace(replacements) 37 | } 38 | .join(".") 39 | -------------------------------------------------------------------------------- /partials/extraTags.groovy: -------------------------------------------------------------------------------- 1 | final String space = ' ' 2 | final String commaSpace = ', ' 3 | final String reIMAX = /(?i:imax)/ 4 | 5 | String last = n.tokenize(space).last() 6 | List inTags = any { tags } { null } 7 | if (inTags) { 8 | inTags.removeAll { a -> 9 | a ==~ reIMAX 10 | } 11 | } 12 | String pattern = /(?i)(?:Special.?|Extended.?|Ultimate.?)?(?:(?:Director.?s|/ + 13 | /Collector.?s|Theatrical|Ultimate|Final|Extended|Rogue|Special|/ + 14 | /Diamond|Despecialized|R.?Rated|Super.?Duper|Alternate|First|/ + 15 | /IMAX|(?:1st|2nd|3rd|[4-9]th).?Anniversary).(?:Cut|Edition|Version))/ + 16 | /|DC|(?:Extended|Theatrical|Remaster(?:ed)?|Recut|Uncut|/ + 17 | /Uncensored|Unrated|IMAX|Alternate.?Ending|Limited|Hybrid)/ 18 | 19 | specials = allOf 20 | { inTags } 21 | { fn.after(/(?i:$last)/).findAll(/$pattern/) } 22 | .flatten() 23 | .sort() 24 | *.upperInitial() 25 | *.lowerTrail() 26 | *.replaceAll(/[._-]/, space) 27 | *.replaceAll('Dc', "Director's Cut") 28 | *.replaceAll(reIMAX, 'IMAX') 29 | 30 | if (specials.size() > 0) { 31 | specials.removeIf { a -> 32 | inTags.any { b -> 33 | a != b && (b.startsWith(a) || b.endsWith(a)) 34 | } 35 | } 36 | specials.unique() 37 | } 38 | 39 | " - ${specials.join(commaSpace)}" 40 | -------------------------------------------------------------------------------- /partials/groupPart.groovy: -------------------------------------------------------------------------------- 1 | def grp = net.filebot.media.MediaDetection.releaseInfo.getReleaseGroup(fn.replaceAll(/\[.*\]$/, "")) 2 | 3 | String.metaClass.surround { l = "[", r = "]" -> 4 | l + delegate + r 5 | } 6 | 7 | String grpOut = grp ? "$grp" : "$group" 8 | 9 | if (anime) { 10 | "$grpOut".surround() 11 | } else { 12 | "$grpOut".surround('-', '') 13 | } 14 | 15 | /* { any{"-$group"}{"-" + fn.match(/(?:(?<=[-])\w+$)|(?:^\w+(?=[-]))/)} } */ 16 | /* { def grp = fn.match(/(?<=[-])\w+$/) 17 | any{"-$group"}{"-$grp"} } */ 18 | -------------------------------------------------------------------------------- /partials/hdrPart.groovy: -------------------------------------------------------------------------------- 1 | Map HDRMap = [ 2 | 'HDR10': 'HDR10', 3 | 'SMPTE ST 2086': 'HDR10', 4 | 'SMPTE ST 2094 App 3': 'Advanced HDR', 5 | 'SMPTE ST 2094 App 4': 'HDR10+', 6 | 'Dolby Vision / SMPTE ST 2086': 'Dolby Vision', 7 | 'Dolby Vision / HDR10': 'Dolby Vision', 8 | 'ETSI TS 103 433': 'SL-HDR1', 9 | 'SL-HDR1': 'SL-HDR1', // , Version 1.0, Parameter-based, constant 10 | // , Version 1.0, Parameter-based, non-constant 11 | 'SL-HDR2': 'SL-HDR2', // , Version 0.0, Parameter-based 12 | 'SL-HDR3': 'SL-HDR3', 13 | 'Technicolor Advanced HDR': 'Technicolor Advanced HDR', 14 | ] 15 | 16 | Map vid = video.first() 17 | 18 | if (bitdepth > 8) { 19 | switch (vid) { 20 | case { vid =~ /\bHDR_Format_Commercial/ }: 21 | vid['HDR_Format_Commercial'] 22 | break 23 | 24 | case { vid =~ /\bHDR_/ }: 25 | String fHDR = any 26 | { vid['HDR_Format'] } 27 | { vid['HDR_Format/String'] } 28 | // { vid['HDR_Format/String'] } 29 | // { vid['HDR_Format_Compatibility'] } 30 | // following for both HDR10+ (misses compatibility) and Dolby Vision 31 | // { vid['HDR_Format_Version'] } 32 | // following only for Dolby Vision 33 | // { vid['HDR_Format_Profile'] } 34 | // { vid['HDR_Format_Level'] } 35 | // { vid['HDR_Format_Settings'] } 36 | 37 | hdr_out = HDRMap.get(fHDR, fHDR) 38 | if (hdr_out.findMatch(/vision/)) { 39 | dv_info = allOf 40 | { 'P' } 41 | { vid['HDR_Format_Profile'].match(/[dh][ve][hvca][e13v]\.\d(\d)/) } 42 | { 43 | '.' + vid['HDR_Format_Compatibility'].match(/HDR10|SDR/) 44 | .replace('HDR10', '1').replace('SDR', '2') 45 | } 46 | .join() 47 | hdr_out = "$hdr_out $dv_info" 48 | } 49 | hdr_out 50 | break 51 | case { vid['transfer_characteristics'].findMatch(/HLG/) && vid['colour_primaries'] == 'BT.2020' }: 52 | 'HLG10' // HLG 53 | break 54 | case { vid['transfer_characteristics'] == 'PQ' && vid['colour_primaries'] == 'BT.2020' }: 55 | 'HDR10' // PQ10 or HDR 56 | break 57 | default: 58 | "$bitdepth-bit" 59 | break 60 | } 61 | } 62 | -------------------------------------------------------------------------------- /partials/repackPart.groovy: -------------------------------------------------------------------------------- 1 | def ed = fn.findAll(/(?i)repack|proper/)*.upper().join(".") 2 | // def ed = allOf{fn.match(/repack|proper/)}{f.dir.path.match(/repack|proper/)}*.upper().join(".") 3 | if (ed) { ".$ed" } -------------------------------------------------------------------------------- /partials/stdPart.groovy: -------------------------------------------------------------------------------- 1 | import java.util.regex.Pattern 2 | 3 | import groovy.json.JsonSlurper 4 | import groovy.json.JsonOutput 5 | 6 | class Common { 7 | 8 | Map replaceMap = [ 9 | (~/[`´‘’ʻ""“”]/): "'", 10 | '|': ' - ', 11 | ':': '\u2236', 12 | // ':': "\u2236", // "∶" Ratio symbol 13 | // ':': "\uFF1A", // ":" Fullwidth Colon 14 | // ':': "\uFE55", // "﹕" Small Colon 15 | // '/': "\u002F", // "/" Solidus 16 | // '/': "\u29F8", // "⧸" Big Solidus 17 | // '/': "\u2215", // "∕" Division Slash 18 | // '/': "\u2044", // "⁄" Fraction Slash 19 | // '/': "\u2571", // "╱" Box Drawings Light Diagonal Upper Right to Lower Left 20 | // '?': "\uFF1F", // "?" Fullwidth Question Mark 21 | '?': '\uFE56', // '﹖' Small Question Mark 22 | '*': '\u204E', // '⁎' low asterisk 23 | (~/[*\p{Zs}]+/): ' ', 24 | (~/\b[IiVvXx]+\b/): { String it -> it.upper() }, 25 | (~/\b[0-9](?i:th|nd|rd)\b/): { String it -> it.lower() } 26 | ] 27 | 28 | Closure clsReplace = { String origin -> 29 | String tmpStd = origin 30 | replaceMap.each { k1, v -> 31 | [k1].flatten().each { k2 -> 32 | pattern = k2.class == Pattern ? k2 : Pattern.quote(k2) 33 | if (v.class == Closure) { 34 | tmpStd = tmpStd.replaceAll(pattern, (Closure) v) 35 | } else { 36 | tmpStd = tmpStd.replaceAll(pattern, v) 37 | } 38 | } 39 | } 40 | if (movie) { 41 | return tmpStd.replaceTrailingBrackets() 42 | } else if (anime || episode) { 43 | return tmpStd 44 | } 45 | return tmpStd 46 | } 47 | 48 | Closure isLatin = { 49 | java.text.Normalizer.normalize(it, java.text.Normalizer.Form.NFD) 50 | .replaceAll(/\p{InCombiningDiacriticalMarks}+/, '') ==~ /^\p{InBasicLatin}+$/ 51 | } 52 | 53 | Closure translJap = { original -> 54 | /* rate limited to 100 per day I believe, please be careful */ 55 | Object url = new URL('https://api.kuroshiro.org/convert') 56 | Map requestHeaders = [:] 57 | Map postBody = [:] 58 | postBody.str = original 59 | postBody.to = 'romaji' 60 | postBody.mode = 'spaced' 61 | postBody.romajiSystem = 'hepburn' 62 | Object postResponse = url.post( 63 | JsonOutput.toJson(postBody).getBytes('UTF-8'), 64 | 'application/json', 65 | requestHeaders 66 | ) 67 | Object json = new JsonSlurper().parseText(postResponse.text) 68 | return json.result 69 | } 70 | 71 | Closure transl = { 72 | if (languages.first().iso_639_2B == 'jpn') { 73 | translJap(it) 74 | } else { 75 | it.transliterate('Any-Latin; NFD; NFC; Title') 76 | } 77 | } 78 | 79 | 80 | // def isEng = any{ audio.language ==~ /en/ }{ true } 81 | // def isJpn = any{ languages.first().iso_639_2B == "jpn" || net.filebot.Language.findLanguage(audio.language.first()).iso_639_2B == "jpn" }{false} 82 | 83 | // Boolean isEng = any{ audio.language.any{ it ==~ /en/ } }{ audio.language ==~ /en/ }{ true } 84 | // Boolean isJpn = any{ languages.first().ISO2 ==~ /ja/ }{ audio.language.first() ==~ /ja/ }{ false } 85 | 86 | // // WARNING: any db.{AniDB,TheTVDB} binding requires FileBot 4.8.6 or above 87 | // String mainTitle = any{ db.TMDb.n }{ db.TheTVDB.n }{ norm(n).colon(" - ").replaceTrailingBrackets() } 88 | // String primTitle = norm(primaryTitle).colon(" - ").replaceTrailingBrackets() 89 | 90 | } 91 | 92 | String.metaClass.stdReplace { Map replacer -> 93 | String tmpStd = delegate 94 | replacer.each { k1, v -> 95 | [k1].flatten().each { k2 -> 96 | pattern = k2.class == Pattern ? k2 : Pattern.quote(k2) 97 | if (v.class == Closure) { 98 | tmpStd = tmpStd.replaceAll(pattern, (Closure) v) 99 | } else { 100 | tmpStd = tmpStd.replaceAll(pattern, v) 101 | } 102 | } 103 | } 104 | if (movie) { 105 | return tmpStd.replaceTrailingBrackets() 106 | } else if (anime || episode) { 107 | return tmpStd 108 | } 109 | return tmpStd 110 | } 111 | 112 | String.metaClass.surround { l = "(", r = ")" -> 113 | l + delegate + r 114 | } 115 | 116 | /* alternative to the above, with defaults, usable with any Type 117 | String surround(s, l = "(", r = ")") { 118 | l + s + r 119 | } 120 | */ 121 | 122 | return new Common() 123 | -------------------------------------------------------------------------------- /playbook.yml: -------------------------------------------------------------------------------- 1 | # deploying to server 2 | # ansible-playbook -i , playbook.yml 3 | - hosts: all 4 | tasks: 5 | - name: test 6 | ansible.builtin.include_tasks: test.yml 7 | args: 8 | apply: 9 | tags: 10 | - test 11 | delegate_to: 127.0.0.1 12 | tags: 13 | - test 14 | loop: 15 | - { input: 'Chainsaw Man', output: 'Anime/Chainsaw Man (2022)/Season 1 (2022)/Chainsaw Man - 1x01 (01) - DOG & CHAINSAW', db: 'AniDB', format: 'animeFormat.groovy', extra: "--filter 's == 1 && e == 1'" } 16 | 17 | - name: copy qbittorrent script 18 | copy: 19 | src: qbittorrent-postprocess 20 | dest: /srv/pv/scripts/qbittorrent-postprocess 21 | backup: true 22 | tags: deploy 23 | 24 | - name: copy partials 25 | copy: 26 | src: "partials" 27 | dest: "/srv/pv/scripts/partials" 28 | backup: true 29 | tags: deploy 30 | 31 | - name: template files out to destination 32 | ansible.builtin.template: 33 | src: "{{ item }}" 34 | dest: "/srv/pv/scripts/{{ item | basename | splitext | first }}" 35 | backup: true 36 | vars: 37 | scriptDir: "/scripts" 38 | loop: "{{ lookup('fileglob', '*.groovy.j2', wantlist=True) }}" 39 | tags: deploy 40 | -------------------------------------------------------------------------------- /post-script.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash -u 2 | 3 | file=$1 # {f} 4 | location=$2 # {f.dir.dir} 5 | database=${3} # {info.DataBase} 6 | id=${4:-x} # {info.id} 7 | name="${5}" # {info.Name} 8 | 9 | if [[ ! -f "$1" ]]; then 10 | echo "No ouput file" 11 | exit 0 12 | fi 13 | 14 | chmod 664 "${file}" 15 | export JAVA_OPTS="-Xmx128M" 16 | filebot -script fn:suball --def maxAgeDaysLimit=false maxAgeDays=3000d "${file}" 17 | echo "${location/mnt/downloads}" 18 | 19 | [ ${id} == "x" ] && exit 0 20 | 21 | if [[ "${database}" = "AniDB" ]] 22 | then 23 | id=$(http --body :8081/medusa/api/v1/"${MEDUSA_API_KEY}"/ cmd=='shows' sort==name | jq --arg n "${name}" '.data | .[$n].indexerid') 24 | fi 25 | 26 | http --check-status --ignore-stdin --body --pretty=format \ 27 | :8081/medusa/api/v1/${MEDUSA_API_KEY}/ \ 28 | cmd=="show.refresh" \ 29 | indexerid=="${id}" 30 | 31 | printf "\n" 32 | 33 | #check_show="$(http --check-status --ignore-stdin --body :8081/api/v1/"${MEDUSA_API_KEY}" cmd==shows)" 34 | #echo "${check_show}" 35 | #parse_check="$(echo "${check_show}" | jq ".data | map(select(.tvdbid == "${tvdbid}")) | length > 0")" 36 | #echo "${parse_check}" 37 | 38 | #if [[ "${parse_check}" = true ]] 39 | #then 40 | # http --check-status --ignore-stdin --body \ 41 | # :8081/api/v1/${MEDUSA_API_KEY}/ \ 42 | # cmd=="show.refresh" \ 43 | # indexerid=="${tvdbid}" 44 | #else 45 | # docker exec rpi3_medusa \ 46 | # http --check-status --ignore-stdin --body \ 47 | # :8081/api/v1/${MEDUSA_API_KEY}/ \ 48 | # cmd==show.addexisting \ 49 | # indexerid==${tvdbid} \ 50 | # location=="${location}" \ 51 | # tvdbid==${tvdbid} 52 | #fi 53 | -------------------------------------------------------------------------------- /qbittorrent-postprocess: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | version="0.1.0" 3 | # 4 | # This is an optional arguments-only example of Argbash potential 5 | # 6 | # ARG_OPTIONAL_SINGLE([qb_name],[N],[torrent name]) 7 | # ARG_OPTIONAL_SINGLE([qb_category],[L],[torrent category]) 8 | # ARG_OPTIONAL_SINGLE([qb_tags],[G],[separated by comma]) 9 | # ARG_OPTIONAL_SINGLE([qb_content],[F],[content path, same as root for multifile torrent]) 10 | # ARG_OPTIONAL_SINGLE([qb_root],[R],[root path, first torrent subdirectory path]) 11 | # ARG_OPTIONAL_SINGLE([qb_save],[D],[save path]) 12 | # ARG_OPTIONAL_SINGLE([qb_num],[C],[number of files]) 13 | # ARG_OPTIONAL_SINGLE([qb_size],[Z],[torrent size (bytes)]) 14 | # ARG_OPTIONAL_SINGLE([qb_tracker],[T],[current tracker]) 15 | # ARG_OPTIONAL_SINGLE([qb_hash],[I],[torrent info hash]) 16 | # ARG_USE_ENV([SCRIPTS],[/scripts],[the default mount path for scripts]) 17 | 18 | # ARG_USE_PROG([FILEBOT], [/usr/bin/filebot], [filebot executable]) 19 | 20 | # ARG_HELP([qbittorrent filebot postprocess script]) 21 | # ARG_VERSION([echo "$(basename "$0")" v$version]) 22 | # ARGBASH_GO() 23 | # needed because of Argbash --> m4_ignore([ 24 | 25 | ### START OF CODE GENERATED BY Argbash v2.9.0 one line above ### 26 | 27 | # When called, the process ends. 28 | # Args: 29 | # $1: The exit message (print to stderr) 30 | # $2: The exit code (default is 1) 31 | # if env var _PRINT_HELP is set to 'yes', the usage is print to stderr (prior to ) 32 | # Example: 33 | # test -f "$_arg_infile" || _PRINT_HELP=yes die "Can't continue, have to supply file as an argument, got '$_arg_infile'" 4 34 | die() 35 | { 36 | local _ret="${2:-1}" 37 | test "${_PRINT_HELP:-no}" = yes && print_help >&2 38 | echo "$1" >&2 39 | exit "${_ret}" 40 | } 41 | 42 | # Function that evaluates whether a value passed to it begins by a character 43 | # that is a short option of an argument the script knows about. 44 | # This is required in order to support getopts-like short options grouping. 45 | begins_with_short_option() 46 | { 47 | local first_option all_short_options='NLGFRDCZTIhv' 48 | first_option="${1:0:1}" 49 | test "$all_short_options" = "${all_short_options/$first_option/}" && return 1 || return 0 50 | } 51 | 52 | # THE DEFAULTS INITIALIZATION - OPTIONALS 53 | _arg_qb_name= 54 | _arg_qb_category= 55 | _arg_qb_tags= 56 | _arg_qb_content= 57 | _arg_qb_root= 58 | _arg_qb_save= 59 | _arg_qb_num= 60 | _arg_qb_size= 61 | _arg_qb_tracker= 62 | _arg_qb_hash= 63 | 64 | # Function that prints general usage of the script. 65 | # This is useful if users asks for it, or if there is an argument parsing error (unexpected / spurious arguments) 66 | # and it makes sense to remind the user how the script is supposed to be called. 67 | print_help () 68 | { 69 | printf '%s\n' "qbittorrent filebot postprocess script" 70 | printf 'Usage: %s [-N|--qb_name ] [-L|--qb_category ] [-G|--qb_tags ] [-F|--qb_content ] [-R|--qb_root ] [-D|--qb_save ] [-C|--qb_num ] [-Z|--qb_size ] [-T|--qb_tracker ] [-I|--qb_hash ] [-h|--help] [-v|--version]\n' "$0" 71 | printf '\t%s\n' "-N,--qb_name: torrent name (no default)" 72 | printf '\t%s\n' "-L,--qb_category: torrent category (no default)" 73 | printf '\t%s\n' "-G,--qb_tags: separated by comma (no default)" 74 | printf '\t%s\n' "-F,--qb_content: content path, same as root for multifile torrent (no default)" 75 | printf '\t%s\n' "-R,--qb_root: root path, first torrent subdirectory path (no default)" 76 | printf '\t%s\n' "-D,--qb_save: save path (no default)" 77 | printf '\t%s\n' "-C,--qb_num: number of files (no default)" 78 | printf '\t%s\n' "-Z,--qb_size: torrent size (bytes) (no default)" 79 | printf '\t%s\n' "-T,--qb_tracker: current tracker (no default)" 80 | printf '\t%s\n' "-I,--qb_hash: torrent info hash (no default)" 81 | printf '\t%s\n' "-h, --help: Prints help" 82 | printf '\t%s\n' "-v, --version: Prints version" 83 | printf '\nEnvironment variables that are supported:\n' 84 | printf '\t%s\n' "SCRIPTS: the default mount path for scripts. (default: '/scripts')" 85 | } 86 | 87 | # The parsing of the command-line 88 | parse_commandline () 89 | { 90 | while test $# -gt 0 91 | do 92 | _key="$1" 93 | case "$_key" in 94 | # We support whitespace as a delimiter between option argument and its value. 95 | # Therefore, we expect the --qb_name or -N value. 96 | # so we watch for --qb_name and -N. 97 | # Since we know that we got the long or short option, 98 | # we just reach out for the next argument to get the value. 99 | -N|--qb_name) 100 | test $# -lt 2 && die "Missing value for the optional argument '$_key'." 1 101 | _arg_qb_name="$2" 102 | shift 103 | ;; 104 | # We support the = as a delimiter between option argument and its value. 105 | # Therefore, we expect --qb_name=value, so we watch for --qb_name=* 106 | # For whatever we get, we strip '--qb_name=' using the ${var##--qb_name=} notation 107 | # to get the argument value 108 | --qb_name=*) 109 | _arg_qb_name="${_key##--qb_name=}" 110 | ;; 111 | # We support getopts-style short arguments grouping, 112 | # so as -N accepts value, we allow it to be appended to it, so we watch for -N* 113 | # and we strip the leading -N from the argument string using the ${var##-N} notation. 114 | -N*) 115 | _arg_qb_name="${_key##-N}" 116 | ;; 117 | # See the comment of option '--qb_name' to see what's going on here - principle is the same. 118 | -L|--qb_category) 119 | test $# -lt 2 && die "Missing value for the optional argument '$_key'." 1 120 | _arg_qb_category="$2" 121 | shift 122 | ;; 123 | # See the comment of option '--qb_name=' to see what's going on here - principle is the same. 124 | --qb_category=*) 125 | _arg_qb_category="${_key##--qb_category=}" 126 | ;; 127 | # See the comment of option '-N' to see what's going on here - principle is the same. 128 | -L*) 129 | _arg_qb_category="${_key##-L}" 130 | ;; 131 | # See the comment of option '--qb_name' to see what's going on here - principle is the same. 132 | -G|--qb_tags) 133 | test $# -lt 2 && die "Missing value for the optional argument '$_key'." 1 134 | _arg_qb_tags="$2" 135 | shift 136 | ;; 137 | # See the comment of option '--qb_name=' to see what's going on here - principle is the same. 138 | --qb_tags=*) 139 | _arg_qb_tags="${_key##--qb_tags=}" 140 | ;; 141 | # See the comment of option '-N' to see what's going on here - principle is the same. 142 | -G*) 143 | _arg_qb_tags="${_key##-G}" 144 | ;; 145 | # See the comment of option '--qb_name' to see what's going on here - principle is the same. 146 | -F|--qb_content) 147 | test $# -lt 2 && die "Missing value for the optional argument '$_key'." 1 148 | _arg_qb_content="$2" 149 | shift 150 | ;; 151 | # See the comment of option '--qb_name=' to see what's going on here - principle is the same. 152 | --qb_content=*) 153 | _arg_qb_content="${_key##--qb_content=}" 154 | ;; 155 | # See the comment of option '-N' to see what's going on here - principle is the same. 156 | -F*) 157 | _arg_qb_content="${_key##-F}" 158 | ;; 159 | # See the comment of option '--qb_name' to see what's going on here - principle is the same. 160 | -R|--qb_root) 161 | test $# -lt 2 && die "Missing value for the optional argument '$_key'." 1 162 | _arg_qb_root="$2" 163 | shift 164 | ;; 165 | # See the comment of option '--qb_name=' to see what's going on here - principle is the same. 166 | --qb_root=*) 167 | _arg_qb_root="${_key##--qb_root=}" 168 | ;; 169 | # See the comment of option '-N' to see what's going on here - principle is the same. 170 | -R*) 171 | _arg_qb_root="${_key##-R}" 172 | ;; 173 | # See the comment of option '--qb_name' to see what's going on here - principle is the same. 174 | -D|--qb_save) 175 | test $# -lt 2 && die "Missing value for the optional argument '$_key'." 1 176 | _arg_qb_save="$2" 177 | shift 178 | ;; 179 | # See the comment of option '--qb_name=' to see what's going on here - principle is the same. 180 | --qb_save=*) 181 | _arg_qb_save="${_key##--qb_save=}" 182 | ;; 183 | # See the comment of option '-N' to see what's going on here - principle is the same. 184 | -D*) 185 | _arg_qb_save="${_key##-D}" 186 | ;; 187 | # See the comment of option '--qb_name' to see what's going on here - principle is the same. 188 | -C|--qb_num) 189 | test $# -lt 2 && die "Missing value for the optional argument '$_key'." 1 190 | _arg_qb_num="$2" 191 | shift 192 | ;; 193 | # See the comment of option '--qb_name=' to see what's going on here - principle is the same. 194 | --qb_num=*) 195 | _arg_qb_num="${_key##--qb_num=}" 196 | ;; 197 | # See the comment of option '-N' to see what's going on here - principle is the same. 198 | -C*) 199 | _arg_qb_num="${_key##-C}" 200 | ;; 201 | # See the comment of option '--qb_name' to see what's going on here - principle is the same. 202 | -Z|--qb_size) 203 | test $# -lt 2 && die "Missing value for the optional argument '$_key'." 1 204 | _arg_qb_size="$2" 205 | shift 206 | ;; 207 | # See the comment of option '--qb_name=' to see what's going on here - principle is the same. 208 | --qb_size=*) 209 | _arg_qb_size="${_key##--qb_size=}" 210 | ;; 211 | # See the comment of option '-N' to see what's going on here - principle is the same. 212 | -Z*) 213 | _arg_qb_size="${_key##-Z}" 214 | ;; 215 | # See the comment of option '--qb_name' to see what's going on here - principle is the same. 216 | -T|--qb_tracker) 217 | test $# -lt 2 && die "Missing value for the optional argument '$_key'." 1 218 | _arg_qb_tracker="$2" 219 | shift 220 | ;; 221 | # See the comment of option '--qb_name=' to see what's going on here - principle is the same. 222 | --qb_tracker=*) 223 | _arg_qb_tracker="${_key##--qb_tracker=}" 224 | ;; 225 | # See the comment of option '-N' to see what's going on here - principle is the same. 226 | -T*) 227 | _arg_qb_tracker="${_key##-T}" 228 | ;; 229 | # See the comment of option '--qb_name' to see what's going on here - principle is the same. 230 | -I|--qb_hash) 231 | test $# -lt 2 && die "Missing value for the optional argument '$_key'." 1 232 | _arg_qb_hash="$2" 233 | shift 234 | ;; 235 | # See the comment of option '--qb_name=' to see what's going on here - principle is the same. 236 | --qb_hash=*) 237 | _arg_qb_hash="${_key##--qb_hash=}" 238 | ;; 239 | # See the comment of option '-N' to see what's going on here - principle is the same. 240 | -I*) 241 | _arg_qb_hash="${_key##-I}" 242 | ;; 243 | # The version argurment doesn't accept a value, 244 | # we expect the --version or -v, so we watch for them. 245 | -v|--version) 246 | echo "$(basename "$0")" v"$version" 247 | exit 0 248 | ;; 249 | # We support getopts-style short arguments clustering, 250 | # so as -v doesn't accept value, other short options may be appended to it, so we watch for -v*. 251 | # After stripping the leading -v from the argument, we have to make sure 252 | # that the first character that follows coresponds to a short option. 253 | -v*) 254 | echo "$(basename "$0")" v"$version" 255 | exit 0 256 | ;; 257 | # See the comment of option '--version' to see what's going on here - principle is the same. 258 | -h|--help) 259 | print_help 260 | exit 0 261 | ;; 262 | # See the comment of option '-v' to see what's going on here - principle is the same. 263 | -h*) 264 | print_help 265 | exit 0 266 | ;; 267 | *) 268 | _PRINT_HELP=yes die "FATAL ERROR: Got an unexpected argument '$1'" 1 269 | ;; 270 | esac 271 | shift 272 | done 273 | } 274 | 275 | # Now call all the functions defined above that are needed to get the job done 276 | parse_commandline "$@" 277 | 278 | # OTHER STUFF GENERATED BY Argbash 279 | test -n "$SCRIPTS" || SCRIPTS="/scripts" 280 | 281 | ### END OF CODE GENERATED BY Argbash (sortof) ### ]) 282 | # [ <-- needed because of Argbash 283 | 284 | # FileBot Configuration 285 | shopt -s nocasematch 286 | test -n "$OUT_DIR" || OUT_DIR="." 287 | test -n "$FILEBOT" || FILEBOT=$(which filebot) || FILEBOT="/usr/bin/filebot" 288 | test -n "$PPR_LOG" || PPR_LOG=/config/postprocess.log 289 | MEDIA_OUTPUT="$OUT_DIR/Media" 290 | 291 | log() { 292 | echo "$(date -Iseconds) [$(basename "$0")]: $*" | tee -a "$PPR_LOG" 293 | } 294 | 295 | log "-------------------------------------" 296 | log "--- RUN $(date -Iseconds) ---" 297 | log "Value of SCRIPTS: $SCRIPTS" 298 | log "Value of OUT_DIR: $OUT_DIR" 299 | log "Value of FILEBOT: $FILEBOT" 300 | log "Value of --qb_name: $_arg_qb_name" 301 | log "Value of --qb_category: $_arg_qb_category" 302 | log "Value of --qb_tags: $_arg_qb_tags" 303 | log "Value of --qb_content: $_arg_qb_content" 304 | log "Value of --qb_root: $_arg_qb_root" 305 | log "Value of --qb_save: $_arg_qb_save" 306 | log "Value of --qb_num: $_arg_qb_num" 307 | log "Value of --qb_size: $_arg_qb_size" 308 | log "Value of --qb_hash: $_arg_qb_hash" 309 | 310 | # TODO: https://www.filebot.net/forums/viewtopic.php?p=43015#p43017 311 | # The --def ut_kind=single option instructs the amc script to use both 312 | # ut_dir and ut_file to create a file path, and that requires both of 313 | # these options to be set, and AFAIK is only useful for uT integration. 314 | # [[ $_arg_qb_num -eq 1 ]] && _qb_multi="single" || _qb_multi="multi" 315 | _qb_multi="multi" 316 | log "Value of _qb_multi: $_qb_multi" 317 | 318 | [[ "$_arg_qb_category" =~ (tv_shows|tv|anime|movies|movie) ]] || \ 319 | { log 'FileBot not handling this category, skipping' ; exit 0; } 320 | 321 | # LINKS=("$(find "$_arg_qb_root" -type l)") 322 | # NOT reliable and returns array of length 1 even when empty 323 | 324 | # https://stackoverflow.com/a/54561526/4078543 325 | mapfile -d '' LINKS < <(find "$_arg_qb_root" -type l -print0) 326 | if [[ "${#LINKS[@]}" -gt 0 ]] 327 | then 328 | log 'Links found in torrent directory, skipping' 329 | exit 0 330 | fi 331 | 332 | # PAUSE the torrent before processing 333 | set -x 334 | http --pretty=format --headers --ignore-stdin ":${WEBUI_PORT}/api/v2/torrents/pause" hashes=="$_arg_qb_hash" 335 | set -x 336 | 337 | FB=("$FILEBOT" -script fn:amc --action keeplink --output "$MEDIA_OUTPUT" --conflict skip \ 338 | --filter \"'!readLines('\'"$SCRIPTS"'/excludes.txt'\'').contains(n)'\" \ 339 | -non-strict --log-file amc.log --def excludeList=".excludes" \ 340 | --def ut_dir="$_arg_qb_root" ut_kind="$_qb_multi" ut_title="$_arg_qb_name" ut_label="$_arg_qb_category" \ 341 | --def @"$SCRIPTS"/notify.txt \ 342 | --def minLengthMS=300000 \ 343 | --def movieDB=TheMovieDB seriesDB=TheMovieDB::TV animeDB=AniDB musicDB=ID3 \ 344 | --def movieFormat=@"$SCRIPTS"/movieFormat.groovy \ 345 | --def seriesFormat=@"$SCRIPTS"/seriesFormat.groovy \ 346 | --def animeFormat=@"$SCRIPTS"/animeFormat.groovy) 347 | 348 | log "${FB[@]}" 349 | 350 | # sudo -H -u devster -g devster -- 351 | "${FB[@]}" 352 | 353 | # RESTART the torrent after processing 354 | set -x 355 | http --pretty=format --headers --ignore-stdin ":${WEBUI_PORT}/api/v2/torrents/resume" hashes=="$_arg_qb_hash" 356 | set +x 357 | 358 | # ] <-- needed because of Argbash -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | ansible~=7.1.0 2 | ansible-lint~=6.10.2 3 | black~=22.12.0 4 | -------------------------------------------------------------------------------- /seriesFormat.groovy.j2: -------------------------------------------------------------------------------- 1 | { 2 | def normTV = { 3 | it.replaceAll(/[`´‘’ʻ""“”]/, "'") 4 | .replaceAll(/[|]/, " - ") 5 | .replaceAll(/[?]/, "\uFE56") // "﹖" Small Question Mark 6 | .replaceAll(/[\*]/, "\u204E") // "⁎" low asterisk 7 | .replaceAll(/[*\p{Zs}]+/, " ") 8 | .replaceAll(/\b[IiVvXx]+\b/, { it.upper() }) 9 | .replaceAll(/\b[0-9](?i:th|nd|rd)\b/, { it.lower() }) 10 | } 11 | 12 | Boolean isEng = any{ audio.language.any{ it ==~ /en/ } }{ audio.language ==~ /en/ }{true} 13 | 14 | allOf 15 | {"TV Shows"} 16 | { 17 | allOf 18 | { (!isEng && (audio.language != null)) ? 19 | normTV(localize[audio.language[0]].n).colon(" - ").replaceTrailingBrackets() : 20 | normTV(n).colon(" - ").replaceTrailingBrackets() } 21 | { def firstYear = episodelist.find{ it.regular }.airdate.year 22 | "($firstYear)" } 23 | .join(" ") 24 | } 25 | { episode.special ? "Specials" : allOf{"Season"}{s}.join(" ") } 26 | /* allOf{"Season"}{s}{sy}.join(" ") --- {sc >= 10 ? s.pad(2) : s} */ 27 | { 28 | allOf 29 | { 30 | if (!isEng && (audio.language != null)) { 31 | normTV(localize[audio.language[0]].n).colon("\u2236 ").replaceTrailingBrackets() 32 | } else { 33 | normTV(n).colon("\u2236 ").replaceTrailingBrackets() 34 | } 35 | } 36 | { episode.special ? "S00E" + special.pad(2) : s00e00 } 37 | { 38 | allOf 39 | // { t.replacePart(replacement = ", Part $1") } 40 | { 41 | if (!isEng && (audio.language != null)) { 42 | normTV(localize[audio.language[0]].t).colon("\u2236 ").slash("\u2571") 43 | } else { 44 | normTV(t).colon("\u2236 ").slash("\u2571") // ╱ is the replacement for slash 45 | } 46 | } 47 | {"PT $pi"} 48 | { 49 | allOf 50 | {" ["} 51 | { 52 | allOf 53 | { // Video 54 | // net.filebot.media.VideoFormat.DEFAULT_GROUPS.guessFormat(dim[0], dim[1]) 55 | allOf 56 | { vf } 57 | { vc } 58 | { include '{{ scriptDir }}/partials/hdrPart.groovy' } 59 | .join(" ") 60 | } 61 | { include '{{ scriptDir }}/partials/audioPart.groovy' } 62 | { include '{{ scriptDir }}/partials/extraSource.groovy' } 63 | .join(" - ") 64 | } 65 | {"]"} 66 | { include '{{ scriptDir }}/partials/repackPart.groovy' } 67 | { include '{{ scriptDir }}/partials/groupPart.groovy' } 68 | {subt} 69 | .join("") 70 | } 71 | .join(" ") 72 | } 73 | .join(" - ") 74 | } 75 | .join("/") 76 | } 77 | -------------------------------------------------------------------------------- /subs: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env sh 2 | 3 | file=$1 4 | shift 5 | chmod 664 $file 6 | 7 | for lang in "$@" ; do 8 | filebot -script fn:suball --lang $lang --def maxAgeDays=7 $file 9 | subliminal --addict7ed $ADDIC7ED_USER $ADDIC7ED_PASS --opensubtitles $OPENSUBS_USER $OPENSUBS_PASS \ 10 | download -l $lang -hi --refiner metadata -p addic7ed -p podnapisi -p shooter -p subscenter -p thesubdb -p tvsubtitles $file 11 | done 12 | -------------------------------------------------------------------------------- /test.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: test output 3 | ansible.builtin.command: 4 | cmd: /usr/local/bin/filebot -list --q {{ item.input }} --db {{ item.db }} --format {{ item.format }} {{ item.extra | d() }} 5 | changed_when: false 6 | register: result 7 | 8 | - name: test 9 | ansible.builtin.assert: 10 | that: result.stdout_lines[-1] == item.output 11 | -------------------------------------------------------------------------------- /trakt.groovy: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env filebot -script 2 | 3 | /** 4 | * trakt API v2 URL. 5 | */ 6 | def API_HOST = "api.trakt.tv"; 7 | def API_URL = "https://" + API_HOST + "/"; 8 | def API_VERSION = "2"; 9 | 10 | def SITE_URL = "https://trakt.tv"; 11 | def OAUTH2_AUTHORIZATION_URL = SITE_URL + "/oauth/authorize"; 12 | def OAUTH2_TOKEN_URL = SITE_URL + "/oauth/token"; 13 | 14 | def HEADER_AUTHORIZATION = "Authorization"; 15 | def HEADER_CONTENT_TYPE = "Content-Type"; 16 | def CONTENT_TYPE_JSON = "application/json"; 17 | def HEADER_TRAKT_API_VERSION = "trakt-api-version"; 18 | def HEADER_TRAKT_API_KEY = "trakt-api-key"; 19 | 20 | // series name => series key (e.g. Doctor Who (2005) => doctorwho) 21 | def collationKey = { s -> s == null ? '' : s.removeAll(/^(?i)(The|A)\b/).removeAll(/\(?\d{4}\)?$/).removeAll(/\W/).lower() } 22 | 23 | args.getFiles().findAll{ it.isVideo() && parseEpisodeNumber(it) && detectSeriesName(it) }.groupBy{ detectSeriesName(it) }.each{ series, files -> 24 | def show = myshows.find{ collationKey(it.name) == collationKey(series) } 25 | if (show == null && mesadd) { 26 | show = mes.getShows().find{ collationKey(it.name) == collationKey(series) } 27 | if (show == null) { 28 | println "[failure] '$series' not found" 29 | return 30 | } 31 | mes.addShow(show.id) 32 | println "[added] $show.name" 33 | } 34 | 35 | files.each{ 36 | if (show != null) { 37 | def sxe = parseEpisodeNumber(it) 38 | mes.update(show.id, sxe.season, sxe.episode, mesupdate, mesvalue) 39 | println "[$mesupdate] $show.name $sxe [$it.name]" 40 | } else { 41 | println "[failure] '$series' has not been added [$it.name]" 42 | } 43 | } 44 | } 45 | 46 | /**************************************************************************** 47 | * Trakt 48 | ****************************************************************************/ 49 | 50 | class MyEpisodesScraper { 51 | this.apikey = apikey 52 | def username 53 | def password 54 | 55 | def cache = Cache.getCache('myepisodes', CacheType.Weekly) 56 | def session = [:] 57 | 58 | def login = { 59 | def response = Jsoup.connect('http://www.myepisodes.com/login.php').data('username', username, 'password', password, 'action', 'Login', 'u', '').method(Method.POST).execute() 60 | session << response.cookies() 61 | return response.parse() 62 | } 63 | 64 | def get = { url -> 65 | if (session.isEmpty()) { 66 | login() 67 | } 68 | 69 | def response = Jsoup.connect(url).cookies(session).method(Method.GET).execute() 70 | session << response.cookies() 71 | def html = response.parse() 72 | 73 | if (html.select('#frmLogin')) { 74 | session.clear() 75 | throw new Exception('Login failed') 76 | } 77 | 78 | return html 79 | } 80 | 81 | def getShows = { 82 | def shows = cache.get('MyEpisodes.Shows') 83 | if (shows == null) { 84 | shows = ['other', 'A'..'Z'].flatten().findResults{ section -> 85 | get("http://myepisodes.com/shows.php?list=${section}").select('a').findResults{ a -> 86 | try { 87 | return [id:a.absUrl('href').match(/showid=(\d+)/).toInteger(), name:a.text().trim()] 88 | } catch(e) { 89 | return null 90 | } 91 | } 92 | }.flatten().sort{ it.name } 93 | cache.put('MyEpisodes.Shows', shows) 94 | } 95 | return shows 96 | } 97 | 98 | def getShowList = { 99 | get("http://www.myepisodes.com/shows.php?type=manage").select('option').findResults{ option -> 100 | try { 101 | return [id:option.attr('value').toInteger(), name:option.text().trim()] 102 | } catch(e) { 103 | return null 104 | } 105 | } 106 | } 107 | 108 | def addShow = { showid -> 109 | get("http://www.myepisodes.com/views.php?type=manageshow&mode=add&showid=${showid}") 110 | } 111 | 112 | def update = { showid, season, episode, tick = 'acquired', value = '1' -> 113 | get("http://www.myepisodes.com/myshows.php?action=Update&showid=${showid}&season=${season}&episode=${episode}&${tick}=${value}") 114 | } 115 | } 116 | -------------------------------------------------------------------------------- /transmission-postprocess.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash -xu 2 | 3 | # Input Parameters 4 | TR_APP_VERSION="$TR_APP_VERSION" 5 | TR_TIME_LOCALTIME="$TR_TIME_LOCALTIME" 6 | TR_TORRENT_HASH="$TR_TORRENT_HASH" 7 | TR_TORRENT_ID="$TR_TORRENT_ID" 8 | 9 | ARG_PATH="$TR_TORRENT_DIR/$TR_TORRENT_NAME" 10 | ARG_NAME="$TR_TORRENT_NAME" 11 | ARG_LABEL="N/A" 12 | 13 | # Configuration 14 | MOUNT="/mnt" 15 | CONFIG_OUTPUT="/$MOUNT/Media" 16 | FILEBOT="/usr/local/bin/filebot" 17 | 18 | if [[ "$TR_TORRENT_DIR" =~ ^$MOUNT/usbhdd.* ]] 19 | then 20 | exit 0 21 | fi 22 | 23 | if [[ "$TR_TORRENT_DIR" =~ ^$MOUNT/bellatrix/downloads/books.* ]] 24 | then 25 | exit 0 26 | fi 27 | 28 | LINKS=($(find "$ARG_PATH" -type l)) 29 | if [[ ${#LINKS[@]} -gt 0 ]] 30 | then 31 | exit 0 32 | fi 33 | 34 | transmission-remote -t ${TR_TORRENT_ID} -S 35 | 36 | export JAVA_OPTS="-Xmx256M" 37 | if [[ "$TR_TORRENT_DIR" =~ ^$MOUNT/bellatrix/downloads/(tv_shows|anime).* ]] 38 | then 39 | sudo -H -u devster -g devster -- $FILEBOT -script fn:amc --action keeplink --output "$CONFIG_OUTPUT" --conflict skip \ 40 | --filter '!readLines("$MOUNT/antares/scripts/tv_excludes.txt").contains(n)' \ 41 | -non-strict --log-file amc.log --def excludeList=".excludes" \ 42 | --def ut_dir="$ARG_PATH" ut_kind="multi" ut_title="$ARG_NAME" ut_label="$ARG_LABEL" \ 43 | --def @$MOUNT/scripts/notify.txt \ 44 | --def movieFormat=@$MOUNT/scripts/movieFormat.groovy \ 45 | --def seriesFormat=@$MOUNT/scripts/seriesFormat.groovy \ 46 | --def animeFormat=@$MOUNT/scripts/animeFormat.groovy 47 | else 48 | sudo -H -u devster -g devster -- $FILEBOT -script fn:amc --action keeplink --output "$CONFIG_OUTPUT" --conflict skip \ 49 | --filter '!readLines("$MOUNT/scripts/movie_excludes.txt").contains(n)' \ 50 | --log-file amc.log --def subtitles=en artwork=y excludeList=".excludes" \ 51 | ut_dir="$ARG_PATH" ut_kind="multi" ut_title="$ARG_NAME" ut_label="$ARG_LABEL" \ 52 | exec="chmod 664 {quote file}" \ 53 | --def @$MOUNT/scripts/notify.txt \ 54 | --def movieFormat=@$MOUNT/scripts/movieFormat.groovy \ 55 | --def seriesFormat=@$MOUNT/scripts/seriesFormat.groovy \ 56 | --def animeFormat=@$MOUNT/scripts/animeFormat.groovy 57 | fi 58 | 59 | transmission-remote -t ${TR_TORRENT_ID} -s 60 | -------------------------------------------------------------------------------- /websources.txt: -------------------------------------------------------------------------------- 1 | 9NOW 2 | AE 3 | AUBC 4 | AMBC 5 | AS 6 | AJAZ 7 | ALL4 8 | AMZN 9 | AMC 10 | ATK 11 | ANPL 12 | ANLB 13 | AOL 14 | ATVP 15 | ARD 16 | iP 17 | BKPL 18 | BOOM 19 | BRAV 20 | CMOR 21 | CNLP 22 | CN 23 | CBC 24 | CBS 25 | 4OD 26 | CHGD 27 | CMAX 28 | CNBC 29 | CC 30 | CCGC 31 | COOK 32 | CMT 33 | CRKL 34 | CR 35 | CSPN 36 | CTV 37 | CUR 38 | CW 39 | CWS 40 | DSKI 41 | DCU 42 | DHF 43 | DEST 44 | DDY 45 | DTV 46 | DISC 47 | DSNY 48 | DSNP 49 | DIY 50 | DOCC 51 | DPLY 52 | DRPO 53 | ETV 54 | ETTV 55 | EPIX 56 | ESPN 57 | ESQ 58 | FAM 59 | FJR 60 | FOOD 61 | FOX 62 | FPT 63 | FTV 64 | FREE 65 | FUNI 66 | FYI 67 | GLBL 68 | GLOB 69 | GO90 70 | PLAY 71 | HLMK 72 | HBO 73 | HMAX 74 | HGTV 75 | HIDI 76 | HIST 77 | HULU 78 | TOU 79 | IFC 80 | ID 81 | iT 82 | ITV 83 | KNPY 84 | Sports 85 | KNOW 86 | LIFE 87 | LN 88 | MNBC 89 | MTOD 90 | MTV 91 | NATG 92 | NBA 93 | NBC 94 | NF 95 | NFLN 96 | NFL 97 | GC 98 | NICK 99 | NRK 100 | PMNT 101 | PCOK 102 | PLUZ 103 | PBS 104 | PBSK 105 | PSN 106 | POGO 107 | PA 108 | PUHU 109 | QIBI 110 | RKTN 111 | RSTR 112 | RTE 113 | SBS 114 | SESO 115 | SHMI 116 | SHO 117 | SPIK 118 | SNET 119 | SPRT 120 | STAN 121 | STZ 122 | SVT 123 | SWER 124 | SYFY 125 | TBS 126 | TEN 127 | TFOU 128 | TIMV 129 | TLC 130 | TRVL 131 | TUBI 132 | TV3 133 | TV4 134 | TVL 135 | VH1 136 | VICE 137 | VMEO 138 | UFC 139 | UKTV 140 | UNIV 141 | USAN 142 | VLCT 143 | VIAP 144 | VRV 145 | WNET 146 | WME 147 | WWEN 148 | XBOX 149 | YHOO 150 | RED 151 | ZDF 152 | -------------------------------------------------------------------------------- /websources_legend.txt: -------------------------------------------------------------------------------- 1 | 9Now 9NOW 2 | A&E AE 3 | ABC (AU) iView AUBC 4 | ABC (US) AMBC 5 | Adult Swim AS 6 | Al Jazeera English AJAZ 7 | All4 (Channel 4, ex-4oD) ALL4 8 | Amazon AMZN 9 | AMC AMC 10 | America's Test Kitchen ATK 11 | Animal Planet ANPL 12 | AnimeLab ANLB 13 | AOL AOL 14 | Apple TV+ ATVP 15 | ARD ARD 16 | BBC iPlayer iP 17 | Blackpills BKPL 18 | Boomerang BOOM 19 | BravoTV BRAV 20 | C More CMOR 21 | Canal+ CNLP 22 | Cartoon Network CN 23 | CBC CBC 24 | CBS CBS 25 | Channel 4 4OD 26 | CHRGD CHGD 27 | Cinemax CMAX 28 | CNBC CNBC 29 | Comedy Central CC 30 | Comedians in Cars Getting Coffee CCGC 31 | Cooking Channel COOK 32 | Country Music Television CMT 33 | Crackle CRKL 34 | Crunchy Roll CR 35 | CSpan CSPN 36 | CTV CTV 37 | CuriosityStream CUR 38 | The CW CW 39 | CWSeed CWS 40 | Daisuki DSKI 41 | DC Universe DCU 42 | Deadhouse Films DHF 43 | Destination America DEST 44 | Digiturk Dilediğin Yerde DDY 45 | DirecTV Now DTV 46 | Discovery Channel DISC 47 | Disney DSNY 48 | Disney+ DSNP 49 | DIY Network DIY 50 | Doc Club DOCC 51 | DPlay DPLY 52 | Dropout DRPO 53 | E! ETV 54 | El Trece ETTV 55 | EPIX EPIX 56 | ESPN ESPN 57 | Esquire ESQ 58 | Family FAM 59 | Family Jr FJR 60 | Food Network FOOD 61 | Fox FOX 62 | FPT Play FPT 63 | France.tv FTV 64 | Freeform FREE 65 | Funimation FUNI 66 | FYI Network FYI 67 | Global GLBL 68 | GloboSat Play GLOB 69 | go90 GO90 70 | Google Play PLAY 71 | Hallmark HLMK 72 | HBO HBO 73 | HBO Max HMAX 74 | HGTV HGTV 75 | HIDIVE HIDI 76 | History Channel HIST 77 | Hulu HULU 78 | Ici TOU.TV TOU 79 | IFC IFC 80 | Investigation Discovery ID 81 | iTunes iT 82 | ITV ITV 83 | Kanopy KNPY 84 | Kayo Sports 85 | Knowledge Network KNOW 86 | Lifetime LIFE 87 | Loving Nature LN 88 | MSNBC MNBC 89 | Motor Trend OnDemand MTOD 90 | MTV MTV 91 | National Geographic NATG 92 | NBA League Pass NBA 93 | NBC NBC 94 | Netflix NF 95 | NFL Now NFLN 96 | NFL Network NFL 97 | NHL GameCenter GC 98 | Nickelodeon NICK 99 | Norsk Rikskringkasting NRK 100 | Paramount Network PMNT 101 | Peacock PCOK 102 | Pluzz PLUZ 103 | PBS PBS 104 | PBS Kids PBSK 105 | Playstation Network PSN 106 | PokerGo POGO 107 | Project Alpha PA 108 | puhutv PUHU 109 | Quibi QIBI 110 | Rakuten TV RKTN 111 | Rooster Teeth (Staff approval needed) RSTR 112 | RTÉ RTE 113 | SBS (AU) SBS 114 | Seeso SESO 115 | Shomi SHMI 116 | Showtime SHO 117 | Spike SPIK 118 | Sportsnet SNET 119 | Sprout SPRT 120 | Stan STAN 121 | Starz STZ 122 | Sveriges Television SVT 123 | SwearNet SWER 124 | SyFy SYFY 125 | TBS TBS 126 | TenPlay TEN 127 | TFOU TFOU 128 | TIMvision TIMV 129 | TLC TLC 130 | Travel Channel TRVL 131 | TubiTV TUBI 132 | TV3 (IE) TV3 133 | TV4 (SE) TV4 134 | TVLand TVL 135 | VH1 VH1 136 | Viceland VICE 137 | Vimeo (Staff approval needed) VMEO 138 | UFC UFC 139 | UKTV UKTV 140 | Univision UNIV 141 | USA Network USAN 142 | Velocity VLCT 143 | Viaplay (originals only) VIAP 144 | VRV VRV 145 | W Network WNET 146 | WatchMe WME 147 | WWE Network WWEN 148 | Xbox Video XBOX 149 | Yahoo YHOO 150 | YouTube Red RED 151 | ZDF ZDF 152 | -------------------------------------------------------------------------------- /xem.groovy: -------------------------------------------------------------------------------- 1 | // test with filebot -list --q "Monogatari" --db AniDB --mapper xem.groovy 2 | import groovy.json.JsonSlurper 3 | import net.filebot.Cache 4 | import net.filebot.CacheType 5 | 6 | Closure request = { Map headers = [:], String base = "http://thexem.de", String path, Map params -> 7 | Cache cache = net.filebot.Cache.getCache('xem', CacheType.Daily) 8 | URL baseURL = new URL(base) 9 | String query = params.collect { k, v -> "$k=$v" }.join('&') 10 | Object response = new URL(baseURL, "$path?$query").get(headers) 11 | response 12 | // TODO: daily caching 13 | // def content = cache.text(url, String.&toURL).get() 14 | } 15 | 16 | String origin = anime ? "anidb" : "tvdb" 17 | Integer seas = anime ? 1 : episode?.season 18 | 19 | Object hasMap = request("/map/havemap", ["origin": origin]) 20 | Map jHasMap = new JsonSlurper().parseText(hasMap.text) 21 | Boolean item = jHasMap.data.any{ it == id.toString() } 22 | if (item) { 23 | Object names = request("/map/names", [ 24 | "origin": origin, 25 | "id": id, 26 | "defaultNames": 1, 27 | ]) 28 | Map jName = new JsonSlurper().parseText(names.text) 29 | ArrayList reflect = jName.data.collect{ 30 | if (it.value instanceof Map) { 31 | def name = it.value.entrySet().value 32 | [(it.key): name.flatten()] 33 | } else if (it.value instanceof String) { 34 | [(it.key): it.value] 35 | } 36 | } 37 | 38 | // String newN = reflect.findAll{ it instanceof String }.first() 39 | String newN = reflect.findAll{ it.all }?.all.first() 40 | Integer foundS = reflect.findAll{ 41 | it.entrySet().value.any{ v -> v =~ /(?i)$episode.seriesName/ } 42 | }.first().entrySet().key.first().toInteger() 43 | // Integer foundS = item.findAll{ it instanceof Map }*.find{ k, v -> k.match(/$n/) }.find{ it != null }?.value 44 | Integer newS = (foundS < 0) ? seas : foundS 45 | 46 | Map old = [ 47 | ep: episode.episode ? episode.episode : episode.special, 48 | se: episode.special ? 0 : newS, 49 | ] 50 | // assuming TVDB destination, could be included in the query 51 | Object mapping = request("/map/single", [ 52 | "origin": origin, 53 | "id": id, 54 | "season": old.se, 55 | "episode": old.ep, 56 | ]) 57 | Map jMapping = new JsonSlurper().parseText(mapping.text) 58 | // also assuming TVDB destination 59 | if (jMapping.data.isEmpty()) { 60 | return episode 61 | } 62 | def result = jMapping.data.entrySet().findAll{ it.key.matches(/tvdb.*/) } 63 | if (result.size() < 2) { 64 | return new net.filebot.web.Episode(newN, newS, result.first().value.episode, episode?.title, result.first().value.absolute, episode?.special, episode?.airdate, episode.id, episode.seriesInfo) 65 | } else { 66 | def multi = [] 67 | for ( i in 0..result.size()-1 ) { 68 | // hopefully all multi-episodes are just multi-part because I couldn't find a way to merge titles 69 | multi << new net.filebot.web.Episode(newN, newS, result[i].value.episode, episode?.title, result[i].value.absolute, episode?.special, episode?.airdate, episode.id, episode.seriesInfo) 70 | } 71 | return new net.filebot.web.MultiEpisode(*multi) 72 | } 73 | } 74 | // hopefully return the episode untouched if not matched 75 | return episode 76 | 77 | /* 78 | { 79 | seriesName=Better Off Ted, 80 | airdate=2009-03-18, 81 | special=null, 82 | title=Pilot, 83 | class=class net.filebot.web.Episode, 84 | absolute=1, 85 | episode=1, 86 | id=413862, 87 | numbers=[1, 1, null, 1], 88 | season=1, 89 | seriesInfo=TheTVDB::84021, 90 | seriesNames=[ 91 | Better Off Ted, 92 | Better Off Ted (2009), 93 | Better Off Ted - Scientificamente pazzi, 94 | Dilinyósok, 95 | Давай ещё, 96 | Тэд, אי אפשר בלי טד, 97 | Ted a spol. 98 | ] 99 | } 100 | */ --------------------------------------------------------------------------------