├── .mailmap ├── CONTRIBUTORS ├── embedupload.sh ├── CONTRIBUTING.md ├── pastebin.sh ├── tempshare.sh ├── 115.sh ├── espafiles.sh ├── promptfile.sh ├── filebin_ca.sh ├── gfile_ru.sh ├── firedrive.sh ├── anonfiles.sh ├── nakido.sh ├── flashx.sh ├── bayimg.sh ├── uplea.sh ├── tempsend.sh ├── fileover.sh ├── fileparadox.sh ├── hexupload.sh ├── filemonkey.sh ├── jheberg.sh ├── filedais.sh ├── nitroflare.sh ├── yourvideohost.sh ├── videowood_tv.sh ├── netkups.sh ├── data_hu.sh ├── vidzi_tv.sh ├── directmirror.sh ├── uploadboy.sh ├── hotlink_cc.sh ├── vid_ag.sh ├── datafile.sh ├── filecore.sh ├── thefilebay.sh ├── uloz_to.sh ├── catshare.sh ├── 4share_vn.sh ├── rapidu.sh ├── gamefront.sh ├── tezfiles.sh ├── dataport_cz.sh ├── sharehost.sh └── config /.mailmap: -------------------------------------------------------------------------------- 1 | Calvin Spencer Kwok 2 | capkokoon 3 | David Kurz 4 | GaspardT 5 | idlelop 6 | ljsdoug 7 | 8 | Oscar Padilla 9 | 10 | 11 | Valérian Rousset 12 | Vitaly Shukela 13 | -------------------------------------------------------------------------------- /CONTRIBUTORS: -------------------------------------------------------------------------------- 1 | Alex Rea 2 | Andres Vargas 3 | Antoine Girard 4 | Arnau Sanchez 5 | Bach Le 6 | Baptiste 7 | Bastien Cecchinato 8 | Ben Zho <0xbzho@gmail.com> 9 | Calvin Spencer Kwok (rEtSaMfF) 10 | capkokoon 11 | Chu Chong Meng Steven 12 | David Kurz (MDXDave) 13 | ericb48 14 | Fabiano Francesconi 15 | GaspardT (Fullmono) 16 | Golam Sarwar 17 | Hervé 18 | idleloop 19 | Jakub Wilk 20 | Jan 21 | Jason 22 | Julien Rolland 23 | kidburglar 24 | ljsdoug 25 | Matthieu Crapet 26 | Maurus Cuelenaere 27 | Nicolas Michaux 28 | Oscar Padilla (dataoscar,padillao) 29 | Pavel Alexeev 30 | Petr Pulpán 31 | pink 32 | ? 33 | Raziel-23 34 | roadman17 35 | RunningDroid 36 | Ryan 37 | Simon Lipp 38 | Soonbesleeping 39 | StalkR 40 | Stefan Meier 41 | Tapiwa Kelvin 42 | Thomas Jensen 43 | Tony Lainson 44 | Valérian Rousset (tharvik) 45 | Vitaly Shukela 46 | Walid Iguer 47 | Wesley Barroso 48 | zodman 49 | 50 | And also testers, bug reporters, premium account providers. THANK YOU! 51 | 52 | # vim: set fenc=utf-8: 53 | -------------------------------------------------------------------------------- /embedupload.sh: -------------------------------------------------------------------------------- 1 | # Plowshare embedupload.com module 2 | # Copyright (c) 2013 Plowshare team 3 | # 4 | # This file is part of Plowshare. 5 | # 6 | # Plowshare is free software: you can redistribute it and/or modify 7 | # it under the terms of the GNU General Public License as published by 8 | # the Free Software Foundation, either version 3 of the License, or 9 | # (at your option) any later version. 10 | # 11 | # Plowshare is distributed in the hope that it will be useful, 12 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 13 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 14 | # GNU General Public License for more details. 15 | # 16 | # You should have received a copy of the GNU General Public License 17 | # along with Plowshare. If not, see . 18 | 19 | MODULE_EMBEDUPLOAD_REGEXP_URL='http://\(www\.\)\?embedupload\.com/' 20 | 21 | MODULE_EMBEDUPLOAD_LIST_OPTIONS="" 22 | MODULE_EMBEDUPLOAD_LIST_HAS_SUBFOLDERS=no 23 | 24 | # List links from an embedupload link 25 | # $1: embedupload link 26 | # $2: recurse subfolders (ignored here) 27 | # stdout: list of links 28 | embedupload_list() { 29 | local URL=$1 30 | local PAGE LINKS LINK NAME 31 | 32 | local -r NOT_AUTHORIZED_PATTERN='not authorized' 33 | 34 | if matchi 'embedupload.com/?d=' "$URL"; then 35 | # Handle folders: get all URLs in there and resolve them 36 | PAGE=$(curl "$URL") || return 37 | LINKS=$(parse_all_attr 'class=.DownloadNow.' href <<< "$PAGE") || return 38 | 39 | NAME=$(parse 'class=.form-title.' '^[[:space:]]*\([^<]\+\)' 1 <<< "$PAGE") 40 | NAME=${NAME% } 41 | 42 | # Sub-link 43 | elif matchi 'embedupload.com/?[[:alpha:]][[:alpha:]]=' "$URL"; then 44 | LINKS=$URL 45 | NAME=$(parse_quiet . '?\(..\)=' <<< "$URL") 46 | else 47 | log_error 'Bad link format' 48 | return $ERR_FATAL 49 | fi 50 | 51 | for URL in $LINKS; do 52 | PAGE=$(curl "$URL" | strip_html_comments) || return 53 | 54 | # You should click on the download link 55 | LINK=$(parse_tag 'target=' a <<< "$PAGE") || continue 56 | 57 | # Ignore URLs we are not authorized for 58 | if ! matchi "$NOT_AUTHORIZED_PATTERN" "$LINK"; then 59 | echo "$LINK" 60 | echo "$NAME" 61 | fi 62 | done 63 | } 64 | -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | ## How to contribute to Plowshare legacy modules 2 | 3 | #### Foreword 4 | 5 | This documentation is related to [Plowshare legacy modules](https://github.com/mcrapet/plowshare-modules-legacy), not [Plowshare core](https://github.com/mcrapet/plowshare). 6 | Modules and core are separated in two different git repositories. 7 | 8 | Plowshare without modules is worthless! 9 | 10 | #### **How could you help us ?** 11 | 12 | - [x] Report dead hosters 13 | - [x] Report broken modules 14 | - [x] Report misspellings, typos 15 | - [x] Contribute to new modules (one hoster equal one plowshare module) 16 | 17 | You may or may not provide a `.patch` depending your skills. 18 | 19 | #### **How to submit a bug ?** 20 | 21 | Before reporting issue, please ensure the following points: 22 | 23 | * **Ensure the bug was not already reported** by searching on GitHub under [Issues](https://github.com/mcrapet/plowshare-modules-legacy/issues). 24 | * Be sure to use the latest revision of [plowshare-modules-legacy](https://github.com/mcrapet/plowshare-modules-legacy). 25 | * Try your download URL (or your file uploading) in a real browser (firefox, opera, etc). Upstream website can be temporary down. 26 | * You IP might be blacklisted or in some rare case the service may not be available for your country. Try with a foreign HTTP proxy. 27 | 28 | If you passed all checks above, [create a new issue](https://github.com/mcrapet/plowshare-modules-legacy/issues/new). Be sure to include **module name in title**. 29 | 30 | Information to mention in issue content: 31 | * Explain your issue as close as possible using english language. 32 | * Attach full log (using `-v4` command line switch), see below. 33 | * Anonymous, free account or premium account? 34 | * Plowshare (core) version. For example: `v2.1.2`. 35 | 36 | ``` 37 | plowdown -v4 -r0 --no-plowsharerc --no-color &>log.txt 38 | ``` 39 | 40 | ``` 41 | plowup -v4 -r0 --no-plowsharerc --no-color &>log.txt 42 | ``` 43 | 44 | *Attention*: Generated logs can contain your credentials (account login data specified with `-a` or `-b` command line switches). 45 | Be sure to remove them before posting. 46 | 47 | #### **How to submit a patch ?** 48 | 49 | Before submitting your patch, check that your work complies with 50 | [code policy](https://github.com/mcrapet/plowshare/wiki/Modules) (refer to last chapters). 51 | 52 | If this is okay, you can create a [new pull request](https://github.com/mcrapet/plowshare-modules-legacy/pulls/). 53 | 54 | Thanks! 55 | -------------------------------------------------------------------------------- /pastebin.sh: -------------------------------------------------------------------------------- 1 | # Plowshare pastebin.com module 2 | # Copyright (c) 2013 Plowshare team 3 | # 4 | # This file is part of Plowshare. 5 | # 6 | # Plowshare is free software: you can redistribute it and/or modify 7 | # it under the terms of the GNU General Public License as published by 8 | # the Free Software Foundation, either version 3 of the License, or 9 | # (at your option) any later version. 10 | # 11 | # Plowshare is distributed in the hope that it will be useful, 12 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 13 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 14 | # GNU General Public License for more details. 15 | # 16 | # You should have received a copy of the GNU General Public License 17 | # along with Plowshare. If not, see . 18 | 19 | MODULE_PASTEBIN_REGEXP_URL='http://\(www\.\)\?pastebin\.com/' 20 | 21 | MODULE_PASTEBIN_LIST_OPTIONS=" 22 | COUNT,,count,n=COUNT,Take COUNT pastes when listing user folder. Default is 100 (first web page)." 23 | MODULE_PASTEBIN_LIST_HAS_SUBFOLDERS=no 24 | 25 | # Static function. Process a single note 26 | # $1: pastebin url 27 | pastebin_single_list() { 28 | local URL=$1 29 | local PAGE LINKS 30 | 31 | if [ "${URL:0:1}" = '/' ]; then 32 | URL="http://pastebin.com/raw.php?i=${URL#/}" 33 | elif match '/[[:alnum:]]\+$' "$URL"; then 34 | URL=$(echo "$URL" | replace 'pastebin.com/' 'pastebin.com/raw.php?i=') 35 | elif ! match 'raw\.php?i=' "$URL"; then 36 | log_error 'Bad link format' 37 | return $ERR_FATAL 38 | fi 39 | 40 | PAGE=$(curl "$URL") || return 41 | LINKS=$(parse_all . '\(https\?://[^[:space:]]\+\)' <<< "$PAGE") || return 42 | 43 | # TODO: filter crappy links (length <15 chars, ...) 44 | 45 | list_submit "$LINKS" 46 | } 47 | 48 | # List all links in a pastebin note 49 | # $1: pastebin url 50 | # stdout: list of links 51 | pastebin_list() { 52 | local -r URL=${1%/} 53 | local PASTES PASTE 54 | 55 | # User folder: 56 | # - http://pastebin.com/u/username 57 | if match '/u/[[:alnum:]]\+$' "$URL"; then 58 | local HTML 59 | HTML=$(curl "$URL") || return 60 | 61 | if [ -n "$COUNT" ]; then 62 | if (( COUNT > 100 )); then 63 | COUNT=100 64 | log_error "Too big integer value for --count, set it to $COUNT" 65 | fi 66 | else 67 | COUNT=100 68 | fi 69 | 70 | log_debug "user folder: listing first $COUNT items (if available)" 71 | PASTES=$(parse_all_attr 'title=.Public paste' href <<< "$HTML" | \ 72 | first_line $COUNT) || return 73 | else 74 | PASTES=$URL 75 | fi 76 | 77 | # Accepted link format 78 | # - /xyz 79 | # - http://pastebin.com/xyz 80 | # - http://pastebin.com/raw.php?i=xyz 81 | while IFS= read -r PASTE; do 82 | pastebin_single_list "$PASTE" || continue 83 | done <<< "$PASTES" 84 | } 85 | -------------------------------------------------------------------------------- /tempshare.sh: -------------------------------------------------------------------------------- 1 | # Plowshare temp-share.com module 2 | # Copyright (c) 2016 Plowshare team 3 | # 4 | # This file is part of Plowshare. 5 | # 6 | # Plowshare is free software: you can redistribute it and/or modify 7 | # it under the terms of the GNU General Public License as published by 8 | # the Free Software Foundation, either version 3 of the License, or 9 | # (at your option) any later version. 10 | # 11 | # Plowshare is distributed in the hope that it will be useful, 12 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 13 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 14 | # GNU General Public License for more details. 15 | # 16 | # You should have received a copy of the GNU General Public License 17 | # along with Plowshare. If not, see . 18 | 19 | MODULE_TEMPSHARE_REGEXP_URL='https\?://temp-share\.com/' 20 | 21 | MODULE_TEMPSHARE_DOWNLOAD_OPTIONS="" 22 | MODULE_TEMPSHARE_DOWNLOAD_RESUME=yes 23 | MODULE_TEMPSHARE_DOWNLOAD_FINAL_LINK_NEEDS_COOKIE=no 24 | MODULE_TEMPSHARE_DOWNLOAD_FINAL_LINK_NEEDS_EXTRA=(--referer) 25 | MODULE_TEMPSHARE_DOWNLOAD_SUCCESSIVE_INTERVAL= 26 | 27 | MODULE_TEMPSHARE_PROBE_OPTIONS="" 28 | 29 | # Output a tempshare file download URL 30 | # $1: cookie file (unused here) 31 | # $2: tempshare url 32 | # stdout: real file download link 33 | tempshare_download() { 34 | local URL PAGE FILE_URL FILE_NAME PUBKEY 35 | 36 | # Get a canonical URL for this file. 37 | URL=$(curl -I "$2" | grep_http_header_location_quiet) || return 38 | [ -n "$URL" ] || URL=$2 39 | readonly URL 40 | 41 | PAGE=$(curl "$URL" | break_html_lines) || return 42 | 43 | if ! match 'data-url' "$PAGE"; then 44 | return $ERR_LINK_DEAD 45 | fi 46 | 47 | FILE_URL=$(parse_attr 'data-url' <<< "$PAGE") || return 48 | FILE_NAME=$(parse_tag 'h1' <<< "$PAGE") || return 49 | PUBKEY=$(parse "id='publickey'" "value='\([^']\+\)" <<< "$PAGE") || return 50 | 51 | # Mandatory! 52 | MODULE_TEMPSHARE_DOWNLOAD_FINAL_LINK_NEEDS_EXTRA=(--referer "$URL") 53 | 54 | echo "$FILE_URL/$PUBKEY" 55 | echo "$FILE_NAME" 56 | } 57 | 58 | # Probe a download URL 59 | # $1: cookie file (unused here) 60 | # $2: tempshare url 61 | # $3: requested capability list 62 | # stdout: 1 capability per line 63 | tempshare_probe() { 64 | local -r URL=$2 65 | local -r REQ_IN=$3 66 | local PAGE FILE_SIZE REQ_OUT 67 | 68 | PAGE=$(curl -L "$URL" | break_html_lines) || return 69 | 70 | if ! match 'data-url' "$PAGE"; then 71 | return $ERR_LINK_DEAD 72 | fi 73 | 74 | REQ_OUT=c 75 | 76 | if [[ $REQ_IN = *f* ]]; then 77 | parse_tag 'h1' <<< "$PAGE" && REQ_OUT="${REQ_OUT}f" 78 | fi 79 | 80 | if [[ $REQ_IN = *s* ]]; then 81 | FILE_SIZE=$(parse '(.*)' '(\([^)]\+\)' <<< "$PAGE") \ 82 | && FILE_SIZE=$(replace 'B' 'iB' <<< $FILE_SIZE) \ 83 | && translate_size "$FILE_SIZE" && REQ_OUT="${REQ_OUT}s" 84 | fi 85 | 86 | if [[ $REQ_IN = *i* ]]; then 87 | parse 'data-url' 'f/\(.\+\)/download' <<< "$PAGE" \ 88 | && REQ_OUT="${REQ_OUT}i" 89 | fi 90 | 91 | echo $REQ_OUT 92 | } 93 | -------------------------------------------------------------------------------- /115.sh: -------------------------------------------------------------------------------- 1 | # Plowshare 115.com module 2 | # Copyright (c) 2010-2012 Plowshare team 3 | # 4 | # This file is part of Plowshare. 5 | # 6 | # Plowshare is free software: you can redistribute it and/or modify 7 | # it under the terms of the GNU General Public License as published by 8 | # the Free Software Foundation, either version 3 of the License, or 9 | # (at your option) any later version. 10 | # 11 | # Plowshare is distributed in the hope that it will be useful, 12 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 13 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 14 | # GNU General Public License for more details. 15 | # 16 | # You should have received a copy of the GNU General Public License 17 | # along with Plowshare. If not, see . 18 | 19 | MODULE_115_REGEXP_URL='http://\([[:alnum:]]\+\.\)\?115\.com/file/' 20 | 21 | MODULE_115_DOWNLOAD_OPTIONS=" 22 | AUTH,a,auth,a=USER:PASSWORD,User account (mandatory)" 23 | MODULE_115_DOWNLOAD_RESUME=no 24 | MODULE_115_DOWNLOAD_FINAL_LINK_NEEDS_COOKIE=unused 25 | MODULE_115_DOWNLOAD_SUCCESSIVE_INTERVAL= 26 | 27 | # Output a 115.com file download URL 28 | # $1: cookie file 29 | # $2: 115.com url 30 | # stdout: real file download link 31 | 115_download() { 32 | local COOKIEFILE=$1 33 | local URL=$2 34 | local PAGE JSON LINKS HEADERS DIRECT FILENAME U1 U2 35 | 36 | if [ -z "$AUTH" ]; then 37 | log_error 'Anonymous users cannot download links' 38 | return $ERR_LINK_NEED_PERMISSIONS 39 | fi 40 | 41 | LOGIN_DATA=$(echo \ 42 | 'login[account]=$USER&login[passwd]=$PASSWORD&back=http%3A%2F%2Fwww.115.com&goto=http%3A%2F%2F115.com' | uri_encode) 43 | post_login "$AUTH" "$COOKIEFILE" "$LOGIN_DATA" 'http://passport.115.com/?ac=login' '-L' >/dev/null || return 44 | 45 | PAGE=$(curl -L -b "$COOKIEFILE" "$URL" | break_html_lines) || return 46 | 47 | if matchi "file_size:[[:space:]]*'0B'," "$PAGE"; then 48 | return $ERR_LINK_DEAD 49 | fi 50 | 51 | U1=$(echo "$PAGE" | parse_all 'url:' "'\(/?ct=download[^']*\)" | last_line) || return 52 | U2=$(echo "$PAGE" | parse 'GetMyDownloadAddress(' "('\([^']*\)") || return 53 | 54 | 55 | # {"state":true,"urls":[{"client":1,"url":"http:\/\/119. ... 56 | JSON=$(curl -b "$COOKIEFILE" "http://115.com$U1$U2") || return 57 | 58 | if ! match_json_true state "$JSON"; then 59 | log_error 'Bad state. Site updated?' 60 | return $ERR_FATAL 61 | fi 62 | 63 | LINKS=$(echo "$JSON" | parse_json 'url' split) || return 64 | 65 | # There are usually mirrors (do a HTTP HEAD request to check dead mirror) 66 | while read URL; do 67 | HEADERS=$(curl -I "$URL") || return 68 | 69 | FILENAME=$(echo "$HEADERS" | grep_http_header_content_disposition) 70 | if [ -n "$FILENAME" ]; then 71 | echo "$URL" 72 | echo "$FILENAME" 73 | return 0 74 | fi 75 | 76 | DIRECT=$(echo "$HEADERS" | grep_http_header_content_type) || return 77 | if [ "$DIRECT" = 'application/octet-stream' ]; then 78 | echo "$URL" 79 | return 0 80 | fi 81 | done <<< "$LINKS" 82 | 83 | log_debug 'all mirrors are dead' 84 | return $ERR_FATAL 85 | } 86 | -------------------------------------------------------------------------------- /espafiles.sh: -------------------------------------------------------------------------------- 1 | # Plowshare espafiles.com module 2 | # Copyright (c) 2016 dataoscar 3 | # 4 | # This file is part of Plowshare. 5 | # 6 | # Plowshare is free software: you can redistribute it and/or modify 7 | # it under the terms of the GNU General Public License as published by 8 | # the Free Software Foundation, either version 3 of the License, or 9 | # (at your option) any later version. 10 | # 11 | # Plowshare is distributed in the hope that it will be useful, 12 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 13 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 14 | # GNU General Public License for more details. 15 | # 16 | # You should have received a copy of the GNU General Public License 17 | # along with Plowshare. If not, see . 18 | 19 | MODULE_ESPAFILES_REGEXP_URL='https\?://\(www\.\)\?espafiles\.com/' 20 | 21 | MODULE_ESPAFILES_DOWNLOAD_OPTIONS="" 22 | MODULE_ESPAFILES_DOWNLOAD_RESUME=no 23 | MODULE_ESPAFILES_DOWNLOAD_FINAL_LINK_NEEDS_COOKIE=unused 24 | MODULE_ESPAFILES_DOWNLOAD_FINAL_LINK_NEEDS_EXTRA=() 25 | MODULE_ESPAFILES_DOWNLOAD_SUCCESSIVE_INTERVAL= 26 | 27 | MODULE_ESPAFILES_PROBE_OPTIONS="" 28 | 29 | # Output a espafiles file download URL 30 | # $1: cookie file (unused here) 31 | # $2: url 32 | # stdout: real file download link 33 | espafiles_download() { 34 | local URL PAGE FILE_URL FILE_NAME FINAL_URL DL_LINE 35 | 36 | # Get a canonical URL for this file. 37 | URL=$(curl -I "$2" | grep_http_header_location_quiet) || return 38 | [ -n "$URL" ] || URL=$2 39 | readonly URL 40 | 41 | PAGE=$(curl "$URL" ) || return 42 | 43 | if ! match 'big_button' "$PAGE"; then 44 | return $ERR_LINK_DEAD 45 | fi 46 | 47 | FILE_NAME=$(parse 'Nombre:' 'span>\([^<]\+\)' <<< "$PAGE") || return 48 | DL_LINE=$(parse 'big_button' '^\(.*\)$' <<< "$PAGE") || return 49 | FINAL_URL=$(parse_attr 'href' <<< "$DL_LINE") || return 50 | FILE_URL=$(curl --referer "$URL" -I "$FINAL_URL" | grep_http_header_location) || return 51 | 52 | # Mandatory! 53 | MODULE_ESPAFILES_DOWNLOAD_FINAL_LINK_NEEDS_EXTRA=(--referer "$URL") 54 | 55 | echo "$FILE_URL" 56 | echo "$FILE_NAME" 57 | } 58 | 59 | # Probe an espafiles download URL 60 | # $1: cookie file (unused here) 61 | # $2: url 62 | # $3: requested capability list 63 | # stdout: 1 capability per line 64 | espafiles_probe() { 65 | local -r URL=$2 66 | local -r REQ_IN=$3 67 | local PAGE FILE_SIZE DL_LINE REQ_OUT 68 | 69 | PAGE=$(curl -L "$URL" ) || return 70 | 71 | if ! match 'big_button' "$PAGE"; then 72 | return $ERR_LINK_DEAD 73 | fi 74 | 75 | REQ_OUT=c 76 | 77 | if [[ $REQ_IN = *f* ]]; then 78 | parse 'Nombre:' 'span>\([^<]\+\)' <<< "$PAGE" \ 79 | && REQ_OUT="${REQ_OUT}f" 80 | fi 81 | 82 | if [[ $REQ_IN = *s* ]]; then 83 | FILE_SIZE=$(parse 'Size:' 'span>\([^<]\+\)' <<< "$PAGE") \ 84 | && translate_size "$FILE_SIZE" && REQ_OUT="${REQ_OUT}s" 85 | fi 86 | 87 | if [[ $REQ_IN = *i* ]]; then 88 | DL_LINE=$(parse 'big_button' '^\(.*\)$' <<< "$PAGE") \ 89 | && parse . 'href=".*get\/\([^"]*\)' <<< "$DL_LINE" \ 90 | && REQ_OUT="${REQ_OUT}i" 91 | fi 92 | 93 | echo $REQ_OUT 94 | } 95 | -------------------------------------------------------------------------------- /promptfile.sh: -------------------------------------------------------------------------------- 1 | # Plowshare promptfile.com module 2 | # Copyright (c) 2014 Plowshare team 3 | # 4 | # This file is part of Plowshare. 5 | # 6 | # Plowshare is free software: you can redistribute it and/or modify 7 | # it under the terms of the GNU General Public License as published by 8 | # the Free Software Foundation, either version 3 of the License, or 9 | # (at your option) any later version. 10 | # 11 | # Plowshare is distributed in the hope that it will be useful, 12 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 13 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 14 | # GNU General Public License for more details. 15 | # 16 | # You should have received a copy of the GNU General Public License 17 | # along with Plowshare. If not, see . 18 | 19 | MODULE_PROMPTFILE_REGEXP_URL='http://\(www\.\)\?promptfile\.com/' 20 | 21 | MODULE_PROMPTFILE_DOWNLOAD_OPTIONS="" 22 | MODULE_PROMPTFILE_DOWNLOAD_RESUME=yes 23 | MODULE_PROMPTFILE_DOWNLOAD_FINAL_LINK_NEEDS_COOKIE=no 24 | MODULE_PROMPTFILE_DOWNLOAD_SUCCESSIVE_INTERVAL= 25 | MODULE_PROMPTFILE_DOWNLOAD_FINAL_LINK_NEEDS_EXTRA= 26 | 27 | MODULE_PROMPTFILE_PROBE_OPTIONS="" 28 | 29 | # Output a promptfile.com download URL 30 | # $1: cookie file 31 | # $2: promptfile url 32 | # stdout: real file download link 33 | promptfile_download() { 34 | local -r COOKIE_FILE=$1 35 | local -r URL=$2 36 | local -r BASE_URL='http://www.promptfile.com' 37 | local PAGE CHASH FILE_URL_POINTER FILE_URL FILE_NAME 38 | 39 | PAGE=$(curl -c "$COOKIE_FILE" --location "$URL") || return 40 | 41 | if match 'The file you requested does not exist or has been removed' "$PAGE"; then 42 | return $ERR_LINK_DEAD 43 | fi 44 | 45 | CHASH=$(grep_form_by_order "$PAGE" 1 | \ 46 | parse_form_input_by_name 'chash') || return 47 | 48 | PAGE=$(curl -c "$COOKIE_FILE" --location \ 49 | --data "chash=$CHASH" "$URL") || return 50 | 51 | if match '+"player\.swf"' "$PAGE"; then 52 | FILE_URL_POINTER=$(parse 'url: ' '\(http.*\).,' <<< "$PAGE") || return 53 | else 54 | FILE_URL_POINTER=$(parse_attr 'download_btn.>' 'href' <<< "$PAGE") || return 55 | fi 56 | 57 | FILE_URL=$(curl --include "$FILE_URL_POINTER" | \ 58 | grep_http_header_location) || return 59 | 60 | FILE_NAME=$(parse_attr 'span' 'title' <<< "$PAGE") 61 | 62 | echo "$FILE_URL" 63 | echo "$FILE_NAME" 64 | } 65 | 66 | # Probe a download URL 67 | # $1: cookie file (unused here) 68 | # $2: promptfile.com url 69 | # $3: requested capability list 70 | # stdout: 1 capability per line 71 | promptfile_probe() { 72 | local -r URL=$2 73 | local -r REQ_IN=$3 74 | local PAGE REQ_OUT FILE_SIZE 75 | 76 | PAGE=$(curl --location "$URL") || return 77 | 78 | # Got sometimes: HTTP/1.1 504 Gateway Time-out 79 | [ -z "$PAGE" ] && return $ERR_NETWORK 80 | 81 | if match 'The file you requested does not exist or has been removed' "$PAGE"; then 82 | return $ERR_LINK_DEAD 83 | fi 84 | 85 | REQ_OUT=c 86 | 87 | # There is only one tag in the entire page! 88 | if [[ $REQ_IN = *f* ]]; then 89 | parse_attr 'span' 'title' <<< "$PAGE" && REQ_OUT="${REQ_OUT}f" 90 | fi 91 | 92 | if [[ $REQ_IN = *s* ]]; then 93 | FILE_SIZE=$(parse 'span' '(\([[:digit:].]\+[[:space:]]*[KMG]B\))' <<< "$PAGE") && \ 94 | translate_size "$FILE_SIZE" && REQ_OUT="${REQ_OUT}s" 95 | fi 96 | 97 | echo $REQ_OUT 98 | } 99 | -------------------------------------------------------------------------------- /filebin_ca.sh: -------------------------------------------------------------------------------- 1 | # Plowshare filebin.ca module 2 | # Copyright (c) 2013 Plowshare team 3 | # 4 | # This file is part of Plowshare. 5 | # 6 | # Plowshare is free software: you can redistribute it and/or modify 7 | # it under the terms of the GNU General Public License as published by 8 | # the Free Software Foundation, either version 3 of the License, or 9 | # (at your option) any later version. 10 | # 11 | # Plowshare is distributed in the hope that it will be useful, 12 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 13 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 14 | # GNU General Public License for more details. 15 | # 16 | # You should have received a copy of the GNU General Public License 17 | # along with Plowshare. If not, see . 18 | 19 | MODULE_FILEBIN_CA_REGEXP_URL='https\?://\(www\.\)\?filebin\.ca/[[:alnum:]]\+' 20 | 21 | MODULE_FILEBIN_CA_DOWNLOAD_OPTIONS="" 22 | MODULE_FILEBIN_CA_DOWNLOAD_RESUME=yes 23 | MODULE_FILEBIN_CA_DOWNLOAD_FINAL_LINK_NEEDS_COOKIE=no 24 | MODULE_FILEBIN_CA_DOWNLOAD_SUCCESSIVE_INTERVAL= 25 | 26 | MODULE_FILEBIN_CA_UPLOAD_OPTIONS="" 27 | MODULE_FILEBIN_CA_UPLOAD_REMOTE_SUPPORT=no 28 | 29 | MODULE_FILEBIN_CA_PROBE_OPTIONS="" 30 | 31 | # Output a filebin.ca file download URL 32 | # $1: cookie file (unused here) 33 | # $2: filebin.ca url 34 | # stdout: real file download link 35 | filebin_ca_download() { 36 | local -r URL=$2 37 | 38 | # Nothing to do, links are direct! 39 | echo "$URL" 40 | } 41 | 42 | # Upload a file to filebin.ca 43 | # Official sources: https://github.com/slepp/filebin.ca 44 | # $1: cookie file (unused here) 45 | # $2: input file (with full path) 46 | # $3: remote filename 47 | filebin_ca_upload() { 48 | local -r FILE=$2 49 | local -r DESTFILE=$3 50 | local -r BASE_URL='http://filebin.ca/upload.php' 51 | local DATA STATUS 52 | 53 | # No API key for now.. 54 | DATA=$(curl_with_log \ 55 | -F "file=@$FILE;filename=$DESTFILE" \ 56 | "$BASE_URL") || return 57 | 58 | if [ -z "$DATA" ]; then 59 | log_error 'Remote error: empty result not expected. Server busy?' 60 | return $ERR_LINK_TEMP_UNAVAILABLE 61 | fi 62 | 63 | # Result sample: 64 | # status:wjBQjTib7TH 65 | # url:http://filebin.ca/wjBQjTib7TH/foo.zip 66 | STATUS=$(parse '^status:' '^status:\(.*\)' <<< "$DATA") || return 67 | if [ "$STATUS" = 'error' -o "$STATUS" = 'fail' ]; then 68 | log_error 'Remote error' 69 | return $ERR_FATAL 70 | fi 71 | 72 | parse '^url:' '^url:\(http://[^[:space:]]\+\)' <<< "$DATA" 73 | } 74 | 75 | # Probe a download URL 76 | # $1: cookie file (unused here) 77 | # $2: filebin.ca url 78 | # $3: requested capability list 79 | # stdout: 1 capability per line 80 | filebin_ca_probe() { 81 | local -r URL=$2 82 | local -r REQ_IN=$3 83 | local HEADERS REQ_OUT 84 | 85 | # Content-Type: application/octet-stream 86 | # Content-Disposition: attachment; filename="foo" 87 | # Content-length: 123456 88 | HEADERS=$(curl --head "$URL") || return 89 | 90 | if match '404 Not Found' "$HEADERS"; then 91 | return $ERR_LINK_DEAD 92 | fi 93 | 94 | REQ_OUT=c 95 | 96 | if [[ $REQ_IN = *f* ]]; then 97 | grep_http_header_content_disposition <<< "$HEADERS" && REQ_OUT="${REQ_OUT}f" 98 | fi 99 | 100 | if [[ $REQ_IN = *s* ]]; then 101 | grep_http_header_content_length <<< "$HEADERS" && REQ_OUT="${REQ_OUT}s" 102 | fi 103 | 104 | echo $REQ_OUT 105 | } 106 | -------------------------------------------------------------------------------- /gfile_ru.sh: -------------------------------------------------------------------------------- 1 | # Plowshare gfile.ru module 2 | # Copyright (c) 2013 Plowshare team 3 | # 4 | # This file is part of Plowshare. 5 | # 6 | # Plowshare is free software: you can redistribute it and/or modify 7 | # it under the terms of the GNU General Public License as published by 8 | # the Free Software Foundation, either version 3 of the License, or 9 | # (at your option) any later version. 10 | # 11 | # Plowshare is distributed in the hope that it will be useful, 12 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 13 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 14 | # GNU General Public License for more details. 15 | # 16 | # You should have received a copy of the GNU General Public License 17 | # along with Plowshare. If not, see . 18 | 19 | MODULE_GFILE_RU_REGEXP_URL='http://\(www\.\)\?gfile\.ru/' 20 | 21 | MODULE_GFILE_RU_DOWNLOAD_OPTIONS="" 22 | MODULE_GFILE_RU_DOWNLOAD_RESUME=yes 23 | MODULE_GFILE_RU_DOWNLOAD_FINAL_LINK_NEEDS_COOKIE=no 24 | MODULE_GFILE_RU_DOWNLOAD_SUCCESSIVE_INTERVAL= 25 | 26 | MODULE_GFILE_RU_UPLOAD_OPTIONS="" 27 | MODULE_GFILE_RU_UPLOAD_REMOTE_SUPPORT=no 28 | 29 | MODULE_GFILE_RU_PROBE_OPTIONS="" 30 | 31 | # Output a gfile.ru file download URL 32 | # $1: cookie file (unused here) 33 | # $2: gfile.ru url 34 | # stdout: real file download link 35 | gfile_ru_download() { 36 | local -r URL=$2 37 | local PAGE FILE_URL FILE_NAME 38 | 39 | PAGE=$(curl "$URL") || return 40 | 41 | if match '

404

' "$PAGE"; then 42 | return $ERR_LINK_DEAD 43 | fi 44 | 45 | # Note: 'slink' seems to be an alternate link 46 | FILE_URL=$(parse '^"link"' ':"\([^"]\+\)' <<< "$PAGE") || return 47 | FILE_NAME=$(parse '^"title"' ':"\([^"]\+\)' <<< "$PAGE") || return 48 | 49 | echo "http://gfile.ru$FILE_URL" 50 | echo "$FILE_NAME" 51 | } 52 | 53 | # Upload a file to gfile.ru 54 | # $1: cookie file 55 | # $2: input file (with full path) 56 | # $3: remote filename 57 | gfile_ru_upload() { 58 | local -r COOKIE_FILE=$1 59 | local -r FILE=$2 60 | local -r DESTFILE=$3 61 | local -r BASE_URL='http://www.gfile.ru/upload/' 62 | local PAGE SZ 63 | 64 | local -r MAX_SIZE=104857600 # 100 MiB 65 | SZ=$(get_filesize "$FILE") 66 | if [ "$SZ" -gt "$MAX_SIZE" ]; then 67 | log_debug "file is bigger than $MAX_SIZE" 68 | return $ERR_SIZE_LIMIT_EXCEEDED 69 | fi 70 | 71 | # Note: Use cookie to forward PHPSESSID for second (redirection) page 72 | PAGE=$(curl_with_log -c "$COOKIE_FILE" -L \ 73 | -F "file=@$FILE;filename=$DESTFILE" \ 74 | "$BASE_URL") || return 75 | 76 | parse_attr '=.link_container' value <<< "$PAGE" 77 | } 78 | 79 | # Probe a download URL 80 | # $1: cookie file (unused here) 81 | # $2: gfile.ru url 82 | # $3: requested capability list 83 | # stdout: 1 capability per line 84 | gfile_ru_probe() { 85 | local -r URL=$2 86 | local -r REQ_IN=$3 87 | local PAGE REQ_OUT FILE_SIZE 88 | 89 | PAGE=$(curl "$URL") || return 90 | 91 | if match '

404

' "$PAGE"; then 92 | return $ERR_LINK_DEAD 93 | fi 94 | 95 | REQ_OUT=c 96 | 97 | if [[ $REQ_IN = *f* ]]; then 98 | parse '^"title"' ':"\([^"]\+\)' <<< "$PAGE" && REQ_OUT="${REQ_OUT}f" 99 | fi 100 | 101 | if [[ $REQ_IN = *s* ]]; then 102 | FILE_SIZE=$(parse '=.linkplace' '^[[:space:]]*\([^<]\+\)' 1 <<< "$PAGE") && \ 103 | translate_size "$FILE_SIZE" && REQ_OUT="${REQ_OUT}s" 104 | fi 105 | 106 | echo $REQ_OUT 107 | } 108 | -------------------------------------------------------------------------------- /firedrive.sh: -------------------------------------------------------------------------------- 1 | # Plowshare firedrive.com module 2 | # Copyright (c) 2014 Plowshare team 3 | # 4 | # This file is part of Plowshare. 5 | # 6 | # Plowshare is free software: you can redistribute it and/or modify 7 | # it under the terms of the GNU General Public License as published by 8 | # the Free Software Foundation, either version 3 of the License, or 9 | # (at your option) any later version. 10 | # 11 | # Plowshare is distributed in the hope that it will be useful, 12 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 13 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 14 | # GNU General Public License for more details. 15 | # 16 | # You should have received a copy of the GNU General Public License 17 | # along with Plowshare. If not, see . 18 | 19 | MODULE_FIREDRIVE_REGEXP_URL='http://\(www\.\)\?firedrive\.com/file/' 20 | 21 | MODULE_FIREDRIVE_DOWNLOAD_OPTIONS="" 22 | MODULE_FIREDRIVE_DOWNLOAD_RESUME=yes 23 | MODULE_FIREDRIVE_DOWNLOAD_FINAL_LINK_NEEDS_COOKIE=no 24 | MODULE_FIREDRIVE_DOWNLOAD_SUCCESSIVE_INTERVAL= 25 | 26 | MODULE_FIREDRIVE_PROBE_OPTIONS="" 27 | 28 | # Output a firedrive file download URL 29 | # $1: cookie file 30 | # $2: firedrive url 31 | # stdout: real file download link 32 | firedrive_download() { 33 | local -r COOKIE_FILE=$1 34 | local -r URL=$2 35 | local PAGE FORM_HTML DL_KEY FILE_URL FILE_NAME 36 | 37 | PAGE=$(curl -c "$COOKIE_FILE" "$URL") || return 38 | 39 | # 404: This file might have been moved, replaced or deleted 40 | if match 'class=.removed_file_image.>' "$PAGE"; then 41 | return $ERR_LINK_DEAD 42 | # This file is private and only viewable by the owner 43 | elif match 'class=.private_file_image.>' "$PAGE"; then 44 | return $ERR_LINK_NEED_PERMISSIONS 45 | fi 46 | 47 | FILE_NAME=$(parse_tag 'class="external_title_left"' 'div' <<< "$PAGE") || return 48 | 49 | if ! match ' class="download_' "$PAGE"; then 50 | FORM_HTML=$(grep_form_by_id "$PAGE" 'confirm_form') || return 51 | DL_KEY=$(parse_form_input_by_name 'confirm' <<< "$FORM_HTML") || return 52 | 53 | PAGE=$(curl -b "$COOKIE_FILE" --referer "$URL" \ 54 | --data-urlencode "confirm=$DL_KEY" "$URL") || return 55 | fi 56 | 57 | FILE_URL=$(parse_attr 'Download This File' href <<< "$PAGE") || return 58 | 59 | PAGE=$(curl --include -b "$COOKIE_FILE" "$FILE_URL") || return 60 | FILE_URL=$(grep_http_header_location <<< "$PAGE") || return 61 | 62 | echo "$FILE_URL" 63 | echo "$FILE_NAME" 64 | } 65 | 66 | # Probe a download URL 67 | # $1: cookie file (unused here) 68 | # $2: firedrive url 69 | # $3: requested capability list 70 | # stdout: 1 capability per line 71 | firedrive_probe() { 72 | local -r URL=$2 73 | local -r REQ_IN=$3 74 | local PAGE REQ_OUT FILE_NAME FILE_SIZE 75 | 76 | PAGE=$(curl "$URL") || return 77 | 78 | #
79 | #
80 | 81 | # 404: This file might have been moved, replaced or deleted 82 | if match 'class=.removed_file_image.>' "$PAGE"; then 83 | return $ERR_LINK_DEAD 84 | # This file is private and only viewable by the owner 85 | elif match 'class=.private_file_image.>' "$PAGE"; then 86 | return $ERR_LINK_NEED_PERMISSIONS 87 | fi 88 | 89 | REQ_OUT=c 90 | 91 | if [[ $REQ_IN = *f* ]]; then 92 | FILE_NAME=$(parse 'id=.information_content.>' \ 93 | '[[:space:]]\?\([^<]\+\)' 1 <<< "$PAGE") && \ 94 | echo "${FILE_NAME% }" && REQ_OUT="${REQ_OUT}f" 95 | fi 96 | 97 | if [[ $REQ_IN = *s* ]]; then 98 | FILE_SIZE=$(parse 'id=.information_content.>' \ 99 | '[[:space:]]\?\([^<]\+\)' 3 <<< "$PAGE") && \ 100 | translate_size "${FILE_SIZE% }" && REQ_OUT="${REQ_OUT}s" 101 | fi 102 | 103 | echo $REQ_OUT 104 | } 105 | -------------------------------------------------------------------------------- /anonfiles.sh: -------------------------------------------------------------------------------- 1 | # Plowshare anonfiles.com module 2 | # Copyright (c) 2012-2013 Plowshare team 3 | # 4 | # This file is part of Plowshare. 5 | # 6 | # Plowshare is free software: you can redistribute it and/or modify 7 | # it under the terms of the GNU General Public License as published by 8 | # the Free Software Foundation, either version 3 of the License, or 9 | # (at your option) any later version. 10 | # 11 | # Plowshare is distributed in the hope that it will be useful, 12 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 13 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 14 | # GNU General Public License for more details. 15 | # 16 | # You should have received a copy of the GNU General Public License 17 | # along with Plowshare. If not, see . 18 | 19 | MODULE_ANONFILES_REGEXP_URL='https\?://\([[:alnum:]]\+\.\)\?anonfiles\.com/' 20 | 21 | MODULE_ANONFILES_DOWNLOAD_OPTIONS="" 22 | MODULE_ANONFILES_DOWNLOAD_RESUME=yes 23 | MODULE_ANONFILES_DOWNLOAD_FINAL_LINK_NEEDS_COOKIE=no 24 | MODULE_ANONFILES_DOWNLOAD_FINAL_LINK_NEEDS_EXTRA=() 25 | MODULE_ANONFILES_DOWNLOAD_SUCCESSIVE_INTERVAL= 26 | 27 | MODULE_ANONFILES_UPLOAD_OPTIONS="" 28 | MODULE_ANONFILES_UPLOAD_REMOTE_SUPPORT=no 29 | 30 | MODULE_ANONFILES_PROBE_OPTIONS="" 31 | 32 | # Output an AnonFiles.com file download URL 33 | # $1: cookie file (unused here) 34 | # $2: anonfiles url 35 | # stdout: real file download link 36 | anonfiles_download() { 37 | local -r URL=$2 38 | local PAGE FILE_URL FILENAME 39 | 40 | PAGE=$(curl -L "$URL") || return 41 | 42 | if match '404 - File Not Found<\|>File does not exist\.<' "$PAGE"; then 43 | return $ERR_LINK_DEAD 44 | fi 45 | 46 | FILE_URL=$(echo "$PAGE" | parse_attr_quiet 'download_button' href) 47 | 48 | if [ -z "$FILE_URL" ]; then 49 | FILE_URL=$(echo "$PAGE" | \ 50 | parse_attr_quiet 'image_preview' src) || return 51 | fi 52 | 53 | 54 | FILENAME=$(echo "$PAGE" | parse_tag '. 18 | 19 | MODULE_NAKIDO_REGEXP_URL='https\?://\(www\.\)\?nakido\.com/' 20 | 21 | MODULE_NAKIDO_DOWNLOAD_OPTIONS="" 22 | MODULE_NAKIDO_DOWNLOAD_RESUME=no 23 | MODULE_NAKIDO_DOWNLOAD_FINAL_LINK_NEEDS_COOKIE=yes 24 | MODULE_NAKIDO_DOWNLOAD_SUCCESSIVE_INTERVAL= 25 | 26 | MODULE_NAKIDO_PROBE_OPTIONS="" 27 | 28 | # Static function. Extract file key from download link. 29 | # $1: Nakido download URL 30 | # - http://www.nakido.com/HHHH... 31 | # stdout: file key 32 | nakido_extract_key() { 33 | local ID=$(parse_quiet . '/\([[:xdigit:]]\{40\}\)$' <<< "$1") 34 | if [ -z "$ID" ]; then 35 | log_error 'Cannot extract file key, check your link url' 36 | return $ERR_FATAL 37 | else 38 | log_debug "File key: '$ID'" 39 | echo "$ID" 40 | fi 41 | } 42 | 43 | # Output a nakido file download URL 44 | # $1: cookie file 45 | # $2: nakido url 46 | # stdout: real file download link 47 | nakido_download() { 48 | local -r COOKIE_FILE=$1 49 | local URL=$2 50 | local -r BASE_URL='http://www.nakido.com' 51 | local PAGE FILE_KEY FILE_NAME FILE_NAME2 FILE_URL HASH I 52 | 53 | FILE_KEY=$(nakido_extract_key "$URL") || return 54 | 55 | # Get 'session' cookie 56 | PAGE=$(curl -c "$COOKIE_FILE" -b 'lang=en-us' -b "$COOKIE_FILE" \ 57 | --referer "$BASE_URL/$FILE_KEY" \ 58 | "$BASE_URL/dl?filekey=$FILE_KEY&action=add") || return 59 | 60 | # URL encoded (%xx) 61 | FILE_NAME=$(parse 'Nakido\.downloads\[' \ 62 | "^Nakido\.downloads\['${FILE_KEY}f'\]='\([^']\+\)" <<< "$PAGE") || return 63 | FILE_NAME2=$(parse_tag 'class=.link.' a <<< "$PAGE") 64 | 65 | HASH=$(parse 'Nakido\.downloads\[' \ 66 | "\]='\([[:xdigit:]]\+\)';[[:cntrl:]]$" <<< "$PAGE") || return 67 | log_debug "File hash: '$HASH'" 68 | 69 | for I in 2 3 4; do 70 | PAGE=$(curl -b "$COOKIE_FILE" -b 'lang=en-us' \ 71 | --referer "$BASE_URL/dl?filekey=$FILE_KEY&action=add" \ 72 | "$BASE_URL/dl/ticket?f=$FILE_KEY&o=$HASH") || return 73 | 74 | # Returns: 75 | # E6AC634B1946F301DD17617E51067985DC1866BA#3903 76 | if match "$FILE_KEY#0$" "$PAGE"; then 77 | log_debug 'Wait complete!' 78 | break 79 | elif match "$FILE_KEY#" "$PAGE"; then 80 | local WAIT=${PAGE#*#} 81 | wait $((WAIT + 1)) || return 82 | else 83 | log_error "Unexpected response: $PAGE" 84 | return $ERR_FATAL 85 | fi 86 | done 87 | 88 | PAGE=$(curl -I -b "$COOKIE_FILE" -b 'lang=en-us' \ 89 | "$BASE_URL/$FILE_KEY/$FILE_NAME") || return 90 | FILE_URL=$(grep_http_header_location <<< "$PAGE") || return 91 | 92 | echo "$FILE_URL" 93 | echo "$FILE_NAME2" 94 | } 95 | 96 | # Probe a download URL 97 | # $1: cookie file (unused here) 98 | # $2: nakido url 99 | # $3: requested capability list 100 | # stdout: 1 capability per line 101 | nakido_probe() { 102 | local -r URL=$2 103 | local -r REQ_IN=$3 104 | local PAGE REQ_OUT 105 | 106 | PAGE=$(curl -L -b 'lang=en-us' "$URL") || return 107 | 108 | #
The page you have requested is not exists 109 | if match ' page you have requested is not exist' "$PAGE"; then 110 | return $ERR_LINK_DEAD 111 | fi 112 | 113 | REQ_OUT=c 114 | 115 | if [[ $REQ_IN = *f* ]]; then 116 | parse_tag h1 <<< "$PAGE" && REQ_OUT="${REQ_OUT}f" 117 | fi 118 | 119 | echo $REQ_OUT 120 | } 121 | -------------------------------------------------------------------------------- /flashx.sh: -------------------------------------------------------------------------------- 1 | # Plowshare flashx.tv module 2 | # Copyright (c) 2014 Plowshare team 3 | # 4 | # This file is part of Plowshare. 5 | # 6 | # Plowshare is free software: you can redistribute it and/or modify 7 | # it under the terms of the GNU General Public License as published by 8 | # the Free Software Foundation, either version 3 of the License, or 9 | # (at your option) any later version. 10 | # 11 | # Plowshare is distributed in the hope that it will be useful, 12 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 13 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 14 | # GNU General Public License for more details. 15 | # 16 | # You should have received a copy of the GNU General Public License 17 | # along with Plowshare. If not, see . 18 | 19 | MODULE_FLASHX_REGEXP_URL='http://\(www\.\)\?flashx\.tv/' 20 | 21 | MODULE_FLASHX_DOWNLOAD_OPTIONS="" 22 | MODULE_FLASHX_DOWNLOAD_RESUME=yes 23 | MODULE_FLASHX_DOWNLOAD_FINAL_LINK_NEEDS_COOKIE=no 24 | MODULE_FLASHX_DOWNLOAD_SUCCESSIVE_INTERVAL= 25 | MODULE_FLASHX_DOWNLOAD_FINAL_LINK_NEEDS_EXTRA= 26 | 27 | MODULE_FLASHX_PROBE_OPTIONS="" 28 | 29 | # Output a flashx file download URL 30 | # $1: cookie file 31 | # $2: flashx url 32 | # stdout: real file download link 33 | flashx_download() { 34 | local -r COOKIE_FILE=$1 35 | local -r URL=$2 36 | local -r BASE_URL='http://www.flashx.tv' 37 | local PAGE VIDEO_ID SMIL_URL LINK_BASE LINK_ID FILE_NAME FILE_URL 38 | 39 | detect_javascript || return 40 | 41 | VIDEO_ID=$(parse . '[/?]\([a-z0-9]*\)' <<< "$URL") 42 | log_debug "Video ID: $VIDEO_ID" 43 | 44 | PAGE=$(curl "$BASE_URL/$VIDEO_ID") || return 45 | 46 | # pattern still valid? 47 | if match 'Video not found, deleted, abused or wrong link\|Video not found, deleted or abused, sorry!' \ 48 | "$PAGE"; then 49 | return $ERR_LINK_DEAD 50 | fi 51 | 52 | if match 'ERROR - 404 - FILE NOT FOUND' "$PAGE"; then 53 | return $ERR_LINK_DEAD 54 | fi 55 | 56 | FILE_NAME=$(parse_form_input_by_name 'fname' <<< "$PAGE") || return 57 | log_debug "file name: $FILE_NAME" 58 | 59 | # using embedded video page is easier 60 | PAGE=$(curl "$BASE_URL/embed-$VIDEO_ID.html") || return 61 | JS=$(grep_script_by_order "$PAGE" -2) || return 62 | JS=${JS#*>} 63 | JS=${JS%<*} 64 | 65 | SMIL_URL=$(javascript <<< "empty = function(f) {}; 66 | setup = function(opts) { 67 | print(opts.sources[0].file); 68 | } 69 | var jwplayer = function(tag) { 70 | return { 71 | setup: setup, 72 | onTime: empty, 73 | onSeek: empty, 74 | onPlay: empty, 75 | onComplete: empty, 76 | }; 77 | } 78 | $JS") || return 79 | log_debug smil url: "$SMIL_URL" 80 | 81 | PAGE=$(curl "$SMIL_URL") || return 82 | LINK_BASE=$(parse base '://\([^:/]*\)' <<< "$PAGE") || return 83 | # first link is usually the one with the best quality 84 | LINK_ID=$(parse 'video src' '?h=\([a-z0-9]*\)' <<< "$PAGE") || return 85 | FILE_URL="http://$LINK_BASE/$LINK_ID/video.mp4" 86 | 87 | echo "$FILE_URL" 88 | echo "$FILE_NAME" 89 | } 90 | 91 | # Probe a download URL 92 | # $1: cookie file (unused here) 93 | # $2: flashx.tv url 94 | # $3: requested capability list 95 | # stdout: 1 capability per line 96 | flashx_probe() { 97 | local -r URL=$2 98 | local -r REQ_IN=$3 99 | local -r BASE_URL='http://www.flashx.tv' 100 | local PAGE VIDEO_ID REQ_OUT FILE_NAME 101 | 102 | VIDEO_ID=$(parse . '[/?]\([a-z0-9]*\)' <<< "$URL") 103 | PAGE=$(curl "$BASE_URL/$VIDEO_ID") || return 104 | 105 | # pattern still valid? 106 | if match 'Video not found, deleted, abused or wrong link\|Video not found, deleted or abused, sorry!' \ 107 | "$PAGE"; then 108 | return $ERR_LINK_DEAD 109 | fi 110 | 111 | if match 'ERROR - 404 - FILE NOT FOUND' "$PAGE"; then 112 | return $ERR_LINK_DEAD 113 | fi 114 | 115 | REQ_OUT=c 116 | 117 | if [[ $REQ_IN = *f* ]]; then 118 | FILE_NAME=$(parse_form_input_by_name 'fname' <<< "$PAGE") && \ 119 | echo "$FILE_NAME" && REQ_OUT="${REQ_OUT}f" 120 | fi 121 | 122 | echo $REQ_OUT 123 | } 124 | -------------------------------------------------------------------------------- /bayimg.sh: -------------------------------------------------------------------------------- 1 | # Plowshare bayimg.com module 2 | # Copyright (c) 2013 Plowshare team 3 | # 4 | # This file is part of Plowshare. 5 | # 6 | # Plowshare is free software: you can redistribute it and/or modify 7 | # it under the terms of the GNU General Public License as published by 8 | # the Free Software Foundation, either version 3 of the License, or 9 | # (at your option) any later version. 10 | # 11 | # Plowshare is distributed in the hope that it will be useful, 12 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 13 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 14 | # GNU General Public License for more details. 15 | # 16 | # You should have received a copy of the GNU General Public License 17 | # along with Plowshare. If not, see . 18 | 19 | MODULE_BAYIMG_REGEXP_URL='https\?://\(www\.\)\?bayimg\.com/' 20 | 21 | MODULE_BAYIMG_DOWNLOAD_OPTIONS="" 22 | MODULE_BAYIMG_DOWNLOAD_RESUME=yes 23 | MODULE_BAYIMG_DOWNLOAD_FINAL_LINK_NEEDS_COOKIE=no 24 | MODULE_BAYIMG_DOWNLOAD_SUCCESSIVE_INTERVAL= 25 | 26 | MODULE_BAYIMG_UPLOAD_OPTIONS=" 27 | ADMIN_CODE,,admin-code,s=ADMIN_CODE,Admin code (used for file deletion) 28 | TAGS,,tags,l=LIST,Provide list of tags (comma separated)" 29 | MODULE_BAYIMG_UPLOAD_REMOTE_SUPPORT=no 30 | 31 | MODULE_BAYIMG_DELETE_OPTIONS=" 32 | LINK_PASSWORD,p,link-password,S=PASSWORD,Admin password (mandatory)" 33 | MODULE_BAYIMG_PROBE_OPTIONS="" 34 | 35 | # Output a bayimg.com file download URL 36 | # $1: cookie file (unused here) 37 | # $2: bayimg url 38 | # stdout: real file download link 39 | bayimg_download() { 40 | local -r URL=$2 41 | local PAGE FILE_URL FILE_NAME 42 | 43 | PAGE=$(curl -L "$URL") || return 44 | 45 | if match '404 . Not Found' "$PAGE"; then 46 | return $ERR_LINK_DEAD 47 | fi 48 | 49 | FILE_URL=$(parse_attr 'toggleResize(' src <<< "$PAGE") || return 50 | 51 | # Filename is not always displayed 52 | FILE_NAME=$(parse_quiet '>Filename:' '

Filename:[[:space:]]\([^<]\+\)' <<< "$PAGE") 53 | 54 | echo "http:$FILE_URL" 55 | test -z "$FILE_NAME" || echo "$FILE_NAME" 56 | } 57 | 58 | # Upload a file to bayimg.com 59 | # $1: cookie file (unused here) 60 | # $2: input file (with full path) 61 | # $3: remote filename 62 | # stdout: download link + admin code 63 | bayimg_upload() { 64 | local -r FILE=$2 65 | local -r DESTFILE=$3 66 | local PAGE FILE_URL 67 | 68 | if [ -n "$ADMIN_CODE" ]; then 69 | # No known restrictions (length limitation or forbidden characters) 70 | : 71 | else 72 | ADMIN_CODE=$(random a 8) 73 | fi 74 | 75 | PAGE=$(curl_with_log -F "tags=${TAGS[*]}" \ 76 | -F "code=$ADMIN_CODE" \ 77 | -F "file=@$FILE;filename=$DESTFILE" \ 78 | 'http://bayimg.com/upload') || return 79 | 80 | FILE_URL=$(parse_attr 'image-setting' href <<< "$PAGE") || return 81 | 82 | echo "http:$FILE_URL" 83 | echo 84 | echo "$ADMIN_CODE" 85 | } 86 | 87 | # Delete a file on bayimg (requires an admin code) 88 | # $1: cookie file (unused here) 89 | # $2: delete link 90 | bayimg_delete() { 91 | local -r URL=$2 92 | local PAGE REDIR 93 | 94 | if [ -z "$LINK_PASSWORD" ]; then 95 | LINK_PASSWORD=$(prompt_for_password) || return 96 | fi 97 | 98 | PAGE=$(curl -i "$URL" -d "code=$LINK_PASSWORD") || return 99 | 100 | if match 'REMOVAL CODE' "$PAGE"; then 101 | return $ERR_LINK_PASSWORD_REQUIRED 102 | fi 103 | 104 | REDIR=$(grep_http_header_location_quiet <<< "$PAGE") 105 | if [ "$REDIR" = '/' ]; then 106 | return 0 107 | fi 108 | 109 | return $ERR_LINK_DEAD 110 | } 111 | 112 | # Probe a download URL 113 | # $1: cookie file (unused here) 114 | # $2: bayfile url 115 | # $3: requested capability list 116 | # stdout: 1 capability per line 117 | bayimg_probe() { 118 | local -r URL=$2 119 | local -r REQ_IN=$3 120 | local PAGE REQ_OUT 121 | 122 | PAGE=$(curl -L "$URL") || return 123 | 124 | if match '404 . Not Found' "$PAGE"; then 125 | return $ERR_LINK_DEAD 126 | fi 127 | 128 | REQ_OUT=c 129 | 130 | if [[ $REQ_IN = *f* ]]; then 131 | parse '>Filename:' '

Filename:[[:space:]]\([^<]\+\)' <<< "$PAGE" && \ 132 | REQ_OUT="${REQ_OUT}f" 133 | fi 134 | 135 | echo $REQ_OUT 136 | } 137 | -------------------------------------------------------------------------------- /uplea.sh: -------------------------------------------------------------------------------- 1 | # Plowshare uplea.com module 2 | # Copyright (c) 2015 Plowshare team 3 | # 4 | # This file is part of Plowshare. 5 | # 6 | # Plowshare is free software: you can redistribute it and/or modify 7 | # it under the terms of the GNU General Public License as published by 8 | # the Free Software Foundation, either version 3 of the License, or 9 | # (at your option) any later version. 10 | # 11 | # Plowshare is distributed in the hope that it will be useful, 12 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 13 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 14 | # GNU General Public License for more details. 15 | # 16 | # You should have received a copy of the GNU General Public License 17 | # along with Plowshare. If not, see . 18 | 19 | MODULE_UPLEA_REGEXP_URL='https\?://\(www\.\)\?uplea\.com/' 20 | 21 | MODULE_UPLEA_DOWNLOAD_OPTIONS="" 22 | MODULE_UPLEA_DOWNLOAD_RESUME=yes 23 | MODULE_UPLEA_DOWNLOAD_FINAL_LINK_NEEDS_COOKIE=no 24 | MODULE_UPLEA_DOWNLOAD_SUCCESSIVE_INTERVAL= 25 | 26 | MODULE_UPLEA_PROBE_OPTIONS="" 27 | 28 | # Output an Uplea file download URL 29 | # $1: cookie file (unused here) 30 | # $2: uplea url 31 | # stdout: real file download link 32 | uplea_download() { 33 | local -r COOKIE_FILE=$1 34 | local -r URL=$2 35 | local -r BASE_URL='https://uplea.com' 36 | local PAGE WAIT_URL WAIT_TIME FILE_URL FILE_NAME 37 | 38 | PAGE=$(curl -b "$COOKIE_FILE" -c "$COOKIE_FILE" "$URL") || return 39 | 40 | if match '>You followed an invalid or expired link\.<' "$PAGE"; then 41 | return $ERR_LINK_DEAD 42 | fi 43 | 44 | WAIT_URL=$(parse '>[[:space:]]*Free download[[:space:]]*<' '=.\([^"]*\)' -1 <<< "$PAGE") || return 45 | 46 | PAGE=$(curl -b "$COOKIE_FILE" "$BASE_URL$WAIT_URL") || return 47 | 48 | if match 'You need to have a Premium subscription to download this file' "$PAGE"; then 49 | return $ERR_LINK_NEED_PERMISSIONS 50 | fi 51 | 52 | # jQuery("DIV#timeBeforeNextUpload").jCountdown({ 53 | WAIT_TIME=$(parse_quiet '#timeBeforeNextUpload' ':\([[:digit:]]\+\)' 1 <<< "$PAGE") 54 | if [[ $WAIT_TIME -gt 0 ]]; then 55 | echo $WAIT_TIME 56 | return $ERR_LINK_TEMP_UNAVAILABLE 57 | fi 58 | 59 | FILE_URL=$(parse_attr '=.button-download' href <<< "$PAGE") || return 60 | FILE_NAME=$(parse_tag '=.gold-text' span <<< "$PAGE") 61 | 62 | # Detect email protection (filename contains @) 63 | if match 'href="/cdn-cgi/l/email-protection"' "$FILE_NAME"; then 64 | FILE_NAME= 65 | fi 66 | 67 | # $('#ulCounter').ulCounter({'timer':10}); 68 | WAIT_TIME=$(parse '#ulCounter' ':\([[:digit:]]\+\)' <<< "$PAGE") || WAIT_TIME=10 69 | wait $((WAIT_TIME)) || return 70 | 71 | echo "$FILE_URL" 72 | echo "$FILE_NAME" 73 | } 74 | 75 | # Probe a download URL. Use official API: http://uplea.com/api 76 | # $1: cookie file (unused here) 77 | # $2: Uplea url 78 | # $3: requested capability list 79 | # stdout: 1 capability per line 80 | uplea_probe() { 81 | local -r URL=$2 82 | local -r REQ_IN=$3 83 | local JSON ERR REQ_OUT STATUS PAGE FILE_SIZE 84 | local -r BASE_URL='http://api.uplea.com/api/check-my-links' 85 | 86 | JSON=$(curl -F "json={ \"links\": [ \"$URL\" ] }" "$BASE_URL") || return 87 | 88 | if ! match_json_true 'status' "$JSON"; then 89 | ERR=$(parse_json_quiet 'error' <<< "$PAGE") 90 | log_error "Unexpected remote error: $ERR" 91 | return $ERR_FATAL 92 | fi 93 | 94 | JSON=$(parse_json 'result' <<< "$JSON") 95 | STATUS=$(parse_json 'status' <<< "$JSON") 96 | 97 | # 'DELETED' 98 | if [ "$STATUS" != 'OK' ]; then 99 | return $ERR_LINK_DEAD 100 | fi 101 | 102 | REQ_OUT=c 103 | 104 | # Note: Can't manage $ERR_LINK_NEED_PERMISSIONS with this link checker API. 105 | PAGE=$(curl -L "$URL") 106 | 107 | if [[ $REQ_IN = *f* ]]; then 108 | parse '^Download your file:' '>\([^<]\+\)' 3 <<< "$PAGE" && REQ_OUT="${REQ_OUT}f" 109 | fi 110 | 111 | if [[ $REQ_IN = *s* ]]; then 112 | FILE_SIZE=$(parse '^Download your file:' '>\([^<]\+\)' 4 <<< "$PAGE") && \ 113 | translate_size "${FILE_SIZE/o/B}" && REQ_OUT="${REQ_OUT}s" 114 | fi 115 | 116 | if [[ $REQ_IN = *i* ]]; then 117 | parse . '/\([[:alnum:]]\+\)$' <<< "$URL" && REQ_OUT="${REQ_OUT}i" 118 | fi 119 | 120 | echo $REQ_OUT 121 | } 122 | -------------------------------------------------------------------------------- /tempsend.sh: -------------------------------------------------------------------------------- 1 | # Plowshare tempsend.com module 2 | # Copyright (c) 2014-2016 Plowshare team 3 | # 4 | # This file is part of Plowshare. 5 | # 6 | # Plowshare is free software: you can redistribute it and/or modify 7 | # it under the terms of the GNU General Public License as published by 8 | # the Free Software Foundation, either version 3 of the License, or 9 | # (at your option) any later version. 10 | # 11 | # Plowshare is distributed in the hope that it will be useful, 12 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 13 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 14 | # GNU General Public License for more details. 15 | # 16 | # You should have received a copy of the GNU General Public License 17 | # along with Plowshare. If not, see . 18 | 19 | MODULE_TEMPSEND_REGEXP_URL='https\?://\(www\.\)\?tempsend\.com/' 20 | 21 | MODULE_TEMPSEND_DOWNLOAD_OPTIONS="" 22 | MODULE_TEMPSEND_DOWNLOAD_RESUME=no 23 | MODULE_TEMPSEND_DOWNLOAD_FINAL_LINK_NEEDS_COOKIE=unused 24 | MODULE_TEMPSEND_DOWNLOAD_SUCCESSIVE_INTERVAL= 25 | 26 | MODULE_TEMPSEND_UPLOAD_OPTIONS=" 27 | NOSSL,,nossl,,Use HTTP upload url instead of HTTPS 28 | TTL,,ttl,n=SECS,Expiration period (in seconds). Default is 86400 (one day)." 29 | MODULE_TEMPSEND_UPLOAD_REMOTE_SUPPORT=no 30 | 31 | MODULE_TEMPSEND_PROBE_OPTIONS="" 32 | 33 | # Output a tempsend.com file download URL 34 | # $1: cookie file (unused here) 35 | # $2: tempsend.com url 36 | # stdout: real file download link 37 | tempsend_download() { 38 | local URL=$2 39 | local PAGE FILE_URL 40 | 41 | PAGE=$(curl -L "$URL") || return 42 | FILE_URL=$(parse_attr 'title=.Download' 'href' <<< "$PAGE") || return 43 | 44 | echo "http://tempsend.com$FILE_URL" 45 | } 46 | 47 | # Upload a file to tempsend.com 48 | # $1: cookie file (unused here) 49 | # $2: input file (with full path) 50 | # $3: remote filename 51 | # stdout: download 52 | tempsend_upload() { 53 | local -r FILE=$2 54 | local -r DESTFILE=$3 55 | local BASE_URL='https://tempsend.com/send' 56 | local PAGE FILE_URL DELAY V 57 | 58 | [ -n "$NOSSL" ] && BASE_URL='http://tempsend.com/send' 59 | 60 | if [ -n "$TTL" ]; then 61 | # curl http://tempsend.com | grep option 62 | local -a VALUES=(3600 86400 604800 2678400) 63 | 64 | DELAY=0 65 | 66 | for V in ${VALUES[@]}; do 67 | if [[ $V -eq $TTL ]]; then 68 | DELAY=$V 69 | break; 70 | fi 71 | done 72 | 73 | if [[ $DELAY -eq 0 ]]; then 74 | log_error 'Bad value to --ttl, allowed values are: '${VALUES[*]}'.' 75 | return $ERR_BAD_COMMAND_LINE 76 | fi 77 | else 78 | DELAY=2678400 79 | fi 80 | 81 | PAGE=$(curl_with_log -L \ 82 | -F "file=@$FILE;filename=$DESTFILE" \ 83 | -F "expire=$DELAY" "$BASE_URL") || return 84 | 85 | # Sanity check 86 | if [ "$PAGE" == 'Could not connect to database' ]; then 87 | log_error "Remote error: $PAGE" 88 | return $ERR_LINK_TEMP_UNAVAILABLE 89 | fi 90 | 91 | if FILE_URL=$(parse_tag 'title=.Link to' a <<< "$PAGE"); then 92 | echo "$FILE_URL" 93 | return 0 94 | fi 95 | 96 | if match '>Not FoundDownload' "$PAGE"; then 116 | return $ERR_LINK_DEAD 117 | fi 118 | 119 | REQ_OUT=c 120 | 121 | if [[ $REQ_IN = *f* ]]; then 122 | FILE_NAME=$(parse_tag '="Download[[:space:]]' a <<< "$PAGE") && \ 123 | echo "$FILE_NAME" && REQ_OUT="${REQ_OUT}f" 124 | fi 125 | 126 | if [[ $REQ_IN = *i* ]]; then 127 | FID=$(parse . '/\([[:alnum:]]*\)$' <<< "$URL") && \ 128 | echo "$FID" && REQ_OUT="${REQ_OUT}i" 129 | fi 130 | 131 | if [[ $REQ_IN = *s* ]]; then 132 | FILE_SIZE=$(parse '[[:digit:]][[:space:]]downloads<' '^[^[:space:]]*[[:space:]]-[[:space:]]*\([^-]\+\)' <<< "$PAGE") && \ 133 | translate_size "$FILE_SIZE" && REQ_OUT="${REQ_OUT}s" 134 | fi 135 | 136 | echo $REQ_OUT 137 | } 138 | -------------------------------------------------------------------------------- /fileover.sh: -------------------------------------------------------------------------------- 1 | # Plowshare fileover.net module 2 | # Copyright (c) 2013 Plowshare team 3 | # 4 | # This file is part of Plowshare. 5 | # 6 | # Plowshare is free software: you can redistribute it and/or modify 7 | # it under the terms of the GNU General Public License as published by 8 | # the Free Software Foundation, either version 3 of the License, or 9 | # (at your option) any later version. 10 | # 11 | # Plowshare is distributed in the hope that it will be useful, 12 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 13 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 14 | # GNU General Public License for more details. 15 | # 16 | # You should have received a copy of the GNU General Public License 17 | # along with Plowshare. If not, see . 18 | 19 | MODULE_FILEOVER_REGEXP_URL='https\?://\(www\.\)\?fileover\.net/' 20 | 21 | MODULE_FILEOVER_DOWNLOAD_OPTIONS="" 22 | MODULE_FILEOVER_DOWNLOAD_RESUME=no 23 | MODULE_FILEOVER_DOWNLOAD_FINAL_LINK_NEEDS_COOKIE=no 24 | MODULE_FILEOVER_DOWNLOAD_SUCCESSIVE_INTERVAL= 25 | 26 | MODULE_FILEOVER_PROBE_OPTIONS="" 27 | 28 | # Output a fileover.net file download URL 29 | # $1: cookie file (unused here) 30 | # $2: fileover.net url 31 | # stdout: real file download link 32 | fileover_download() { 33 | local COOKIE_FILE=$1 34 | local URL=$2 35 | local BASE_URL='http://fileover.net' 36 | local PAGE FILE_ID FILE_NAME WAIT JSON HASH FILE_URL 37 | 38 | PAGE=$(curl -L "$URL") || return 39 | 40 | # The following file is unavailable 41 | # The file was completely removed from our servers.

42 | if match 'file is unavailable\|file was completely removed' "$PAGE"; then 43 | return $ERR_LINK_DEAD 44 | fi 45 | 46 | FILE_ID=$(parse '/ax/time' "'[[:space:]]*+[[:space:]]*\([[:digit:]]\+\)" <<< "$PAGE") || return 47 | log_debug "File ID: '$FILE_ID'" 48 | 49 | FILE_NAME=$(parse_tag 'You have to wait: 14 minutes 57 seconds. 52 | if matchi 'You have to wait' "$PAGE"; then 53 | local MINS SECS 54 | MINS=$(parse_quiet 'u have to wait' ':[[:space:]]*\([[:digit:]]\+\) minute' <<< "$PAGE") 55 | SECS=$(parse_quiet 'u have to wait' '[[:space:]]\+\([[:digit:]]\+\) second' <<< "$PAGE") 56 | 57 | echo $(( MINS * 60 + SECS )) 58 | return $ERR_LINK_TEMP_UNAVAILABLE 59 | fi 60 | 61 | #

Wait Time: 20s

62 | WAIT=$(parse_tag '^[[:space:]]\+' span <<< "$PAGE") || return 63 | 64 | # {"hash":"df65ff1c76bdacbe92816971651b91cd"} 65 | JSON=$(curl -H 'X-Requested-With: XMLHttpRequest' \ 66 | --referer "$URL" "$BASE_URL/ax/timereq.flo?$FILE_ID") || return 67 | 68 | HASH=$(parse_json hash <<< "$JSON") || return 69 | log_debug "Hash: '$HASH'" 70 | 71 | wait "$WAIT" || return 72 | 73 | PAGE=$(curl "$BASE_URL/ax/timepoll.flo?file=$FILE_ID&hash=$HASH") || return 74 | 75 | # reCaptcha part 76 | local PUBKEY WCI CHALLENGE WORD ID 77 | PUBKEY='6LfT08MSAAAAAP7dyRaVw9N-ZaMy0SK6Nw1chr7i' 78 | WCI=$(recaptcha_process $PUBKEY) || return 79 | { read WORD; read CHALLENGE; read ID; } <<< "$WCI" 80 | 81 | PAGE=$(curl -d "file=$FILE_ID" \ 82 | -d "recaptcha_challenge_field=$CHALLENGE" \ 83 | -d "recaptcha_response_field=$WORD" -d "hash=$HASH" \ 84 | "$BASE_URL/ax/timepoll.flo") || return 85 | 86 | if match '/recaptcha/' "$PAGE"; then 87 | captcha_nack $ID 88 | log_error 'Wrong captcha' 89 | return $ERR_CAPTCHA 90 | fi 91 | 92 | captcha_ack $ID 93 | log_debug 'Correct captcha' 94 | 95 | # Click here to Download 96 | FILE_URL=$(parse_attr 'Download' href <<< "$PAGE") || return 97 | 98 | echo "$FILE_URL" 99 | echo "$FILE_NAME" 100 | } 101 | 102 | # Probe a download URL 103 | # $1: cookie file (unused here) 104 | # $2: fileover url 105 | # $3: requested capability list 106 | # stdout: 1 capability per line 107 | fileover_probe() { 108 | local -r URL=$2 109 | local -r REQ_IN=$3 110 | local PAGE REQ_OUT 111 | 112 | PAGE=$(curl -L "$URL") || return 113 | 114 | # The following file is unavailable 115 | # The file was completely removed from our servers.

116 | if match 'file is unavailable\|file was completely removed' "$PAGE"; then 117 | return $ERR_LINK_DEAD 118 | fi 119 | 120 | REQ_OUT=c 121 | 122 | if [[ $REQ_IN = *f* ]]; then 123 | parse_tag '. 18 | 19 | MODULE_FILEPARADOX_REGEXP_URL='https\?://\(www\.\)\?fileparadox\.in/' 20 | 21 | MODULE_FILEPARADOX_UPLOAD_OPTIONS=" 22 | AUTH_FREE,b,auth-free,a=USER:PASSWORD,Free account 23 | LINK_PASSWORD,p,link-password,S=PASSWORD,Protect a link with a password 24 | TOEMAIL,,email-to,e=EMAIL, field for notification email" 25 | MODULE_FILEPARADOX_UPLOAD_REMOTE_SUPPORT=no 26 | 27 | # Static function. Proceed with login. 28 | # $1: authentication 29 | # $2: cookie file 30 | # $3: base URL 31 | fileparadox_login() { 32 | local -r AUTH=$1 33 | local -r COOKIE_FILE=$2 34 | local -r BASE_URL=$3 35 | local LOGIN_DATA LOGIN_RESULT STATUS NAME 36 | 37 | LOGIN_DATA='op=login&login=$USER&password=$PASSWORD&redirect=' 38 | LOGIN_RESULT=$(post_login "$AUTH" "$COOKIE_FILE" "$LOGIN_DATA$BASE_URL/?op=my_account" \ 39 | "$BASE_URL" -L -b 'lang=english') || return 40 | 41 | # If successful, two entries are added into cookie file: login and xfss 42 | STATUS=$(parse_cookie_quiet 'xfss' < "$COOKIE_FILE") 43 | if [ -z "$STATUS" ]; then 44 | return $ERR_LOGIN_FAILED 45 | fi 46 | 47 | NAME=$(parse_cookie 'login' < "$COOKIE_FILE") 48 | log_debug "Successfully logged in as $NAME member" 49 | } 50 | 51 | # Upload a file to fileparadox.in 52 | # $1: cookie file 53 | # $2: input file (with full path) 54 | # $3: remote filename 55 | # stdout: download link 56 | fileparadox_upload() { 57 | local -r COOKIE_FILE=$1 58 | local -r FILE=$2 59 | local -r DEST_FILE=$3 60 | local -r BASE_URL='http://fileparadox.in' 61 | local PAGE UPLOAD_ID USER_TYPE 62 | 63 | # Sanity check 64 | [ -n "$AUTH_FREE" ] || return $ERR_LINK_NEED_PERMISSIONS 65 | 66 | fileparadox_login "$AUTH_FREE" "$COOKIE_FILE" "$BASE_URL" || return 67 | 68 | PAGE=$(curl -c "$COOKIE_FILE" -b 'lang=english' -b "$COOKIE_FILE" "$BASE_URL") || return 69 | 70 | local FORM_HTML FORM_ACTION FORM_TMP_SRV FORM_UTYPE FORM_SESS 71 | FORM_HTML=$(grep_form_by_name "$PAGE" 'file') || return 72 | FORM_ACTION=$(echo "$FORM_HTML" | parse_form_action) || return 73 | FORM_TMP_SRV=$(echo "$FORM_HTML" | parse_form_input_by_name 'srv_tmp_url') || return 74 | FORM_UTYPE=$(echo "$FORM_HTML" | parse_form_input_by_name 'upload_type') || return 75 | FORM_SESS=$(echo "$FORM_HTML" | parse_form_input_by_name_quiet 'sess_id') || return 76 | 77 | UPLOAD_ID=$(random dec 12) 78 | USER_TYPE=reg 79 | 80 | PAGE=$(curl_with_log \ 81 | -F "upload_type=$FORM_UTYPE" \ 82 | -F "sess_id=$FORM_SESS" \ 83 | -F "srv_tmp_url=$FORM_TMP_SRV" \ 84 | -F "file_0=@$FILE;filename=$DESTFILE" \ 85 | --form-string "link_rcpt=$TOEMAIL" \ 86 | --form-string "link_pass=$LINK_PASSWORD" \ 87 | -F 'tos=1' \ 88 | -F 'submit_btn=' \ 89 | "${FORM_ACTION}${UPLOAD_ID}&js_on=1&utype=${USER_TYPE}&upload_type=$FORM_UTYPE" | \ 90 | break_html_lines) || return 91 | 92 | local FORM2_ACTION FORM2_FN FORM2_ST FORM2_OP 93 | FORM2_ACTION=$(echo "$PAGE" | parse_form_action) || return 94 | FORM2_FN=$(echo "$PAGE" | parse_tag 'fn.>' textarea) 95 | FORM2_ST=$(echo "$PAGE" | parse_tag 'st.>' textarea) 96 | FORM2_OP=$(echo "$PAGE" | parse_tag 'op.>' textarea) 97 | 98 | if [ "$FORM2_ST" = 'OK' ]; then 99 | PAGE=$(curl -b 'lang=english' \ 100 | -d "fn=$FORM2_FN" -d "st=$FORM2_ST" -d "op=$FORM2_OP" \ 101 | "$FORM2_ACTION" | break_html_lines) || return 102 | 103 | DL_URL=$(parse 'Download Link' '>\(.*\)$' 2 <<< "$PAGE") || return 104 | DEL_URL=$(parse 'Delete Link' '>\(.*\)$' 2 <<< "$PAGE") 105 | 106 | echo "$DL_URL" 107 | echo "$DEL_URL" 108 | echo "$LINK_PASSWORD" 109 | return 0 110 | fi 111 | 112 | log_error "Unexpected status: $FORM2_ST" 113 | return $ERR_FATAL 114 | } 115 | -------------------------------------------------------------------------------- /hexupload.sh: -------------------------------------------------------------------------------- 1 | # Plowshare hexupload.com module 2 | # Copyright (c) 2016 Plowshare team 3 | # 4 | # This file is part of Plowshare. 5 | # 6 | # Plowshare is free software: you can redistribute it and/or modify 7 | # it under the terms of the GNU General Public License as published by 8 | # the Free Software Foundation, either version 3 of the License, or 9 | # (at your option) any later version. 10 | # 11 | # Plowshare is distributed in the hope that it will be useful, 12 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 13 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 14 | # GNU General Public License for more details. 15 | # 16 | # You should have received a copy of the GNU General Public License 17 | # along with Plowshare. If not, see . 18 | 19 | MODULE_HEXUPLOAD_REGEXP_URL='https\?://\(www\.\)\?hexupload\.com/' 20 | 21 | MODULE_HEXUPLOAD_DOWNLOAD_OPTIONS="" 22 | MODULE_HEXUPLOAD_DOWNLOAD_RESUME=yes 23 | MODULE_HEXUPLOAD_DOWNLOAD_FINAL_LINK_NEEDS_COOKIE=no 24 | MODULE_HEXUPLOAD_DOWNLOAD_SUCCESSIVE_INTERVAL= 25 | 26 | MODULE_HEXUPLOAD_PROBE_OPTIONS="" 27 | 28 | # Output a hexupload file download URL 29 | # $1: cookie file 30 | # $2: hexupload url 31 | # stdout: real file download link 32 | hexupload_download() { 33 | local -r COOKIE_FILE=$1 34 | local URL PAGE 35 | local FORM_HTML FORM_OP FORM_USR FORM_ID FORM_FNAME 36 | local FORM_REF FORM_METHOD_F FORM_RAND FORM_METHOD_P FORM_DD 37 | 38 | # Get a canonical URL for this file. 39 | URL=$(curl -I "$2" | grep_http_header_location_quiet) || return 40 | [ -n "$URL" ] || URL=$2 41 | readonly URL 42 | 43 | PAGE=$(curl -b "$COOKIE_FILE" -c "$COOKIE_FILE" "$URL") || return 44 | 45 | if match ">File Not Found<" "$PAGE"; then 46 | return $ERR_LINK_DEAD 47 | fi 48 | 49 | FORM_HTML=$(grep_form_by_order "$PAGE" 4) || return 50 | FORM_OP=$(parse_form_input_by_name 'op' <<< "$FORM_HTML") || return 51 | FORM_USR=$(parse_form_input_by_name_quiet 'usr_login' <<< "$FORM_HTML") 52 | FORM_ID=$(parse_form_input_by_name 'id' <<< "$FORM_HTML") || return 53 | FORM_FNAME=$(parse_form_input_by_name 'fname' <<< "$FORM_HTML") || return 54 | FORM_REF=$(parse_form_input_by_name_quiet 'referer' <<< "$FORM_HTML") 55 | FORM_METHOD_F=$(parse_form_input_by_name_quiet 'method_free' <<< "$FORM_HTML") 56 | 57 | PAGE=$(curl -b "$COOKIE_FILE" \ 58 | -d "op=$FORM_OP" \ 59 | -d "usr_login=$FORM_USR" \ 60 | -d "id=$FORM_ID" \ 61 | -d "fname=$FORM_FNAME" \ 62 | -d "referer=$FORM_REF" \ 63 | -d "method_free=$FORM_METHOD_F" \ 64 | "$URL") || return 65 | 66 | FORM_HTML=$(grep_form_by_name "$PAGE" 'F1') || return 67 | FORM_OP=$(parse_form_input_by_name 'op' <<< "$FORM_HTML") || return 68 | FORM_ID=$(parse_form_input_by_name 'id' <<< "$FORM_HTML") || return 69 | FORM_RAND=$(parse_form_input_by_name 'rand' <<< "$FORM_HTML") || return 70 | FORM_REF=$(parse_form_input_by_name_quiet 'referer' <<< "$FORM_HTML") 71 | FORM_METHOD_F=$(parse_form_input_by_name_quiet 'method_free' <<< "$FORM_HTML") 72 | FORM_METHOD_P=$(parse_form_input_by_name_quiet 'method_premium' <<< "$FORM_HTML") 73 | FORM_DD=$(parse_form_input_by_name_quiet 'down_direct' <<< "$FORM_HTML") 74 | 75 | PAGE=$(curl -b "$COOKIE_FILE" \ 76 | -d "op=$FORM_OP" \ 77 | -d "id=$FORM_ID" \ 78 | -d "rand=$FORM_RAND" \ 79 | -d "referer=$FORM_REF" \ 80 | -d "method_free=$FORM_METHOD_F" \ 81 | -d "method_premium=$FORM_METHOD_P" \ 82 | -d "down_direct=$FORM_DD" \ 83 | "$URL") || return 84 | 85 | parse_attr 'dl-last\.png' 'href' <<< "$PAGE" || return 86 | } 87 | 88 | # Probe a download URL 89 | # $1: cookie file (unused here) 90 | # $2: hexupload url 91 | # $3: requested capability list 92 | # stdout: 1 capability per line 93 | hexupload_probe() { 94 | local -r URL=$2 95 | local -r REQ_IN=$3 96 | local PAGE FILE_SIZE REQ_OUT 97 | 98 | PAGE=$(curl -L "$URL") || return 99 | 100 | if match ">File Not Found<" "$PAGE"; then 101 | return $ERR_LINK_DEAD 102 | fi 103 | 104 | REQ_OUT=c 105 | 106 | if [[ $REQ_IN = *f* ]]; then 107 | parse 'File:' 'File:[^>]*>\([^<]*\)<' <<< "$PAGE" && REQ_OUT="${REQ_OUT}f" 108 | fi 109 | 110 | if [[ $REQ_IN = *s* ]]; then 111 | FILE_SIZE=$(parse 'File:' '\[.*>\([[:digit:]].*B\)<.*\]' <<< "$PAGE") \ 112 | && FILE_SIZE=$(replace 'B' 'iB' <<< $FILE_SIZE) \ 113 | && translate_size "$FILE_SIZE" && REQ_OUT="${REQ_OUT}s" 114 | fi 115 | 116 | if [[ $REQ_IN = *i* ]]; then 117 | parse . 'com/\([[:alnum:]]\+\)' <<< "$URL" && REQ_OUT="${REQ_OUT}i" 118 | fi 119 | 120 | echo $REQ_OUT 121 | } 122 | -------------------------------------------------------------------------------- /filemonkey.sh: -------------------------------------------------------------------------------- 1 | # Plowshare filemonkey.in module 2 | # Copyright (c) 2014 Plowshare team 3 | # 4 | # This file is part of Plowshare. 5 | # 6 | # Plowshare is free software: you can redistribute it and/or modify 7 | # it under the terms of the GNU General Public License as published by 8 | # the Free Software Foundation, either version 3 of the License, or 9 | # (at your option) any later version. 10 | # 11 | # Plowshare is distributed in the hope that it will be useful, 12 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 13 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 14 | # GNU General Public License for more details. 15 | # 16 | # You should have received a copy of the GNU General Public License 17 | # along with Plowshare. If not, see . 18 | 19 | MODULE_FILEMONKEY_REGEXP_URL='https\?://\(www\.\)\?filemonkey\.in/' 20 | 21 | MODULE_FILEMONKEY_UPLOAD_OPTIONS=" 22 | FOLDER,,folder,s=FOLDER,Folder to upload files into (root folder child ONLY!) 23 | CREATE_FOLDER,,create,,Create folder if it does not exist 24 | AUTH_FREE,b,auth-free,a=EMAIL:PASSWORD,Free account" 25 | MODULE_FILEMONKEY_UPLOAD_REMOTE_SUPPORT=no 26 | 27 | # Static function. Proceed with login 28 | # $1: authentication 29 | # $2: cookie file 30 | # $3: base url 31 | filemonkey_login() { 32 | local -r AUTH=$1 33 | local -r COOKIE_FILE=$2 34 | local -r BASE_URL=$3 35 | local LOGIN_DATA PAGE STATUS ERR 36 | 37 | LOGIN_DATA='email=$USER&password=$PASSWORD' 38 | PAGE=$(post_login "$AUTH" "$COOKIE_FILE" "$LOGIN_DATA" \ 39 | "$BASE_URL/login") || return 40 | 41 | STATUS=$(parse_cookie_quiet 'logincookie' < "$COOKIE_FILE") 42 | if [ -z "$STATUS" ]; then 43 | ERR=$(parse_tag_quiet 'alert-danger' div <<< "$PAGE") 44 | log_debug "Remote error: '$ERR'" 45 | return $ERR_LOGIN_FAILED 46 | fi 47 | } 48 | 49 | # Upload a file to Filemonkey.in 50 | # $1: cookie file 51 | # $2: input file (with full path) 52 | # $3: remote filename 53 | # stdout: download link 54 | filemonkey_upload() { 55 | local -r COOKIE_FILE=$1 56 | local -r FILE=$2 57 | local -r DESTFILE=$3 58 | local -r BASE_URL='https://www.filemonkey.in' 59 | local PAGE API_KEY FID UPLOAD_URL JSON STATUS 60 | 61 | # Sanity check 62 | [ -n "$AUTH_FREE" ] || return $ERR_LINK_NEED_PERMISSIONS 63 | 64 | if [ -n "$CREATE_FOLDER" -a -z "$FOLDER" ]; then 65 | log_error '--folder option required' 66 | return $ERR_BAD_COMMAND_LINE 67 | fi 68 | 69 | filemonkey_login "$AUTH_FREE" "$COOKIE_FILE" "$BASE_URL" || return 70 | 71 | PAGE=$(curl -b "$COOKIE_FILE" "$BASE_URL/manage") || return 72 | 73 | # Get upload url, apikey and folder 74 | API_KEY=$(parse "'apikey'" ":[[:space:]]*'\([^']\+\)" <<< "$PAGE") || return 75 | log_debug "apikey: '$API_KEY'" 76 | 77 | if [ -z "$FOLDER" ]; then 78 | FID=$(parse "'folder'" ":[[:space:]]*'\([^']\+\)" <<< "$PAGE") || return 79 | log_debug "root folder: '$FID'" 80 | else 81 | FID=$(parse_attr_quiet ">$FOLDER<" data-pk <<< "$PAGE") 82 | 83 | # Create a folder (root folder is parent) 84 | # POST /manage?folder=xxx 85 | if [ -z "$FID" ]; then 86 | if [ -n "$CREATE_FOLDER" ]; then 87 | PAGE=$(curl -b "$COOKIE_FILE" --referer "$BASE_URL/manage" \ 88 | -d "newfolder_name=$FOLDER" \ 89 | -d 'action=createfolder' \ 90 | "$BASE_URL/manage") || return 91 | 92 | if [ -z "$PAGE" ]; then 93 | log_error 'An error has occurred. Remote folder already exists?' 94 | return $ERR_FATAL 95 | fi 96 | 97 | FID=$(parse_attr ">$FOLDER<" data-pk <<< "$PAGE") || return 98 | else 99 | log_error 'Folder does not seem to exist. Use --create switch.' 100 | return $ERR_FATAL 101 | fi 102 | fi 103 | log_debug "child folder: '$FID'" 104 | fi 105 | 106 | UPLOAD_URL=$(parse '://dl-' "=[[:space:]]*'\([^']\+\)" <<< "$PAGE") || return 107 | log_debug "upload url: '$UPLOAD_URL'" 108 | 109 | # No cookie required here 110 | # Answers: 111 | # {"status":"success","response":{"filename":"foo.zip","extid":"ki1tqa3u369b46s7","md5":"13f5efdc3b88c4076f80b9615bf12312"}} 112 | # {"status":"error","error":"duplicate_file_in_folder"} 113 | JSON=$(curl_with_log --referer "$BASE_URL/manage" -H "Origin: $BASE_URL" \ 114 | -F "apikey=$API_KEY" \ 115 | -F "folder=$FID" \ 116 | -F "file=@$FILE;filename=$DESTFILE" "$UPLOAD_URL") || return 117 | 118 | STATUS=$(parse_json 'status' <<< "$JSON") || return 119 | 120 | if [ "$STATUS" != 'success' ]; then 121 | local ERR=$(parse_json 'error' <<< "$JSON") 122 | log_error "Remote error: '$ERR'" 123 | return $ERR_FATAL 124 | fi 125 | 126 | STATUS=$(parse_json 'extid' <<< "$JSON") || return 127 | 128 | echo "$BASE_URL/file/$STATUS" 129 | } 130 | -------------------------------------------------------------------------------- /jheberg.sh: -------------------------------------------------------------------------------- 1 | # Plowshare jheberg.net module 2 | # Copyright (c) 2012-2013 Plowshare team 3 | # 4 | # This file is part of Plowshare. 5 | # 6 | # Plowshare is free software: you can redistribute it and/or modify 7 | # it under the terms of the GNU General Public License as published by 8 | # the Free Software Foundation, either version 3 of the License, or 9 | # (at your option) any later version. 10 | # 11 | # Plowshare is distributed in the hope that it will be useful, 12 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 13 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 14 | # GNU General Public License for more details. 15 | # 16 | # You should have received a copy of the GNU General Public License 17 | # along with Plowshare. If not, see . 18 | 19 | MODULE_JHEBERG_REGEXP_URL='http://\(www\.\)\?jheberg\.net/' 20 | 21 | MODULE_JHEBERG_UPLOAD_OPTIONS=" 22 | AUTH,a,auth,a=USER:PASSWORD,User account" 23 | MODULE_JHEBERG_UPLOAD_REMOTE_SUPPORT=no 24 | 25 | MODULE_JHEBERG_LIST_OPTIONS="" 26 | MODULE_JHEBERG_LIST_HAS_SUBFOLDERS=no 27 | 28 | MODULE_JHEBERG_PROBE_OPTIONS= 29 | 30 | # Upload a file to Jheberg.net 31 | # $1: cookie file (for account only) 32 | # $2: input file (with full path) 33 | # $3: remote filename 34 | # stdout: jheberg.net download link 35 | jheberg_upload() { 36 | local -r COOKIE_FILE=$1 37 | local -r FILE=$2 38 | local -r DESTFILE=$3 39 | local -r API_URL='http://www.jheberg.net/api' 40 | local PAGE UPLOAD_URL JSON USER PASSWORD 41 | 42 | # Note: official API does not allow hoster selection (yet). 43 | 44 | PAGE=$(curl -L "$API_URL/get/server/") || return 45 | UPLOAD_URL="$(echo "$PAGE" | parse_json 'url')api/upload/" || return 46 | log_debug "Upload URL: $UPLOAD_URL" 47 | 48 | if [ -n "$AUTH" ]; then 49 | split_auth "$AUTH" USER PASSWORD || return 50 | JSON=$(curl_with_log -F "file=@$FILE;filename=$DESTFILE" \ 51 | -F "username=$USER" -F "password=$PASSWORD" \ 52 | "$UPLOAD_URL") || return 53 | else 54 | JSON=$(curl_with_log -F "file=@$FILE;filename=$DESTFILE" \ 55 | "$UPLOAD_URL") || return 56 | fi 57 | 58 | if match_json_true 'error' "$JSON"; then 59 | local ERR=$(echo "$JSON" | parse_json 'error_string') 60 | if matchi 'bad credentials' "$ERR"; then 61 | return $ERR_LOGIN_FAILED 62 | else 63 | log_error "Remote error: $ERR" 64 | return $ERR_FATAL 65 | fi 66 | fi 67 | 68 | echo "$JSON" | parse_json 'url' || return 69 | } 70 | 71 | # List links from a Jheberg link 72 | # $1: jheberg link 73 | # $2: recurse subfolders (ignored here) 74 | # stdout: list of links 75 | jheberg_list() { 76 | local -r URL=${1/\/captcha\///download/} 77 | local -r BASE_URL='http://www.jheberg.net' 78 | local JSON NAMES DL_ID URL2 HOSTER 79 | 80 | JSON=$(curl -L --get --data "id=$(uri_encode_strict <<< "$URL")" \ 81 | "$BASE_URL/api/verify/file/") || return 82 | 83 | if [ -z "$JSON" ] || match '^. 18 | 19 | MODULE_FILEDAIS_REGEXP_URL='http://\(www\.\)\?\(filedais\|anafile\)\.com/.\+.html\?' 20 | MODULE_FILEDAIS_DOWNLOAD_OPTIONS="" 21 | MODULE_FILEDAIS_DOWNLOAD_RESUME=yes 22 | MODULE_FILEDAIS_DOWNLOAD_FINAL_LINK_NEEDS_COOKIE=no 23 | MODULE_FILEDAIS_DOWNLOAD_SUCCESSIVE_INTERVAL= 24 | 25 | MODULE_FILEDAIS_PROBE_OPTIONS="" 26 | 27 | # Output a filedais/anafile.com file download URL 28 | # $1: cookie file 29 | # $2: filedais/anafile.com url 30 | # stdout: real file download link 31 | filedais_download() { 32 | local -r COOKIE_FILE=$1 33 | local -r URL=$2 34 | local -r BASE_URL=$(basename_url "$URL") 35 | local PAGE LOCATION WAIT_TIME FILE_URL FORM_HTML 36 | local PHP_URL FILE_ID FILE_NAME FORM_METHOD DANGER_DIV 37 | 38 | PAGE=$(curl -L -i -c "$COOKIE_FILE" -b "$COOKIE_FILE" "$URL") || return 39 | 40 | if match '

Software error:

' "$PAGE"; then 41 | return $ERR_LINK_TEMP_UNAVAILABLE 42 | elif match 'File Not Found' "$PAGE"; then 43 | return $ERR_LINK_DEAD 44 | fi 45 | 46 | LOCATION=$(grep_http_header_location_quiet <<< "$PAGE") 47 | if match_remote_url "$LOCATION"; then 48 | PHP_URL=$LOCATION 49 | else 50 | PHP_URL=$BASE_URL$LOCATION 51 | fi 52 | 53 | FORM_HTML=$(grep_form_by_order "$PAGE" 1) || return 54 | FILE_ID=$(echo "$FORM_HTML" | parse_form_input_by_name 'id') 55 | FILE_NAME=$(echo "$FORM_HTML" | parse_form_input_by_name 'fname') 56 | FORM_METHOD=$(echo "$FORM_HTML" | parse_form_input_by_name 'method_free') 57 | 58 | PAGE=$(curl -i -c "$COOKIE_FILE" -b "$COOKIE_FILE" \ 59 | -d 'op=download1' \ 60 | -d "id=$FILE_ID" \ 61 | -d "fname=$FILE_NAME" \ 62 | -d "method_free=$FORM_METHOD" \ 63 | $PHP_URL) || return 64 | 65 | DANGER_DIV=$(parse_tag_quiet 'class="alert alert-danger"' 'div' <<< "$PAGE") 66 | if [[ -n "$DANGER_DIV" ]]; then 67 | if match 'You have to wait' "$DANGER_DIV"; then 68 | WAIT_TIME=$(parse_quiet . ' ([0-9]+) seconds' <<< "$DANGER_DIV") 69 | if [[ -n "$WAIT_TIME" ]]; then 70 | echo $WAIT_TIME 71 | fi 72 | return $ERR_LINK_TEMP_UNAVAILABLE 73 | else 74 | log_debug "unexpected alert-danger: $DANGER_DIV" 75 | fi 76 | fi 77 | 78 | FORM_HTML=$(grep_form_by_order "$PAGE" 1) || return 79 | RAND=$(echo "$FORM_HTML" | parse_form_input_by_name 'rand') || return 80 | 81 | WAIT_TIME=$(parse_tag 'Wait' 'span' <<< "$PAGE") || return 82 | wait $WAIT_TIME || return 83 | 84 | local PUBKEY WCI CHALLENGE WORD ID 85 | PUBKEY=$(parse '\/recaptcha\/api\/' '?k=\([^"]\+\)' <<< "$PAGE" ) 86 | WCI=$(recaptcha_process $PUBKEY) || return 87 | { read WORD; read CHALLENGE; read ID; } <<< "$WCI" 88 | 89 | PAGE=$(curl -i -c "$COOKIE_FILE" -b "$COOKIE_FILE" \ 90 | -d 'op=download2' \ 91 | -d "id=$FILE_ID" \ 92 | -d "referer=$PHP_URL" \ 93 | -d "recaptcha_challenge_field=$CHALLENGE" \ 94 | -d "recaptcha_response_field=$WORD" \ 95 | -d 'down_script=1' \ 96 | -d "rand=$RAND" \ 97 | -d "method_free=$FORM_METHOD" \ 98 | $PHP_URL) || return 99 | 100 | if match '>Wrong captcha
' "$PAGE"; then 101 | captcha_nack "$ID" 102 | return $ERR_CAPTCHA 103 | fi 104 | 105 | captcha_ack "$ID" 106 | 107 | parse_attr 'id="download1"' 'href' <<<"$PAGE" 108 | return 0 109 | } 110 | 111 | # # Probe a download URL 112 | # # $1: cookie file 113 | # # $2: filedais.com/anafile.com url 114 | # # $3: requested capability list 115 | # # stdout: 1 capability per line 116 | filedais_probe() { 117 | local -r COOKIE_FILE=$1 118 | local -r URL=$2 119 | local -r REQ_IN=$3 120 | local PAGE REQ_OUT FORM_HTML 121 | 122 | PAGE=$(curl -L -c "$COOKIE_FILE" -b "$COOKIE_FILE" "$URL") || return 123 | 124 | if matchi '>File Not Found<' "$PAGE"; then 125 | return $ERR_LINK_DEAD 126 | fi 127 | 128 | # TODO: use op=checkfiles if we need size 129 | FORM_HTML=$(grep_form_by_order "$PAGE" 1) || return 130 | REQ_OUT='c' 131 | 132 | if [[ "$REQ_IN" = *f* ]]; then 133 | parse_form_input_by_name 'fname' <<< "$FORM_HTML" && REQ_OUT="${REQ_OUT}f" 134 | fi 135 | 136 | if [[ "$REQ_IN" = *i* ]]; then 137 | parse_form_input_by_name 'id' <<< "$FORM_HTML" && REQ_OUT="${REQ_OUT}i" 138 | fi 139 | 140 | echo $REQ_OUT 141 | } 142 | -------------------------------------------------------------------------------- /nitroflare.sh: -------------------------------------------------------------------------------- 1 | # Plowshare nitroflare.com module 2 | # Copyright (c) 2016 Plowshare team 3 | # 4 | # This file is part of Plowshare. 5 | # 6 | # Plowshare is free software: you can redistribute it and/or modify 7 | # it under the terms of the GNU General Public License as published by 8 | # the Free Software Foundation, either version 3 of the License, or 9 | # (at your option) any later version. 10 | # 11 | # Plowshare is distributed in the hope that it will be useful, 12 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 13 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 14 | # GNU General Public License for more details. 15 | # 16 | # You should have received a copy of the GNU General Public License 17 | # along with Plowshare. If not, see . 18 | 19 | MODULE_NITROFLARE_REGEXP_URL='https\?://\(www\.\)\?nitroflare\.com/' 20 | 21 | MODULE_NITROFLARE_DOWNLOAD_OPTIONS="" 22 | MODULE_NITROFLARE_DOWNLOAD_RESUME=no 23 | MODULE_NITROFLARE_DOWNLOAD_FINAL_LINK_NEEDS_COOKIE=no 24 | MODULE_NITROFLARE_DOWNLOAD_SUCCESSIVE_INTERVAL= 25 | 26 | MODULE_NITROFLARE_PROBE_OPTIONS="" 27 | 28 | # Output a nitroflare file download URL 29 | # $1: cookie file 30 | # $2: nitroflare url 31 | # stdout: real file download link 32 | nitroflare_download() { 33 | local -r COOKIE_FILE=$1 34 | local -r BASE_URL='http://nitroflare.com/' 35 | local URL PAGE RAND_HASH FREE_URL FILE_ID RESP WAIT_TIME FILE_URL 36 | 37 | # Get a canonical URL for this file. 38 | URL=$(curl -I "$2" | grep_http_header_location_quiet) || return 39 | [ -n "$URL" ] || URL=$2 40 | readonly URL 41 | 42 | PAGE=$(curl -c "$COOKIE_FILE" -i -L "$URL") || return 43 | 44 | if match "File doesn't exist\|This file has been removed\|404 Not Found" "$PAGE"; then 45 | return $ERR_LINK_DEAD 46 | fi 47 | 48 | # Register randHash in a cookie file. 49 | RAND_HASH=$(random 'H' 32) || return 50 | curl -b "$COOKIE_FILE" \ 51 | -c "$COOKIE_FILE" \ 52 | -d "randHash=$RAND_HASH" \ 53 | "$BASE_URL/ajax/randHash.php" > /dev/null || return 54 | 55 | FREE_URL=$(parse_attr '
' 'title' <<< "$PAGE" && REQ_OUT="${REQ_OUT}f" 127 | fi 128 | 129 | if [[ $REQ_IN = *s* ]]; then 130 | FILE_SIZE=$(parse_tag '' 'span' <<< "$PAGE") \ 131 | && FILE_SIZE=$(replace 'B' 'iB' <<< $FILE_SIZE) \ 132 | && translate_size "$FILE_SIZE" && REQ_OUT="${REQ_OUT}s" 133 | fi 134 | 135 | if [[ $REQ_IN = *i* ]]; then 136 | parse '. 18 | 19 | MODULE_YOURVIDEOHOST_REGEXP_URL='https\?://\(www\.\)\?yourvideohost\.com/' 20 | 21 | MODULE_YOURVIDEOHOST_DOWNLOAD_OPTIONS=" 22 | AUTH,a,auth,a=USER:PASSWORD,Premium account" 23 | MODULE_YOURVIDEOHOST_DOWNLOAD_RESUME=yes 24 | MODULE_YOURVIDEOHOST_DOWNLOAD_FINAL_LINK_NEEDS_COOKIE=no 25 | MODULE_YOURVIDEOHOST_DOWNLOAD_SUCCESSIVE_INTERVAL= 26 | 27 | MODULE_YOURVIDEOHOST_PROBE_OPTIONS="" 28 | 29 | # Static function. Proceed with login 30 | # $1: credentials string 31 | # $2: cookie file 32 | # $3: base url 33 | yourvideohost_login() { 34 | local AUTH=$1 35 | local COOKIE_FILE=$2 36 | local BASE_URL=$3 37 | 38 | local LOGIN_DATA LOGIN_RESULT NAME ERR 39 | 40 | LOGIN_DATA='op=login&redirect=&login=$USER&password=$PASSWORD' 41 | LOGIN_RESULT=$(post_login "$AUTH" "$COOKIE_FILE" "$LOGIN_DATA" "$BASE_URL") || return 42 | 43 | # Set-Cookie: login xfsts 44 | NAME=$(parse_cookie_quiet 'login' < "$COOKIE_FILE") 45 | if [ -n "$NAME" ]; then 46 | log_debug "Successfully logged in as $NAME member" 47 | return 0 48 | fi 49 | 50 | # Try to parse error 51 | # Incorrect Username or Password
52 | ERR=$(parse_tag_quiet 'class=.err.>' b <<< "$LOGIN_RESULT") 53 | [ -n "$ERR" ] && log_error "Unexpected remote error: $ERR" 54 | 55 | return $ERR_LOGIN_FAILED 56 | } 57 | 58 | # Output a yourvideohost file download URL 59 | # $1: cookie file (unused here) 60 | # $2: yourvideohost url 61 | # stdout: real file download link 62 | yourvideohost_download() { 63 | local -r COOKIE_FILE=$1 64 | local -r URL=$2 65 | local -r BASE_URL='http://yourvideohost.com' 66 | local PAGE FILE_URL WAIT_TIME JS 67 | local FORM_HTML FORM_OP FORM_ID FORM_USR FORM_REF FORM_FNAME FORM_HASH FORM_SUBMIT 68 | 69 | detect_javascript || return 70 | 71 | if [ -n "$AUTH" ]; then 72 | yourvideohost_login "$AUTH" "$COOKIE_FILE" "$BASE_URL" || return 73 | else 74 | return $ERR_LINK_NEED_PERMISSIONS 75 | fi 76 | 77 | PAGE=$(curl -b "$COOKIE_FILE" -c "$COOKIE_FILE" "$URL") || return 78 | 79 | FORM_HTML=$(grep_form_by_order "$PAGE" 1) || return 80 | FORM_OP=$(parse_form_input_by_name 'op' <<< "$FORM_HTML") || return 81 | FORM_ID=$(parse_form_input_by_name 'id' <<< "$FORM_HTML") || return 82 | FORM_USR=$(parse_form_input_by_name_quiet 'usr_login' <<< "$FORM_HTML") 83 | FORM_REF=$(parse_form_input_by_name_quiet 'referer' <<< "$FORM_HTML") 84 | FORM_FNAME=$(parse_form_input_by_name 'fname' <<< "$FORM_HTML") || return 85 | FORM_HASH=$(parse_form_input_by_name 'hash' <<< "$FORM_HTML") || return 86 | FORM_SUBMIT=$(parse_form_input_by_name 'imhuman' <<< "$FORM_HTML") || return 87 | 88 | # Wait 3 seconds 89 | WAIT_TIME=$(parse_tag 'countdown_str' 'span' <<< "$PAGE") 90 | wait $((WAIT_TIME)) || return 91 | 92 | PAGE=$(curl -b "$COOKIE_FILE" -b 'lang=english' \ 93 | -d "op=$FORM_OP" -d "id=$FORM_ID" -d "usr_login=$FORM_USR" \ 94 | -d "referer=$FORM_REF" -d "fname=$FORM_FNAME" \ 95 | -d "hash=$FORM_HASH" -d "imhuman=$FORM_SUBMIT" \ 96 | "$URL") || return 97 | 98 | # Obfuscated javascript 99 | #