├── .github ├── ISSUE_TEMPLATE │ ├── config.yml │ └── bug-report.yaml ├── dependabot.yml ├── FUNDING.yml └── pull_request_template.md ├── res ├── icon_16.png ├── icon_32.png ├── icon_48.png ├── icon_64.png └── cultured_downloader.ico ├── src ├── utils │ ├── icon.png │ ├── notify.go │ ├── browser.go │ ├── json.go │ ├── http.go │ ├── logger.go │ ├── file_extractor.go │ ├── log_error.go │ ├── constants.go │ ├── files.go │ └── cookie.go ├── api │ ├── pixiv │ │ ├── models │ │ │ ├── common.go │ │ │ ├── pixiv_mobile.go │ │ │ └── pixiv_web.go │ │ ├── web │ │ │ ├── pixiv_web.go │ │ │ ├── args.go │ │ │ └── process.go │ │ ├── common │ │ │ ├── utils.go │ │ │ └── headers.go │ │ ├── args.go │ │ ├── ugoira │ │ │ ├── ugoira.go │ │ │ ├── ffmpeg.go │ │ │ └── process.go │ │ ├── mobile │ │ │ ├── process.go │ │ │ ├── pixiv_mobile.go │ │ │ ├── args.go │ │ │ └── oauth.go │ │ └── pixiv.go │ ├── fantia │ │ ├── fantia.go │ │ ├── models │ │ │ └── fantia.go │ │ ├── args.go │ │ └── process.go │ ├── kemono │ │ ├── models │ │ │ └── kemono.go │ │ ├── kemono.go │ │ ├── process.go │ │ └── args.go │ ├── pixivfanbox │ │ ├── pixiv_fanbox.go │ │ ├── models │ │ │ └── pixiv_fanbox.go │ │ └── args.go │ └── cookie.go ├── cultured_downloader.go ├── request │ ├── model.go │ ├── args.go │ └── download.go ├── spinner │ ├── spinner_types.go │ └── spinner.go ├── gdrive │ ├── models │ │ └── gdrive.go │ ├── process.go │ ├── gdrive.go │ └── api.go ├── versioninfo.rc ├── configs │ └── configs.go ├── cmds │ ├── root.go │ ├── textparser │ │ ├── text_parser.go │ │ ├── fantia.go │ │ ├── kemono.go │ │ ├── pixiv_fanbox.go │ │ └── pixiv.go │ ├── kemono.go │ ├── pixiv_fanbox.go │ ├── cmds.go │ └── fantia.go ├── make.ps1 └── go.mod ├── .gitignore └── CONTRIBUTING.md /.github/ISSUE_TEMPLATE/config.yml: -------------------------------------------------------------------------------- 1 | blank_issues_enabled: true 2 | -------------------------------------------------------------------------------- /res/icon_16.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/KJHJason/Cultured-Downloader-CLI/HEAD/res/icon_16.png -------------------------------------------------------------------------------- /res/icon_32.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/KJHJason/Cultured-Downloader-CLI/HEAD/res/icon_32.png -------------------------------------------------------------------------------- /res/icon_48.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/KJHJason/Cultured-Downloader-CLI/HEAD/res/icon_48.png -------------------------------------------------------------------------------- /res/icon_64.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/KJHJason/Cultured-Downloader-CLI/HEAD/res/icon_64.png -------------------------------------------------------------------------------- /src/utils/icon.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/KJHJason/Cultured-Downloader-CLI/HEAD/src/utils/icon.png -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | *test*.* 2 | *.json 3 | !spinners.json 4 | *.log 5 | *.syso 6 | *.exe 7 | src/bin 8 | bin/hash.txt 9 | -------------------------------------------------------------------------------- /res/cultured_downloader.ico: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/KJHJason/Cultured-Downloader-CLI/HEAD/res/cultured_downloader.ico -------------------------------------------------------------------------------- /src/api/pixiv/models/common.go: -------------------------------------------------------------------------------- 1 | package models 2 | 3 | type Ugoira struct { 4 | Url string 5 | FilePath string 6 | Frames map[string]int64 7 | } 8 | 9 | type UgoiraFramesJson []struct { 10 | File string `json:"file"` 11 | Delay float64 `json:"delay"` 12 | } 13 | -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # Contributing Guidelines 2 | 3 | - If you would like to make a pull request, please keep your code simple for me to understand it. I am still a student learning programming so if I'm unable to understand what's your code doing, I will be unable to maintain it. 4 | 5 | - Please do not just change the syntax just for the purpose of "best practices". Unless it makes a difference such as in performance, etc. 6 | 7 | - Ensure that your updated code **runs**. -------------------------------------------------------------------------------- /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | # To get started with Dependabot version updates, you'll need to specify which 2 | # package ecosystems to update and where the package manifests are located. 3 | # Please see the documentation for all configuration options: 4 | # https://docs.github.com/github/administering-a-repository/configuration-options-for-dependency-updates 5 | 6 | version: 2 7 | updates: 8 | - package-ecosystem: gomod 9 | directory: /src/ 10 | schedule: 11 | interval: daily 12 | -------------------------------------------------------------------------------- /src/cultured_downloader.go: -------------------------------------------------------------------------------- 1 | package main 2 | 3 | import ( 4 | "github.com/KJHJason/Cultured-Downloader-CLI/cmds" 5 | "github.com/KJHJason/Cultured-Downloader-CLI/utils" 6 | "github.com/KJHJason/Cultured-Downloader-CLI/request" 7 | ) 8 | 9 | func main() { 10 | request.CheckInternetConnection() 11 | if err := request.CheckVer(); err != nil { 12 | utils.LogError(err, "", false, utils.ERROR) 13 | } 14 | 15 | if err := utils.DeleteEmptyAndOldLogs(); err != nil { 16 | utils.LogError(err, "", false, utils.ERROR) 17 | } 18 | 19 | cmds.RootCmd.Execute() 20 | } 21 | -------------------------------------------------------------------------------- /src/request/model.go: -------------------------------------------------------------------------------- 1 | package request 2 | 3 | import "net/http" 4 | 5 | type ToDownload struct { 6 | Url string 7 | FilePath string 8 | } 9 | 10 | type DlOptions struct { 11 | // MaxConcurrency is the maximum number of concurrent downloads 12 | MaxConcurrency int 13 | 14 | // Cookies is a list of cookies to be used in the download process 15 | Cookies []*http.Cookie 16 | 17 | // Headers is a map of headers to be used in the download process 18 | Headers map[string]string 19 | 20 | // UseHttp3 is a flag to enable HTTP/3 21 | // Otherwise, HTTP/2 will be used by default 22 | UseHttp3 bool 23 | } 24 | -------------------------------------------------------------------------------- /src/api/pixiv/web/pixiv_web.go: -------------------------------------------------------------------------------- 1 | package pixivweb 2 | 3 | import ( 4 | "time" 5 | 6 | "github.com/KJHJason/Cultured-Downloader-CLI/utils" 7 | ) 8 | 9 | const ( 10 | ILLUST = iota 11 | MANGA 12 | UGOIRA 13 | ) 14 | 15 | // This is due to Pixiv's strict rate limiting. 16 | // 17 | // Without delays, the user might get 429 too many requests 18 | // or the user's account might get suspended. 19 | // 20 | // Additionally, pixiv.net is protected by cloudflare, so 21 | // to prevent the user's IP reputation from going down, delays are added. 22 | // 23 | // More info: https://github.com/Nandaka/PixivUtil2/issues/477 24 | func pixivSleep() { 25 | time.Sleep(utils.GetRandomTime(0.5, 1.0)) 26 | } 27 | -------------------------------------------------------------------------------- /.github/FUNDING.yml: -------------------------------------------------------------------------------- 1 | # These are supported funding model platforms 2 | 3 | github: [KJHJason] 4 | patreon: # Replace with a single Patreon username 5 | open_collective: # Replace with a single Open Collective username 6 | ko_fi: kjhjason 7 | tidelift: # Replace with a single Tidelift platform-name/package-name e.g., npm/babel 8 | community_bridge: # Replace with a single Community Bridge project-name e.g., cloud-foundry 9 | liberapay: # Replace with a single Liberapay username 10 | issuehunt: # Replace with a single IssueHunt username 11 | otechie: # Replace with a single Otechie username 12 | lfx_crowdfunding: # Replace with a single LFX Crowdfunding project-name e.g., cloud-foundry 13 | custom: # Replace with up to 4 custom sponsorship URLs e.g., ['link1', 'link2'] 14 | -------------------------------------------------------------------------------- /src/api/pixiv/common/utils.go: -------------------------------------------------------------------------------- 1 | package pixivcommon 2 | 3 | import "github.com/KJHJason/Cultured-Downloader-CLI/utils" 4 | 5 | // Convert the page number to the offset as one page will have 60 illustrations. 6 | // 7 | // Usually for paginated results from Pixiv's mobile API, checkPixivMax should be set to true. 8 | func ConvertPageNumToOffset(minPageNum, maxPageNum, perPage int, checkPixivMax bool) (int, int) { 9 | minOffset, maxOffset := utils.ConvertPageNumToOffset( 10 | minPageNum, 11 | maxPageNum, 12 | perPage, 13 | ) 14 | if checkPixivMax { 15 | // Check if the offset is larger than Pixiv's max offset 16 | if maxOffset > 5000 { 17 | maxOffset = 5000 18 | } 19 | if minOffset > 5000 { 20 | minOffset = 5000 21 | } 22 | } 23 | return minOffset, maxOffset 24 | } 25 | -------------------------------------------------------------------------------- /src/spinner/spinner_types.go: -------------------------------------------------------------------------------- 1 | // Spinners from https://github.com/sindresorhus/cli-spinners/blob/main/spinners.json 2 | package spinner 3 | 4 | import ( 5 | _ "embed" 6 | "encoding/json" 7 | ) 8 | 9 | type SpinnerInfo struct { 10 | Interval int64 11 | Frames []string 12 | } 13 | 14 | //go:embed spinners.json 15 | var spinnersJson []byte 16 | 17 | // GetSpinnerTypes returns a map of spinnerInfo 18 | // that contains the interval and frames for each spinner. 19 | func GetSpinnerTypes() map[string]SpinnerInfo { 20 | if spinnerTypes != nil { 21 | // Return the cached spinner types 22 | return spinnerTypes 23 | } 24 | 25 | var spinners map[string]SpinnerInfo 26 | if err := json.Unmarshal(spinnersJson, &spinners); err != nil { 27 | panic(err) 28 | } 29 | 30 | return spinners 31 | } 32 | -------------------------------------------------------------------------------- /src/api/pixiv/common/headers.go: -------------------------------------------------------------------------------- 1 | package pixivcommon 2 | 3 | import ( 4 | "fmt" 5 | 6 | "github.com/KJHJason/Cultured-Downloader-CLI/utils" 7 | ) 8 | 9 | // Returns a defined request header needed to communicate with Pixiv's API 10 | func GetPixivRequestHeaders() map[string]string { 11 | return map[string]string{ 12 | "Origin": utils.PIXIV_URL, 13 | "Referer": utils.PIXIV_URL, 14 | } 15 | } 16 | 17 | // Get the Pixiv illust page URL for the referral header value 18 | func GetIllustUrl(illustId string) string { 19 | return fmt.Sprintf( 20 | "%s/artworks/%s", 21 | utils.PIXIV_URL, 22 | illustId, 23 | ) 24 | } 25 | 26 | // Get the Pixiv user page URL for the referral header value 27 | func GetUserUrl(userId string) string { 28 | return fmt.Sprintf( 29 | "%s/users/%s", 30 | utils.PIXIV_URL, 31 | userId, 32 | ) 33 | } 34 | -------------------------------------------------------------------------------- /src/gdrive/models/gdrive.go: -------------------------------------------------------------------------------- 1 | package models 2 | 3 | type GDriveFile struct { 4 | Kind string `json:"kind"` 5 | Id string `json:"id"` 6 | Name string `json:"name"` 7 | Size string `json:"size"` 8 | MimeType string `json:"mimeType"` 9 | Md5Checksum string `json:"md5Checksum"` 10 | } 11 | 12 | type GDriveFolder struct { 13 | Kind string `json:"kind"` 14 | IncompleteSearch bool `json:"incompleteSearch"` 15 | Files []GDriveFile `json:"files"` 16 | NextPageToken string `json:"nextPageToken"` 17 | } 18 | 19 | type GDriveToDl struct { 20 | Id string 21 | Type string 22 | FilePath string 23 | } 24 | 25 | type GdriveFileToDl struct { 26 | Id string 27 | Name string 28 | Size string 29 | MimeType string 30 | Md5Checksum string 31 | FilePath string 32 | } 33 | 34 | type GdriveError struct { 35 | Err error 36 | FilePath string 37 | } 38 | -------------------------------------------------------------------------------- /src/versioninfo.rc: -------------------------------------------------------------------------------- 1 | 1 VERSIONINFO 2 | FILEVERSION 1,2,5,0 3 | PRODUCTVERSION 1,2,5,0 4 | FILEFLAGSMASK 0X3FL 5 | FILEFLAGS 0L 6 | FILEOS 0X40004L 7 | FILETYPE 0X1 8 | FILESUBTYPE 0 9 | BEGIN 10 | BLOCK "StringFileInfo" 11 | BEGIN 12 | BLOCK "040904B0" 13 | BEGIN 14 | VALUE "CompanyName", "" 15 | VALUE "FileDescription", "CLI tool of Cultured Downloader" 16 | VALUE "FileVersion", "1.2.5" 17 | VALUE "InternalName", "Cultured Downloader CLI" 18 | VALUE "LegalCopyright", "KJHJason. GPL-3.0 license" 19 | VALUE "OriginalFilename", "cultured-downloader-cli.exe" 20 | VALUE "ProductName", "Cultured Downloader CLI" 21 | VALUE "ProductVersion", "1.2.5" 22 | END 23 | END 24 | BLOCK "VarFileInfo" 25 | BEGIN 26 | VALUE "Translation", 0x0409, 0x04B0 27 | END 28 | END 29 | 30 | 1 ICON "../res/cultured_downloader.ico" -------------------------------------------------------------------------------- /src/api/fantia/fantia.go: -------------------------------------------------------------------------------- 1 | package fantia 2 | 3 | import ( 4 | "github.com/KJHJason/Cultured-Downloader-CLI/request" 5 | "github.com/KJHJason/Cultured-Downloader-CLI/utils" 6 | ) 7 | 8 | // Start the download process for Fantia 9 | func FantiaDownloadProcess(fantiaDl *FantiaDl, fantiaDlOptions *FantiaDlOptions) { 10 | if !fantiaDlOptions.DlThumbnails && !fantiaDlOptions.DlImages && !fantiaDlOptions.DlAttachments { 11 | return 12 | } 13 | 14 | if len(fantiaDl.FanclubIds) > 0 { 15 | fantiaDl.getCreatorsPosts(fantiaDlOptions) 16 | } 17 | 18 | var gdriveLinks []*request.ToDownload 19 | var downloadedPosts bool 20 | if len(fantiaDl.PostIds) > 0 { 21 | fantiaDl.dlFantiaPosts(fantiaDlOptions) 22 | downloadedPosts = true 23 | } 24 | 25 | if fantiaDlOptions.GdriveClient != nil && len(gdriveLinks) > 0 { 26 | fantiaDlOptions.GdriveClient.DownloadGdriveUrls(gdriveLinks, fantiaDlOptions.Configs) 27 | downloadedPosts = true 28 | } 29 | 30 | if downloadedPosts { 31 | utils.AlertWithoutErr(utils.Title, "Downloaded all posts from Fantia!") 32 | } else { 33 | utils.AlertWithoutErr(utils.Title, "No posts to download from Fantia!") 34 | } 35 | } 36 | -------------------------------------------------------------------------------- /src/configs/configs.go: -------------------------------------------------------------------------------- 1 | package configs 2 | 3 | import ( 4 | "os" 5 | "os/exec" 6 | 7 | "github.com/fatih/color" 8 | ) 9 | 10 | type Config struct { 11 | // DownloadPath will be used as the base path for all downloads 12 | DownloadPath string 13 | 14 | // FfmpegPath is the path to the FFmpeg binary 15 | FfmpegPath string 16 | 17 | // OverwriteFiles is a flag to overwrite existing files 18 | // If false, the download process will be skipped if the file already exists 19 | OverwriteFiles bool 20 | 21 | // Log any detected URLs of the post content that are being downloaded 22 | // Despite the variable name, it only logs URLs to any supported 23 | // external file hosting providers such as MEGA, Google Drive, etc. 24 | LogUrls bool 25 | 26 | // UserAgent is the user agent to be used in the download process 27 | UserAgent string 28 | } 29 | 30 | func (c *Config) ValidateFfmpeg() { 31 | _, ffmpegErr := exec.LookPath(c.FfmpegPath) 32 | if ffmpegErr != nil { 33 | color.Red("FFmpeg is not installed.\nPlease install it from https://ffmpeg.org/ and either use the --ffmpeg_path flag or add the FFmpeg path to your PATH environment variable or alias depending on your OS.") 34 | os.Exit(1) 35 | } 36 | } 37 | -------------------------------------------------------------------------------- /src/api/pixiv/models/pixiv_mobile.go: -------------------------------------------------------------------------------- 1 | package models 2 | 3 | type PixivOauthJson struct { 4 | AccessToken string `json:"access_token"` 5 | ExpiresIn float64 `json:"expires_in"` 6 | } 7 | 8 | type PixivOauthFlowJson struct { 9 | RefreshToken string `json:"refresh_token"` 10 | } 11 | 12 | type UgoiraJson struct { 13 | Metadata struct { 14 | Frames UgoiraFramesJson `json:"frames"` 15 | ZipUrls struct { 16 | Medium string `json:"medium"` 17 | } `json:"zip_urls"` 18 | } `json:"ugoira_metadata"` 19 | } 20 | 21 | type PixivMobileIllustJson struct { 22 | Id int `json:"id"` 23 | Title string `json:"title"` 24 | Type string `json:"type"` 25 | 26 | User struct { 27 | Name string `json:"name"` 28 | } `json:"user"` 29 | 30 | MetaSinglePage struct { 31 | OriginalImageUrl string `json:"original_image_url"` 32 | } `json:"meta_single_page"` 33 | 34 | MetaPages []struct { 35 | ImageUrls struct { 36 | Original string `json:"original"` 37 | } `json:"image_urls"` 38 | } `json:"meta_pages"` 39 | } 40 | 41 | type PixivMobileArtworkJson struct { 42 | Illust *PixivMobileIllustJson `json:"illust"` 43 | } 44 | type PixivMobileArtworksJson struct { 45 | Illusts []*PixivMobileIllustJson `json:"illusts"` 46 | NextUrl *string `json:"next_url"` 47 | } 48 | -------------------------------------------------------------------------------- /src/api/pixiv/models/pixiv_web.go: -------------------------------------------------------------------------------- 1 | package models 2 | 3 | type ArtworkDetails struct { 4 | Body struct { 5 | UserName string `json:"userName"` 6 | Title string `json:"title"` 7 | IllustType int64 `json:"illustType"` 8 | } 9 | } 10 | 11 | type PixivWebArtworkUgoiraJson struct { 12 | Body struct { 13 | Src string `json:"src"` 14 | OriginalSrc string `json:"originalSrc"` 15 | MimeType string `json:"mime_type"` 16 | Frames UgoiraFramesJson `json:"frames"` 17 | } `json:"body"` 18 | } 19 | 20 | type PixivWebArtworkJson struct { 21 | Body []struct { 22 | Urls struct { 23 | ThumbMini string `json:"thumb_mini"` 24 | Small string `json:"small"` 25 | Regular string `json:"regular"` 26 | Original string `json:"original"` 27 | } `json:"urls"` 28 | Width int `json:"width"` 29 | Height int `json:"height"` 30 | } `json:"body"` 31 | } 32 | 33 | type PixivTag struct { 34 | Body struct { 35 | IllustManga struct { 36 | Data []struct { 37 | Id string `json:"id"` 38 | } `json:"data"` 39 | } `json:"illustManga"` 40 | } `json:"body"` 41 | } 42 | 43 | type PixivWebIllustratorJson struct { 44 | Body struct { 45 | Illusts interface{} `json:"illusts"` 46 | Manga interface{} `json:"manga"` 47 | } `json:"body"` 48 | } 49 | -------------------------------------------------------------------------------- /src/api/fantia/models/fantia.go: -------------------------------------------------------------------------------- 1 | package models 2 | 3 | type FantiaContent struct { 4 | // Any attachments such as pdfs that are on their dedicated section 5 | AttachmentURI string `json:"attachment_uri"` 6 | 7 | // For images that are uploaded to their own section 8 | PostContentPhotos []struct { 9 | ID int `json:"id"` 10 | URL struct { 11 | Original string `json:"original"` 12 | } `json:"url"` 13 | } `json:"post_content_photos"` 14 | 15 | // For images that are embedded in the post content blocks. 16 | // Could also contain links to other external file hosting providers. 17 | Comment string `json:"comment"` 18 | 19 | // for attachments such as pdfs that are embedded in the post content 20 | DownloadUri string `json:"download_uri"` 21 | Filename string `json:"filename"` 22 | } 23 | 24 | type FantiaPost struct { 25 | Post struct { 26 | ID int `json:"id"` 27 | Comment string `json:"comment"` // the main post content 28 | Title string `json:"title"` 29 | Thumb struct { 30 | Original string `json:"original"` 31 | } `json:"thumb"` 32 | Fanclub struct { 33 | User struct { 34 | Name string `json:"name"` 35 | } `json:"user"` 36 | } `json:"fanclub"` 37 | Status string `json:"status"` 38 | PostContents []FantiaContent `json:"post_contents"` 39 | } `json:"post"` 40 | Redirect string `json:"redirect"` // if get flagged by the system, it will redirect to this recaptcha url 41 | } 42 | -------------------------------------------------------------------------------- /src/api/kemono/models/kemono.go: -------------------------------------------------------------------------------- 1 | package models 2 | 3 | type MainKemonoJson struct { 4 | Added string `json:"added"` 5 | Attachments []struct { 6 | Name string `json:"name"` 7 | Path string `json:"path"` 8 | } `json:"attachments"` 9 | Content string `json:"content"` 10 | Edited string `json:"edited"` 11 | Embed struct { 12 | Description string `json:"description"` 13 | Subject string `json:"subject"` 14 | Url string `json:"url"` 15 | } `json:"embed"` 16 | File struct { 17 | // usually is for the post thumbnail 18 | Name string `json:"name"` 19 | Path string `json:"path"` 20 | } `json:"file"` 21 | Id string `json:"id"` 22 | Published string `json:"published"` 23 | Service string `json:"service"` 24 | SharedFile bool `json:"shared_file"` 25 | Title string `json:"title"` 26 | User string `json:"user"` 27 | } 28 | 29 | type KemonoJson []*MainKemonoJson 30 | 31 | type KemonoFavCreatorJson []struct { 32 | FavedSeq int `json:"faved_seq"` 33 | Id string `json:"id"` 34 | Indexed string `json:"indexed"` 35 | Name string `json:"name"` 36 | Service string `json:"service"` 37 | Updated string `json:"updated"` 38 | } 39 | 40 | type KemonoPostToDl struct { 41 | Service string 42 | CreatorId string 43 | PostId string 44 | Tld string 45 | } 46 | 47 | type KemonoCreatorToDl struct { 48 | Service string 49 | CreatorId string 50 | PageNum string 51 | Tld string 52 | } 53 | -------------------------------------------------------------------------------- /.github/pull_request_template.md: -------------------------------------------------------------------------------- 1 | # Addition to code 2 | 3 | ## Type of additions (Tick those that apply): 4 | 5 | 6 | 7 | - [ ] Bug fix (non-breaking change which fixes an issue) 8 | - [ ] New feature (non-breaking change which adds functionality) 9 | 10 | ## Program Version 11 | 12 | Version: 13 | 14 | ## Summary of changes: 15 | 16 | Please add your summary here... 17 | 18 | ## Checklist (Tick those that apply): 19 | 20 | 21 | 22 | - [ ] I have read the [contribution guidelines](https://github.com/KJHJason/Cultured-Downloader-CLI/blob/main/CONTRIBUTING.md) and have adhered to it 23 | - [ ] I have performed a self-review of my code 24 | - [ ] I have commented my code, particularly in hard-to-understand areas 25 | - [ ] I have made corresponding changes to the documentation 26 | - [ ] My changes generate no new warnings 27 | - [ ] I have added tests that prove my fix is effective or that my feature works 28 | - [ ] Any dependent changes have been merged and published in downstream modules 29 | - [ ] I have checked my code and corrected any spelling mistakes 30 | 31 | ## Screenshots (if any): 32 | 33 | 34 | | Original | Updated | 35 | | ------------------- |:------------------:| 36 | | original screenshot | updated screenshot | -------------------------------------------------------------------------------- /src/utils/notify.go: -------------------------------------------------------------------------------- 1 | package utils 2 | 3 | import ( 4 | "bytes" 5 | _ "embed" 6 | "fmt" 7 | "io" 8 | "os" 9 | "path/filepath" 10 | 11 | "github.com/gen2brain/beeep" 12 | ) 13 | 14 | var ( 15 | //go:embed icon.png 16 | iconImg []byte 17 | iconPath = filepath.Join(APP_PATH, "icon.png") 18 | ) 19 | 20 | const Title = "Cultured Downloader CLI" 21 | 22 | func writeIcon() error { 23 | defer func() { 24 | if iconImg != nil { 25 | iconImg = nil 26 | } 27 | }() 28 | 29 | if PathExists(iconPath) { 30 | return nil 31 | } 32 | 33 | f, err := os.Create(iconPath) 34 | if err != nil { 35 | return err 36 | } 37 | 38 | if _, err = io.Copy(f, bytes.NewReader(iconImg)); err != nil { 39 | return err 40 | } 41 | return nil 42 | } 43 | 44 | // Alert shows a notification on the user's system with the given title and message. 45 | func Alert(title, message string) error { 46 | if err := writeIcon(); err != nil { 47 | return fmt.Errorf( 48 | "error %d: unable to write notification icon => %v", 49 | UNEXPECTED_ERROR, 50 | err, 51 | ) 52 | } 53 | 54 | if err := beeep.Alert(title, message, iconPath); err != nil { 55 | return fmt.Errorf( 56 | "error %d: unable to show notification => %v", 57 | UNEXPECTED_ERROR, 58 | err, 59 | ) 60 | } 61 | 62 | return nil 63 | } 64 | 65 | // AlertWithoutErr is the same as Alert but 66 | // if an error occurs, it will log it instead of returning it. 67 | func AlertWithoutErr(title, message string) { 68 | if err := Alert(title, message); err != nil { 69 | LogError(err, "", false, ERROR) 70 | } 71 | } 72 | -------------------------------------------------------------------------------- /src/utils/browser.go: -------------------------------------------------------------------------------- 1 | package utils 2 | 3 | import ( 4 | "context" 5 | "net/http" 6 | "time" 7 | 8 | "github.com/chromedp/cdproto/cdp" 9 | "github.com/chromedp/cdproto/network" 10 | "github.com/chromedp/chromedp" 11 | ) 12 | 13 | func SetChromedpAllocCookies(cookies []*http.Cookie) chromedp.Action { 14 | return chromedp.ActionFunc(func(ctx context.Context) error { 15 | for _, cookie := range cookies { 16 | var expr cdp.TimeSinceEpoch 17 | if cookie.Expires.IsZero() { 18 | expr = cdp.TimeSinceEpoch(time.Now().Add(365 * 24 * time.Hour)) 19 | } else { 20 | expr = cdp.TimeSinceEpoch(cookie.Expires) 21 | } 22 | 23 | err := network.SetCookie(cookie.Name, cookie.Value). 24 | WithExpires(&expr). 25 | WithDomain(cookie.Domain). 26 | WithPath(cookie.Path). 27 | WithHTTPOnly(cookie.HttpOnly). 28 | WithSecure(cookie.Secure). 29 | Do(ctx) 30 | if err != nil { 31 | return err 32 | } 33 | } 34 | return nil 35 | }) 36 | } 37 | 38 | func GetDefaultChromedpAlloc(userAgent string) (context.Context, context.CancelFunc) { 39 | opts := append(chromedp.DefaultExecAllocatorOptions[:], 40 | chromedp.UserAgent(userAgent), 41 | ) 42 | return chromedp.NewExecAllocator(context.Background(), opts...) 43 | } 44 | 45 | func ExecuteChromedpActions(allocCtx context.Context, allocCancelFn context.CancelFunc, actions ...chromedp.Action) error { 46 | if allocCtx == nil { 47 | allocCtx = context.Background() 48 | } 49 | 50 | taskCtx, cancel := chromedp.NewContext(allocCtx) 51 | defer cancel() 52 | 53 | return chromedp.Run(taskCtx, actions...) 54 | } 55 | -------------------------------------------------------------------------------- /src/cmds/root.go: -------------------------------------------------------------------------------- 1 | package cmds 2 | 3 | import ( 4 | "fmt" 5 | 6 | "github.com/fatih/color" 7 | "github.com/spf13/cobra" 8 | "github.com/KJHJason/Cultured-Downloader-CLI/utils" 9 | ) 10 | 11 | var ( 12 | downloadPath string 13 | RootCmd = &cobra.Command{ 14 | Use: "cultured-downloader-cli", 15 | Version: fmt.Sprintf( 16 | "%s by KJHJason\n%s", 17 | utils.VERSION, 18 | "GitHub Repo: https://github.com/KJHJason/Cultured-Downloader-CLI", 19 | ), 20 | Short: "Download images, videos, etc. from various websites like Fantia.", 21 | Long: "Cultured Downloader CLI is a command-line tool for downloading images, videos, etc. from various websites like Pixiv, Pixiv Fanbox, Fantia, and more.", 22 | Run: func(cmd *cobra.Command, args []string) { 23 | if downloadPath != "" { 24 | err := utils.SetDefaultDownloadPath(downloadPath) 25 | if err != nil { 26 | color.Red(err.Error()) 27 | } else { 28 | color.Green("Download path set to: %s", downloadPath) 29 | } 30 | } 31 | }, 32 | } 33 | ) 34 | 35 | func init() { 36 | RootCmd.Flags().StringVarP( 37 | &downloadPath, 38 | "dl_path", 39 | "p", 40 | "", 41 | utils.CombineStringsWithNewline( 42 | "Configure the path to download the files to and save it for future runs.", 43 | "Otherwise, the program will use the current working directory.", 44 | "Note:", 45 | "If you had used the \"-download_path\" flag before or", 46 | "had used the Cultured Downloader Python program, the program will automatically use the path you had set.", 47 | ), 48 | ) 49 | RootCmd.CompletionOptions.HiddenDefaultCmd = true 50 | } 51 | -------------------------------------------------------------------------------- /src/gdrive/process.go: -------------------------------------------------------------------------------- 1 | package gdrive 2 | 3 | import ( 4 | "path/filepath" 5 | "strings" 6 | 7 | "github.com/KJHJason/Cultured-Downloader-CLI/request" 8 | "github.com/KJHJason/Cultured-Downloader-CLI/utils" 9 | ) 10 | 11 | // Process and detects for any external download links from the post's text content 12 | func ProcessPostText(postBodyStr, postFolderPath string, downloadGdrive bool, logUrls bool) []*request.ToDownload { 13 | if postBodyStr == "" { 14 | return nil 15 | } 16 | 17 | // split the text by newlines 18 | postBodySlice := strings.FieldsFunc( 19 | postBodyStr, 20 | func(c rune) bool { 21 | return c == '\n' 22 | }, 23 | ) 24 | loggedPassword := false 25 | var detectedGdriveLinks []*request.ToDownload 26 | for _, text := range postBodySlice { 27 | if utils.DetectPasswordInText(text) && !loggedPassword { 28 | // Log the entire post text if it contains a password 29 | filePath := filepath.Join(postFolderPath, utils.PASSWORD_FILENAME) 30 | if !utils.PathExists(filePath) { 31 | loggedPassword = true 32 | utils.LogMessageToPath( 33 | "Found potential password in the post:\n\n" + postBodyStr, 34 | filePath, 35 | utils.ERROR, 36 | ) 37 | } 38 | } 39 | 40 | if logUrls { 41 | utils.DetectOtherExtDLLink(text, postFolderPath) 42 | } 43 | if utils.DetectGDriveLinks(text, postFolderPath, false, logUrls) && downloadGdrive { 44 | detectedGdriveLinks = append(detectedGdriveLinks, &request.ToDownload{ 45 | Url: text, 46 | FilePath: filepath.Join(postFolderPath, utils.GDRIVE_FOLDER), 47 | }) 48 | } 49 | } 50 | return detectedGdriveLinks 51 | } 52 | -------------------------------------------------------------------------------- /src/api/pixiv/args.go: -------------------------------------------------------------------------------- 1 | package pixiv 2 | 3 | import "github.com/KJHJason/Cultured-Downloader-CLI/utils" 4 | 5 | // PixivDl contains the IDs of the Pixiv artworks and 6 | // illustrators and Tag Names to download. 7 | type PixivDl struct { 8 | ArtworkIds []string 9 | 10 | IllustratorIds []string 11 | IllustratorPageNums []string 12 | 13 | TagNames []string 14 | TagNamesPageNums []string 15 | } 16 | 17 | // ValidateArgs validates the IDs of the Pixiv artworks and illustrators to download. 18 | // 19 | // It also validates the page numbers of the tag names to download. 20 | // 21 | // Should be called after initialising the struct. 22 | func (p *PixivDl) ValidateArgs() { 23 | utils.ValidateIds(p.ArtworkIds) 24 | utils.ValidateIds(p.IllustratorIds) 25 | p.ArtworkIds = utils.RemoveSliceDuplicates(p.ArtworkIds) 26 | 27 | if len(p.IllustratorPageNums) > 0 { 28 | utils.ValidatePageNumInput( 29 | len(p.IllustratorIds), 30 | p.IllustratorPageNums, 31 | []string{ 32 | "Number of illustrators ID(s) and illustrators' page numbers must be equal.", 33 | }, 34 | ) 35 | } else { 36 | p.IllustratorPageNums = make([]string, len(p.IllustratorIds)) 37 | } 38 | p.IllustratorIds, p.IllustratorPageNums = utils.RemoveDuplicateIdAndPageNum( 39 | p.IllustratorIds, 40 | p.IllustratorPageNums, 41 | ) 42 | 43 | if len(p.TagNamesPageNums) > 0 { 44 | utils.ValidatePageNumInput( 45 | len(p.TagNames), 46 | p.TagNamesPageNums, 47 | []string{ 48 | "Number of tag names and tag names' page numbers must be equal.", 49 | }, 50 | ) 51 | } else { 52 | p.TagNamesPageNums = make([]string, len(p.TagNames)) 53 | } 54 | p.TagNames, p.TagNamesPageNums = utils.RemoveDuplicateIdAndPageNum( 55 | p.TagNames, 56 | p.TagNamesPageNums, 57 | ) 58 | } 59 | -------------------------------------------------------------------------------- /src/cmds/textparser/text_parser.go: -------------------------------------------------------------------------------- 1 | package textparser 2 | 3 | import ( 4 | "bufio" 5 | "os" 6 | "io" 7 | "fmt" 8 | 9 | "github.com/fatih/color" 10 | "github.com/KJHJason/Cultured-Downloader-CLI/utils" 11 | ) 12 | 13 | const PAGE_NUM_REGEX_GRP_NAME = "pageNum" 14 | 15 | var PAGE_NUM_REGEX_STR = fmt.Sprintf( 16 | `(?:; (?P<%s>[1-9]\d*(?:-[1-9]\d*)?))?`, 17 | PAGE_NUM_REGEX_GRP_NAME, 18 | ) 19 | 20 | // openTextFile opens the text file at the given path and returns a os.File and a bufio.Reader. 21 | // 22 | // If an error occurs, the program will exit with an error message and status code 1. 23 | func openTextFile(textFilePath, website string) (*os.File, *bufio.Reader) { 24 | f, err := os.Open(textFilePath) 25 | if err != nil { 26 | errMsg := fmt.Sprintf( 27 | "error %d: failed to open %s cookie file at %s, more info => %v", 28 | utils.OS_ERROR, 29 | website, 30 | textFilePath, 31 | err, 32 | ) 33 | color.Red(errMsg) 34 | os.Exit(1) 35 | } 36 | return f, bufio.NewReader(f) 37 | } 38 | 39 | // readLine reads a line from the given reader and returns the line as a slice of bytes. 40 | // 41 | // If the reader reaches EOF, the second return value will be true. Otherwise, it will be false. 42 | // However, if an error occurs, the program will exit with an error message and status code 1. 43 | func readLine(reader *bufio.Reader, textFilePath, website string) ([]byte, bool) { 44 | lineBytes, err := utils.ReadLine(reader) 45 | if err != nil { 46 | if err == io.EOF { 47 | return nil, true 48 | } 49 | errMsg := fmt.Sprintf( 50 | "error %d: failed to read %s text file at %s, more info => %v", 51 | utils.OS_ERROR, 52 | website, 53 | textFilePath, 54 | err, 55 | ) 56 | color.Red(errMsg) 57 | os.Exit(1) 58 | } 59 | return lineBytes, false 60 | } 61 | -------------------------------------------------------------------------------- /src/api/pixivfanbox/pixiv_fanbox.go: -------------------------------------------------------------------------------- 1 | package pixivfanbox 2 | 3 | import ( 4 | "github.com/KJHJason/Cultured-Downloader-CLI/request" 5 | "github.com/KJHJason/Cultured-Downloader-CLI/utils" 6 | ) 7 | 8 | // Start the download process for Pixiv Fanbox 9 | func PixivFanboxDownloadProcess(pixivFanboxDl *PixivFanboxDl, pixivFanboxDlOptions *PixivFanboxDlOptions) { 10 | if !pixivFanboxDlOptions.DlThumbnails && !pixivFanboxDlOptions.DlImages && !pixivFanboxDlOptions.DlAttachments && !pixivFanboxDlOptions.DlGdrive { 11 | return 12 | } 13 | 14 | if len(pixivFanboxDl.CreatorIds) > 0 { 15 | pixivFanboxDl.getCreatorsPosts( 16 | pixivFanboxDlOptions, 17 | ) 18 | } 19 | 20 | var urlsToDownload, gdriveUrlsToDownload []*request.ToDownload 21 | if len(pixivFanboxDl.PostIds) > 0 { 22 | urlsToDownload, gdriveUrlsToDownload = pixivFanboxDl.getPostDetails( 23 | pixivFanboxDlOptions, 24 | ) 25 | } 26 | 27 | var downloadedPosts bool 28 | if len(urlsToDownload) > 0 { 29 | downloadedPosts = true 30 | request.DownloadUrls( 31 | urlsToDownload, 32 | &request.DlOptions{ 33 | MaxConcurrency: utils.PIXIV_MAX_CONCURRENT_DOWNLOADS, 34 | Headers: GetPixivFanboxHeaders(), 35 | Cookies: pixivFanboxDlOptions.SessionCookies, 36 | UseHttp3: false, 37 | }, 38 | pixivFanboxDlOptions.Configs, 39 | ) 40 | } 41 | if pixivFanboxDlOptions.GdriveClient != nil && len(gdriveUrlsToDownload) > 0 { 42 | downloadedPosts = true 43 | pixivFanboxDlOptions.GdriveClient.DownloadGdriveUrls(gdriveUrlsToDownload, pixivFanboxDlOptions.Configs) 44 | } 45 | 46 | if downloadedPosts { 47 | utils.AlertWithoutErr(utils.Title, "Downloaded all posts from Pixiv Fanbox!") 48 | } else { 49 | utils.AlertWithoutErr(utils.Title, "No posts to download from Pixiv Fanbox!") 50 | } 51 | } 52 | -------------------------------------------------------------------------------- /src/make.ps1: -------------------------------------------------------------------------------- 1 | Write-Output "Building Cultured Downloader CLI for Windows, Linux, and macOS..." 2 | 3 | # Make the bin directory quietly if it doesn't exist 4 | if (!(Test-Path -Path "bin")) { 5 | New-Item -ItemType Directory -Path "bin" | Out-Null 6 | } 7 | 8 | Remove-Item -Path "bin/hash.txt" -Force -ErrorAction SilentlyContinue 9 | "SHA256 Hashes`r`n" | Out-File -FilePath "bin/hash.txt" 10 | 11 | function GetHash($path, $os, $arch) { 12 | $hash = Get-FileHash -Algorithm SHA256 $path | Select-Object -ExpandProperty Hash 13 | 14 | $bits = "64-bit" 15 | if ($arch -eq "386") { 16 | $bits = "32-bit" 17 | } 18 | 19 | $filename = Split-Path -Path $path -Leaf 20 | $osTitle = $os.Substring(0,1).ToUpper() + $os.Substring(1) 21 | $hashMsg = "$filename ($os-$arch/$osTitle $bits):`r`n- $hash`r`n" 22 | 23 | # write to bin/hash.txt 24 | $hashMsg | Out-File -FilePath "bin/hash.txt" -Append 25 | } 26 | 27 | # github.com/josephspurrier/goversioninfo/cmd/goversioninfo 28 | $verInfoName = "versioninfo.syso" 29 | $verInfoRc = "versioninfo.rc" 30 | windres -i $verInfoRc -O coff -o $verInfoName 31 | 32 | $env:GOOS = "windows" 33 | $env:GOARCH = "amd64" 34 | $binaryPath = "bin/cultured-downloader-cli.exe" 35 | go build -o $binaryPath 36 | GetHash $binaryPath "windows" "amd64" 37 | Remove-Item -Path $verInfoName -Force -ErrorAction SilentlyContinue 38 | 39 | $env:GOOS = "linux" 40 | $binaryPath = "bin/cultured-downloader-cli-linux" 41 | go build -o $binaryPath 42 | GetHash $binaryPath "linux" "amd64" 43 | 44 | $env:GOOS = "darwin" 45 | $binaryPath = "bin/cultured-downloader-cli-darwin" 46 | go build -o $binaryPath 47 | GetHash $binaryPath "darwin" "amd64" 48 | 49 | # reset the environment variables 50 | $env:GOOS = "windows" 51 | Write-Output "Finished building Cultured Downloader CLI for Windows, Linux, and macOS." 52 | -------------------------------------------------------------------------------- /src/api/pixiv/ugoira/ugoira.go: -------------------------------------------------------------------------------- 1 | package ugoira 2 | 3 | import ( 4 | "fmt" 5 | "os" 6 | "strings" 7 | 8 | "github.com/KJHJason/Cultured-Downloader-CLI/utils" 9 | "github.com/fatih/color" 10 | ) 11 | 12 | // UgoiraDlOptions is the struct that contains the 13 | // configs for the processing of the ugoira images after downloading from Pixiv. 14 | type UgoiraOptions struct { 15 | DeleteZip bool 16 | Quality int 17 | OutputFormat string 18 | } 19 | 20 | var UGOIRA_ACCEPTED_EXT = []string{ 21 | ".gif", 22 | ".apng", 23 | ".webp", 24 | ".webm", 25 | ".mp4", 26 | } 27 | 28 | // ValidateArgs validates the arguments of the ugoira process options. 29 | // 30 | // Should be called after initialising the struct. 31 | func (u *UgoiraOptions) ValidateArgs() { 32 | u.OutputFormat = strings.ToLower(u.OutputFormat) 33 | 34 | // u.Quality is only for .mp4 and .webm 35 | if u.OutputFormat == ".mp4" && u.Quality < 0 || u.Quality > 51 { 36 | color.Red( 37 | fmt.Sprintf( 38 | "pixiv error %d: Ugoira quality of %d is not allowed", 39 | utils.INPUT_ERROR, 40 | u.Quality, 41 | ), 42 | ) 43 | color.Red("Ugoira quality for FFmpeg must be between 0 and 51 for .mp4") 44 | os.Exit(1) 45 | } else if u.OutputFormat == ".webm" && u.Quality < 0 || u.Quality > 63 { 46 | color.Red( 47 | fmt.Sprintf( 48 | "pixiv error %d: Ugoira quality of %d is not allowed", 49 | utils.INPUT_ERROR, 50 | u.Quality, 51 | ), 52 | ) 53 | color.Red("Ugoira quality for FFmpeg must be between 0 and 63 for .webm") 54 | os.Exit(1) 55 | } 56 | 57 | u.OutputFormat = strings.ToLower(u.OutputFormat) 58 | utils.ValidateStrArgs( 59 | u.OutputFormat, 60 | UGOIRA_ACCEPTED_EXT, 61 | []string{ 62 | fmt.Sprintf( 63 | "pixiv error %d: Output extension %q is not allowed for ugoira conversion", 64 | utils.INPUT_ERROR, 65 | u.OutputFormat, 66 | ), 67 | }, 68 | ) 69 | } 70 | -------------------------------------------------------------------------------- /src/utils/json.go: -------------------------------------------------------------------------------- 1 | package utils 2 | 3 | import ( 4 | "bytes" 5 | "encoding/json" 6 | "fmt" 7 | "net/http" 8 | "os" 9 | "path/filepath" 10 | "time" 11 | 12 | "github.com/fatih/color" 13 | ) 14 | 15 | func logJsonResponse(body []byte) { 16 | var prettyJson bytes.Buffer 17 | err := json.Indent(&prettyJson, body, "", " ") 18 | if err != nil { 19 | color.Red( 20 | fmt.Sprintf( 21 | "error %d: failed to indent JSON response body due to %v", 22 | JSON_ERROR, 23 | err, 24 | ), 25 | ) 26 | return 27 | } 28 | 29 | filename := fmt.Sprintf("saved_%s.json", time.Now().Format("2006-01-02_15-04-05")) 30 | filePath := filepath.Join("json", filename) 31 | os.MkdirAll(filepath.Dir(filePath), 0755) 32 | err = os.WriteFile(filePath, prettyJson.Bytes(), 0666) 33 | if err != nil { 34 | color.Red( 35 | fmt.Sprintf( 36 | "error %d: failed to write JSON response body to file due to %v", 37 | UNEXPECTED_ERROR, 38 | err, 39 | ), 40 | ) 41 | } 42 | } 43 | 44 | // Read the response body and unmarshal it into a interface and returns it 45 | func LoadJsonFromResponse(res *http.Response, format any) error { 46 | body, err := ReadResBody(res) 47 | if err != nil { 48 | return err 49 | } 50 | 51 | // write to file if debug mode is on 52 | if DEBUG_MODE { 53 | logJsonResponse(body) 54 | } 55 | 56 | if err = json.Unmarshal(body, &format); err != nil { 57 | return fmt.Errorf( 58 | "error %d: failed to unmarshal json response from %s due to %v\nBody: %s", 59 | RESPONSE_ERROR, 60 | res.Request.URL.String(), 61 | err, 62 | string(body), 63 | ) 64 | } 65 | return nil 66 | } 67 | 68 | func LoadJsonFromBytes(body []byte, format any) error { 69 | if err := json.Unmarshal(body, &format); err != nil { 70 | return fmt.Errorf( 71 | "error %d: failed to unmarshal json due to %v\nBody: %s", 72 | JSON_ERROR, 73 | err, 74 | string(body), 75 | ) 76 | } 77 | return nil 78 | } 79 | -------------------------------------------------------------------------------- /src/utils/http.go: -------------------------------------------------------------------------------- 1 | package utils 2 | 3 | import ( 4 | "fmt" 5 | "io" 6 | "net/http" 7 | "net/url" 8 | "path/filepath" 9 | "strings" 10 | ) 11 | 12 | // Returns a boolean value indicating whether the specified site supports HTTP/3 13 | // 14 | // Usually, the API endpoints of a site do not support HTTP/3, so the isApi parameter must be provided. 15 | func IsHttp3Supported(site string, isApi bool) bool { 16 | switch site { 17 | case FANTIA: 18 | return !isApi 19 | case PIXIV_FANBOX: 20 | return false 21 | case PIXIV: 22 | return !isApi 23 | case PIXIV_MOBILE: 24 | return true 25 | case KEMONO, KEMONO_BACKUP: 26 | return false 27 | default: 28 | panic( 29 | fmt.Errorf( 30 | "error %d, invalid site, %q in IsHttp3Supported", 31 | DEV_ERROR, 32 | site, 33 | ), 34 | ) 35 | } 36 | } 37 | 38 | // Returns the last part of the given URL string 39 | func GetLastPartOfUrl(url string) string { 40 | removedParams := strings.SplitN(url, "?", 2) 41 | splittedUrl := strings.Split(removedParams[0], "/") 42 | return splittedUrl[len(splittedUrl)-1] 43 | } 44 | 45 | // Returns the path without the file extension 46 | func RemoveExtFromFilename(filename string) string { 47 | return strings.TrimSuffix(filename, filepath.Ext(filename)) 48 | } 49 | 50 | // Converts a map of string back to a string 51 | func ParamsToString(params map[string]string) string { 52 | paramsStr := "" 53 | for key, value := range params { 54 | paramsStr += fmt.Sprintf("%s=%s&", key, url.QueryEscape(value)) 55 | } 56 | return paramsStr[:len(paramsStr)-1] // remove the last & 57 | } 58 | 59 | // Reads and returns the response body in bytes and closes it 60 | func ReadResBody(res *http.Response) ([]byte, error) { 61 | defer res.Body.Close() 62 | body, err := io.ReadAll(res.Body) 63 | if err != nil { 64 | return nil, fmt.Errorf( 65 | "error %d: failed to read response body from %s due to %v", 66 | RESPONSE_ERROR, 67 | res.Request.URL.String(), 68 | err, 69 | ) 70 | } 71 | return body, nil 72 | } 73 | -------------------------------------------------------------------------------- /src/cmds/textparser/fantia.go: -------------------------------------------------------------------------------- 1 | package textparser 2 | 3 | import ( 4 | "fmt" 5 | "strings" 6 | "regexp" 7 | 8 | "github.com/KJHJason/Cultured-Downloader-CLI/utils" 9 | ) 10 | 11 | var ( 12 | F_POST_URL_REGEX = regexp.MustCompile( 13 | `^https://fantia\.jp/posts/(?P\d+)$`, 14 | ) 15 | F_POST_REGEX_POST_ID_INDEX = F_POST_URL_REGEX.SubexpIndex("postId") 16 | F_FANCLUB_URL_REGEX = regexp.MustCompile( 17 | // ^https://fantia\.jp/fanclubs/(?P\d+)(?:/posts)?(?:; (?P[1-9]\d*(?:-[1-9]\d*)?))?$ 18 | fmt.Sprintf( 19 | `^https://fantia\.jp/fanclubs/(?P\d+)(?:/posts)?%s$`, 20 | PAGE_NUM_REGEX_STR, 21 | ), 22 | ) 23 | F_FANCLUB_REGEX_FANCLUB_ID_INDEX = F_FANCLUB_URL_REGEX.SubexpIndex("fanclubId") 24 | F_FANCLUB_REGEX_PAGE_NUM_INDEX = F_FANCLUB_URL_REGEX.SubexpIndex(PAGE_NUM_REGEX_GRP_NAME) 25 | ) 26 | 27 | type parsedFantiaFanclub struct { 28 | FanclubId string 29 | PageNum string 30 | } 31 | 32 | // parseFantiaTextFile parses the text file at the given path and returns a slice of post IDs and a slice of parsedFantiaFanclub. 33 | func ParseFantiaTextFile(textFilePath string) ([]string, []*parsedFantiaFanclub) { 34 | f, reader := openTextFile( 35 | textFilePath, 36 | utils.FANTIA, 37 | ) 38 | defer f.Close() 39 | 40 | var postIds []string 41 | var fanclubIds []*parsedFantiaFanclub 42 | for { 43 | lineBytes, isEof := readLine(reader, textFilePath, utils.FANTIA) 44 | if isEof { 45 | break 46 | } 47 | 48 | url := strings.TrimSpace(string(lineBytes)) 49 | if url == "" { 50 | continue 51 | } 52 | 53 | if matched := F_POST_URL_REGEX.FindStringSubmatch(url); matched != nil { 54 | postIds = append(postIds, matched[F_POST_REGEX_POST_ID_INDEX]) 55 | continue 56 | } 57 | 58 | if matched := F_FANCLUB_URL_REGEX.FindStringSubmatch(url); matched != nil { 59 | fanclubIds = append(fanclubIds, &parsedFantiaFanclub{ 60 | FanclubId: matched[F_FANCLUB_REGEX_FANCLUB_ID_INDEX], 61 | PageNum: matched[F_FANCLUB_REGEX_PAGE_NUM_INDEX], 62 | }) 63 | continue 64 | } 65 | } 66 | 67 | return postIds, fanclubIds 68 | } 69 | -------------------------------------------------------------------------------- /src/cmds/textparser/kemono.go: -------------------------------------------------------------------------------- 1 | package textparser 2 | 3 | import ( 4 | "strings" 5 | "regexp" 6 | 7 | "github.com/KJHJason/Cultured-Downloader-CLI/utils" 8 | "github.com/KJHJason/Cultured-Downloader-CLI/api/kemono" 9 | "github.com/KJHJason/Cultured-Downloader-CLI/api/kemono/models" 10 | ) 11 | 12 | var ( 13 | K_POST_URL_REGEX = regexp.MustCompile(kemono.BASE_REGEX_STR + kemono.BASE_POST_SUFFIX_REGEX_STR) 14 | K_POST_REGEX_SERVICE_INDEX = K_POST_URL_REGEX.SubexpIndex(kemono.SERVICE_GROUP_NAME) 15 | K_POST_REGEX_CREATOR_ID_INDEX = K_POST_URL_REGEX.SubexpIndex(kemono.CREATOR_ID_GROUP_NAME) 16 | K_POST_REGEX_POST_ID_INDEX = K_POST_URL_REGEX.SubexpIndex(kemono.POST_ID_GROUP_NAME) 17 | 18 | K_CREATOR_URL_REGEX = regexp.MustCompile(kemono.BASE_REGEX_STR + PAGE_NUM_REGEX_STR) 19 | K_CREATOR_REGEX_CREATOR_ID_INDEX = K_CREATOR_URL_REGEX.SubexpIndex(kemono.CREATOR_ID_GROUP_NAME) 20 | K_CREATOR_REGEX_PAGE_NUM_INDEX = K_CREATOR_URL_REGEX.SubexpIndex(PAGE_NUM_REGEX_GRP_NAME) 21 | ) 22 | 23 | // ParseKemonoTextFile parses the text file at the given path and returns a slice of KemonoPostToDl and a slice of KemonoCreatorToDl. 24 | func ParseKemonoTextFile(textFilePath string) ([]*models.KemonoPostToDl, []*models.KemonoCreatorToDl) { 25 | lowercaseFanbox := strings.ToLower(utils.PIXIV_FANBOX_TITLE) 26 | f, reader := openTextFile( 27 | textFilePath, 28 | lowercaseFanbox, 29 | ) 30 | defer f.Close() 31 | 32 | var postsToDl []*models.KemonoPostToDl 33 | var creatorsToDl []*models.KemonoCreatorToDl 34 | for { 35 | lineBytes, isEof := readLine(reader, textFilePath, lowercaseFanbox) 36 | if isEof { 37 | break 38 | } 39 | 40 | url := strings.TrimSpace(string(lineBytes)) 41 | if url == "" { 42 | continue 43 | } 44 | 45 | if matched := K_POST_URL_REGEX.FindStringSubmatch(url); matched != nil { 46 | postsToDl = append(postsToDl, &models.KemonoPostToDl{ 47 | Service: matched[K_POST_REGEX_SERVICE_INDEX], 48 | CreatorId: matched[K_POST_REGEX_CREATOR_ID_INDEX], 49 | PostId: matched[K_POST_REGEX_POST_ID_INDEX], 50 | }) 51 | continue 52 | } 53 | 54 | if matched := K_CREATOR_URL_REGEX.FindStringSubmatch(url); matched != nil { 55 | creatorsToDl = append(creatorsToDl, &models.KemonoCreatorToDl{ 56 | Service: matched[K_POST_REGEX_SERVICE_INDEX], 57 | CreatorId: matched[K_CREATOR_REGEX_CREATOR_ID_INDEX], 58 | PageNum: matched[K_CREATOR_REGEX_PAGE_NUM_INDEX], 59 | }) 60 | continue 61 | } 62 | } 63 | 64 | return postsToDl, creatorsToDl 65 | } 66 | -------------------------------------------------------------------------------- /src/utils/logger.go: -------------------------------------------------------------------------------- 1 | package utils 2 | 3 | import ( 4 | "fmt" 5 | "log" 6 | "io" 7 | "os" 8 | ) 9 | 10 | const ( 11 | // Log levels 12 | INFO = iota 13 | ERROR 14 | DEBUG 15 | ) 16 | 17 | type logger struct { 18 | infoLogger *log.Logger 19 | errorLogger *log.Logger 20 | debugLogger *log.Logger 21 | } 22 | 23 | var loggerPrefix = fmt.Sprintf("Cultured Downloader CLI V%s ", VERSION) 24 | func NewLogger(out io.Writer) *logger { 25 | if out == nil { 26 | out = os.Stdout 27 | } 28 | 29 | return &logger{ 30 | infoLogger: log.New(out, loggerPrefix + "[INFO]: ", log.Ldate|log.Ltime), 31 | errorLogger: log.New(out, loggerPrefix + "[ERROR]: ", log.Ldate|log.Ltime), 32 | debugLogger: log.New(out, loggerPrefix + "[DEBUG]: ", log.Ldate|log.Ltime), 33 | } 34 | } 35 | 36 | func (l *logger) SetOutput(w io.Writer) { 37 | l.infoLogger.SetOutput(w) 38 | l.errorLogger.SetOutput(w) 39 | l.debugLogger.SetOutput(w) 40 | } 41 | 42 | // LogBasedOnLvlf logs a message based on the log level passed in 43 | // 44 | // You can use this function to log a message with a format string 45 | // 46 | // However, please ensure that the 47 | // lvl passed in is valid (i.e. INFO, ERROR, or DEBUG), otherwise this function will panic 48 | func (l *logger) LogBasedOnLvlf(lvl int, format string, args ...any) { 49 | switch lvl { 50 | case INFO: 51 | l.Infof(format, args...) 52 | case ERROR: 53 | l.Errorf(format, args...) 54 | case DEBUG: 55 | l.Debugf(format, args...) 56 | default: 57 | panic( 58 | fmt.Sprintf( 59 | "error %d: invalid log level %d passed to LogBasedOnLvl()", 60 | DEV_ERROR, 61 | lvl, 62 | ), 63 | ) 64 | } 65 | } 66 | 67 | // LogBasedOnLvl is a wrapper for LogBasedOnLvlf() that takes a string instead of a format string 68 | // 69 | // However, please ensure that the 70 | // lvl passed in is valid (i.e. INFO, ERROR, or DEBUG), otherwise this function will panic 71 | func (l *logger) LogBasedOnLvl(lvl int, msg string) { 72 | l.LogBasedOnLvlf(lvl, msg) 73 | } 74 | 75 | func (l *logger) Debug(args ...any) { 76 | l.debugLogger.Println(args...) 77 | } 78 | 79 | func (l *logger) Debugf(format string, args ...any) { 80 | l.debugLogger.Printf(format, args...) 81 | } 82 | 83 | func (l *logger) Info(args ...any) { 84 | l.infoLogger.Println(args...) 85 | } 86 | 87 | func (l *logger) Infof(format string, args ...any) { 88 | l.infoLogger.Printf(format, args...) 89 | } 90 | 91 | func (l *logger) Error(args ...any) { 92 | l.errorLogger.Println(args...) 93 | } 94 | 95 | func (l *logger) Errorf(format string, args ...any) { 96 | l.errorLogger.Printf(format, args...) 97 | } 98 | -------------------------------------------------------------------------------- /src/api/kemono/kemono.go: -------------------------------------------------------------------------------- 1 | package kemono 2 | 3 | import ( 4 | "github.com/KJHJason/Cultured-Downloader-CLI/configs" 5 | "github.com/KJHJason/Cultured-Downloader-CLI/request" 6 | "github.com/KJHJason/Cultured-Downloader-CLI/spinner" 7 | "github.com/KJHJason/Cultured-Downloader-CLI/utils" 8 | ) 9 | 10 | func KemonoDownloadProcess(config *configs.Config, kemonoDl *KemonoDl, dlOptions *KemonoDlOptions, dlFav bool) { 11 | if !dlOptions.DlAttachments && !dlOptions.DlGdrive { 12 | return 13 | } 14 | 15 | var toDownload, gdriveLinks []*request.ToDownload 16 | if dlFav { 17 | progress := spinner.New( 18 | spinner.REQ_SPINNER, 19 | "fgHiYellow", 20 | "Getting favourites from Kemono Party...", 21 | "Finished getting favourites from Kemono Party!", 22 | "Something went wrong while getting favourites from Kemono Party.\nPlease refer to the logs for more details.", 23 | 0, 24 | ) 25 | progress.Start() 26 | favToDl, favGdriveLinks, err := getFavourites( 27 | utils.DOWNLOAD_PATH, 28 | dlOptions, 29 | ) 30 | hasErr := (err != nil) 31 | if hasErr { 32 | utils.LogError(err, "", false, utils.ERROR) 33 | } else { 34 | toDownload = favToDl 35 | gdriveLinks = favGdriveLinks 36 | } 37 | progress.Stop(hasErr) 38 | } 39 | 40 | if len(kemonoDl.PostsToDl) > 0 { 41 | postsToDl, gdriveLinksToDl := getMultiplePosts( 42 | kemonoDl.PostsToDl, 43 | utils.DOWNLOAD_PATH, 44 | dlOptions, 45 | ) 46 | toDownload = append(toDownload, postsToDl...) 47 | gdriveLinks = append(gdriveLinks, gdriveLinksToDl...) 48 | } 49 | if len(kemonoDl.CreatorsToDl) > 0 { 50 | creatorsToDl, gdriveLinksToDl := getMultipleCreators( 51 | kemonoDl.CreatorsToDl, 52 | utils.DOWNLOAD_PATH, 53 | dlOptions, 54 | ) 55 | toDownload = append(toDownload, creatorsToDl...) 56 | gdriveLinks = append(gdriveLinks, gdriveLinksToDl...) 57 | } 58 | 59 | var downloadedPosts bool 60 | if len(toDownload) > 0 { 61 | downloadedPosts = true 62 | request.DownloadUrls( 63 | toDownload, 64 | &request.DlOptions{ 65 | MaxConcurrency: utils.PIXIV_MAX_CONCURRENT_DOWNLOADS, 66 | Cookies: dlOptions.SessionCookies, 67 | UseHttp3: utils.IsHttp3Supported(utils.KEMONO, false), 68 | }, 69 | config, 70 | ) 71 | } 72 | if dlOptions.GdriveClient != nil && len(gdriveLinks) > 0 { 73 | downloadedPosts = true 74 | dlOptions.GdriveClient.DownloadGdriveUrls(gdriveLinks, config) 75 | } 76 | 77 | if downloadedPosts { 78 | utils.AlertWithoutErr(utils.Title, "Downloaded all posts from Kemono Party!") 79 | } else { 80 | utils.AlertWithoutErr(utils.Title, "No posts to download from Kemono Party!") 81 | } 82 | } 83 | -------------------------------------------------------------------------------- /src/api/pixivfanbox/models/pixiv_fanbox.go: -------------------------------------------------------------------------------- 1 | package models 2 | 3 | import "encoding/json" 4 | 5 | type CreatorPaginatedPostsJson struct { 6 | Body []string `json:"body"` 7 | } 8 | 9 | type FanboxCreatorPostsJson struct { 10 | Body struct { 11 | Items []struct { 12 | Id string `json:"id"` 13 | } `json:"items"` 14 | } `json:"body"` 15 | } 16 | 17 | type FanboxPostJson struct { 18 | Body struct { 19 | Id string `json:"id"` 20 | Title string `json:"title"` 21 | Type string `json:"type"` 22 | CreatorId string `json:"creatorId"` 23 | CoverImageUrl string `json:"coverImageUrl"` 24 | Body json.RawMessage `json:"body"` 25 | } `json:"body"` 26 | } 27 | 28 | type FanboxFilePostJson struct { 29 | Text string `json:"text"` 30 | Files []struct { 31 | ID string `json:"id"` 32 | Name string `json:"name"` 33 | Extension string `json:"extension"` 34 | Size int `json:"size"` 35 | Url string `json:"url"` 36 | } `json:"files"` 37 | } 38 | 39 | type FanboxImagePostJson struct { 40 | Text string `json:"text"` 41 | Images []struct { 42 | ID string `json:"id"` 43 | Extension string `json:"extension"` 44 | Width int `json:"width"` 45 | Height int `json:"height"` 46 | OriginalUrl string `json:"originalUrl"` 47 | ThumbnailUrl string `json:"thumbnailUrl"` 48 | } `json:"images"` 49 | } 50 | 51 | type FanboxTextPostJson struct { 52 | Text string `json:"text"` 53 | } 54 | 55 | type FanboxArticleBlocks []struct { 56 | Type string `json:"type"` 57 | Text string `json:"text,omitempty"` 58 | ImageID string `json:"imageId,omitempty"` 59 | Styles []struct { 60 | Type string `json:"type"` 61 | Offset int `json:"offset"` 62 | Length int `json:"length"` 63 | } `json:"styles,omitempty"` 64 | Links []struct { 65 | Offset int `json:"offset"` 66 | Length int `json:"length"` 67 | Url string `json:"url"` 68 | } `json:"links,omitempty"` 69 | FileID string `json:"fileId,omitempty"` 70 | } 71 | 72 | type FanboxArticleJson struct { 73 | Blocks FanboxArticleBlocks `json:"blocks"` 74 | ImageMap map[string]struct { 75 | ID string `json:"id"` 76 | Extension string `json:"extension"` 77 | Width int `json:"width"` 78 | Height int `json:"height"` 79 | OriginalUrl string `json:"originalUrl"` 80 | ThumbnailUrl string `json:"thumbnailUrl"` 81 | } `json:"imageMap"` 82 | FileMap map[string]struct { 83 | ID string `json:"id"` 84 | Name string `json:"name"` 85 | Extension string `json:"extension"` 86 | Size int `json:"size"` 87 | Url string `json:"url"` 88 | } `json:"fileMap"` 89 | } 90 | -------------------------------------------------------------------------------- /src/cmds/textparser/pixiv_fanbox.go: -------------------------------------------------------------------------------- 1 | package textparser 2 | 3 | import ( 4 | "fmt" 5 | "strings" 6 | "regexp" 7 | 8 | "github.com/KJHJason/Cultured-Downloader-CLI/utils" 9 | ) 10 | 11 | const PF_BASE_REGEX_STR = `https://(?:www\.fanbox\.cc/@(?P[\w.-]+)|(?P[\w.-]+)\.fanbox\.cc)` 12 | 13 | var ( 14 | PF_POST_URL_REGEX = regexp.MustCompile( 15 | // ^https://(?:www\.fanbox\.cc/@(?P[\w.-]+)|(?P[\w.-]+)\.fanbox\.cc)/posts/(?P\d+)$ 16 | fmt.Sprintf( 17 | `^%s/posts/(?P\d+)$`, 18 | PF_BASE_REGEX_STR, 19 | ), 20 | ) 21 | PF_POST_REGEX_POST_ID_INDEX = PF_POST_URL_REGEX.SubexpIndex("postId") 22 | PF_CREATOR_URL_REGEX = regexp.MustCompile( 23 | // ^https://(?:www\.fanbox\.cc/@(?P[\w.-]+)|(?P[\w.-]+)\.fanbox\.cc)(?:/posts)?(?:; (?P[1-9]\d*(?:-[1-9]\d*)?))?$ 24 | fmt.Sprintf( 25 | `^%s(?:/posts)?%s$`, 26 | PF_BASE_REGEX_STR, 27 | PAGE_NUM_REGEX_STR, 28 | ), 29 | ) 30 | PF_CREATOR_REGEX_CREATOR_ID_INDEX_1 = PF_CREATOR_URL_REGEX.SubexpIndex("creatorId1") 31 | PF_CREATOR_REGEX_CREATOR_ID_INDEX_2 = PF_CREATOR_URL_REGEX.SubexpIndex("creatorId2") 32 | PF_CREATOR_REGEX_PAGE_NUM_INDEX = PF_CREATOR_URL_REGEX.SubexpIndex(PAGE_NUM_REGEX_GRP_NAME) 33 | ) 34 | 35 | type parsedPixivFanboxCreator struct { 36 | CreatorId string 37 | PageNum string 38 | } 39 | 40 | // ParsePixivFanboxTextFile parses the text file at the given path and returns a slice of post IDs and a slice of parsedPixivFanboxCreator. 41 | func ParsePixivFanboxTextFile(textFilePath string) ([]string, []*parsedPixivFanboxCreator) { 42 | lowercaseFanbox := strings.ToLower(utils.PIXIV_FANBOX_TITLE) 43 | f, reader := openTextFile( 44 | textFilePath, 45 | lowercaseFanbox, 46 | ) 47 | defer f.Close() 48 | 49 | var postIds []string 50 | var creatorIds []*parsedPixivFanboxCreator 51 | for { 52 | lineBytes, isEof := readLine(reader, textFilePath, lowercaseFanbox) 53 | if isEof { 54 | break 55 | } 56 | 57 | url := strings.TrimSpace(string(lineBytes)) 58 | if url == "" { 59 | continue 60 | } 61 | 62 | if matched := PF_POST_URL_REGEX.FindStringSubmatch(url); matched != nil { 63 | postIds = append(postIds, matched[PF_POST_REGEX_POST_ID_INDEX]) 64 | continue 65 | } 66 | 67 | if matched := PF_CREATOR_URL_REGEX.FindStringSubmatch(url); matched != nil { 68 | creatorId := matched[PF_CREATOR_REGEX_CREATOR_ID_INDEX_1] 69 | if creatorId == "" { 70 | creatorId = matched[PF_CREATOR_REGEX_CREATOR_ID_INDEX_2] 71 | } 72 | 73 | creatorIds = append(creatorIds, &parsedPixivFanboxCreator{ 74 | CreatorId: creatorId, 75 | PageNum: matched[PF_CREATOR_REGEX_PAGE_NUM_INDEX], 76 | }) 77 | continue 78 | } 79 | } 80 | 81 | return postIds, creatorIds 82 | } 83 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/bug-report.yaml: -------------------------------------------------------------------------------- 1 | name: Bug Report 2 | description: File a bug report here 3 | title: "[Bug]: (Enter a short summary of the bug here)" 4 | labels: ["bug"] 5 | assignees: 6 | - KJHJason 7 | 8 | body: 9 | - type: markdown 10 | attributes: 11 | value: | 12 | Thanks for taking the time to fill out this bug report! 13 | 14 | - type: dropdown 15 | id: os 16 | attributes: 17 | label: Which Operating System are you running the program on? 18 | multiple: true 19 | options: 20 | - Windows 21 | - Linux 22 | - MacOS 23 | validations: 24 | required: true 25 | 26 | - type: input 27 | id: program_version 28 | attributes: 29 | label: Program Version 30 | description: What version of the program are you using? (Can be found when running the program with the -v/--version flag) 31 | validations: 32 | required: true 33 | 34 | - type: textarea 35 | id: steps_to_reproduce 36 | attributes: 37 | label: Steps to reproduce the bug 38 | description: This will assist me in finding where it went wrong. 39 | placeholder: Please enter the steps to reproduce the bug that you have encountered... 40 | value: | 41 | 1. 42 | 2. 43 | 3. 44 | 4. 45 | validations: 46 | required: true 47 | 48 | - type: textarea 49 | id: expected_behaviour 50 | attributes: 51 | label: Expected behaviour 52 | description: A clear and concise description of what you expected to happen. 53 | placeholder: Explain what should have happened instead... 54 | validations: 55 | required: true 56 | 57 | - type: dropdown 58 | id: error_code 59 | attributes: 60 | label: What error code did you receive? (If any) 61 | multiple: false 62 | options: 63 | - "N/A" 64 | - "1000 (Developer Error)" 65 | - "1001 (Unexpected Error)" 66 | - "1002 (OS Error)" 67 | - "1003 (Input Error)" 68 | - "1004 (CMD Error)" 69 | - "1005 (Connection Error)" 70 | - "1006 (Response Error)" 71 | - "1007 (Download Error)" 72 | - "1008 (JSON Error)" 73 | - "1009 (HTML Error)" 74 | - "1010 (Captcha Error)" 75 | validations: 76 | required: true 77 | 78 | - type: textarea 79 | id: error_log_text 80 | attributes: 81 | label: Relevant log output (If any) 82 | description: Please copy and paste the text in the error log that was generated. 83 | validations: 84 | required: false 85 | 86 | - type: textarea 87 | id: screenshots 88 | attributes: 89 | label: Screenshots (Optional) 90 | description: Screenshots can be uploaded by simply dragging an image file into this box 91 | validations: 92 | required: false 93 | -------------------------------------------------------------------------------- /src/api/pixiv/mobile/process.go: -------------------------------------------------------------------------------- 1 | package pixivmobile 2 | 3 | import ( 4 | "strconv" 5 | "path/filepath" 6 | 7 | "github.com/KJHJason/Cultured-Downloader-CLI/api/pixiv/models" 8 | "github.com/KJHJason/Cultured-Downloader-CLI/request" 9 | "github.com/KJHJason/Cultured-Downloader-CLI/utils" 10 | ) 11 | 12 | // Process the artwork JSON and returns a slice of map that contains the urls of the images and the file path 13 | func (pixiv *PixivMobile) processArtworkJson(artworkJson *models.PixivMobileIllustJson, downloadPath string) ([]*request.ToDownload, *models.Ugoira, error) { 14 | if artworkJson == nil { 15 | return nil, nil, nil 16 | } 17 | 18 | artworkId := strconv.Itoa(artworkJson.Id) 19 | artworkTitle := artworkJson.Title 20 | artworkType := artworkJson.Type 21 | illustratorName := artworkJson.User.Name 22 | artworkFolderPath := utils.GetPostFolder( 23 | filepath.Join(downloadPath, utils.PIXIV_TITLE), illustratorName, artworkId, artworkTitle, 24 | ) 25 | 26 | if artworkType == "ugoira" { 27 | ugoiraInfo, err := pixiv.getUgoiraMetadata(artworkId, artworkFolderPath) 28 | if err != nil { 29 | return nil, nil, err 30 | } 31 | return nil, ugoiraInfo, nil 32 | } 33 | 34 | var artworksToDownload []*request.ToDownload 35 | singlePageImageUrl := artworkJson.MetaSinglePage.OriginalImageUrl 36 | if singlePageImageUrl != "" { 37 | artworksToDownload = append(artworksToDownload, &request.ToDownload{ 38 | Url: singlePageImageUrl, 39 | FilePath: artworkFolderPath, 40 | }) 41 | } else { 42 | for _, image := range artworkJson.MetaPages { 43 | imageUrl := image.ImageUrls.Original 44 | artworksToDownload = append(artworksToDownload, &request.ToDownload{ 45 | Url: imageUrl, 46 | FilePath: artworkFolderPath, 47 | }) 48 | } 49 | } 50 | return artworksToDownload, nil, nil 51 | } 52 | 53 | // The same as the processArtworkJson function but for mutliple JSONs at once 54 | // (Those with the "illusts" key which holds a slice of maps containing the artwork JSON) 55 | func (pixiv *PixivMobile) processMultipleArtworkJson(resJson *models.PixivMobileArtworksJson, downloadPath string) ([]*request.ToDownload, []*models.Ugoira, []error) { 56 | if resJson == nil { 57 | return nil, nil, nil 58 | } 59 | 60 | artworksMaps := resJson.Illusts 61 | if len(artworksMaps) == 0 { 62 | return nil, nil, nil 63 | } 64 | 65 | var errSlice []error 66 | var ugoiraToDl []*models.Ugoira 67 | var artworksToDl []*request.ToDownload 68 | for _, artwork := range artworksMaps { 69 | artworks, ugoira, err := pixiv.processArtworkJson(artwork, downloadPath) 70 | if err != nil { 71 | errSlice = append(errSlice, err) 72 | continue 73 | } 74 | if ugoira != nil { 75 | ugoiraToDl = append(ugoiraToDl, ugoira) 76 | continue 77 | } 78 | artworksToDl = append(artworksToDl, artworks...) 79 | } 80 | return artworksToDl, ugoiraToDl, errSlice 81 | } 82 | -------------------------------------------------------------------------------- /src/api/pixiv/web/args.go: -------------------------------------------------------------------------------- 1 | package pixivweb 2 | 3 | import ( 4 | "fmt" 5 | "net/http" 6 | "strings" 7 | 8 | "github.com/KJHJason/Cultured-Downloader-CLI/api" 9 | "github.com/KJHJason/Cultured-Downloader-CLI/configs" 10 | "github.com/KJHJason/Cultured-Downloader-CLI/utils" 11 | ) 12 | 13 | // PixivToDl is the struct that contains the arguments of Pixiv download options. 14 | type PixivWebDlOptions struct { 15 | // Sort order of the results. Can be "date_desc" or "date_asc". 16 | SortOrder string 17 | SearchMode string 18 | RatingMode string 19 | ArtworkType string 20 | 21 | Configs *configs.Config 22 | 23 | SessionCookies []*http.Cookie 24 | SessionCookieId string 25 | } 26 | 27 | var ( 28 | ACCEPTED_SORT_ORDER = []string{ 29 | "date", "date_d", 30 | "popular", "popular_d", 31 | "popular_male", "popular_male_d", 32 | "popular_female", "popular_female_d", 33 | } 34 | ACCEPTED_SEARCH_MODE = []string{ 35 | "s_tag", 36 | "s_tag_full", 37 | "s_tc", 38 | } 39 | ACCEPTED_RATING_MODE = []string{ 40 | "safe", 41 | "r18", 42 | "all", 43 | } 44 | ACCEPTED_ARTWORK_TYPE = []string{ 45 | "illust_and_ugoira", 46 | "manga", 47 | "all", 48 | } 49 | ) 50 | 51 | // ValidateArgs validates the arguments of the Pixiv download options. 52 | // 53 | // Should be called after initialising the struct. 54 | func (p *PixivWebDlOptions) ValidateArgs(userAgent string) { 55 | p.SortOrder = strings.ToLower(p.SortOrder) 56 | utils.ValidateStrArgs( 57 | p.SortOrder, 58 | ACCEPTED_SORT_ORDER, 59 | []string{ 60 | fmt.Sprintf( 61 | "pixiv error %d: Sort order %s is not allowed", 62 | utils.INPUT_ERROR, 63 | p.SortOrder, 64 | ), 65 | }, 66 | ) 67 | 68 | p.SearchMode = strings.ToLower(p.SearchMode) 69 | utils.ValidateStrArgs( 70 | p.SearchMode, 71 | ACCEPTED_SEARCH_MODE, 72 | []string{ 73 | fmt.Sprintf( 74 | "pixiv error %d: Search order %s is not allowed", 75 | utils.INPUT_ERROR, 76 | p.SearchMode, 77 | ), 78 | }, 79 | ) 80 | 81 | p.RatingMode = strings.ToLower(p.RatingMode) 82 | utils.ValidateStrArgs( 83 | p.RatingMode, 84 | ACCEPTED_RATING_MODE, 85 | []string{ 86 | fmt.Sprintf( 87 | "pixiv error %d: Rating order %s is not allowed", 88 | utils.INPUT_ERROR, 89 | p.RatingMode, 90 | ), 91 | }, 92 | ) 93 | 94 | p.ArtworkType = strings.ToLower(p.ArtworkType) 95 | utils.ValidateStrArgs( 96 | p.ArtworkType, 97 | ACCEPTED_ARTWORK_TYPE, 98 | []string{ 99 | fmt.Sprintf( 100 | "pixiv error %d: Artwork type %s is not allowed", 101 | utils.INPUT_ERROR, 102 | p.ArtworkType, 103 | ), 104 | }, 105 | ) 106 | 107 | if p.SessionCookieId != "" { 108 | p.SessionCookies = []*http.Cookie{ 109 | api.VerifyAndGetCookie(utils.PIXIV, p.SessionCookieId, userAgent), 110 | } 111 | } 112 | } 113 | -------------------------------------------------------------------------------- /src/api/pixivfanbox/args.go: -------------------------------------------------------------------------------- 1 | package pixivfanbox 2 | 3 | import ( 4 | "net/http" 5 | "os" 6 | "regexp" 7 | 8 | "github.com/KJHJason/Cultured-Downloader-CLI/api" 9 | "github.com/KJHJason/Cultured-Downloader-CLI/configs" 10 | "github.com/KJHJason/Cultured-Downloader-CLI/gdrive" 11 | "github.com/KJHJason/Cultured-Downloader-CLI/utils" 12 | "github.com/fatih/color" 13 | ) 14 | 15 | // PixivFanboxDl is the struct that contains the IDs of the Pixiv Fanbox creators and posts to download. 16 | type PixivFanboxDl struct { 17 | CreatorIds []string 18 | CreatorPageNums []string 19 | 20 | PostIds []string 21 | } 22 | 23 | var creatorIdRegex = regexp.MustCompile(`^[\w.-]+$`) 24 | 25 | // ValidateArgs validates the IDs of the Pixiv Fanbox creators and posts to download. 26 | // 27 | // It also validates the page numbers of the creators to download. 28 | // 29 | // Should be called after initialising the struct. 30 | func (pf *PixivFanboxDl) ValidateArgs() { 31 | utils.ValidateIds(pf.PostIds) 32 | pf.PostIds = utils.RemoveSliceDuplicates(pf.PostIds) 33 | 34 | for _, creatorId := range pf.CreatorIds { 35 | if !creatorIdRegex.MatchString(creatorId) { 36 | color.Red( 37 | "error %d: invalid Pixiv Fanbox creator ID %q, must be alphanumeric with underscores, dashes, or periods", 38 | utils.INPUT_ERROR, 39 | creatorId, 40 | ) 41 | os.Exit(1) 42 | } 43 | } 44 | 45 | if len(pf.CreatorPageNums) > 0 { 46 | utils.ValidatePageNumInput( 47 | len(pf.CreatorIds), 48 | pf.CreatorPageNums, 49 | []string{ 50 | "Number of Pixiv Fanbox Creator ID(s) and page numbers must be equal.", 51 | }, 52 | ) 53 | } else { 54 | pf.CreatorPageNums = make([]string, len(pf.CreatorIds)) 55 | } 56 | pf.CreatorIds, pf.CreatorPageNums = utils.RemoveDuplicateIdAndPageNum( 57 | pf.CreatorIds, 58 | pf.CreatorPageNums, 59 | ) 60 | } 61 | 62 | // PixivFanboxDlOptions is the struct that contains the options for downloading from Pixiv Fanbox. 63 | type PixivFanboxDlOptions struct { 64 | DlThumbnails bool 65 | DlImages bool 66 | DlAttachments bool 67 | DlGdrive bool 68 | 69 | Configs *configs.Config 70 | 71 | // GdriveClient is the Google Drive client to be 72 | // used in the download process for Pixiv Fanbox posts 73 | GdriveClient *gdrive.GDrive 74 | 75 | SessionCookieId string 76 | SessionCookies []*http.Cookie 77 | } 78 | 79 | // ValidateArgs validates the session cookie ID of the Pixiv Fanbox account to download from. 80 | // 81 | // Should be called after initialising the struct. 82 | func (pf *PixivFanboxDlOptions) ValidateArgs(userAgent string) { 83 | if pf.SessionCookieId != "" { 84 | pf.SessionCookies = []*http.Cookie{ 85 | api.VerifyAndGetCookie(utils.PIXIV_FANBOX, pf.SessionCookieId, userAgent), 86 | } 87 | } 88 | 89 | if pf.DlGdrive && pf.GdriveClient == nil { 90 | pf.DlGdrive = false 91 | } else if !pf.DlGdrive && pf.GdriveClient != nil { 92 | pf.GdriveClient = nil 93 | } 94 | } 95 | -------------------------------------------------------------------------------- /src/cmds/textparser/pixiv.go: -------------------------------------------------------------------------------- 1 | package textparser 2 | 3 | import ( 4 | "fmt" 5 | "strings" 6 | "regexp" 7 | 8 | "github.com/KJHJason/Cultured-Downloader-CLI/utils" 9 | ) 10 | 11 | const P_BASE_REGEX_STR = `https://www\.pixiv\.net/(?:en/)?` 12 | 13 | var ( 14 | P_ILLUST_URL_REGEX = regexp.MustCompile( 15 | fmt.Sprintf( 16 | `^%sartworks/(?P\d+)$`, 17 | P_BASE_REGEX_STR, 18 | ), 19 | ) 20 | P_ILLUST_REGEX_ID_INDEX = P_ILLUST_URL_REGEX.SubexpIndex("illustId") 21 | P_ARTIST_URL_REGEX = regexp.MustCompile( 22 | fmt.Sprintf( 23 | `^%susers/(?P\d+)%s$`, 24 | P_BASE_REGEX_STR, 25 | PAGE_NUM_REGEX_STR, 26 | ), 27 | ) 28 | P_ARTIST_REGEX_ID_INDEX = P_ARTIST_URL_REGEX.SubexpIndex("artistId") 29 | P_ARTIST_REGEX_PAGE_NUM_INDEX = P_ARTIST_URL_REGEX.SubexpIndex(PAGE_NUM_REGEX_GRP_NAME) 30 | P_TAG_URL_REGEX = regexp.MustCompile( 31 | // ^https://www\.pixiv\.net/(?:en/)?tags/(?P[\w-%()]+)(?:/(?:artworks|illustrations|manga))?(?:\?[\w=&-.]+)?(?:; (?P[1-9]\d*(?:-[1-9]\d*)?))?$ 32 | "^" + P_BASE_REGEX_STR + `tags/(?P[\w-%()]+)(?:/(?:artworks|illustrations|manga))?(?:\?[\w=&-.]+)?` + PAGE_NUM_REGEX_STR + "$", 33 | ) 34 | P_TAG_REGEX_TAG_INDEX = P_TAG_URL_REGEX.SubexpIndex("tag") 35 | P_TAG_REGEX_PAGE_NUM_INDEX = P_TAG_URL_REGEX.SubexpIndex(PAGE_NUM_REGEX_GRP_NAME) 36 | ) 37 | 38 | type parsedPixivArtist struct { 39 | ArtistId string 40 | PageNum string 41 | } 42 | type parsedPixivTag struct { 43 | Tag string 44 | PageNum string 45 | } 46 | 47 | // ParsePixivTextFile parses the text file at the given path and returns a slice of post IDs, a slice of parsedPixivArtist, and a slice of parsedPixivTag. 48 | func ParsePixivTextFile(textFilePath string) ([]string, []*parsedPixivArtist, []*parsedPixivTag) { 49 | f, reader := openTextFile( 50 | textFilePath, 51 | utils.PIXIV, 52 | ) 53 | defer f.Close() 54 | 55 | var postIds []string 56 | var artistIds []*parsedPixivArtist 57 | var tags []*parsedPixivTag 58 | for { 59 | lineBytes, isEof := readLine(reader, textFilePath, utils.PIXIV) 60 | if isEof { 61 | break 62 | } 63 | 64 | url := strings.TrimSpace(string(lineBytes)) 65 | if url == "" { 66 | continue 67 | } 68 | 69 | if matched := P_ILLUST_URL_REGEX.FindStringSubmatch(url); matched != nil { 70 | postIds = append(postIds, matched[P_ILLUST_REGEX_ID_INDEX]) 71 | continue 72 | } 73 | 74 | if matched := P_ARTIST_URL_REGEX.FindStringSubmatch(url); matched != nil { 75 | artistIds = append(artistIds, &parsedPixivArtist{ 76 | ArtistId: matched[P_ARTIST_REGEX_ID_INDEX], 77 | PageNum: matched[P_ARTIST_REGEX_PAGE_NUM_INDEX], 78 | }) 79 | continue 80 | } 81 | 82 | if matched := P_TAG_URL_REGEX.FindStringSubmatch(url); matched != nil { 83 | tags = append(tags, &parsedPixivTag{ 84 | Tag: matched[P_TAG_REGEX_TAG_INDEX], 85 | PageNum: matched[P_TAG_REGEX_PAGE_NUM_INDEX], 86 | }) 87 | continue 88 | } 89 | } 90 | 91 | return postIds, artistIds, tags 92 | } 93 | -------------------------------------------------------------------------------- /src/utils/file_extractor.go: -------------------------------------------------------------------------------- 1 | package utils 2 | 3 | import ( 4 | "context" 5 | "fmt" 6 | "io" 7 | "os" 8 | "path/filepath" 9 | 10 | "github.com/mholt/archiver/v4" 11 | ) 12 | 13 | type archiveExtractor struct { 14 | reader io.Reader 15 | readCloser io.ReadCloser 16 | ex archiver.Extractor 17 | } 18 | 19 | func extractFileLogic(ctx context.Context, src, dest string, extractor *archiveExtractor) error { 20 | handler := func(ctx context.Context, file archiver.File) error { 21 | extractedFilePath := filepath.Join(dest, file.NameInArchive) 22 | os.MkdirAll(filepath.Dir(extractedFilePath), 0755) 23 | 24 | af, err := file.Open() 25 | if err != nil { 26 | return err 27 | } 28 | defer af.Close() 29 | 30 | out, err := os.OpenFile( 31 | extractedFilePath, 32 | os.O_WRONLY|os.O_CREATE|os.O_TRUNC, 33 | file.Mode(), 34 | ) 35 | if err != nil { 36 | return err 37 | } 38 | defer out.Close() 39 | 40 | _, err = io.Copy(out, af) 41 | if err != nil { 42 | return err 43 | } 44 | return nil 45 | } 46 | 47 | var input io.Reader 48 | if extractor.readCloser != nil { 49 | input = extractor.readCloser 50 | } else { 51 | input = extractor.reader 52 | } 53 | 54 | err := extractor.ex.Extract(ctx, input, nil, handler) 55 | if err != nil { 56 | if err == context.Canceled { 57 | // delete all the files that were extracted 58 | err := os.RemoveAll(dest) 59 | if err != nil { 60 | LogError(err, "", false, ERROR) 61 | } 62 | return err 63 | } 64 | return fmt.Errorf( 65 | "error %d: unable to extract zip file %s, more info => %v", 66 | OS_ERROR, 67 | src, 68 | err, 69 | ) 70 | } 71 | return nil 72 | } 73 | 74 | func getExtractor(f *os.File, src string) (*archiveExtractor, error) { 75 | format, archiveReader, err := archiver.Identify( 76 | filepath.Base(src), 77 | f, 78 | ) 79 | if err == archiver.ErrNoMatch { 80 | return nil, fmt.Errorf( 81 | "error %d: %s is not a valid zip file", 82 | OS_ERROR, 83 | src, 84 | ) 85 | } else if err != nil { 86 | return nil, err 87 | } 88 | 89 | var rc io.ReadCloser 90 | if decom, ok := format.(archiver.Decompressor); ok { 91 | rc, err = decom.OpenReader(archiveReader) 92 | if err != nil { 93 | return nil, err 94 | } 95 | } 96 | 97 | ex, ok := format.(archiver.Extractor) 98 | if !ok { 99 | return nil, fmt.Errorf( 100 | "error %d: unable to extract zip file %s, more info => %v", 101 | UNEXPECTED_ERROR, 102 | src, 103 | err, 104 | ) 105 | } 106 | return &archiveExtractor{ 107 | reader: archiveReader, 108 | readCloser: rc, 109 | ex: ex, 110 | }, nil 111 | } 112 | 113 | func getErrIfNotIgnored(src string, ignoreIfMissing bool) error { 114 | if ignoreIfMissing { 115 | return nil 116 | } 117 | return fmt.Errorf( 118 | "error %d: %s does not exist", 119 | OS_ERROR, 120 | src, 121 | ) 122 | } 123 | 124 | // Extract all files from the given archive file to the given destination 125 | // 126 | // Code based on https://stackoverflow.com/a/24792688/2737403 127 | func ExtractFiles(ctx context.Context, src, dest string, ignoreIfMissing bool) error { 128 | if !PathExists(src) { 129 | return getErrIfNotIgnored(src, ignoreIfMissing) 130 | } 131 | 132 | f, err := os.Open(src) 133 | if err != nil { 134 | return fmt.Errorf( 135 | "error %d: unable to open zip file %s", 136 | OS_ERROR, 137 | src, 138 | ) 139 | } 140 | defer f.Close() 141 | 142 | extractor, err := getExtractor(f, src) 143 | if err != nil { 144 | return err 145 | } 146 | 147 | if extractor.readCloser != nil { 148 | defer extractor.readCloser.Close() 149 | } 150 | return extractFileLogic( 151 | ctx, 152 | src, 153 | dest, 154 | extractor, 155 | ) 156 | } 157 | -------------------------------------------------------------------------------- /src/api/pixiv/web/process.go: -------------------------------------------------------------------------------- 1 | package pixivweb 2 | 3 | import ( 4 | "net/http" 5 | 6 | "github.com/KJHJason/Cultured-Downloader-CLI/api/pixiv/models" 7 | "github.com/KJHJason/Cultured-Downloader-CLI/api/pixiv/ugoira" 8 | "github.com/KJHJason/Cultured-Downloader-CLI/api/pixiv/common" 9 | "github.com/KJHJason/Cultured-Downloader-CLI/request" 10 | "github.com/KJHJason/Cultured-Downloader-CLI/utils" 11 | ) 12 | 13 | func processIllustratorPostJson(resJson *models.PixivWebIllustratorJson, pageNum string, pixivDlOptions *PixivWebDlOptions) ([]string, error) { 14 | minPage, maxPage, hasMax, err := utils.GetMinMaxFromStr(pageNum) 15 | if err != nil { 16 | return nil, err 17 | } 18 | minOffset, maxOffset := pixivcommon.ConvertPageNumToOffset(minPage, maxPage, utils.PIXIV_PER_PAGE, false) 19 | 20 | var artworkIds []string 21 | if pixivDlOptions.ArtworkType == "all" || pixivDlOptions.ArtworkType == "illust_and_ugoira" { 22 | illusts := resJson.Body.Illusts 23 | switch t := illusts.(type) { 24 | case map[string]interface{}: 25 | curOffset := 0 26 | for illustId := range t { 27 | curOffset++ 28 | if curOffset < minOffset { 29 | continue 30 | } 31 | if hasMax && curOffset > maxOffset { 32 | break 33 | } 34 | 35 | artworkIds = append(artworkIds, illustId) 36 | } 37 | default: // where there are no posts or has an unknown type 38 | break 39 | } 40 | } 41 | 42 | if pixivDlOptions.ArtworkType == "all" || pixivDlOptions.ArtworkType == "manga" { 43 | manga := resJson.Body.Manga 44 | switch t := manga.(type) { 45 | case map[string]interface{}: 46 | curOffset := 0 47 | for mangaId := range t { 48 | curOffset++ 49 | if curOffset < minOffset { 50 | continue 51 | } 52 | if hasMax && curOffset > maxOffset { 53 | break 54 | } 55 | 56 | artworkIds = append(artworkIds, mangaId) 57 | } 58 | default: // where there are no posts or has an unknown type 59 | break 60 | } 61 | } 62 | return artworkIds, nil 63 | } 64 | 65 | // Process the artwork details JSON and returns a map of urls 66 | // with its file path or a Ugoira struct (One of them will be null depending on the artworkType) 67 | func processArtworkJson(res *http.Response, artworkType int64, postDownloadDir string) ([]*request.ToDownload, *models.Ugoira, error) { 68 | if artworkType == UGOIRA { 69 | var ugoiraJson models.PixivWebArtworkUgoiraJson 70 | if err := utils.LoadJsonFromResponse(res, &ugoiraJson); err != nil { 71 | return nil, nil, err 72 | } 73 | 74 | ugoiraMap := ugoiraJson.Body 75 | originalUrl := ugoiraMap.OriginalSrc 76 | ugoiraInfo := &models.Ugoira{ 77 | Url: originalUrl, 78 | FilePath: postDownloadDir, 79 | Frames: ugoira.MapDelaysToFilename(ugoiraMap.Frames), 80 | } 81 | return nil, ugoiraInfo, nil 82 | } 83 | 84 | var artworkUrls models.PixivWebArtworkJson 85 | if err := utils.LoadJsonFromResponse(res, &artworkUrls); err != nil { 86 | return nil, nil, err 87 | } 88 | 89 | var urlsToDownload []*request.ToDownload 90 | for _, artworkUrl := range artworkUrls.Body { 91 | urlsToDownload = append(urlsToDownload, &request.ToDownload{ 92 | Url: artworkUrl.Urls.Original, 93 | FilePath: postDownloadDir, 94 | }) 95 | } 96 | return urlsToDownload, nil, nil 97 | } 98 | 99 | // Process the tag search results JSON and returns a slice of artwork IDs 100 | func processTagJsonResults(res *http.Response) ([]string, error) { 101 | var pixivTagJson models.PixivTag 102 | if err := utils.LoadJsonFromResponse(res, &pixivTagJson); err != nil { 103 | return nil, err 104 | } 105 | 106 | artworksSlice := []string{} 107 | for _, illust := range pixivTagJson.Body.IllustManga.Data { 108 | artworksSlice = append(artworksSlice, illust.Id) 109 | } 110 | return artworksSlice, nil 111 | } 112 | -------------------------------------------------------------------------------- /src/gdrive/gdrive.go: -------------------------------------------------------------------------------- 1 | package gdrive 2 | 3 | import ( 4 | "context" 5 | "fmt" 6 | "os" 7 | "regexp" 8 | 9 | "github.com/KJHJason/Cultured-Downloader-CLI/configs" 10 | "github.com/KJHJason/Cultured-Downloader-CLI/request" 11 | "github.com/KJHJason/Cultured-Downloader-CLI/utils" 12 | "github.com/fatih/color" 13 | "google.golang.org/api/drive/v3" 14 | "google.golang.org/api/option" 15 | ) 16 | 17 | const ( 18 | HTTP3_SUPPORTED = true 19 | GDRIVE_ERROR_FILENAME = "gdrive_download.log" 20 | BASE_API_KEY_REGEX_STR = `AIza[\w-]{35}` 21 | 22 | // file fields to fetch from GDrive API: 23 | // https://developers.google.com/drive/api/v3/reference/files 24 | GDRIVE_FILE_FIELDS = "id,name,size,mimeType,md5Checksum" 25 | GDRIVE_FOLDER_FIELDS = "nextPageToken,files(id,name,size,mimeType,md5Checksum)" 26 | ) 27 | 28 | var ( 29 | API_KEY_REGEX = regexp.MustCompile(fmt.Sprintf(`^%s$`, BASE_API_KEY_REGEX_STR)) 30 | API_KEY_PARAM_REGEX = regexp.MustCompile(fmt.Sprintf(`key=%s`, BASE_API_KEY_REGEX_STR)) 31 | ) 32 | 33 | type GDrive struct { 34 | apiKey string // Google Drive API key to use 35 | client *drive.Service // Google Drive service client (if using service account credentials) 36 | apiUrl string // https://www.googleapis.com/drive/v3/files 37 | timeout int // timeout in seconds for GDrive API v3 38 | downloadTimeout int // timeout in seconds for GDrive file downloads 39 | maxDownloadWorkers int // max concurrent workers for downloading files 40 | } 41 | 42 | // Returns a GDrive structure with the given API key and max download workers 43 | func GetNewGDrive(apiKey, jsonPath string, config *configs.Config, maxDownloadWorkers int) *GDrive { 44 | if jsonPath != "" && apiKey != "" { 45 | color.Red("Both Google Drive API key and service account credentials file cannot be used at the same time.") 46 | os.Exit(1) 47 | } else if jsonPath == "" && apiKey == "" { 48 | color.Red("Google Drive API key or service account credentials file is required.") 49 | os.Exit(1) 50 | } 51 | 52 | gdrive := &GDrive{ 53 | apiUrl: "https://www.googleapis.com/drive/v3/files", 54 | timeout: 15, 55 | downloadTimeout: 900, // 15 minutes 56 | maxDownloadWorkers: maxDownloadWorkers, 57 | } 58 | if apiKey != "" { 59 | gdrive.apiKey = apiKey 60 | gdriveIsValid, err := gdrive.GDriveKeyIsValid(config.UserAgent) 61 | if err != nil { 62 | color.Red(err.Error()) 63 | os.Exit(1) 64 | } else if !gdriveIsValid { 65 | color.Red("Google Drive API key is invalid.") 66 | os.Exit(1) 67 | } 68 | return gdrive 69 | } 70 | 71 | if !utils.PathExists(jsonPath) { 72 | color.Red("Unable to access Drive API due to missing credentials file: %s", jsonPath) 73 | os.Exit(1) 74 | } 75 | srv, err := drive.NewService(context.Background(), option.WithCredentialsFile(jsonPath)) 76 | if err != nil { 77 | color.Red("Unable to access Drive API due to %v", err) 78 | os.Exit(1) 79 | } 80 | gdrive.client = srv 81 | return gdrive 82 | } 83 | 84 | // Checks if the given Google Drive API key is valid 85 | // 86 | // Will return true if the given Google Drive API key is valid 87 | func (gdrive *GDrive) GDriveKeyIsValid(userAgent string) (bool, error) { 88 | match := API_KEY_REGEX.MatchString(gdrive.apiKey) 89 | if !match { 90 | return false, nil 91 | } 92 | 93 | params := map[string]string{"key": gdrive.apiKey} 94 | res, err := request.CallRequest( 95 | &request.RequestArgs{ 96 | Url: gdrive.apiUrl, 97 | Method: "GET", 98 | Timeout: gdrive.timeout, 99 | Params: params, 100 | UserAgent: userAgent, 101 | Http2: !HTTP3_SUPPORTED, 102 | Http3: HTTP3_SUPPORTED, 103 | }, 104 | ) 105 | if err != nil { 106 | return false, fmt.Errorf( 107 | "gdrive error %d: failed to check if Google Drive API key is valid, more info => %v", 108 | utils.CONNECTION_ERROR, 109 | err, 110 | ) 111 | } 112 | res.Body.Close() 113 | return res.StatusCode != 400, nil 114 | } 115 | -------------------------------------------------------------------------------- /src/go.mod: -------------------------------------------------------------------------------- 1 | module github.com/KJHJason/Cultured-Downloader-CLI 2 | 3 | go 1.22 4 | 5 | require ( 6 | github.com/PuerkitoBio/goquery v1.9.2 7 | github.com/chromedp/cdproto v0.0.0-20240512230644-b3296df1660c 8 | github.com/chromedp/chromedp v0.9.5 9 | github.com/fatih/color v1.17.0 10 | github.com/gen2brain/beeep v0.0.0-20240112042604-c7bb2cd88fea 11 | github.com/mholt/archiver/v4 v4.0.0-alpha.8 12 | github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c 13 | github.com/quic-go/quic-go v0.43.1 14 | github.com/spf13/cobra v1.8.0 15 | google.golang.org/api v0.180.0 16 | ) 17 | 18 | require ( 19 | cloud.google.com/go/auth v0.4.1 // indirect 20 | cloud.google.com/go/auth/oauth2adapt v0.2.2 // indirect 21 | cloud.google.com/go/compute/metadata v0.3.0 // indirect 22 | github.com/andybalholm/brotli v1.1.0 // indirect 23 | github.com/andybalholm/cascadia v1.3.2 // indirect 24 | github.com/bodgit/plumbing v1.3.0 // indirect 25 | github.com/bodgit/sevenzip v1.5.1 // indirect 26 | github.com/bodgit/windows v1.0.1 // indirect 27 | github.com/chromedp/sysutil v1.0.0 // indirect 28 | github.com/dsnet/compress v0.0.1 // indirect 29 | github.com/felixge/httpsnoop v1.0.4 // indirect 30 | github.com/go-logr/logr v1.4.1 // indirect 31 | github.com/go-logr/stdr v1.2.2 // indirect 32 | github.com/go-task/slim-sprig/v3 v3.0.0 // indirect 33 | github.com/go-toast/toast v0.0.0-20190211030409-01e6764cf0a4 // indirect 34 | github.com/gobwas/httphead v0.1.0 // indirect 35 | github.com/gobwas/pool v0.2.1 // indirect 36 | github.com/gobwas/ws v1.4.0 // indirect 37 | github.com/godbus/dbus/v5 v5.1.0 // indirect 38 | github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da // indirect 39 | github.com/golang/protobuf v1.5.4 // indirect 40 | github.com/golang/snappy v0.0.4 // indirect 41 | github.com/google/pprof v0.0.0-20240509144519-723abb6459b7 // indirect 42 | github.com/google/s2a-go v0.1.7 // indirect 43 | github.com/google/uuid v1.6.0 // indirect 44 | github.com/googleapis/enterprise-certificate-proxy v0.3.2 // indirect 45 | github.com/googleapis/gax-go/v2 v2.12.4 // indirect 46 | github.com/hashicorp/errwrap v1.1.0 // indirect 47 | github.com/hashicorp/go-multierror v1.1.1 // indirect 48 | github.com/hashicorp/golang-lru/v2 v2.0.7 // indirect 49 | github.com/inconshreveable/mousetrap v1.1.0 // indirect 50 | github.com/josharian/intern v1.0.0 // indirect 51 | github.com/klauspost/compress v1.17.8 // indirect 52 | github.com/klauspost/pgzip v1.2.6 // indirect 53 | github.com/mailru/easyjson v0.7.7 // indirect 54 | github.com/mattn/go-colorable v0.1.13 // indirect 55 | github.com/mattn/go-isatty v0.0.20 // indirect 56 | github.com/nu7hatch/gouuid v0.0.0-20131221200532-179d4d0c4d8d // indirect 57 | github.com/nwaples/rardecode/v2 v2.0.0-beta.2 // indirect 58 | github.com/onsi/ginkgo/v2 v2.17.3 // indirect 59 | github.com/pierrec/lz4/v4 v4.1.21 // indirect 60 | github.com/quic-go/qpack v0.4.0 // indirect 61 | github.com/spf13/pflag v1.0.5 // indirect 62 | github.com/tadvi/systray v0.0.0-20190226123456-11a2b8fa57af // indirect 63 | github.com/therootcompany/xz v1.0.1 // indirect 64 | github.com/ulikunitz/xz v0.5.12 // indirect 65 | go.opencensus.io v0.24.0 // indirect 66 | go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.51.0 // indirect 67 | go.opentelemetry.io/otel v1.26.0 // indirect 68 | go.opentelemetry.io/otel/metric v1.26.0 // indirect 69 | go.opentelemetry.io/otel/trace v1.26.0 // indirect 70 | go.uber.org/mock v0.4.0 // indirect 71 | go4.org v0.0.0-20230225012048-214862532bf5 // indirect 72 | golang.org/x/crypto v0.23.0 // indirect 73 | golang.org/x/exp v0.0.0-20240506185415-9bf2ced13842 // indirect 74 | golang.org/x/mod v0.17.0 // indirect 75 | golang.org/x/net v0.25.0 // indirect 76 | golang.org/x/oauth2 v0.20.0 // indirect 77 | golang.org/x/sys v0.20.0 // indirect 78 | golang.org/x/text v0.15.0 // indirect 79 | golang.org/x/tools v0.21.0 // indirect 80 | google.golang.org/genproto/googleapis/api v0.0.0-20240429193739-8cf5692501f6 // indirect 81 | google.golang.org/genproto/googleapis/rpc v0.0.0-20240513163218-0867130af1f8 // indirect 82 | google.golang.org/grpc v1.64.0 // indirect 83 | google.golang.org/protobuf v1.34.1 // indirect 84 | ) 85 | -------------------------------------------------------------------------------- /src/api/pixiv/mobile/pixiv_mobile.go: -------------------------------------------------------------------------------- 1 | package pixivmobile 2 | 3 | import ( 4 | "fmt" 5 | "net/http" 6 | "os" 7 | "sync" 8 | "time" 9 | 10 | "github.com/KJHJason/Cultured-Downloader-CLI/request" 11 | "github.com/KJHJason/Cultured-Downloader-CLI/utils" 12 | "github.com/fatih/color" 13 | ) 14 | 15 | type PixivMobile struct { 16 | // API information and its endpoints 17 | baseUrl string 18 | clientId string 19 | clientSecret string 20 | userAgent string 21 | authTokenUrl string 22 | loginUrl string 23 | redirectUri string 24 | refreshToken string 25 | 26 | // User given arguments 27 | apiTimeout int 28 | 29 | // Access token information 30 | accessTokenMu sync.Mutex 31 | accessTokenMap accessTokenInfo 32 | } 33 | 34 | // Get a new PixivMobile structure 35 | func NewPixivMobile(refreshToken string, timeout int) *PixivMobile { 36 | pixivMobile := &PixivMobile{ 37 | baseUrl: utils.PIXIV_MOBILE_URL, 38 | clientId: "MOBrBDS8blbauoSck0ZfDbtuzpyT", 39 | clientSecret: "lsACyCD94FhDUtGTXi3QzcFE2uU1hqtDaKeqrdwj", 40 | userAgent: "PixivIOSApp/7.13.3 (iOS 14.6; iPhone13,2)", 41 | authTokenUrl: "https://oauth.secure.pixiv.net/auth/token", 42 | loginUrl: utils.PIXIV_MOBILE_URL + "/web/v1/login", 43 | redirectUri: utils.PIXIV_MOBILE_URL + "/web/v1/users/auth/pixiv/callback", 44 | refreshToken: refreshToken, 45 | apiTimeout: timeout, 46 | } 47 | if refreshToken != "" { 48 | // refresh the access token and verify it 49 | err := pixivMobile.refreshAccessToken() 50 | if err != nil { 51 | color.Red(err.Error()) 52 | os.Exit(1) 53 | } 54 | } 55 | return pixivMobile 56 | } 57 | 58 | // This is due to Pixiv's strict rate limiting. 59 | // 60 | // Without delays, the user might get 429 too many requests 61 | // or the user's account might get suspended. 62 | // 63 | // Additionally, pixiv.net is protected by cloudflare, so 64 | // to prevent the user's IP reputation from going down, delays are added. 65 | func (pixiv *PixivMobile) Sleep() { 66 | time.Sleep(utils.GetRandomTime(1.0, 1.5)) 67 | } 68 | 69 | // Get the required headers to communicate with the Pixiv API 70 | func (pixiv *PixivMobile) getHeaders(additional map[string]string) map[string]string { 71 | headers := make(map[string]string) 72 | for k, v := range additional { 73 | headers[k] = v 74 | } 75 | 76 | baseHeaders := map[string]string{ 77 | "User-Agent": pixiv.userAgent, 78 | "App-OS": "ios", 79 | "App-OS-Version": "14.6", 80 | "Authorization": "Bearer " + pixiv.accessTokenMap.accessToken, 81 | } 82 | for k, v := range baseHeaders { 83 | headers[k] = v 84 | } 85 | return headers 86 | } 87 | 88 | 89 | // Sends a request to the Pixiv API and refreshes the access token if required 90 | // 91 | // Returns the JSON interface and errors if any 92 | func (pixiv *PixivMobile) SendRequest(reqArgs *request.RequestArgs) (*http.Response, error) { 93 | if reqArgs.Method == "" { 94 | reqArgs.Method = "GET" 95 | } 96 | if reqArgs.Timeout == 0 { 97 | reqArgs.Timeout = pixiv.apiTimeout 98 | } 99 | useHttp3 := utils.IsHttp3Supported(utils.PIXIV_MOBILE, true) 100 | reqArgs.Http3 = useHttp3 101 | reqArgs.Http2 = !useHttp3 102 | reqArgs.ValidateArgs() 103 | 104 | req, err := http.NewRequest(reqArgs.Method, reqArgs.Url, nil) 105 | if err != nil { 106 | return nil, err 107 | } 108 | 109 | refreshed, err := pixiv.refreshTokenIfReq() 110 | if err != nil { 111 | return nil, err 112 | } 113 | 114 | for k, v := range pixiv.getHeaders(reqArgs.Headers) { 115 | req.Header.Set(k, v) 116 | } 117 | request.AddParams(reqArgs.Params, req) 118 | 119 | var res *http.Response 120 | client := request.GetHttpClient(reqArgs) 121 | client.Timeout = time.Duration(reqArgs.Timeout) * time.Second 122 | for i := 1; i <= utils.RETRY_COUNTER; i++ { 123 | res, err = client.Do(req) 124 | if err == nil { 125 | if refreshed { 126 | continue 127 | } else if res.StatusCode == 200 || !reqArgs.CheckStatus { 128 | return res, nil 129 | } 130 | } 131 | time.Sleep(utils.GetRandomDelay()) 132 | } 133 | return nil, fmt.Errorf( 134 | "request to %s failed after %d retries", 135 | reqArgs.Url, 136 | utils.RETRY_COUNTER, 137 | ) 138 | } 139 | -------------------------------------------------------------------------------- /src/request/args.go: -------------------------------------------------------------------------------- 1 | package request 2 | 3 | import ( 4 | "context" 5 | "fmt" 6 | "net/http" 7 | "strings" 8 | "regexp" 9 | 10 | "github.com/KJHJason/Cultured-Downloader-CLI/utils" 11 | ) 12 | 13 | type RequestHandler func (reqArgs *RequestArgs) (*http.Response, error) 14 | 15 | type RequestArgs struct { 16 | // Main Request Options 17 | Method string 18 | Url string 19 | Timeout int 20 | 21 | // Additional Request Options 22 | Headers map[string]string 23 | Params map[string]string 24 | Cookies []*http.Cookie 25 | UserAgent string 26 | DisableCompression bool 27 | 28 | // HTTP/2 and HTTP/3 Options 29 | Http2 bool 30 | Http3 bool 31 | 32 | // Check status will check the status code of the response for 200 OK. 33 | // If the status code is not 200 OK, it will retry several times and 34 | // if the status code is still not 200 OK, it will return an error. 35 | // Otherwise, it will return the response regardless of the status code. 36 | CheckStatus bool 37 | 38 | // Context is used to cancel the request if needed. 39 | // E.g. if the user presses Ctrl+C, we can use context.WithCancel(context.Background()) 40 | Context context.Context 41 | 42 | // RequestHandler is the main function that will be called to make the request. 43 | RequestHandler RequestHandler 44 | } 45 | 46 | var ( 47 | // Since the URLs below will be redirected to Fantia's AWS S3 URL, 48 | // we need to use HTTP/2 as it is not supported by HTTP/3 yet. 49 | FANTIA_ALBUM_URL = regexp.MustCompile( 50 | `^https://fantia.jp/posts/[\d]+/album_image`, 51 | ) 52 | FANTIA_DOWNLOAD_URL = regexp.MustCompile( 53 | `^https://fantia.jp/posts/[\d]+/download/[\d]+`, 54 | ) 55 | 56 | HTTP3_SUPPORT_ARR = [...]string{ 57 | "https://www.pixiv.net", 58 | "https://app-api.pixiv.net", 59 | 60 | "https://www.google.com", 61 | "https://drive.google.com", 62 | } 63 | ) 64 | 65 | func (args *RequestArgs) validateHttp3Arg() { 66 | if !args.Http2 && !args.Http3 { 67 | // if http2 and http3 are not enabled, 68 | // do a check to determine which protocol to use. 69 | if FANTIA_DOWNLOAD_URL.MatchString(args.Url) || FANTIA_ALBUM_URL.MatchString(args.Url) { 70 | args.Http2 = true 71 | } else { 72 | // check if the URL supports HTTP/3 first 73 | // before falling back to the default HTTP/2. 74 | for _, domain := range HTTP3_SUPPORT_ARR { 75 | if strings.HasPrefix(args.Url, domain) { 76 | args.Http3 = true 77 | break 78 | } 79 | } 80 | // if HTTP/3 is not supported, fall back to HTTP/2 81 | if !args.Http3 { 82 | args.Http2 = true 83 | } 84 | } 85 | } else if args.Http2 && args.Http3 { 86 | panic( 87 | fmt.Errorf( 88 | "error %d: http2 and http3 cannot be enabled at the same time", 89 | utils.DEV_ERROR, 90 | ), 91 | ) 92 | } 93 | } 94 | 95 | func (args *RequestArgs) getDefaultArgs() { 96 | if args.RequestHandler == nil { 97 | args.RequestHandler = CallRequest 98 | } 99 | 100 | if args.Headers == nil { 101 | args.Headers = make(map[string]string) 102 | } 103 | 104 | if args.Params == nil { 105 | args.Params = make(map[string]string) 106 | } 107 | 108 | if args.Cookies == nil { 109 | args.Cookies = make([]*http.Cookie, 0) 110 | } 111 | 112 | if args.UserAgent == "" { 113 | args.UserAgent = utils.USER_AGENT 114 | } 115 | 116 | if args.Context == nil { 117 | args.Context = context.Background() 118 | } 119 | } 120 | 121 | // ValidateArgs validates the arguments of the request 122 | // 123 | // Will panic if the arguments are invalid as this is a developer error 124 | func (args *RequestArgs) ValidateArgs() { 125 | args.getDefaultArgs() 126 | args.validateHttp3Arg() 127 | 128 | if args.Method == "" { 129 | panic( 130 | fmt.Errorf( 131 | "error %d: method cannot be empty", 132 | utils.DEV_ERROR, 133 | ), 134 | ) 135 | } 136 | 137 | if args.Url == "" { 138 | panic( 139 | fmt.Errorf( 140 | "error %d: url cannot be empty", 141 | utils.DEV_ERROR, 142 | ), 143 | ) 144 | } 145 | 146 | if args.Timeout < 0 { 147 | panic( 148 | fmt.Errorf( 149 | "error %d: timeout cannot be negative", 150 | utils.DEV_ERROR, 151 | ), 152 | ) 153 | } else if args.Timeout == 0 { 154 | args.Timeout = 15 155 | } 156 | } 157 | -------------------------------------------------------------------------------- /src/api/fantia/args.go: -------------------------------------------------------------------------------- 1 | package fantia 2 | 3 | import ( 4 | "fmt" 5 | "net/http" 6 | "sync" 7 | 8 | "github.com/KJHJason/Cultured-Downloader-CLI/api" 9 | "github.com/KJHJason/Cultured-Downloader-CLI/configs" 10 | "github.com/KJHJason/Cultured-Downloader-CLI/gdrive" 11 | "github.com/KJHJason/Cultured-Downloader-CLI/request" 12 | "github.com/KJHJason/Cultured-Downloader-CLI/utils" 13 | "github.com/PuerkitoBio/goquery" 14 | ) 15 | 16 | // FantiaDl is the struct that contains the 17 | // IDs of the Fantia fanclubs and posts to download. 18 | type FantiaDl struct { 19 | FanclubIds []string 20 | FanclubPageNums []string 21 | PostIds []string 22 | } 23 | 24 | // ValidateArgs validates the IDs of the Fantia fanclubs and posts to download. 25 | // 26 | // It also validates the page numbers of the fanclubs to download. 27 | // 28 | // Should be called after initialising the struct. 29 | func (f *FantiaDl) ValidateArgs() { 30 | utils.ValidateIds(f.PostIds) 31 | utils.ValidateIds(f.FanclubIds) 32 | f.PostIds = utils.RemoveSliceDuplicates(f.PostIds) 33 | 34 | if len(f.FanclubPageNums) > 0 { 35 | utils.ValidatePageNumInput( 36 | len(f.FanclubIds), 37 | f.FanclubPageNums, 38 | []string{ 39 | "Number of Fantia Fanclub ID(s) and page numbers must be equal.", 40 | }, 41 | ) 42 | } else { 43 | f.FanclubPageNums = make([]string, len(f.FanclubIds)) 44 | } 45 | 46 | f.FanclubIds, f.FanclubPageNums = utils.RemoveDuplicateIdAndPageNum( 47 | f.FanclubIds, 48 | f.FanclubPageNums, 49 | ) 50 | } 51 | 52 | // FantiaDlOptions is the struct that contains the options for downloading from Fantia. 53 | type FantiaDlOptions struct { 54 | DlThumbnails bool 55 | DlImages bool 56 | DlAttachments bool 57 | DlGdrive bool 58 | AutoSolveCaptcha bool // whether to use chromedp to solve reCAPTCHA automatically 59 | 60 | GdriveClient *gdrive.GDrive 61 | 62 | Configs *configs.Config 63 | 64 | SessionCookieId string 65 | SessionCookies []*http.Cookie 66 | 67 | csrfMu sync.Mutex 68 | CsrfToken string 69 | } 70 | 71 | // GetCsrfToken gets the CSRF token from Fantia's index HTML 72 | // which is required to communicate with their API. 73 | func (f *FantiaDlOptions) GetCsrfToken(userAgent string) error { 74 | f.csrfMu.Lock() 75 | defer f.csrfMu.Unlock() 76 | 77 | useHttp3 := utils.IsHttp3Supported(utils.FANTIA, false) 78 | res, err := request.CallRequest( 79 | &request.RequestArgs{ 80 | Method: "GET", 81 | Url: "https://fantia.jp/", 82 | Cookies: f.SessionCookies, 83 | Http2: !useHttp3, 84 | Http3: useHttp3, 85 | CheckStatus: true, 86 | UserAgent: userAgent, 87 | }, 88 | ) 89 | if err != nil { 90 | return fmt.Errorf( 91 | "fantia error %d, failed to get CSRF token from Fantia: %w", 92 | utils.CONNECTION_ERROR, 93 | err, 94 | ) 95 | } 96 | 97 | defer res.Body.Close() 98 | if res.StatusCode != 200 { 99 | return fmt.Errorf( 100 | "fantia error %d, failed to get CSRF token from Fantia: %w", 101 | utils.RESPONSE_ERROR, 102 | err, 103 | ) 104 | } 105 | 106 | // parse the response 107 | doc, err := goquery.NewDocumentFromReader(res.Body) 108 | if err != nil { 109 | return fmt.Errorf( 110 | "fantia error %d, failed to parse response body when getting CSRF token from Fantia: %w", 111 | utils.HTML_ERROR, 112 | err, 113 | ) 114 | } 115 | 116 | if csrfToken, ok := doc.Find("meta[name=csrf-token]").Attr("content"); !ok { 117 | // shouldn't happen but just in case if Fantia's csrf token changes 118 | docHtml, err := doc.Html() 119 | if err != nil { 120 | docHtml = "failed to get HTML" 121 | } 122 | return fmt.Errorf( 123 | "fantia error %d, failed to get CSRF Token from Fantia, please report this issue!\nHTML: %s", 124 | utils.HTML_ERROR, 125 | docHtml, 126 | ) 127 | } else { 128 | f.CsrfToken = csrfToken 129 | } 130 | return nil 131 | } 132 | 133 | // ValidateArgs validates the options for downloading from Fantia. 134 | // 135 | // Should be called after initialising the struct. 136 | func (f *FantiaDlOptions) ValidateArgs(userAgent string) error { 137 | if f.SessionCookieId != "" { 138 | f.SessionCookies = []*http.Cookie{ 139 | api.VerifyAndGetCookie(utils.FANTIA, f.SessionCookieId, userAgent), 140 | } 141 | } 142 | 143 | if f.DlGdrive && f.GdriveClient == nil { 144 | f.DlGdrive = false 145 | } else if !f.DlGdrive && f.GdriveClient != nil { 146 | f.GdriveClient = nil 147 | } 148 | 149 | return f.GetCsrfToken(userAgent) 150 | } 151 | -------------------------------------------------------------------------------- /src/api/kemono/process.go: -------------------------------------------------------------------------------- 1 | package kemono 2 | 3 | import ( 4 | "fmt" 5 | "strings" 6 | "regexp" 7 | "path/filepath" 8 | 9 | "github.com/KJHJason/Cultured-Downloader-CLI/api/kemono/models" 10 | "github.com/KJHJason/Cultured-Downloader-CLI/request" 11 | "github.com/KJHJason/Cultured-Downloader-CLI/utils" 12 | "github.com/KJHJason/Cultured-Downloader-CLI/gdrive" 13 | ) 14 | 15 | var ( 16 | imgSrcTagRegex = regexp.MustCompile(`(?i)]+src=(?:\\)?"(?P[^">]+)(?:\\)?"[^>]*>`) 17 | imgSrcTagRegexIdx = imgSrcTagRegex.SubexpIndex("imgSrc") 18 | ) 19 | 20 | func getInlineImages(content, postFolderPath, tld string) []*request.ToDownload { 21 | var toDownload []*request.ToDownload 22 | for _, match := range imgSrcTagRegex.FindAllStringSubmatch(content, -1) { 23 | imgSrc := match[imgSrcTagRegexIdx] 24 | if imgSrc == "" { 25 | continue 26 | } 27 | toDownload = append(toDownload, &request.ToDownload{ 28 | Url: getKemonoUrl(tld) + imgSrc, 29 | FilePath: filepath.Join(postFolderPath, utils.IMAGES_FOLDER, utils.GetLastPartOfUrl(imgSrc)), 30 | }) 31 | } 32 | return toDownload 33 | } 34 | 35 | // Since the name of each attachment or file is not always the filename of the file as it could be a URL, 36 | // we need to check if the returned name value is a URL and if it is, we just return the postFolderPath as the file path. 37 | func getKemonoFilePath(postFolderPath, childDir, fileName string) string { 38 | if strings.HasPrefix(fileName, "http://") || strings.HasPrefix(fileName, "https://") { 39 | return filepath.Join(postFolderPath, childDir) 40 | } 41 | return filepath.Join(postFolderPath, childDir, fileName) 42 | } 43 | 44 | func processJson(resJson *models.MainKemonoJson, tld, downloadPath string, dlOptions *KemonoDlOptions) ([]*request.ToDownload, []*request.ToDownload) { 45 | var creatorNamePath string 46 | if creatorName, err := getCreatorName(resJson.Service, resJson.User, dlOptions); err != nil { 47 | err = fmt.Errorf( 48 | "error getting creator name for %q (%s)... falling back to creator ID! (Details below)\n%v", 49 | resJson.User, 50 | resJson.Service, 51 | err, 52 | ) 53 | utils.LogError(err, "", false, utils.ERROR) 54 | creatorNamePath = resJson.User 55 | } else { 56 | creatorNamePath = fmt.Sprintf("%s [%s]", creatorName, resJson.User) 57 | } 58 | 59 | postFolderPath := utils.GetPostFolder( 60 | filepath.Join(downloadPath, "Kemono-Party", resJson.Service), 61 | creatorNamePath, 62 | resJson.Id, 63 | resJson.Title, 64 | ) 65 | 66 | var gdriveLinks []*request.ToDownload 67 | var toDownload []*request.ToDownload 68 | if dlOptions.DlAttachments { 69 | toDownload = getInlineImages(resJson.Content, postFolderPath, tld) 70 | for _, attachment := range resJson.Attachments { 71 | toDownload = append(toDownload, &request.ToDownload{ 72 | Url: getKemonoUrl(tld) + attachment.Path, 73 | FilePath: getKemonoFilePath(postFolderPath, utils.KEMONO_CONTENT_FOLDER, attachment.Name), 74 | }) 75 | } 76 | 77 | if resJson.Embed.Url != "" { 78 | embedsDirPath := filepath.Join(postFolderPath, utils.KEMONO_EMBEDS_FOLDER) 79 | if dlOptions.Configs.LogUrls { 80 | utils.DetectOtherExtDLLink(resJson.Embed.Url, embedsDirPath) 81 | } 82 | if utils.DetectGDriveLinks(resJson.Embed.Url, postFolderPath, true, dlOptions.Configs.LogUrls,) && dlOptions.DlGdrive { 83 | gdriveLinks = append(gdriveLinks, &request.ToDownload{ 84 | Url: resJson.Embed.Url, 85 | FilePath: embedsDirPath, 86 | }) 87 | } 88 | } 89 | 90 | if resJson.File.Path != "" { 91 | // usually is the thumbnail of the post 92 | toDownload = append(toDownload, &request.ToDownload{ 93 | Url: getKemonoUrl(tld) + resJson.File.Path, 94 | FilePath: getKemonoFilePath(postFolderPath, "", resJson.File.Name), 95 | }) 96 | } 97 | } 98 | 99 | contentGdriveLinks := gdrive.ProcessPostText( 100 | resJson.Content, 101 | postFolderPath, 102 | dlOptions.DlGdrive, 103 | dlOptions.Configs.LogUrls, 104 | ) 105 | gdriveLinks = append(gdriveLinks, contentGdriveLinks...) 106 | return toDownload, gdriveLinks 107 | } 108 | 109 | func processMultipleJson(resJson models.KemonoJson, tld, downloadPath string, dlOptions *KemonoDlOptions) ([]*request.ToDownload, []*request.ToDownload) { 110 | var urlsToDownload, gdriveLinks []*request.ToDownload 111 | for _, post := range resJson { 112 | toDownload, foundGdriveLinks := processJson(post, tld, downloadPath, dlOptions) 113 | urlsToDownload = append(urlsToDownload, toDownload...) 114 | gdriveLinks = append(gdriveLinks, foundGdriveLinks...) 115 | } 116 | return urlsToDownload, gdriveLinks 117 | } 118 | -------------------------------------------------------------------------------- /src/utils/log_error.go: -------------------------------------------------------------------------------- 1 | package utils 2 | 3 | import ( 4 | "context" 5 | "fmt" 6 | "log" 7 | "os" 8 | "path/filepath" 9 | "strings" 10 | "sync" 11 | "time" 12 | 13 | "github.com/fatih/color" 14 | ) 15 | 16 | const LogSuffix = "\n\n" 17 | var ( 18 | mainLogger *logger 19 | logFolder = filepath.Join(APP_PATH, "logs") 20 | logFilePath = filepath.Join( 21 | logFolder, 22 | fmt.Sprintf( 23 | "cultured_downloader-cli_v%s_%s.log", 24 | VERSION, 25 | time.Now().Format("2006-01-02"), 26 | ), 27 | ) 28 | ) 29 | 30 | func init() { 31 | // will be opened througout the program's runtime 32 | // hence, there is no need to call f.Close() at the end of this function 33 | os.MkdirAll(logFolder, 0755) 34 | f, fileErr := os.OpenFile( 35 | logFilePath, 36 | os.O_WRONLY|os.O_CREATE|os.O_APPEND, 37 | 0666, 38 | ) 39 | if fileErr != nil { 40 | fileErr = fmt.Errorf( 41 | "error opening log file: %v\nlog file path: %s", 42 | fileErr, 43 | logFilePath, 44 | ) 45 | log.Println(color.RedString(fileErr.Error())) 46 | os.Exit(1) 47 | } 48 | mainLogger = NewLogger(f) 49 | } 50 | 51 | // Delete all empty log files and log files 52 | // older than 30 days except for the current day's log file. 53 | func DeleteEmptyAndOldLogs() error { 54 | err := filepath.Walk(logFolder, func(path string, info os.FileInfo, err error) error { 55 | if err != nil { 56 | return err 57 | } 58 | 59 | if info.IsDir() || path == logFilePath { 60 | return nil 61 | } 62 | 63 | if info.Size() == 0 || info.ModTime().Before(time.Now().AddDate(0, 0, -30)) { 64 | return os.Remove(path) 65 | } 66 | 67 | return nil 68 | }) 69 | 70 | if err != nil { 71 | return err 72 | } 73 | 74 | return nil 75 | } 76 | 77 | // Thread-safe logging function that logs to "cultured_downloader.log" in the logs directory 78 | func LogError(err error, errorMsg string, exit bool, level int) { 79 | if err == nil && errorMsg == "" { 80 | return 81 | } 82 | 83 | if err != nil && errorMsg != "" { 84 | mainLogger.LogBasedOnLvl(level, err.Error() + LogSuffix) 85 | if errorMsg != "" { 86 | mainLogger.LogBasedOnLvlf(level, "Additional info: %v%s", errorMsg, LogSuffix) 87 | } 88 | } else if err != nil { 89 | mainLogger.LogBasedOnLvl(level, err.Error() + LogSuffix) 90 | } else { 91 | mainLogger.LogBasedOnLvlf(level, errorMsg + LogSuffix) 92 | } 93 | 94 | if exit { 95 | if err != nil { 96 | color.Red(err.Error()) 97 | } else { 98 | color.Red(errorMsg) 99 | } 100 | os.Exit(1) 101 | } 102 | } 103 | 104 | // Uses the thread-safe LogError() function to log a slice of errors or a channel of errors 105 | // 106 | // Also returns if any errors were due to context.Canceled which is caused by Ctrl + C. 107 | func LogErrors(exit bool, errChan chan error, level int, errs ...error) bool { 108 | if errChan != nil && len(errs) > 0 { 109 | panic( 110 | fmt.Sprintf( 111 | "error %d: cannot pass both an error channel and a slice of errors to LogErrors()", 112 | DEV_ERROR, 113 | ), 114 | ) 115 | } 116 | 117 | hasCanceled := false 118 | if errChan != nil { 119 | for err := range errChan { 120 | if err == context.Canceled { 121 | if !hasCanceled { 122 | hasCanceled = true 123 | } 124 | continue 125 | } 126 | LogError(err, "", exit, level) 127 | } 128 | return hasCanceled 129 | } 130 | 131 | for _, err := range errs { 132 | if err == context.Canceled { 133 | if !hasCanceled { 134 | hasCanceled = true 135 | } 136 | continue 137 | } 138 | LogError(err, "", exit, level) 139 | } 140 | return hasCanceled 141 | } 142 | 143 | var logToPathMux sync.Mutex 144 | 145 | // Thread-safe logging function that logs to the provided file path 146 | func LogMessageToPath(message, filePath string, level int) { 147 | logToPathMux.Lock() 148 | defer logToPathMux.Unlock() 149 | 150 | os.MkdirAll(filepath.Dir(filePath), 0755) 151 | if PathExists(filePath) { 152 | logFileContents, err := os.ReadFile(filePath) 153 | if err != nil { 154 | err = fmt.Errorf( 155 | "error %d: failed to read log file, more info => %v\nfile path: %s\noriginal message: %s", 156 | OS_ERROR, 157 | err, 158 | filePath, 159 | message, 160 | ) 161 | LogError(err, "", false, ERROR) 162 | return 163 | } 164 | 165 | // check if the same message has already been logged 166 | if strings.Contains(string(logFileContents), message) { 167 | return 168 | } 169 | } 170 | 171 | logFile, err := os.OpenFile( 172 | filePath, 173 | os.O_RDWR|os.O_CREATE|os.O_APPEND, 174 | 0666, 175 | ) 176 | if err != nil { 177 | err = fmt.Errorf( 178 | "error %d: failed to open log file, more info => %v\nfile path: %s\noriginal message: %s", 179 | OS_ERROR, 180 | err, 181 | filePath, 182 | message, 183 | ) 184 | LogError(err, "", false, ERROR) 185 | return 186 | } 187 | defer logFile.Close() 188 | 189 | pathLogger := NewLogger(logFile) 190 | pathLogger.LogBasedOnLvl(level, message) 191 | } 192 | -------------------------------------------------------------------------------- /src/cmds/kemono.go: -------------------------------------------------------------------------------- 1 | package cmds 2 | 3 | import ( 4 | "github.com/KJHJason/Cultured-Downloader-CLI/api/kemono" 5 | "github.com/KJHJason/Cultured-Downloader-CLI/configs" 6 | "github.com/KJHJason/Cultured-Downloader-CLI/gdrive" 7 | "github.com/KJHJason/Cultured-Downloader-CLI/utils" 8 | "github.com/KJHJason/Cultured-Downloader-CLI/cmds/textparser" 9 | "github.com/spf13/cobra" 10 | ) 11 | 12 | var ( 13 | kemonoDlTextFile string 14 | kemonoCookieFile string 15 | kemonoSession string 16 | kemonoCreatorUrls []string 17 | kemonoPageNums []string 18 | kemonoPostUrls []string 19 | kemonoDlGdrive bool 20 | kemonoGdriveApiKey string 21 | kemonoGdriveServiceAccPath string 22 | kemonoDlAttachments bool 23 | kemonoOverwrite bool 24 | kemonoLogUrls bool 25 | kemonoDlFav bool 26 | kemonoUserAgent string 27 | kemonoCmd = &cobra.Command{ 28 | Use: "kemono", 29 | Short: "Download from Kemono Party", 30 | Long: "Supports downloads from creators and posts on Kemono Party.", 31 | Run: func(cmd *cobra.Command, args []string) { 32 | kemonoConfig := &configs.Config{ 33 | OverwriteFiles: kemonoOverwrite, 34 | UserAgent: kemonoUserAgent, 35 | LogUrls: kemonoLogUrls, 36 | } 37 | var gdriveClient *gdrive.GDrive 38 | if kemonoGdriveApiKey != "" || kemonoGdriveServiceAccPath != "" { 39 | gdriveClient = gdrive.GetNewGDrive( 40 | kemonoGdriveApiKey, 41 | kemonoGdriveServiceAccPath, 42 | kemonoConfig, 43 | utils.MAX_CONCURRENT_DOWNLOADS, 44 | ) 45 | } 46 | 47 | kemonoDl := &kemono.KemonoDl{ 48 | CreatorUrls: kemonoCreatorUrls, 49 | CreatorPageNums: kemonoPageNums, 50 | PostUrls: kemonoPostUrls, 51 | } 52 | if kemonoDlTextFile != "" { 53 | kemonoPostToDl, kemonoCreatorToDl := textparser.ParseKemonoTextFile(kemonoDlTextFile) 54 | kemonoDl.PostsToDl = kemonoPostToDl 55 | kemonoDl.CreatorsToDl = kemonoCreatorToDl 56 | } 57 | kemonoDl.ValidateArgs() 58 | 59 | kemonoDlOptions := &kemono.KemonoDlOptions{ 60 | DlAttachments: kemonoDlAttachments, 61 | DlGdrive: kemonoDlGdrive, 62 | Configs: kemonoConfig, 63 | SessionCookieId: kemonoSession, 64 | GdriveClient: gdriveClient, 65 | } 66 | if kemonoCookieFile != "" { 67 | cookies, err := utils.ParseNetscapeCookieFile( 68 | kemonoCookieFile, 69 | kemonoSession, 70 | utils.KEMONO, 71 | ) 72 | if err != nil { 73 | utils.LogError( 74 | err, 75 | "", 76 | true, 77 | utils.ERROR, 78 | ) 79 | } 80 | kemonoDlOptions.SessionCookies = cookies 81 | } 82 | 83 | kemonoDlOptions.ValidateArgs(kemonoUserAgent) 84 | 85 | utils.PrintWarningMsg() 86 | kemono.KemonoDownloadProcess( 87 | kemonoConfig, 88 | kemonoDl, 89 | kemonoDlOptions, 90 | kemonoDlFav, 91 | ) 92 | }, 93 | } 94 | ) 95 | 96 | func init() { 97 | mutlipleUrlsMsg := "Multiple URLs can be supplied by separating them with a comma.\n" + 98 | "Example: \"https://kemono.party/service/user/123,https://kemono.party/service/user/456\" (without the quotes)" 99 | kemonoCmd.Flags().StringVarP( 100 | &kemonoSession, 101 | "session", 102 | "s", 103 | "", 104 | utils.CombineStringsWithNewline( 105 | "Your Kemono Party \"session\" cookie value to use for the requests to Kemono Party.", 106 | "Required to get pass Kemono Party's DDOS protection and to download from your favourites.", 107 | ), 108 | ) 109 | kemonoCmd.MarkFlagRequired("session") 110 | kemonoCmd.Flags().StringSliceVar( 111 | &kemonoCreatorUrls, 112 | "creator_url", 113 | []string{}, 114 | utils.CombineStringsWithNewline( 115 | "Kemono Party creator URL(s) to download from.", 116 | mutlipleUrlsMsg, 117 | ), 118 | ) 119 | kemonoCmd.Flags().StringSliceVar( 120 | &kemonoPageNums, 121 | "page_num", 122 | []string{}, 123 | utils.CombineStringsWithNewline( 124 | "Min and max page numbers to search for corresponding to the order of the supplied Kemono Party creator URL(s).", 125 | "Format: \"num\", \"minNum-maxNum\", or \"\" to download all pages", 126 | "Leave blank to download all pages from each creator on Kemono Party.", 127 | ), 128 | ) 129 | kemonoCmd.Flags().StringSliceVar( 130 | &kemonoPostUrls, 131 | "post_url", 132 | []string{}, 133 | utils.CombineStringsWithNewline( 134 | "Kemono Party post URL(s) to download.", 135 | mutlipleUrlsMsg, 136 | ), 137 | ) 138 | kemonoCmd.Flags().BoolVarP( 139 | &kemonoDlGdrive, 140 | "dl_gdrive", 141 | "g", 142 | true, 143 | "Whether to download the Google Drive links of a post on Kemono Party.", 144 | ) 145 | kemonoCmd.Flags().BoolVarP( 146 | &kemonoDlAttachments, 147 | "dl_attachments", 148 | "a", 149 | true, 150 | "Whether to download the attachments (images, zipped files, etc.) of a post on Kemono Party.", 151 | ) 152 | } 153 | -------------------------------------------------------------------------------- /src/utils/constants.go: -------------------------------------------------------------------------------- 1 | package utils 2 | 3 | import ( 4 | "fmt" 5 | "net/http" 6 | "os" 7 | "path/filepath" 8 | "regexp" 9 | "runtime" 10 | ) 11 | 12 | const ( 13 | // Error codes 14 | DEV_ERROR = iota + 1000 15 | UNEXPECTED_ERROR 16 | OS_ERROR 17 | INPUT_ERROR 18 | CMD_ERROR 19 | CONNECTION_ERROR 20 | RESPONSE_ERROR 21 | DOWNLOAD_ERROR 22 | JSON_ERROR 23 | HTML_ERROR 24 | CAPTCHA_ERROR 25 | ) 26 | 27 | // Returns the path to the application's config directory 28 | func getAppPath() string { 29 | appPath, err := os.UserConfigDir() 30 | if err != nil { 31 | panic( 32 | fmt.Errorf( 33 | "error %d, failed to get user's config directory: %v", 34 | OS_ERROR, 35 | err, 36 | ), 37 | ) 38 | } 39 | return filepath.Join(appPath, "Cultured-Downloader") 40 | } 41 | 42 | const ( 43 | DEBUG_MODE = false // Will save a copy of all JSON response from the API 44 | VERSION = "1.2.5" 45 | MAX_RETRY_DELAY = 3 46 | MIN_RETRY_DELAY = 1 47 | RETRY_COUNTER = 4 48 | MAX_CONCURRENT_DOWNLOADS = 4 49 | PIXIV_MAX_CONCURRENT_DOWNLOADS = 3 50 | MAX_API_CALLS = 10 51 | 52 | PAGE_NUM_REGEX_STR = `[1-9]\d*(-[1-9]\d*)?` 53 | DOWNLOAD_TIMEOUT = 25 * 60 // 25 minutes in seconds as downloads 54 | // can take quite a while for large files (especially for Pixiv) 55 | // However, the average max file size on these platforms is around 300MB. 56 | // Note: Fantia do have a max file size per post of 3GB if one paid extra for it. 57 | 58 | FANTIA = "fantia" 59 | FANTIA_TITLE = "Fantia" 60 | FANTIA_URL = "https://fantia.jp" 61 | FANTIA_RECAPTCHA_URL = "https://fantia.jp/recaptcha" 62 | 63 | PIXIV = "pixiv" 64 | PIXIV_MOBILE = "pixiv_mobile" 65 | PIXIV_TITLE = "Pixiv" 66 | PIXIV_PER_PAGE = 60 67 | PIXIV_URL = "https://www.pixiv.net" 68 | PIXIV_API_URL = "https://www.pixiv.net/ajax" 69 | PIXIV_MOBILE_URL = "https://app-api.pixiv.net" 70 | 71 | PIXIV_FANBOX = "fanbox" 72 | PIXIV_FANBOX_TITLE = "Pixiv Fanbox" 73 | PIXIV_FANBOX_URL = "https://www.fanbox.cc" 74 | PIXIV_FANBOX_API_URL = "https://api.fanbox.cc" 75 | 76 | KEMONO = "kemono" 77 | KEMONO_SESSION_COOKIE_NAME = "session" 78 | KEMONO_COOKIE_DOMAIN = "kemono.party" 79 | KEMONO_BACKUP = "kemono_backup" 80 | KEMONO_COOKIE_BACKUP_DOMAIN = "kemono.su" 81 | KEMONO_TITLE = "Kemono Party" 82 | KEMONO_PER_PAGE = 50 83 | KEMONO_TLD = "party" 84 | KEMONO_BACKUP_TLD = "su" 85 | KEMONO_URL = "https://kemono.party" 86 | KEMONO_API_URL = "https://kemono.party/api/v1" 87 | BACKUP_KEMONO_URL = "https://kemono.su" 88 | BACKUP_KEMONO_API_URL = "https://kemono.su/api/v1" 89 | 90 | PASSWORD_FILENAME = "detected_passwords.txt" 91 | ATTACHMENT_FOLDER = "attachments" 92 | IMAGES_FOLDER = "images" 93 | 94 | KEMONO_EMBEDS_FOLDER = "embeds" 95 | KEMONO_CONTENT_FOLDER = "post_content" 96 | 97 | GDRIVE_URL = "https://drive.google.com" 98 | GDRIVE_FOLDER = "gdrive" 99 | GDRIVE_FILENAME = "detected_gdrive_links.txt" 100 | OTHER_LINKS_FILENAME = "detected_external_links.txt" 101 | ) 102 | 103 | type cookieInfo struct { 104 | Domain string 105 | Name string 106 | SameSite http.SameSite 107 | } 108 | 109 | // Although the variables below are not 110 | // constants, they are not supposed to be changed 111 | var ( 112 | USER_AGENT string 113 | 114 | APP_PATH = getAppPath() 115 | DOWNLOAD_PATH = GetDefaultDownloadPath() 116 | 117 | PAGE_NUM_REGEX = regexp.MustCompile( 118 | fmt.Sprintf(`^%s$`, PAGE_NUM_REGEX_STR), 119 | ) 120 | NUMBER_REGEX = regexp.MustCompile(`^\d+$`) 121 | GDRIVE_URL_REGEX = regexp.MustCompile( 122 | `https://drive\.google\.com/(?Pfile/d|drive/(u/\d+/)?folders)/(?P[\w-]+)`, 123 | ) 124 | GDRIVE_REGEX_ID_INDEX = GDRIVE_URL_REGEX.SubexpIndex("id") 125 | GDRIVE_REGEX_TYPE_INDEX = GDRIVE_URL_REGEX.SubexpIndex("type") 126 | FANTIA_IMAGE_URL_REGEX = regexp.MustCompile( 127 | `original_url\":\"(?P/posts/\d+/album_image\?query=[\w%-]*)\"`, 128 | ) 129 | FANTIA_REGEX_URL_INDEX = FANTIA_IMAGE_URL_REGEX.SubexpIndex("url") 130 | 131 | // For Pixiv Fanbox 132 | PASSWORD_TEXTS = []string{"パス", "Pass", "pass", "密码"} 133 | EXTERNAL_DOWNLOAD_PLATFORMS = []string{"mega", "gigafile", "dropbox", "mediafire"} 134 | ) 135 | 136 | func init() { 137 | var userAgent = map[string]string{ 138 | "linux": "Mozilla/5.0 (X11; Linux x86_64)", 139 | "darwin": "Mozilla/5.0 (Macintosh; Intel Mac OS X 12_6)", 140 | "windows": "Mozilla/5.0 (Windows NT 10.0; Win64; x64)", 141 | } 142 | userAgentOS, ok := userAgent[runtime.GOOS] 143 | if !ok { 144 | panic( 145 | fmt.Errorf( 146 | "error %d: Failed to get user agent OS as your OS, %q, is not supported", 147 | OS_ERROR, 148 | runtime.GOOS, 149 | ), 150 | ) 151 | } 152 | USER_AGENT = fmt.Sprintf("%s AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36", userAgentOS) 153 | } 154 | -------------------------------------------------------------------------------- /src/cmds/pixiv_fanbox.go: -------------------------------------------------------------------------------- 1 | package cmds 2 | 3 | import ( 4 | "github.com/KJHJason/Cultured-Downloader-CLI/api/pixivfanbox" 5 | "github.com/KJHJason/Cultured-Downloader-CLI/configs" 6 | "github.com/KJHJason/Cultured-Downloader-CLI/gdrive" 7 | "github.com/KJHJason/Cultured-Downloader-CLI/utils" 8 | "github.com/KJHJason/Cultured-Downloader-CLI/cmds/textparser" 9 | "github.com/spf13/cobra" 10 | ) 11 | 12 | var ( 13 | fanboxDlTextFile string 14 | fanboxCookieFile string 15 | fanboxSession string 16 | fanboxCreatorIds []string 17 | fanboxPageNums []string 18 | fanboxPostIds []string 19 | fanboxDlThumbnails bool 20 | fanboxDlImages bool 21 | fanboxDlAttachments bool 22 | fanboxDlGdrive bool 23 | fanboxGdriveApiKey string 24 | fanboxGdriveServiceAccPath string 25 | fanboxOverwriteFiles bool 26 | fanboxLogUrls bool 27 | fanboxUserAgent string 28 | pixivFanboxCmd = &cobra.Command{ 29 | Use: "pixiv_fanbox", 30 | Short: "Download from Pixiv Fanbox", 31 | Long: "Supports downloads from Pixiv Fanbox creators and individual posts.", 32 | Run: func(cmd *cobra.Command, args []string) { 33 | pixivFanboxConfig := &configs.Config{ 34 | OverwriteFiles: fanboxOverwriteFiles, 35 | UserAgent: fanboxUserAgent, 36 | LogUrls: fanboxLogUrls, 37 | } 38 | var gdriveClient *gdrive.GDrive 39 | if fanboxGdriveApiKey != "" || fanboxGdriveServiceAccPath != "" { 40 | gdriveClient = gdrive.GetNewGDrive( 41 | fanboxGdriveApiKey, 42 | fanboxGdriveServiceAccPath, 43 | pixivFanboxConfig, 44 | utils.MAX_CONCURRENT_DOWNLOADS, 45 | ) 46 | } 47 | 48 | if fanboxDlTextFile != "" { 49 | postIds, creatorInfoSlice := textparser.ParsePixivFanboxTextFile(fanboxDlTextFile) 50 | fanboxPostIds = append(fanboxPostIds, postIds...) 51 | 52 | for _, creatorInfo := range creatorInfoSlice { 53 | fanboxCreatorIds = append(fanboxCreatorIds, creatorInfo.CreatorId) 54 | fanboxPageNums = append(fanboxPageNums, creatorInfo.PageNum) 55 | } 56 | } 57 | pixivFanboxDl := &pixivfanbox.PixivFanboxDl{ 58 | CreatorIds: fanboxCreatorIds, 59 | CreatorPageNums: fanboxPageNums, 60 | PostIds: fanboxPostIds, 61 | } 62 | pixivFanboxDl.ValidateArgs() 63 | 64 | pixivFanboxDlOptions := &pixivfanbox.PixivFanboxDlOptions{ 65 | DlThumbnails: fanboxDlThumbnails, 66 | DlImages: fanboxDlImages, 67 | DlAttachments: fanboxDlAttachments, 68 | Configs: pixivFanboxConfig, 69 | GdriveClient: gdriveClient, 70 | DlGdrive: fanboxDlGdrive, 71 | SessionCookieId: fanboxSession, 72 | } 73 | if fanboxCookieFile != "" { 74 | cookies, err := utils.ParseNetscapeCookieFile( 75 | fanboxCookieFile, 76 | fanboxSession, 77 | utils.PIXIV_FANBOX, 78 | ) 79 | if err != nil { 80 | utils.LogError( 81 | err, 82 | "", 83 | true, 84 | utils.ERROR, 85 | ) 86 | } 87 | pixivFanboxDlOptions.SessionCookies = cookies 88 | } 89 | pixivFanboxDlOptions.ValidateArgs(fanboxUserAgent) 90 | 91 | utils.PrintWarningMsg() 92 | pixivfanbox.PixivFanboxDownloadProcess( 93 | pixivFanboxDl, 94 | pixivFanboxDlOptions, 95 | ) 96 | }, 97 | } 98 | ) 99 | 100 | func init() { 101 | mutlipleIdsMsg := getMultipleIdsMsg() 102 | pixivFanboxCmd.Flags().StringVarP( 103 | &fanboxSession, 104 | "session", 105 | "s", 106 | "", 107 | "Your \"FANBOXSESSID\" cookie value to use for the requests to Pixiv Fanbox.", 108 | ) 109 | pixivFanboxCmd.Flags().StringSliceVar( 110 | &fanboxCreatorIds, 111 | "creator_id", 112 | []string{}, 113 | utils.CombineStringsWithNewline( 114 | "Pixiv Fanbox Creator ID(s) to download from.", 115 | mutlipleIdsMsg, 116 | ), 117 | ) 118 | pixivFanboxCmd.Flags().StringSliceVar( 119 | &fanboxPageNums, 120 | "page_num", 121 | []string{}, 122 | utils.CombineStringsWithNewline( 123 | "Min and max page numbers to search for corresponding to the order of the supplied Pixiv Fanbox creator ID(s).", 124 | "Format: \"num\", \"minNum-maxNum\", or \"\" to download all pages", 125 | "Leave blank to download all pages from each creator.", 126 | ), 127 | ) 128 | pixivFanboxCmd.Flags().StringSliceVar( 129 | &fanboxPostIds, 130 | "post_id", 131 | []string{}, 132 | utils.CombineStringsWithNewline( 133 | "Pixiv Fanbox post ID(s) to download.", 134 | mutlipleIdsMsg, 135 | ), 136 | ) 137 | pixivFanboxCmd.Flags().BoolVarP( 138 | &fanboxDlThumbnails, 139 | "dl_thumbnails", 140 | "t", 141 | true, 142 | "Whether to download the thumbnail of a Pixiv Fanbox post.", 143 | ) 144 | pixivFanboxCmd.Flags().BoolVarP( 145 | &fanboxDlImages, 146 | "dl_images", 147 | "i", 148 | true, 149 | "Whether to download the images of a Pixiv Fanbox post.", 150 | ) 151 | pixivFanboxCmd.Flags().BoolVarP( 152 | &fanboxDlAttachments, 153 | "dl_attachments", 154 | "a", 155 | true, 156 | "Whether to download the attachments of a Pixiv Fanbox post.", 157 | ) 158 | pixivFanboxCmd.Flags().BoolVarP( 159 | &fanboxDlGdrive, 160 | "dl_gdrive", 161 | "g", 162 | true, 163 | "Whether to download the Google Drive links of a Pixiv Fanbox post.", 164 | ) 165 | } 166 | -------------------------------------------------------------------------------- /src/api/pixiv/mobile/args.go: -------------------------------------------------------------------------------- 1 | package pixivmobile 2 | 3 | import ( 4 | "fmt" 5 | "strings" 6 | 7 | "github.com/KJHJason/Cultured-Downloader-CLI/configs" 8 | "github.com/KJHJason/Cultured-Downloader-CLI/utils" 9 | "github.com/fatih/color" 10 | ) 11 | 12 | // PixivToDl is the struct that contains the arguments of Pixiv download options. 13 | type PixivMobileDlOptions struct { 14 | // Sort order of the results. Can be "date_desc" or "date_asc". 15 | SortOrder string 16 | SearchMode string 17 | RatingMode string 18 | ArtworkType string 19 | 20 | Configs *configs.Config 21 | 22 | MobileClient *PixivMobile 23 | RefreshToken string 24 | } 25 | 26 | var ( 27 | ACCEPTED_SORT_ORDER = []string{ 28 | "date", "date_d", 29 | "popular", "popular_d", 30 | "popular_male", "popular_male_d", 31 | "popular_female", "popular_female_d", 32 | } 33 | ACCEPTED_SEARCH_MODE = []string{ 34 | "s_tag", 35 | "s_tag_full", 36 | "s_tc", 37 | } 38 | ACCEPTED_RATING_MODE = []string{ 39 | "safe", 40 | "r18", 41 | "all", 42 | } 43 | ACCEPTED_ARTWORK_TYPE = []string{ 44 | "illust_and_ugoira", 45 | "manga", 46 | "all", 47 | } 48 | ) 49 | 50 | // ValidateArgs validates the arguments of the Pixiv download options. 51 | // 52 | // Should be called after initialising the struct. 53 | func (p *PixivMobileDlOptions) ValidateArgs(userAgent string) { 54 | p.SortOrder = strings.ToLower(p.SortOrder) 55 | utils.ValidateStrArgs( 56 | p.SortOrder, 57 | ACCEPTED_SORT_ORDER, 58 | []string{ 59 | fmt.Sprintf( 60 | "pixiv error %d: Sort order %s is not allowed", 61 | utils.INPUT_ERROR, 62 | p.SortOrder, 63 | ), 64 | }, 65 | ) 66 | 67 | p.SearchMode = strings.ToLower(p.SearchMode) 68 | utils.ValidateStrArgs( 69 | p.SearchMode, 70 | ACCEPTED_SEARCH_MODE, 71 | []string{ 72 | fmt.Sprintf( 73 | "pixiv error %d: Search order %s is not allowed", 74 | utils.INPUT_ERROR, 75 | p.SearchMode, 76 | ), 77 | }, 78 | ) 79 | 80 | p.RatingMode = strings.ToLower(p.RatingMode) 81 | utils.ValidateStrArgs( 82 | p.RatingMode, 83 | ACCEPTED_RATING_MODE, 84 | []string{ 85 | fmt.Sprintf( 86 | "pixiv error %d: Rating order %s is not allowed", 87 | utils.INPUT_ERROR, 88 | p.RatingMode, 89 | ), 90 | }, 91 | ) 92 | 93 | p.ArtworkType = strings.ToLower(p.ArtworkType) 94 | utils.ValidateStrArgs( 95 | p.ArtworkType, 96 | ACCEPTED_ARTWORK_TYPE, 97 | []string{ 98 | fmt.Sprintf( 99 | "pixiv error %d: Artwork type %s is not allowed", 100 | utils.INPUT_ERROR, 101 | p.ArtworkType, 102 | ), 103 | }, 104 | ) 105 | 106 | if p.RefreshToken != "" { 107 | p.MobileClient = NewPixivMobile(p.RefreshToken, 10) 108 | if p.RatingMode != "all" { 109 | color.Red( 110 | utils.CombineStringsWithNewline( 111 | fmt.Sprintf( 112 | "pixiv error %d: when using the refresh token, only \"all\" is supported for the --rating_mode flag.", 113 | utils.INPUT_ERROR, 114 | ), 115 | fmt.Sprintf( 116 | "hence, the rating mode will be updated from %q to \"all\"...\n", 117 | p.RatingMode, 118 | ), 119 | ), 120 | ) 121 | p.RatingMode = "all" 122 | } 123 | 124 | if p.ArtworkType == "illust_and_ugoira" { 125 | // convert "illust_and_ugoira" to "illust" 126 | // since the mobile API does not support "illust_and_ugoira" 127 | // However, there will still be ugoira posts in the results 128 | p.ArtworkType = "illust" 129 | } 130 | 131 | // Convert search mode to the correct value 132 | // based on the Pixiv's ajax web API 133 | switch p.SearchMode { 134 | case "s_tag": 135 | p.SearchMode = "partial_match_for_tags" 136 | case "s_tag_full": 137 | p.SearchMode = "exact_match_for_tags" 138 | case "s_tc": 139 | p.SearchMode = "title_and_caption" 140 | default: 141 | panic( 142 | fmt.Sprintf( 143 | "pixiv mobile error %d: invalid search mode %q", 144 | utils.DEV_ERROR, 145 | p.SearchMode, 146 | ), 147 | ) 148 | } 149 | 150 | // Convert sort order to the correct value 151 | // based on the Pixiv's ajax web API 152 | var newSortOrder string 153 | if strings.Contains(p.SortOrder, "popular") { 154 | newSortOrder = "popular_desc" // only supports popular_desc 155 | } else if p.SortOrder == "date_d" { 156 | newSortOrder = "date_desc" 157 | } else { 158 | newSortOrder = "date_asc" 159 | } 160 | 161 | if p.SortOrder != "date" && p.SortOrder != "date_d" && p.SortOrder != "popular_d" { 162 | var ajaxEquivalent string 163 | switch newSortOrder { 164 | case "popular_desc": 165 | ajaxEquivalent = "popular_d" 166 | case "date_desc": 167 | ajaxEquivalent = "date_d" 168 | case "date_asc": 169 | ajaxEquivalent = "date" 170 | default: 171 | panic( 172 | fmt.Sprintf( 173 | "pixiv error %d: unknown sort order %q in PixivDlOptions.ValidateArgs()", 174 | utils.DEV_ERROR, 175 | newSortOrder, 176 | ), 177 | ) 178 | } 179 | 180 | color.Red( 181 | utils.CombineStringsWithNewline( 182 | fmt.Sprintf( 183 | "pixiv error %d: when using the refresh token, only \"date\", \"date_d\", \"popular_d\" are supported for the --sort_order flag.", 184 | utils.INPUT_ERROR, 185 | ), 186 | fmt.Sprintf( 187 | "hence, the sort order will be updated from %q to %q...\n", 188 | p.SortOrder, 189 | ajaxEquivalent, 190 | ), 191 | ), 192 | ) 193 | } 194 | p.SortOrder = newSortOrder 195 | } 196 | } 197 | -------------------------------------------------------------------------------- /src/cmds/cmds.go: -------------------------------------------------------------------------------- 1 | package cmds 2 | 3 | import ( 4 | "github.com/spf13/cobra" 5 | "github.com/KJHJason/Cultured-Downloader-CLI/utils" 6 | ) 7 | 8 | func getMultipleIdsMsg() string { 9 | return "For multiple IDs, separate them with a comma.\nExample: \"12345,67891\" (without the quotes)" 10 | } 11 | 12 | type textFilePath struct { 13 | variable *string 14 | desc string 15 | } 16 | type commonFlags struct { 17 | cmd *cobra.Command 18 | overwriteVar *bool 19 | cookieFileVar *string 20 | userAgentVar *string 21 | gdriveApiKeyVar *string 22 | gdriveServiceAccPathVar *string 23 | logUrlsVar *bool 24 | textFile textFilePath 25 | } 26 | 27 | func init() { 28 | commonCmdFlags := [...]commonFlags{ 29 | { 30 | cmd: fantiaCmd, 31 | overwriteVar: &fantiaOverwrite, 32 | cookieFileVar: &fantiaCookieFile, 33 | userAgentVar: &fantiaUserAgent, 34 | gdriveApiKeyVar: &fantiaGdriveApiKey, 35 | gdriveServiceAccPathVar: &fantiaGdriveServiceAccPath, 36 | logUrlsVar: &fantiaLogUrls, 37 | textFile: textFilePath { 38 | variable: &fantiaDlTextFile, 39 | desc: "Path to a text file containing Fanclub and/or post URL(s) to download from Fantia.", 40 | }, 41 | }, 42 | { 43 | cmd: pixivFanboxCmd, 44 | overwriteVar: &fanboxOverwriteFiles, 45 | cookieFileVar: &fanboxCookieFile, 46 | userAgentVar: &fanboxUserAgent, 47 | gdriveApiKeyVar: &fanboxGdriveApiKey, 48 | gdriveServiceAccPathVar: &fanboxGdriveApiKey, 49 | logUrlsVar: &fanboxLogUrls, 50 | textFile: textFilePath { 51 | variable: &fanboxDlTextFile, 52 | desc: "Path to a text file containing creator and/or post URL(s) to download from Pixiv Fanbox.", 53 | }, 54 | }, 55 | { 56 | cmd: pixivCmd, 57 | overwriteVar: &pixivOverwrite, 58 | cookieFileVar: &pixivCookieFile, 59 | userAgentVar: &pixivUserAgent, 60 | textFile: textFilePath { 61 | variable: &pixivDlTextFile, 62 | desc: "Path to a text file containing artwork, illustrator, and tag name URL(s) to download from Pixiv.", 63 | }, 64 | }, 65 | { 66 | cmd: kemonoCmd, 67 | overwriteVar: &kemonoOverwrite, 68 | cookieFileVar: &kemonoCookieFile, 69 | userAgentVar: &kemonoUserAgent, 70 | gdriveApiKeyVar: &kemonoGdriveApiKey, 71 | gdriveServiceAccPathVar: &kemonoGdriveServiceAccPath, 72 | logUrlsVar: &kemonoLogUrls, 73 | textFile: textFilePath { 74 | variable: &kemonoDlTextFile, 75 | desc: "Path to a text file containing creator and/or post URL(s) to download from Kemono Party.", 76 | }, 77 | }, 78 | } 79 | for _, cmdInfo := range commonCmdFlags { 80 | cmd := cmdInfo.cmd 81 | cmd.Flags().BoolVarP( 82 | cmdInfo.overwriteVar, 83 | "overwrite", 84 | "o", 85 | false, 86 | utils.CombineStringsWithNewline( 87 | "Overwrite any existing files if there is no Content-Length header in the response.", 88 | "Usually used for Pixiv Fanbox when there are incomplete downloads.", 89 | ), 90 | ) 91 | cmd.Flags().StringVarP( 92 | cmdInfo.userAgentVar, 93 | "user_agent", 94 | "u", 95 | "", 96 | "Set a custom User-Agent header to use when communicating with the API(s) or when downloading.", 97 | ) 98 | cmd.Flags().StringVarP( 99 | cmdInfo.textFile.variable, 100 | "txt_filepath", 101 | "p", 102 | "", 103 | cmdInfo.textFile.desc, 104 | ) 105 | cmd.Flags().StringVarP( 106 | cmdInfo.cookieFileVar, 107 | "cookie_file", 108 | "c", 109 | "", 110 | utils.CombineStringsWithNewline( 111 | "Pass in a file path to your saved Netscape/Mozilla generated cookie file to use when downloading.", 112 | "You can generate a cookie file by using the \"Get cookies.txt LOCALLY\" extension for your browser.", 113 | "Chrome Extension URL: https://chrome.google.com/webstore/detail/get-cookiestxt-locally/cclelndahbckbenkjhflpdbgdldlbecc", 114 | ), 115 | ) 116 | if cmdInfo.gdriveApiKeyVar != nil { 117 | cmd.Flags().StringVar( 118 | cmdInfo.gdriveApiKeyVar, 119 | "gdrive_api_key", 120 | "", 121 | utils.CombineStringsWithNewline( 122 | "Google Drive API key to use for downloading gdrive files.", 123 | "Guide: https://github.com/KJHJason/Cultured-Downloader/blob/main/doc/google_api_setup_guide.md", 124 | ), 125 | ) 126 | } 127 | if cmdInfo.gdriveServiceAccPathVar != nil { 128 | cmd.Flags().StringVar( 129 | cmdInfo.gdriveServiceAccPathVar, 130 | "gdrive_service_acc_path", 131 | "", 132 | utils.CombineStringsWithNewline( 133 | "Path to the Google Drive service account JSON file to use for downloading gdrive files.", 134 | "Generally, this is preferred over the API key as it is less likely to be flagged as bot traffic.", 135 | "Guide: https://github.com/KJHJason/Cultured-Downloader/blob/main/doc/google_api_setup_guide.md", 136 | ), 137 | ) 138 | } 139 | if cmdInfo.logUrlsVar != nil { 140 | cmd.Flags().BoolVarP( 141 | cmdInfo.logUrlsVar, 142 | "log_urls", 143 | "l", 144 | false, 145 | utils.CombineStringsWithNewline( 146 | "Log any detected URLs of the files that are being downloaded.", 147 | "Note that not all URLs are logged, only URLs to external file hosting providers like MEGA, Google Drive, etc. are logged.", 148 | ), 149 | ) 150 | } 151 | RootCmd.AddCommand(cmd) 152 | } 153 | } 154 | -------------------------------------------------------------------------------- /src/cmds/fantia.go: -------------------------------------------------------------------------------- 1 | package cmds 2 | 3 | import ( 4 | "github.com/KJHJason/Cultured-Downloader-CLI/api/fantia" 5 | "github.com/KJHJason/Cultured-Downloader-CLI/configs" 6 | "github.com/KJHJason/Cultured-Downloader-CLI/gdrive" 7 | "github.com/KJHJason/Cultured-Downloader-CLI/utils" 8 | "github.com/KJHJason/Cultured-Downloader-CLI/cmds/textparser" 9 | "github.com/spf13/cobra" 10 | ) 11 | 12 | var ( 13 | fantiaDlTextFile string 14 | fantiaCookieFile string 15 | fantiaSession string 16 | fantiaFanclubIds []string 17 | fantiaPageNums []string 18 | fantiaPostIds []string 19 | fantiaDlGdrive bool 20 | fantiaGdriveApiKey string 21 | fantiaGdriveServiceAccPath string 22 | fantiaDlThumbnails bool 23 | fantiaDlImages bool 24 | fantiaDlAttachments bool 25 | fantiaOverwrite bool 26 | fantiaAutoSolveCaptcha bool 27 | fantiaLogUrls bool 28 | fantiaUserAgent string 29 | fantiaCmd = &cobra.Command{ 30 | Use: "fantia", 31 | Short: "Download from Fantia", 32 | Long: "Supports downloads from Fantia Fanclubs and individual posts.", 33 | Run: func(cmd *cobra.Command, args []string) { 34 | if fantiaDlTextFile != "" { 35 | postIds, fanclubInfoSlice := textparser.ParseFantiaTextFile(fantiaDlTextFile) 36 | fantiaPostIds = append(fantiaPostIds, postIds...) 37 | 38 | for _, fanclubInfo := range fanclubInfoSlice { 39 | fantiaFanclubIds = append(fantiaFanclubIds, fanclubInfo.FanclubId) 40 | fantiaPageNums = append(fantiaPageNums, fanclubInfo.PageNum) 41 | } 42 | } 43 | 44 | fantiaConfig := &configs.Config{ 45 | OverwriteFiles: fantiaOverwrite, 46 | UserAgent: fantiaUserAgent, 47 | LogUrls: fantiaLogUrls, 48 | } 49 | 50 | var gdriveClient *gdrive.GDrive 51 | if fantiaGdriveApiKey != "" || fantiaGdriveServiceAccPath != "" { 52 | gdriveClient = gdrive.GetNewGDrive( 53 | fantiaGdriveApiKey, 54 | fantiaGdriveServiceAccPath, 55 | fantiaConfig, 56 | utils.MAX_CONCURRENT_DOWNLOADS, 57 | ) 58 | } 59 | 60 | fantiaDl := &fantia.FantiaDl{ 61 | FanclubIds: fantiaFanclubIds, 62 | FanclubPageNums: fantiaPageNums, 63 | PostIds: fantiaPostIds, 64 | } 65 | fantiaDl.ValidateArgs() 66 | 67 | fantiaDlOptions := &fantia.FantiaDlOptions{ 68 | DlThumbnails: fantiaDlThumbnails, 69 | DlImages: fantiaDlImages, 70 | DlAttachments: fantiaDlAttachments, 71 | DlGdrive: fantiaDlGdrive, 72 | AutoSolveCaptcha: fantiaAutoSolveCaptcha, 73 | GdriveClient: gdriveClient, 74 | Configs: fantiaConfig, 75 | SessionCookieId: fantiaSession, 76 | } 77 | if fantiaCookieFile != "" { 78 | cookies, err := utils.ParseNetscapeCookieFile( 79 | fantiaCookieFile, 80 | fantiaSession, 81 | utils.FANTIA, 82 | ) 83 | if err != nil { 84 | utils.LogError( 85 | err, 86 | "", 87 | true, 88 | utils.ERROR, 89 | ) 90 | } 91 | fantiaDlOptions.SessionCookies = cookies 92 | } 93 | 94 | err := fantiaDlOptions.ValidateArgs(fantiaUserAgent) 95 | if err != nil { 96 | utils.LogError( 97 | err, 98 | "", 99 | true, 100 | utils.ERROR, 101 | ) 102 | } 103 | 104 | utils.PrintWarningMsg() 105 | fantia.FantiaDownloadProcess( 106 | fantiaDl, 107 | fantiaDlOptions, 108 | ) 109 | }, 110 | } 111 | ) 112 | 113 | func init() { 114 | mutlipleIdsMsg := getMultipleIdsMsg() 115 | fantiaCmd.Flags().StringVarP( 116 | &fantiaSession, 117 | "session", 118 | "s", 119 | "", 120 | "Your \"_session_id\" cookie value to use for the requests to Fantia.", 121 | ) 122 | fantiaCmd.Flags().StringSliceVar( 123 | &fantiaFanclubIds, 124 | "fanclub_id", 125 | []string{}, 126 | utils.CombineStringsWithNewline( 127 | "Fantia Fanclub ID(s) to download from.", 128 | mutlipleIdsMsg, 129 | ), 130 | ) 131 | fantiaCmd.Flags().StringSliceVar( 132 | &fantiaPageNums, 133 | "page_num", 134 | []string{}, 135 | utils.CombineStringsWithNewline( 136 | "Min and max page numbers to search for corresponding to the order of the supplied Fantia Fanclub ID(s).", 137 | "Format: \"num\", \"minNum-maxNum\", or \"\" to download all pages", 138 | "Leave blank to download all pages from each Fantia Fanclub.", 139 | ), 140 | ) 141 | fantiaCmd.Flags().StringSliceVar( 142 | &fantiaPostIds, 143 | "post_id", 144 | []string{}, 145 | utils.CombineStringsWithNewline( 146 | "Fantia post ID(s) to download.", 147 | mutlipleIdsMsg, 148 | ), 149 | ) 150 | fantiaCmd.Flags().BoolVarP( 151 | &fantiaDlGdrive, 152 | "dl_gdrive", 153 | "g", 154 | true, 155 | "Whether to download the Google Drive links of a post on Fantia.", 156 | ) 157 | fantiaCmd.Flags().BoolVarP( 158 | &fantiaDlThumbnails, 159 | "dl_thumbnails", 160 | "t", 161 | true, 162 | "Whether to download the thumbnail of a post on Fantia.", 163 | ) 164 | fantiaCmd.Flags().BoolVarP( 165 | &fantiaDlImages, 166 | "dl_images", 167 | "i", 168 | true, 169 | "Whether to download the images of a post on Fantia.", 170 | ) 171 | fantiaCmd.Flags().BoolVarP( 172 | &fantiaDlAttachments, 173 | "dl_attachments", 174 | "a", 175 | true, 176 | "Whether to download the attachments of a post on Fantia.", 177 | ) 178 | fantiaCmd.Flags().BoolVarP( 179 | &fantiaAutoSolveCaptcha, 180 | "auto_solve_recaptcha", 181 | "r", 182 | true, 183 | utils.CombineStringsWithNewline( 184 | "Whether to automatically solve the reCAPTCHA when it appears. If failed, the program will solve it automatically if this flag is false.", 185 | "Otherwise, if this flag is true and it fails to solve the reCAPTCHA, the program will ask you to solve it manually on your browser with", 186 | "the SAME supplied session by visiting " + utils.FANTIA_RECAPTCHA_URL, 187 | ), 188 | ) 189 | } 190 | -------------------------------------------------------------------------------- /src/utils/files.go: -------------------------------------------------------------------------------- 1 | package utils 2 | 3 | import ( 4 | "bufio" 5 | "encoding/json" 6 | "fmt" 7 | "os" 8 | "path/filepath" 9 | "strings" 10 | ) 11 | 12 | // checks if a file or directory exists 13 | func PathExists(filepath string) bool { 14 | _, err := os.Stat(filepath) 15 | return !os.IsNotExist(err) 16 | } 17 | 18 | // Returns the file size based on the provided file path 19 | // 20 | // If the file does not exist or 21 | // there was an error opening the file at the given file path string, -1 is returned 22 | func GetFileSize(filePath string) (int64, error) { 23 | if !PathExists(filePath) { 24 | return -1, os.ErrNotExist 25 | } 26 | 27 | file, err := os.OpenFile(filePath, os.O_RDONLY, 0666) 28 | if err != nil { 29 | return -1, err 30 | } 31 | fileInfo, err := file.Stat() 32 | if err != nil { 33 | return -1, err 34 | } 35 | return fileInfo.Size(), nil 36 | } 37 | 38 | // Uses bufio.Reader to read a line from a file and returns it as a byte slice 39 | // 40 | // Mostly thanks to https://devmarkpro.com/working-big-files-golang 41 | func ReadLine(reader *bufio.Reader) ([]byte, error) { 42 | var err error 43 | var isPrefix = true 44 | var totalLine, line []byte 45 | 46 | // Read until isPrefix is false as 47 | // that means the line has been fully read 48 | for isPrefix && err == nil { 49 | line, isPrefix, err = reader.ReadLine() 50 | totalLine = append(totalLine, line...) 51 | } 52 | return totalLine, err 53 | } 54 | 55 | // Used in CleanPathName to remove illegal characters in a path name 56 | func removeIllegalRuneInPath(r rune) rune { 57 | if strings.ContainsRune("<>:\"/\\|?*\n\r\t", r) { 58 | return '-' 59 | } else if r == '.' { 60 | return ',' 61 | } 62 | return r 63 | } 64 | 65 | // Removes any illegal characters in a path name 66 | // to prevent any error with file I/O using the path name 67 | func CleanPathName(pathName string) string { 68 | pathName = strings.TrimSpace(pathName) 69 | if len(pathName) > 255 { 70 | pathName = pathName[:255] 71 | } 72 | return strings.Map(removeIllegalRuneInPath, pathName) 73 | } 74 | 75 | // Returns a directory path for a post, artwork, etc. 76 | // based on the user's saved download path and the provided arguments 77 | func GetPostFolder(downloadPath, creatorName, postId, postTitle string) string { 78 | creatorName = CleanPathName(creatorName) 79 | postTitle = CleanPathName(postTitle) 80 | 81 | postFolderPath := filepath.Join( 82 | downloadPath, 83 | creatorName, 84 | fmt.Sprintf("[%s] %s", postId, postTitle), 85 | ) 86 | return postFolderPath 87 | } 88 | 89 | type ConfigFile struct { 90 | DownloadDir string `json:"download_directory"` 91 | Language string `json:"language"` 92 | } 93 | 94 | // Returns the download path from the config file 95 | func GetDefaultDownloadPath() string { 96 | configFilePath := filepath.Join(APP_PATH, "config.json") 97 | if !PathExists(configFilePath) { 98 | return "" 99 | } 100 | 101 | configFile, err := os.ReadFile(configFilePath) 102 | if err != nil { 103 | os.Remove(configFilePath) 104 | return "" 105 | } 106 | 107 | var config ConfigFile 108 | err = json.Unmarshal(configFile, &config) 109 | if err != nil { 110 | os.Remove(configFilePath) 111 | return "" 112 | } 113 | 114 | if !PathExists(config.DownloadDir) { 115 | return "" 116 | } 117 | return config.DownloadDir 118 | } 119 | 120 | // saves the new download path to the config file if it does not exist 121 | func saveConfig(newDownloadPath, configFilePath string) error { 122 | config := ConfigFile{ 123 | DownloadDir: newDownloadPath, 124 | Language: "en", 125 | } 126 | configFile, err := json.MarshalIndent(config, "", " ") 127 | if err != nil { 128 | return fmt.Errorf( 129 | "error %d: failed to marshal config file, more info => %v", 130 | JSON_ERROR, 131 | err, 132 | ) 133 | } 134 | 135 | err = os.WriteFile(configFilePath, configFile, 0666) 136 | if err != nil { 137 | return fmt.Errorf( 138 | "error %d: failed to write config file, more info => %v", 139 | OS_ERROR, 140 | err, 141 | ) 142 | } 143 | return nil 144 | } 145 | 146 | // saves the new download path to the config file and overwrites the old one 147 | func overwriteConfig(newDownloadPath, configFilePath string) error { 148 | // read the file 149 | configFile, err := os.ReadFile(configFilePath) 150 | if err != nil { 151 | return fmt.Errorf( 152 | "error %d: failed to read config file, more info => %v", 153 | OS_ERROR, 154 | err, 155 | ) 156 | } 157 | 158 | var config ConfigFile 159 | err = json.Unmarshal(configFile, &config) 160 | if err != nil { 161 | return fmt.Errorf( 162 | "error %d: failed to unmarshal config file, more info => %v", 163 | JSON_ERROR, 164 | err, 165 | ) 166 | } 167 | 168 | // update the file if the download directory is different 169 | if config.DownloadDir == newDownloadPath { 170 | return nil 171 | } 172 | 173 | config.DownloadDir = newDownloadPath 174 | configFile, err = json.MarshalIndent(config, "", " ") 175 | if err != nil { 176 | return fmt.Errorf( 177 | "error %d: failed to marshal config file, more info => %v", 178 | JSON_ERROR, 179 | err, 180 | ) 181 | } 182 | 183 | err = os.WriteFile(configFilePath, configFile, 0666) 184 | if err != nil { 185 | return fmt.Errorf( 186 | "error %d: failed to write config file, more info => %v", 187 | OS_ERROR, 188 | err, 189 | ) 190 | } 191 | return nil 192 | } 193 | 194 | // Configure and saves the config file with updated download path 195 | func SetDefaultDownloadPath(newDownloadPath string) error { 196 | if !PathExists(newDownloadPath) { 197 | return fmt.Errorf("error %d: download path does not exist, please create the directory and try again", INPUT_ERROR) 198 | } 199 | 200 | os.MkdirAll(APP_PATH, 0755) 201 | configFilePath := filepath.Join(APP_PATH, "config.json") 202 | if !PathExists(configFilePath) { 203 | return saveConfig(newDownloadPath, configFilePath) 204 | } 205 | return overwriteConfig(newDownloadPath, configFilePath) 206 | } 207 | -------------------------------------------------------------------------------- /src/api/fantia/process.go: -------------------------------------------------------------------------------- 1 | package fantia 2 | 3 | import ( 4 | "fmt" 5 | "net/http" 6 | "path/filepath" 7 | "strconv" 8 | 9 | "github.com/fatih/color" 10 | "github.com/KJHJason/Cultured-Downloader-CLI/api/fantia/models" 11 | "github.com/KJHJason/Cultured-Downloader-CLI/request" 12 | "github.com/KJHJason/Cultured-Downloader-CLI/gdrive" 13 | "github.com/KJHJason/Cultured-Downloader-CLI/utils" 14 | "github.com/KJHJason/Cultured-Downloader-CLI/spinner" 15 | ) 16 | 17 | func dlImagesFromPost(content *models.FantiaContent, postFolderPath string) []*request.ToDownload { 18 | var urlsSlice []*request.ToDownload 19 | 20 | // download images that are uploaded to their own section 21 | postContentPhotos := content.PostContentPhotos 22 | for _, image := range postContentPhotos { 23 | imageUrl := image.URL.Original 24 | urlsSlice = append(urlsSlice, &request.ToDownload{ 25 | Url: imageUrl, 26 | FilePath: filepath.Join(postFolderPath, utils.IMAGES_FOLDER), 27 | }) 28 | } 29 | 30 | // for images that are embedded in the post content 31 | comment := content.Comment 32 | matchedStr := utils.FANTIA_IMAGE_URL_REGEX.FindAllStringSubmatch(comment, -1) 33 | for _, matched := range matchedStr { 34 | imageUrl := utils.FANTIA_URL + matched[utils.FANTIA_REGEX_URL_INDEX] 35 | urlsSlice = append(urlsSlice, &request.ToDownload{ 36 | Url: imageUrl, 37 | FilePath: filepath.Join(postFolderPath, utils.IMAGES_FOLDER), 38 | }) 39 | } 40 | return urlsSlice 41 | } 42 | 43 | func dlAttachmentsFromPost(content *models.FantiaContent, postFolderPath string) []*request.ToDownload { 44 | var urlsSlice []*request.ToDownload 45 | 46 | // get the attachment url string if it exists 47 | attachmentUrl := content.AttachmentURI 48 | if attachmentUrl != "" { 49 | attachmentUrlStr := utils.FANTIA_URL + attachmentUrl 50 | urlsSlice = append(urlsSlice, &request.ToDownload{ 51 | Url: attachmentUrlStr, 52 | FilePath: filepath.Join(postFolderPath, utils.ATTACHMENT_FOLDER), 53 | }) 54 | } else if content.DownloadUri != "" { 55 | // if the attachment url string does not exist, 56 | // then get the download url for the file 57 | downloadUrl := utils.FANTIA_URL + content.DownloadUri 58 | filename := content.Filename 59 | urlsSlice = append(urlsSlice, &request.ToDownload{ 60 | Url: downloadUrl, 61 | FilePath: filepath.Join(postFolderPath, utils.ATTACHMENT_FOLDER, filename), 62 | }) 63 | } 64 | return urlsSlice 65 | } 66 | 67 | var errRecaptcha = fmt.Errorf("recaptcha detected for the current session") 68 | 69 | // Process the JSON response from Fantia's API and 70 | // returns a slice of urls and a slice of gdrive urls to download from 71 | func processFantiaPost(res *http.Response, downloadPath string, dlOptions *FantiaDlOptions) ([]*request.ToDownload, []*request.ToDownload, error) { 72 | // processes a fantia post 73 | // returns a map containing the post id and the url to download the file from 74 | var postJson models.FantiaPost 75 | if err := utils.LoadJsonFromResponse(res, &postJson); err != nil { 76 | return nil, nil, err 77 | } 78 | 79 | if postJson.Redirect != "" { 80 | if postJson.Redirect != "/recaptcha" { 81 | return nil, nil, fmt.Errorf( 82 | "fantia error %d: unknown redirect url, %q", 83 | utils.UNEXPECTED_ERROR, 84 | postJson.Redirect, 85 | ) 86 | } 87 | return nil, nil, errRecaptcha 88 | } 89 | 90 | post := postJson.Post 91 | postId := strconv.Itoa(post.ID) 92 | postTitle := post.Title 93 | creatorName := post.Fanclub.User.Name 94 | postFolderPath := utils.GetPostFolder( 95 | filepath.Join( 96 | downloadPath, 97 | utils.FANTIA_TITLE, 98 | ), 99 | creatorName, 100 | postId, 101 | postTitle, 102 | ) 103 | 104 | var urlsSlice []*request.ToDownload 105 | thumbnail := post.Thumb.Original 106 | if dlOptions.DlThumbnails && thumbnail != "" { 107 | urlsSlice = append(urlsSlice, &request.ToDownload{ 108 | Url: thumbnail, 109 | FilePath: postFolderPath, 110 | }) 111 | } 112 | 113 | gdriveLinks := gdrive.ProcessPostText( 114 | post.Comment, 115 | postFolderPath, 116 | dlOptions.DlGdrive, 117 | dlOptions.Configs.LogUrls, 118 | ) 119 | 120 | postContent := post.PostContents 121 | if postContent == nil { 122 | return urlsSlice, gdriveLinks, nil 123 | } 124 | for _, content := range postContent { 125 | commentGdriveLinks := gdrive.ProcessPostText( 126 | content.Comment, 127 | postFolderPath, 128 | dlOptions.DlGdrive, 129 | dlOptions.Configs.LogUrls, 130 | ) 131 | if len(commentGdriveLinks) > 0 { 132 | gdriveLinks = append(gdriveLinks, commentGdriveLinks...) 133 | } 134 | if dlOptions.DlImages { 135 | urlsSlice = append(urlsSlice, dlImagesFromPost(&content, postFolderPath)...) 136 | } 137 | if dlOptions.DlAttachments { 138 | urlsSlice = append(urlsSlice, dlAttachmentsFromPost(&content, postFolderPath)...) 139 | } 140 | } 141 | return urlsSlice, gdriveLinks, nil 142 | } 143 | 144 | type processIllustArgs struct { 145 | res *http.Response 146 | postId string 147 | postIdsLen int 148 | msgSuffix string 149 | } 150 | 151 | // Process the JSON response to get the urls to download 152 | func processIllustDetailApiRes(illustArgs *processIllustArgs, dlOptions *FantiaDlOptions) ([]*request.ToDownload, []*request.ToDownload, error) { 153 | progress := spinner.New( 154 | spinner.JSON_SPINNER, 155 | "fgHiYellow", 156 | fmt.Sprintf( 157 | "Processing retrieved JSON for post %s from Fantia %s...", 158 | illustArgs.postId, 159 | illustArgs.msgSuffix, 160 | ), 161 | fmt.Sprintf( 162 | "Finished processing retrieved JSON for post %s from Fantia %s!", 163 | illustArgs.postId, 164 | illustArgs.msgSuffix, 165 | ), 166 | fmt.Sprintf( 167 | "Something went wrong while processing retrieved JSON for post %s from Fantia %s.\nPlease refer to the logs for more details.", 168 | illustArgs.postId, 169 | illustArgs.msgSuffix, 170 | ), 171 | illustArgs.postIdsLen, 172 | ) 173 | progress.Start() 174 | urlsToDownload, gdriveLinks, err := processFantiaPost( 175 | illustArgs.res, 176 | utils.DOWNLOAD_PATH, 177 | dlOptions, 178 | ) 179 | if err != nil { 180 | if err == errRecaptcha { 181 | progress.StopWithFn(func() { 182 | color.Red("✗ reCAPTCHA detected for the current session...") 183 | }) 184 | } else { 185 | progress.Stop(true) 186 | } 187 | return nil, nil, err 188 | } 189 | progress.Stop(false) 190 | return urlsToDownload, gdriveLinks, nil 191 | } 192 | -------------------------------------------------------------------------------- /src/spinner/spinner.go: -------------------------------------------------------------------------------- 1 | package spinner 2 | 3 | import ( 4 | "os" 5 | "fmt" 6 | "sync" 7 | "time" 8 | 9 | "github.com/fatih/color" 10 | "github.com/KJHJason/Cultured-Downloader-CLI/utils" 11 | ) 12 | 13 | const ( 14 | CLEAR_LINE = "\033[K" 15 | 16 | // Common spinner types used in this program 17 | REQ_SPINNER = "pong" 18 | JSON_SPINNER = "aesthetic" 19 | DL_SPINNER = "material" 20 | ) 21 | 22 | var ( 23 | spinnerTypes map[string]SpinnerInfo 24 | colourMap = map[string]color.Attribute{ 25 | "black": color.FgBlack, 26 | "red": color.FgRed, 27 | "green": color.FgGreen, 28 | "yellow": color.FgYellow, 29 | "blue": color.FgBlue, 30 | "magenta": color.FgMagenta, 31 | "cyan": color.FgCyan, 32 | "white": color.FgWhite, 33 | 34 | "fgHiBlack": color.FgHiBlack, 35 | "fgHiRed": color.FgHiRed, 36 | "fgHiGreen": color.FgHiGreen, 37 | "fgHiYellow": color.FgHiYellow, 38 | "fgHiBlue": color.FgHiBlue, 39 | "fgHiMagenta": color.FgHiMagenta, 40 | "fgHiCyan": color.FgHiCyan, 41 | "fgHiWhite": color.FgHiWhite, 42 | } 43 | ) 44 | 45 | func init() { 46 | spinnerTypes = GetSpinnerTypes() 47 | spinnersJson = nil // free up memory since it is no longer needed 48 | } 49 | 50 | // ListSpinnerTypes lists all the supported spinner types 51 | func ListSpinnerTypes() { 52 | fmt.Println("Spinner types:") 53 | for spinnerType := range spinnerTypes { 54 | fmt.Printf( 55 | "%s\n", 56 | spinnerType, 57 | ) 58 | } 59 | } 60 | 61 | // ListColours lists all the supported colours 62 | func ListColours() { 63 | fmt.Println("Colours:") 64 | for colour := range colourMap { 65 | fmt.Printf( 66 | "%s\n", 67 | colour, 68 | ) 69 | } 70 | } 71 | 72 | // GetSpinner returns the spinner info of 73 | // the given spinner type string if it exists. 74 | // 75 | // If the spinner type string does not exist, the program will panic. 76 | func GetSpinner(spinnerType string) SpinnerInfo { 77 | if spinner, ok := spinnerTypes[spinnerType]; ok { 78 | return spinner 79 | } else { 80 | panic( 81 | fmt.Errorf( 82 | "error %d: spinner type %s not found", 83 | utils.DEV_ERROR, 84 | spinnerType, 85 | ), 86 | ) 87 | } 88 | } 89 | 90 | // Thread-safe spinner 91 | type Spinner struct { 92 | Spinner SpinnerInfo 93 | 94 | Colour *color.Color 95 | Msg string 96 | SuccessMsg string 97 | ErrMsg string 98 | 99 | count int 100 | maxCount int 101 | active bool 102 | mu *sync.RWMutex 103 | stop chan struct{} 104 | } 105 | 106 | // New creates a new spinner with the given spinner type, 107 | // colour, message, success message, error message and max count. 108 | // 109 | // For the spinner type and colour, please refer to the source code or 110 | // use ListSpinnerTypes() and ListColours() to print all the supported spinner types and colours. 111 | func New(spinnerType, colour, message, successMsg, errMsg string, maxCount int) *Spinner { 112 | colourAttribute, ok := colourMap[colour] 113 | if !ok { 114 | panic( 115 | fmt.Errorf( 116 | "error %d: colour %s not found", 117 | utils.DEV_ERROR, 118 | colour, 119 | ), 120 | ) 121 | } 122 | 123 | return &Spinner{ 124 | Spinner: GetSpinner(spinnerType), 125 | 126 | Colour: color.New(colourAttribute), 127 | Msg: message, 128 | SuccessMsg: successMsg, 129 | ErrMsg: errMsg, 130 | 131 | count: 0, 132 | maxCount: maxCount, 133 | active: false, 134 | mu: &sync.RWMutex{}, 135 | stop: make(chan struct{}, 1), 136 | } 137 | } 138 | 139 | // Starts the spinner 140 | func (s *Spinner) Start() { 141 | s.mu.Lock() 142 | if s.active { 143 | s.mu.Unlock() 144 | return 145 | } 146 | 147 | s.active = true 148 | s.mu.Unlock() 149 | 150 | go func() { 151 | for { 152 | for _, frame := range s.Spinner.Frames { 153 | select { 154 | case <-s.stop: 155 | return 156 | default: 157 | s.mu.Lock() 158 | if !s.active { 159 | s.mu.Unlock() 160 | return 161 | } 162 | 163 | s.Colour.Printf( 164 | "\r%s %s%s", 165 | frame, 166 | s.Msg, 167 | CLEAR_LINE, 168 | ) 169 | s.mu.Unlock() 170 | time.Sleep( 171 | time.Duration(s.Spinner.Interval) * time.Millisecond, 172 | ) 173 | } 174 | } 175 | } 176 | }() 177 | } 178 | 179 | // Add adds i to the spinner count 180 | func (s *Spinner) Add(i int) int { 181 | s.mu.Lock() 182 | defer s.mu.Unlock() 183 | 184 | if s.count >= s.maxCount { 185 | return s.count 186 | } 187 | 188 | s.count += i 189 | return s.count 190 | } 191 | 192 | // UpdateMsg changes the spinner message 193 | func (s *Spinner) UpdateMsg(msg string) { 194 | s.mu.Lock() 195 | defer s.mu.Unlock() 196 | 197 | s.Msg = msg 198 | } 199 | 200 | // MsgIncrement increments the spinner count and 201 | // updates the message with the new count based onthe baseMsg. 202 | // 203 | // baseMsg should be a string with a single %d placeholder 204 | // e.g. s.MsgIncrement("Downloading %d files...") 205 | func (s* Spinner) MsgIncrement(baseMsg string) { 206 | s.UpdateMsg( 207 | fmt.Sprintf( 208 | baseMsg, 209 | s.Add(1), 210 | ), 211 | ) 212 | } 213 | 214 | func (s *Spinner) stopSpinner() { 215 | s.active = false 216 | if s.count != 0 { 217 | s.count = 0 218 | } 219 | s.stop <- struct{}{} 220 | close(s.stop) 221 | } 222 | 223 | // Stop stops the spinner and prints an outcome message 224 | func (s *Spinner) Stop(hasErr bool) { 225 | s.StopWithFn(func () { 226 | if hasErr && s.ErrMsg != "" { 227 | color.Red( 228 | "\r✗ %s%s\n", 229 | s.ErrMsg, 230 | CLEAR_LINE, 231 | ) 232 | } else if s.SuccessMsg != "" { 233 | color.Green( 234 | "\r✓ %s%s", 235 | s.SuccessMsg, 236 | CLEAR_LINE, 237 | ) 238 | } 239 | }) 240 | } 241 | 242 | // Stop spinner with the given action function that will be called 243 | func (s *Spinner) StopWithFn(action func()) { 244 | s.mu.Lock() 245 | defer s.mu.Unlock() 246 | if !s.active { 247 | return 248 | } 249 | 250 | s.stopSpinner() 251 | action() 252 | } 253 | 254 | // KillProgram stops the spinner, 255 | // prints the given message and exits the program with code 2. 256 | // 257 | // Used for Ctrl + C interrupts. 258 | func (s *Spinner) KillProgram(msg string) { 259 | s.mu.Lock() 260 | defer s.mu.Unlock() 261 | if !s.active { 262 | os.Exit(2) 263 | } 264 | 265 | s.stopSpinner() 266 | color.Red( 267 | "\r✗ %s%s\n", 268 | msg, 269 | CLEAR_LINE, 270 | ) 271 | os.Exit(2) 272 | } 273 | -------------------------------------------------------------------------------- /src/api/pixiv/mobile/oauth.go: -------------------------------------------------------------------------------- 1 | package pixivmobile 2 | 3 | import ( 4 | cryptorand "crypto/rand" 5 | "crypto/sha256" 6 | "encoding/base64" 7 | "fmt" 8 | "regexp" 9 | "time" 10 | 11 | "github.com/KJHJason/Cultured-Downloader-CLI/api/pixiv/models" 12 | "github.com/KJHJason/Cultured-Downloader-CLI/request" 13 | "github.com/KJHJason/Cultured-Downloader-CLI/utils" 14 | "github.com/fatih/color" 15 | "github.com/pkg/browser" 16 | ) 17 | 18 | type accessTokenInfo struct { 19 | accessToken string // The access token that will be used to communicate with the Pixiv's Mobile API 20 | expiresAt time.Time // The time when the access token expires 21 | } 22 | 23 | // Perform a S256 transformation method on a byte array 24 | func S256(bytes []byte) string { 25 | hash := sha256.Sum256(bytes) 26 | return base64.RawURLEncoding.EncodeToString(hash[:]) 27 | } 28 | 29 | var pixivOauthCodeRegex = regexp.MustCompile(`^[\w-]{43}$`) 30 | 31 | // Start the OAuth flow to get the refresh token 32 | func (pixiv *PixivMobile) StartOauthFlow() error { 33 | // create a random 32 bytes that is cryptographically secure 34 | codeVerifierBytes := make([]byte, 32) 35 | _, err := cryptorand.Read(codeVerifierBytes) 36 | if err != nil { 37 | // should never happen but just in case 38 | return fmt.Errorf( 39 | "pixiv mobile error %d: failed to generate random bytes, more info => %v", 40 | utils.DEV_ERROR, 41 | err, 42 | ) 43 | } 44 | codeVerifier := base64.RawURLEncoding.EncodeToString(codeVerifierBytes) 45 | codeChallenge := S256([]byte(codeVerifier)) 46 | 47 | loginParams := map[string]string{ 48 | "code_challenge": codeChallenge, 49 | "code_challenge_method": "S256", 50 | "client": "pixiv-android", 51 | } 52 | 53 | loginUrl := pixiv.loginUrl + "?" + utils.ParamsToString(loginParams) 54 | err = browser.OpenURL(loginUrl) 55 | if err != nil { 56 | color.Red("Pixiv: Failed to open browser: " + err.Error()) 57 | color.Red("Please open the following URL in your browser:") 58 | color.Red(loginUrl) 59 | } else { 60 | color.Green("Opened a new tab in your browser to\n" + loginUrl) 61 | } 62 | 63 | useHttp3 := utils.IsHttp3Supported(utils.PIXIV_MOBILE, true) 64 | color.Yellow("If unsure, follow the guide below:") 65 | color.Yellow("https://github.com/KJHJason/Cultured-Downloader/blob/main/doc/pixiv_oauth_guide.md\n") 66 | for { 67 | var code string 68 | fmt.Print(color.YellowString("Please enter the code you received from Pixiv: ")) 69 | _, err := fmt.Scanln(&code) 70 | fmt.Println() 71 | if err != nil { 72 | color.Red("Failed to read inputted code: " + err.Error()) 73 | continue 74 | } 75 | if !pixivOauthCodeRegex.MatchString(code) { 76 | color.Red("Invalid code format...") 77 | continue 78 | } 79 | 80 | res, err := request.CallRequestWithData( 81 | &request.RequestArgs{ 82 | Url: pixiv.authTokenUrl, 83 | Method: "POST", 84 | Timeout: pixiv.apiTimeout, 85 | CheckStatus: true, 86 | UserAgent: "PixivAndroidApp/5.0.234 (Android 11; Pixel 5)", 87 | Http2: !useHttp3, 88 | Http3: useHttp3, 89 | }, 90 | map[string]string{ 91 | "client_id": pixiv.clientId, 92 | "client_secret": pixiv.clientSecret, 93 | "code": code, 94 | "code_verifier": codeVerifier, 95 | "grant_type": "authorization_code", 96 | "include_policy": "true", 97 | "redirect_uri": pixiv.redirectUri, 98 | }, 99 | ) 100 | if err != nil { 101 | color.Red("Please check if the code you entered is correct.") 102 | continue 103 | } 104 | 105 | var oauthFlowJson models.PixivOauthFlowJson 106 | if err := utils.LoadJsonFromResponse(res, &oauthFlowJson); err != nil { 107 | color.Red(err.Error()) 108 | continue 109 | } 110 | 111 | refreshToken := oauthFlowJson.RefreshToken 112 | color.Green("Your Pixiv Refresh Token: " + refreshToken) 113 | color.Yellow("Please save your refresh token somewhere SECURE and do NOT share it with anyone!") 114 | return nil 115 | } 116 | } 117 | 118 | // Refresh the access token 119 | func (pixiv *PixivMobile) refreshAccessToken() error { 120 | pixiv.accessTokenMu.Lock() 121 | defer pixiv.accessTokenMu.Unlock() 122 | 123 | useHttp3 := utils.IsHttp3Supported(utils.PIXIV_MOBILE, true) 124 | res, err := request.CallRequestWithData( 125 | &request.RequestArgs{ 126 | Url: pixiv.authTokenUrl, 127 | Method: "POST", 128 | Timeout: pixiv.apiTimeout, 129 | UserAgent: pixiv.userAgent, 130 | Http2: !useHttp3, 131 | Http3: useHttp3, 132 | }, 133 | map[string]string{ 134 | "client_id": pixiv.clientId, 135 | "client_secret": pixiv.clientSecret, 136 | "grant_type": "refresh_token", 137 | "include_policy": "true", 138 | "refresh_token": pixiv.refreshToken, 139 | }, 140 | ) 141 | if err != nil || res.StatusCode != 200 { 142 | const errPrefix = "pixiv mobile error" 143 | if err == nil { 144 | res.Body.Close() 145 | err = fmt.Errorf( 146 | "%s %d: failed to refresh token due to %s response from Pixiv\n"+ 147 | "Please check your refresh token and try again or use the \"-pixiv_start_oauth\" flag to get a new refresh token", 148 | errPrefix, 149 | utils.RESPONSE_ERROR, 150 | res.Status, 151 | ) 152 | } else { 153 | err = fmt.Errorf( 154 | "%s %d: failed to refresh token due to %v\n"+ 155 | "Please check your internet connection and try again", 156 | errPrefix, 157 | utils.CONNECTION_ERROR, 158 | err, 159 | ) 160 | } 161 | return err 162 | } 163 | 164 | var oauthJson models.PixivOauthJson 165 | if err := utils.LoadJsonFromResponse(res, &oauthJson); err != nil { 166 | return err 167 | } 168 | 169 | expiresIn := oauthJson.ExpiresIn - 15 // usually 3600 but minus 15 seconds to be safe 170 | pixiv.accessTokenMap.accessToken = oauthJson.AccessToken 171 | pixiv.accessTokenMap.expiresAt = time.Now().Add(time.Duration(expiresIn) * time.Second) 172 | return nil 173 | } 174 | 175 | // Reads the response JSON and checks if the access token has expired, 176 | // if so, refreshes the access token for future requests. 177 | // 178 | // Returns a boolean indicating if the access token was refreshed. 179 | func (pixiv *PixivMobile) refreshTokenIfReq() (bool, error) { 180 | if pixiv.accessTokenMap.accessToken != "" && pixiv.accessTokenMap.expiresAt.After(time.Now()) { 181 | return false, nil 182 | } 183 | 184 | err := pixiv.refreshAccessToken() 185 | if err != nil { 186 | return true, err 187 | } 188 | return true, nil 189 | } 190 | -------------------------------------------------------------------------------- /src/api/cookie.go: -------------------------------------------------------------------------------- 1 | package api 2 | 3 | import ( 4 | "fmt" 5 | "net/http" 6 | "os" 7 | "strings" 8 | "time" 9 | 10 | "github.com/KJHJason/Cultured-Downloader-CLI/request" 11 | "github.com/KJHJason/Cultured-Downloader-CLI/utils" 12 | "github.com/fatih/color" 13 | ) 14 | 15 | // Returns a cookie with given value and website to be used in requests 16 | func GetCookie(sessionID, website string) *http.Cookie { 17 | if sessionID == "" { 18 | return &http.Cookie{} 19 | } 20 | 21 | sessionCookieInfo := utils.GetSessionCookieInfo(website) 22 | domain := sessionCookieInfo.Domain 23 | cookieName := sessionCookieInfo.Name 24 | sameSite := sessionCookieInfo.SameSite 25 | 26 | cookie := http.Cookie{ 27 | Name: cookieName, 28 | Value: sessionID, 29 | Domain: domain, 30 | Expires: time.Now().Add(365 * 24 * time.Hour), 31 | Path: "/", 32 | SameSite: sameSite, 33 | Secure: true, 34 | HttpOnly: true, 35 | } 36 | return &cookie 37 | } 38 | 39 | func getHeaders(website, userAgent string) map[string]string { 40 | headers := map[string]string{ 41 | "User-Agent": userAgent, 42 | } 43 | 44 | var referer, origin string 45 | switch website { 46 | case utils.PIXIV : 47 | referer = utils.PIXIV_URL 48 | origin = utils.PIXIV_URL 49 | case utils.PIXIV_FANBOX : 50 | referer = utils.PIXIV_FANBOX_URL 51 | origin = utils.PIXIV_FANBOX_URL 52 | case utils.FANTIA : 53 | referer = utils.FANTIA_URL 54 | origin = utils.FANTIA_URL 55 | case utils.KEMONO : 56 | referer = utils.KEMONO_URL 57 | origin = utils.KEMONO_URL 58 | case utils.KEMONO_BACKUP : 59 | referer = utils.BACKUP_KEMONO_URL 60 | origin = utils.BACKUP_KEMONO_URL 61 | default : 62 | // Shouldn't happen but could happen during development 63 | panic( 64 | fmt.Errorf( 65 | "error %d, invalid website, %q, in getHeaders", 66 | utils.DEV_ERROR, 67 | website, 68 | ), 69 | ) 70 | } 71 | 72 | headers["Referer"] = referer 73 | headers["Origin"] = origin 74 | return headers 75 | } 76 | 77 | // Verifies the given cookie by making a request to the website 78 | // and returns true if the cookie is valid 79 | func VerifyCookie(cookie *http.Cookie, website, userAgent string) (bool, error) { 80 | // sends a request to the website to verify the cookie 81 | var websiteUrl string 82 | switch website { 83 | case utils.FANTIA: 84 | websiteUrl = utils.FANTIA_URL + "/mypage/users/plans" 85 | case utils.PIXIV_FANBOX: 86 | websiteUrl = utils.PIXIV_FANBOX_URL + "/creators/supporting" 87 | case utils.PIXIV: 88 | websiteUrl = utils.PIXIV_URL + "/dashboard" 89 | // case utils.KEMONO: Since kemono.party is no longer up and redirects to kemono.su 90 | // websiteUrl = utils.KEMONO_URL + "/favorites" 91 | case utils.KEMONO_BACKUP: 92 | websiteUrl = utils.BACKUP_KEMONO_URL + "/favorites" 93 | default: 94 | // Shouldn't happen but could happen during development 95 | panic( 96 | fmt.Errorf( 97 | "error %d, invalid website, %q, in VerifyCookie", 98 | utils.DEV_ERROR, 99 | website, 100 | ), 101 | ) 102 | } 103 | 104 | if cookie.Value == "" { 105 | return false, nil 106 | } 107 | 108 | useHttp3 := utils.IsHttp3Supported(website, false) 109 | cookies := []*http.Cookie{cookie} 110 | resp, err := request.CallRequest( 111 | &request.RequestArgs{ 112 | Method: "HEAD", 113 | Url: websiteUrl, 114 | Cookies: cookies, 115 | CheckStatus: true, 116 | Http3: useHttp3, 117 | Http2: !useHttp3, 118 | Headers: getHeaders(website, userAgent), 119 | }, 120 | ) 121 | if err != nil { 122 | return false, err 123 | } 124 | resp.Body.Close() 125 | 126 | // check if the cookie is valid 127 | resUrl := resp.Request.URL.String() 128 | if website == utils.FANTIA && strings.HasPrefix(resUrl, utils.FANTIA_RECAPTCHA_URL) { 129 | // This would still mean that the cookie is still valid. 130 | return true, nil 131 | } 132 | return resUrl == websiteUrl, nil 133 | } 134 | 135 | // Prints out the error message and exits the program if the cookie verification fails 136 | func processCookieVerification(website string, err error) { 137 | if err != nil { 138 | utils.LogError( 139 | err, 140 | "error occurred when trying to verify cookie.", 141 | true, 142 | utils.ERROR, 143 | ) 144 | color.Red( 145 | fmt.Sprintf( 146 | "error %d: could not verify %s cookie.\nPlease refer to the log file for more details.", 147 | utils.INPUT_ERROR, 148 | utils.GetReadableSiteStr(website), 149 | ), 150 | ) 151 | os.Exit(1) 152 | } 153 | } 154 | 155 | // Verifies the given cookie by making a request to the backup domain and checks if the cookie is valid 156 | func backupVerifyCookie(website, cookieValue, userAgent string) *http.Cookie { 157 | var backupWebsite string 158 | switch website { 159 | case utils.KEMONO: 160 | backupWebsite = utils.KEMONO_BACKUP 161 | default: 162 | // Shouldn't happen but could happen during development 163 | color.Red( 164 | fmt.Sprintf( 165 | "error %d: %s is not supported for cookie verification on a backup domain.", 166 | utils.DEV_ERROR, 167 | utils.GetReadableSiteStr(website), 168 | ), 169 | ) 170 | os.Exit(1) 171 | } 172 | 173 | cookie := GetCookie(cookieValue, backupWebsite) 174 | cookieIsValid, err := VerifyCookie(cookie, backupWebsite, userAgent) 175 | processCookieVerification(backupWebsite, err) 176 | if !cookieIsValid { 177 | color.Red( 178 | fmt.Sprintf( 179 | "error %d: %s cookie is invalid", 180 | utils.INPUT_ERROR, 181 | utils.GetReadableSiteStr(backupWebsite), 182 | ), 183 | ) 184 | os.Exit(1) 185 | } 186 | return cookie 187 | } 188 | 189 | // Verifies the given cookie by making a request to the website and checks if the cookie is valid 190 | // If the cookie is valid, the cookie will be returned 191 | // 192 | // However, if the cookie is invalid, an error message will be printed out and the program will shutdown 193 | func VerifyAndGetCookie(website, cookieValue, userAgent string) *http.Cookie { 194 | cookie := GetCookie(cookieValue, website) 195 | cookieIsValid, err := VerifyCookie(cookie, website, userAgent) 196 | processCookieVerification(website, err) 197 | 198 | if !cookieIsValid { 199 | if website != utils.KEMONO { 200 | color.Red( 201 | fmt.Sprintf( 202 | "error %d: %s cookie is invalid", 203 | utils.INPUT_ERROR, 204 | utils.GetReadableSiteStr(website), 205 | ), 206 | ) 207 | os.Exit(1) 208 | } else { 209 | // try to verify the cookie on the backup domain 210 | cookie = backupVerifyCookie(website, cookieValue, userAgent) 211 | } 212 | } 213 | return cookie 214 | } 215 | -------------------------------------------------------------------------------- /src/utils/cookie.go: -------------------------------------------------------------------------------- 1 | package utils 2 | 3 | import ( 4 | "bufio" 5 | "encoding/json" 6 | "fmt" 7 | "io" 8 | "net/http" 9 | "os" 10 | "path/filepath" 11 | "strconv" 12 | "strings" 13 | "time" 14 | 15 | "github.com/fatih/color" 16 | ) 17 | 18 | // Returns the cookie info for the specified site 19 | // 20 | // Will panic if the site does not match any of the cases 21 | func GetSessionCookieInfo(site string) *cookieInfo { 22 | switch site { 23 | case FANTIA: 24 | return &cookieInfo{ 25 | Domain: "fantia.jp", 26 | Name: "_session_id", 27 | SameSite: http.SameSiteLaxMode, 28 | } 29 | case PIXIV_FANBOX: 30 | return &cookieInfo{ 31 | Domain: ".fanbox.cc", 32 | Name: "FANBOXSESSID", 33 | SameSite: http.SameSiteNoneMode, 34 | } 35 | case PIXIV: 36 | return &cookieInfo{ 37 | Domain: ".pixiv.net", 38 | Name: "PHPSESSID", 39 | SameSite: http.SameSiteNoneMode, 40 | } 41 | case KEMONO: 42 | return &cookieInfo{ 43 | Domain: KEMONO_COOKIE_DOMAIN, 44 | Name: KEMONO_SESSION_COOKIE_NAME, 45 | SameSite: http.SameSiteNoneMode, 46 | } 47 | case KEMONO_BACKUP: 48 | return &cookieInfo{ 49 | Domain: KEMONO_COOKIE_BACKUP_DOMAIN, 50 | Name: KEMONO_SESSION_COOKIE_NAME, 51 | SameSite: http.SameSiteNoneMode, 52 | } 53 | default: 54 | panic( 55 | fmt.Errorf( 56 | "error %d, invalid site, %q in GetSessionCookieInfo", 57 | DEV_ERROR, 58 | site, 59 | ), 60 | ) 61 | } 62 | } 63 | 64 | // For the exported cookies in JSON instead of Netscape format 65 | type ExportedCookies []struct { 66 | Domain string `json:"domain"` 67 | Expire float64 `json:"expirationDate"` 68 | HttpOnly bool `json:"httpOnly"` 69 | Name string `json:"name"` 70 | Path string `json:"path"` 71 | Secure bool `json:"secure"` 72 | Value string `json:"value"` 73 | Session bool `json:"session"` 74 | } 75 | 76 | type cookieInfoArgs struct { 77 | name string 78 | sameSite http.SameSite 79 | } 80 | 81 | func parseTxtCookieFile(f *os.File, filePath string, cookieArgs *cookieInfoArgs) ([]*http.Cookie, error) { 82 | var cookies []*http.Cookie 83 | reader := bufio.NewReader(f) 84 | for { 85 | lineBytes, err := ReadLine(reader) 86 | if err != nil { 87 | if err == io.EOF { 88 | break 89 | } 90 | return nil, fmt.Errorf( 91 | "error %d: reading cookie file at %s, more info => %v", 92 | OS_ERROR, 93 | filePath, 94 | err, 95 | ) 96 | } 97 | 98 | line := strings.TrimSpace(string(lineBytes)) 99 | if line == "" || strings.HasPrefix(line, "#") { 100 | continue // skip empty lines and comments 101 | } 102 | 103 | // split the line 104 | cookieInfos := strings.Split(line, "\t") 105 | if len(cookieInfos) < 7 { 106 | continue // too few values will be ignored 107 | } 108 | 109 | cookieName := cookieInfos[5] 110 | if cookieName != cookieArgs.name { 111 | continue // not the session cookie 112 | } 113 | 114 | // parse the values 115 | cookie := http.Cookie{ 116 | Name: cookieName, 117 | Value: cookieInfos[6], 118 | Domain: cookieInfos[0], 119 | Path: cookieInfos[2], 120 | Secure: cookieInfos[3] == "TRUE", 121 | HttpOnly: true, 122 | SameSite: cookieArgs.sameSite, 123 | } 124 | 125 | expiresUnixStr := cookieInfos[4] 126 | if expiresUnixStr != "" { 127 | expiresUnixInt, err := strconv.Atoi(expiresUnixStr) 128 | if err != nil { 129 | // should never happen but just in case 130 | errMsg := fmt.Sprintf( 131 | "error %d: parsing cookie expiration time, %q, more info => %v", 132 | UNEXPECTED_ERROR, 133 | expiresUnixStr, 134 | err, 135 | ) 136 | color.Red(errMsg) 137 | continue 138 | } 139 | if expiresUnixInt > 0 { 140 | cookie.Expires = time.Unix(int64(expiresUnixInt), 0) 141 | } 142 | } 143 | cookies = append(cookies, &cookie) 144 | } 145 | return cookies, nil 146 | } 147 | 148 | func parseJsonCookieFile(f *os.File, filePath string, cookieArgs *cookieInfoArgs) ([]*http.Cookie, error) { 149 | var cookies []*http.Cookie 150 | var exportedCookies ExportedCookies 151 | if err := json.NewDecoder(f).Decode(&exportedCookies); err != nil { 152 | return nil, fmt.Errorf( 153 | "error %d: failed to decode cookie JSON file at %s, more info => %v", 154 | JSON_ERROR, 155 | filePath, 156 | err, 157 | ) 158 | } 159 | 160 | for _, cookie := range exportedCookies { 161 | if cookie.Name != cookieArgs.name { 162 | // not the session cookie 163 | continue 164 | } 165 | 166 | parsedCookie := &http.Cookie{ 167 | Name: cookie.Name, 168 | Value: cookie.Value, 169 | Domain: cookie.Domain, 170 | Path: cookie.Path, 171 | Secure: cookie.Secure, 172 | HttpOnly: cookie.HttpOnly, 173 | SameSite: cookieArgs.sameSite, 174 | } 175 | if !cookie.Session { 176 | parsedCookie.Expires = time.Unix(int64(cookie.Expire), 0) 177 | } 178 | 179 | cookies = append(cookies, parsedCookie) 180 | } 181 | return cookies, nil 182 | } 183 | 184 | // parse the Netscape cookie file generated by extensions like Get cookies.txt LOCALLY 185 | func ParseNetscapeCookieFile(filePath, sessionId, website string) ([]*http.Cookie, error) { 186 | if filePath != "" && sessionId != "" { 187 | return nil, fmt.Errorf( 188 | "error %d: cannot use both cookie file and session id flags", 189 | INPUT_ERROR, 190 | ) 191 | } 192 | 193 | sessionCookieInfo := GetSessionCookieInfo(website) 194 | sessionCookieName := sessionCookieInfo.Name 195 | sessionCookieSameSite := sessionCookieInfo.SameSite 196 | 197 | f, err := os.Open(filePath) 198 | if err != nil { 199 | return nil, fmt.Errorf( 200 | "error %d: opening cookie file at %s, more info => %v", 201 | OS_ERROR, 202 | filePath, 203 | err, 204 | ) 205 | } 206 | defer f.Close() 207 | 208 | cookieArgs := &cookieInfoArgs{ 209 | name: sessionCookieName, 210 | sameSite: sessionCookieSameSite, 211 | } 212 | var cookies []*http.Cookie 213 | if ext := filepath.Ext(filePath); ext == ".txt" { 214 | cookies, err = parseTxtCookieFile(f, filePath, cookieArgs) 215 | } else if ext == ".json" { 216 | cookies, err = parseJsonCookieFile(f, filePath, cookieArgs) 217 | } else { 218 | err = fmt.Errorf( 219 | "error %d: invalid cookie file extension, %q, at %s...\nOnly .txt and .json files are supported", 220 | INPUT_ERROR, 221 | ext, 222 | filePath, 223 | ) 224 | } 225 | 226 | if err != nil { 227 | return nil, err 228 | } 229 | 230 | if len(cookies) == 0 { 231 | return nil, fmt.Errorf( 232 | "error %d: no session cookie found in cookie file at %s for website %s", 233 | INPUT_ERROR, 234 | filePath, 235 | GetReadableSiteStr(website), 236 | ) 237 | } 238 | return cookies, nil 239 | } 240 | -------------------------------------------------------------------------------- /src/api/pixiv/ugoira/ffmpeg.go: -------------------------------------------------------------------------------- 1 | package ugoira 2 | 3 | import ( 4 | "fmt" 5 | "os" 6 | "os/exec" 7 | "path/filepath" 8 | "sort" 9 | "strconv" 10 | 11 | "github.com/KJHJason/Cultured-Downloader-CLI/api/pixiv/models" 12 | "github.com/KJHJason/Cultured-Downloader-CLI/utils" 13 | ) 14 | 15 | type ffmpegOptions struct { 16 | ffmpegPath string 17 | outputExt string 18 | concatDelayFilePath string 19 | sortedFilenames []string 20 | ugoiraQuality int 21 | outputPath string 22 | } 23 | 24 | func writeDelays(ugoiraInfo *models.Ugoira, imagesFolderPath string) (string, []string, error) { 25 | // sort the ugoira frames by their filename which are %6d.imageExt 26 | sortedFilenames := make([]string, 0, len(ugoiraInfo.Frames)) 27 | for fileName := range ugoiraInfo.Frames { 28 | sortedFilenames = append(sortedFilenames, fileName) 29 | } 30 | sort.Strings(sortedFilenames) 31 | 32 | // write the frames' variable delays to a text file 33 | baseFmtStr := "file '%s'\nduration %f\n" 34 | delaysText := "ffconcat version 1.0\n" 35 | for _, frameName := range sortedFilenames { 36 | delay := ugoiraInfo.Frames[frameName] 37 | delaySec := float64(delay) / 1000 38 | delaysText += fmt.Sprintf( 39 | baseFmtStr, 40 | filepath.Join(imagesFolderPath, frameName), delaySec, 41 | ) 42 | } 43 | // copy the last frame and add it to the end of the delays text file 44 | // https://video.stackexchange.com/questions/20588/ffmpeg-flash-frames-last-still-image-in-concat-sequence 45 | lastFilename := sortedFilenames[len(sortedFilenames)-1] 46 | delaysText += fmt.Sprintf( 47 | "file '%s'\nduration %f", 48 | filepath.Join(imagesFolderPath, lastFilename), 49 | 0.001, 50 | ) 51 | 52 | concatDelayFilePath := filepath.Join(imagesFolderPath, "delays.txt") 53 | f, err := os.Create(concatDelayFilePath) 54 | if err != nil { 55 | return "", nil, fmt.Errorf( 56 | "pixiv error %d: failed to create delays.txt, more info => %v", 57 | utils.OS_ERROR, 58 | err, 59 | ) 60 | } 61 | defer f.Close() 62 | 63 | _, err = f.WriteString(delaysText) 64 | if err != nil { 65 | return "", nil, fmt.Errorf( 66 | "pixiv error %d: failed to write delay string to delays.txt, more info => %v", 67 | utils.OS_ERROR, 68 | err, 69 | ) 70 | } 71 | return concatDelayFilePath, sortedFilenames, nil 72 | } 73 | 74 | func getFlagsForWebmAndMp4(outputExt string, ugoiraQuality int) []string { 75 | var args []string 76 | if outputExt == ".mp4" { 77 | // if converting to an mp4 file 78 | // crf range is 0-51 for .mp4 files 79 | if ugoiraQuality > 51 { 80 | ugoiraQuality = 51 81 | } else if ugoiraQuality < 0 { 82 | ugoiraQuality = 0 83 | } 84 | args = append( 85 | args, 86 | "-vf", "pad=ceil(iw/2)*2:ceil(ih/2)*2", // pad the video to be even 87 | "-crf", strconv.Itoa(ugoiraQuality), // set the quality 88 | ) 89 | } else { 90 | // crf range is 0-63 for .webm files 91 | if ugoiraQuality == 0 || ugoiraQuality < 0 { 92 | args = append(args, "-lossless", "1") 93 | } else if ugoiraQuality > 63 { 94 | args = append(args, "-crf", "63") 95 | } else { 96 | args = append(args, "-crf", strconv.Itoa(ugoiraQuality)) 97 | } 98 | } 99 | 100 | // if converting the ugoira to a webm or .mp4 file 101 | // then set the output video codec to vp9 or h264 respectively 102 | // - webm: https://trac.ffmpeg.org/wiki/Encode/VP9 103 | // - mp4: https://trac.ffmpeg.org/wiki/Encode/H.264 104 | var encoding string 105 | if outputExt == ".webm" { 106 | encoding = "libvpx-vp9" 107 | } else { // outputExt == ".mp4" 108 | encoding = "libx264" 109 | } 110 | 111 | args = append( 112 | args, 113 | "-pix_fmt", "yuv420p", // set the pixel format to yuv420p 114 | "-c:v", encoding, // video codec 115 | "-vsync", "passthrough", // Prevents frame dropping 116 | ) 117 | return args 118 | } 119 | 120 | func getFlagsForGif(options *ffmpegOptions, imagesFolderPath string) ([]string, error) { 121 | // Generate a palette for the gif using FFmpeg for better quality 122 | palettePath := filepath.Join(imagesFolderPath, "palette.png") 123 | ffmpegImages := "%" + fmt.Sprintf( 124 | "%dd%s", // Usually it's %6d.extension but just in case, measure the length of the filename 125 | len(utils.RemoveExtFromFilename(options.sortedFilenames[0])), 126 | filepath.Ext(options.sortedFilenames[0]), 127 | ) 128 | imagePaletteCmd := exec.Command( 129 | options.ffmpegPath, 130 | "-i", filepath.Join(imagesFolderPath, ffmpegImages), 131 | "-vf", "palettegen", 132 | palettePath, 133 | ) 134 | 135 | if utils.DEBUG_MODE { 136 | imagePaletteCmd.Stdout = os.Stdout 137 | imagePaletteCmd.Stderr = os.Stderr 138 | } 139 | err := imagePaletteCmd.Run() 140 | if err != nil { 141 | return nil, fmt.Errorf( 142 | "pixiv error %d: failed to generate palette for ugoira gif, more info => %v", 143 | utils.CMD_ERROR, 144 | err, 145 | ) 146 | } 147 | return []string{ 148 | "-loop", "0", // loop the gif 149 | "-i", palettePath, 150 | "-filter_complex", "paletteuse", 151 | }, nil 152 | } 153 | 154 | func getFfmpegFlagsForUgoira(options *ffmpegOptions, imagesFolderPath string) ([]string, error) { 155 | // FFmpeg flags: https://www.ffmpeg.org/ffmpeg.html 156 | args := []string{ 157 | "-y", // overwrite output file if it exists 158 | "-an", // disable audio 159 | "-f", "concat", // input is a concat file 160 | "-safe", "0", // allow absolute paths in the concat file 161 | "-i", options.concatDelayFilePath, // input file 162 | } 163 | switch options.outputExt { 164 | case ".webm", ".mp4": 165 | args = append(args, getFlagsForWebmAndMp4(options.outputExt, options.ugoiraQuality)...) 166 | case ".gif": 167 | gifArgs, err := getFlagsForGif(options, imagesFolderPath) 168 | if err != nil { 169 | return nil, err 170 | } 171 | args = append(args, gifArgs...) 172 | case ".apng": 173 | args = append( 174 | args, 175 | "-plays", "0", // loop the apng 176 | "-vf", 177 | "setpts=PTS-STARTPTS,hqdn3d=1.5:1.5:6:6", // set the setpts filter and apply some denoising 178 | ) 179 | case ".webp": // outputExt == ".webp" 180 | args = append( 181 | args, 182 | "-pix_fmt", "yuv420p", // set the pixel format to yuv420p 183 | "-loop", "0", // loop the webp 184 | "-vsync", "passthrough", // Prevents frame dropping 185 | "-lossless", "1", // lossless compression 186 | ) 187 | default: 188 | panic( 189 | fmt.Sprintf( 190 | "pixiv error %d: Output extension %v is not allowed for ugoira conversion", 191 | utils.DEV_ERROR, 192 | options.outputExt, 193 | ), 194 | ) 195 | } 196 | if options.outputExt != ".webp" { 197 | args = append(args, "-quality", "best") 198 | } 199 | 200 | args = append(args, options.outputPath) 201 | return args, nil 202 | } 203 | -------------------------------------------------------------------------------- /src/api/pixiv/pixiv.go: -------------------------------------------------------------------------------- 1 | package pixiv 2 | 3 | import ( 4 | "fmt" 5 | 6 | "github.com/KJHJason/Cultured-Downloader-CLI/api/pixiv/models" 7 | "github.com/KJHJason/Cultured-Downloader-CLI/api/pixiv/ugoira" 8 | "github.com/KJHJason/Cultured-Downloader-CLI/api/pixiv/common" 9 | "github.com/KJHJason/Cultured-Downloader-CLI/api/pixiv/web" 10 | "github.com/KJHJason/Cultured-Downloader-CLI/api/pixiv/mobile" 11 | "github.com/KJHJason/Cultured-Downloader-CLI/request" 12 | "github.com/KJHJason/Cultured-Downloader-CLI/spinner" 13 | "github.com/KJHJason/Cultured-Downloader-CLI/utils" 14 | ) 15 | 16 | func alertUser(artworksToDl []*request.ToDownload, ugoiraToDl []*models.Ugoira) { 17 | if len(artworksToDl) > 0 || len(ugoiraToDl) > 0 { 18 | utils.AlertWithoutErr(utils.Title, "Finished downloading artworks from Pixiv!") 19 | } else { 20 | utils.AlertWithoutErr(utils.Title, "No artworks to download from Pixiv!") 21 | } 22 | } 23 | 24 | // Start the download process for Pixiv 25 | func PixivWebDownloadProcess(pixivDl *PixivDl, pixivDlOptions *pixivweb.PixivWebDlOptions, pixivUgoiraOptions *ugoira.UgoiraOptions) { 26 | var ugoiraToDl []*models.Ugoira 27 | var artworksToDl []*request.ToDownload 28 | if len(pixivDl.IllustratorIds) > 0 { 29 | artworkIdsSlice := pixivweb.GetMultipleIllustratorPosts( 30 | pixivDl.IllustratorIds, 31 | pixivDl.IllustratorPageNums, 32 | utils.DOWNLOAD_PATH, 33 | pixivDlOptions, 34 | ) 35 | pixivDl.ArtworkIds = append(pixivDl.ArtworkIds, artworkIdsSlice...) 36 | pixivDl.ArtworkIds = utils.RemoveSliceDuplicates(pixivDl.ArtworkIds) 37 | } 38 | 39 | if len(pixivDl.ArtworkIds) > 0 { 40 | artworkSlice, ugoiraSlice := pixivweb.GetMultipleArtworkDetails( 41 | pixivDl.ArtworkIds, 42 | utils.DOWNLOAD_PATH, 43 | pixivDlOptions, 44 | ) 45 | artworksToDl = append(artworksToDl, artworkSlice...) 46 | ugoiraToDl = append(ugoiraToDl, ugoiraSlice...) 47 | } 48 | 49 | if len(pixivDl.TagNames) > 0 { 50 | // loop through each tag and page number 51 | baseMsg := "Searching for artworks based on tag names on Pixiv [%d/" + fmt.Sprintf("%d]...", len(pixivDl.TagNames)) 52 | progress := spinner.New( 53 | "pong", 54 | "fgHiYellow", 55 | fmt.Sprintf( 56 | baseMsg, 57 | 0, 58 | ), 59 | fmt.Sprintf( 60 | "Finished searching for artworks based on %d tag names on Pixiv!", 61 | len(pixivDl.TagNames), 62 | ), 63 | fmt.Sprintf( 64 | "Finished with some errors while searching for artworks based on %d tag names on Pixiv!\nPlease refer to the logs for more details...", 65 | len(pixivDl.TagNames), 66 | ), 67 | len(pixivDl.TagNames), 68 | ) 69 | progress.Start() 70 | hasErr := false 71 | for idx, tagName := range pixivDl.TagNames { 72 | var artworksSlice []*request.ToDownload 73 | var ugoiraSlice []*models.Ugoira 74 | artworksSlice, ugoiraSlice, hasErr = pixivweb.TagSearch( 75 | tagName, 76 | utils.DOWNLOAD_PATH, 77 | pixivDl.TagNamesPageNums[idx], 78 | pixivDlOptions, 79 | ) 80 | artworksToDl = append(artworksToDl, artworksSlice...) 81 | ugoiraToDl = append(ugoiraToDl, ugoiraSlice...) 82 | progress.MsgIncrement(baseMsg) 83 | } 84 | progress.Stop(hasErr) 85 | } 86 | 87 | if len(artworksToDl) > 0 { 88 | request.DownloadUrls( 89 | artworksToDl, 90 | &request.DlOptions{ 91 | MaxConcurrency: utils.PIXIV_MAX_CONCURRENT_DOWNLOADS, 92 | Headers: pixivcommon.GetPixivRequestHeaders(), 93 | Cookies: pixivDlOptions.SessionCookies, 94 | UseHttp3: false, 95 | }, 96 | pixivDlOptions.Configs, 97 | ) 98 | } 99 | if len(ugoiraToDl) > 0 { 100 | ugoira.DownloadMultipleUgoira( 101 | &ugoira.UgoiraArgs{ 102 | UseMobileApi: false, 103 | ToDownload: ugoiraToDl, 104 | Cookies: pixivDlOptions.SessionCookies, 105 | }, 106 | pixivUgoiraOptions, 107 | pixivDlOptions.Configs, 108 | request.CallRequest, 109 | ) 110 | } 111 | 112 | alertUser(artworksToDl, ugoiraToDl) 113 | } 114 | 115 | // Start the download process for Pixiv 116 | func PixivMobileDownloadProcess(pixivDl *PixivDl, pixivDlOptions *pixivmobile.PixivMobileDlOptions, pixivUgoiraOptions *ugoira.UgoiraOptions) { 117 | var ugoiraToDl []*models.Ugoira 118 | var artworksToDl []*request.ToDownload 119 | if len(pixivDl.IllustratorIds) > 0 { 120 | artworkSlice, ugoiraSlice := pixivDlOptions.MobileClient.GetMultipleIllustratorPosts( 121 | pixivDl.IllustratorIds, 122 | pixivDl.IllustratorPageNums, 123 | utils.DOWNLOAD_PATH, 124 | pixivDlOptions.ArtworkType, 125 | ) 126 | artworksToDl = artworkSlice 127 | ugoiraToDl = ugoiraSlice 128 | } 129 | 130 | if len(pixivDl.ArtworkIds) > 0 { 131 | artworkSlice, ugoiraSlice := pixivDlOptions.MobileClient.GetMultipleArtworkDetails( 132 | pixivDl.ArtworkIds, 133 | utils.DOWNLOAD_PATH, 134 | ) 135 | artworksToDl = append(artworksToDl, artworkSlice...) 136 | ugoiraToDl = append(ugoiraToDl, ugoiraSlice...) 137 | } 138 | 139 | if len(pixivDl.TagNames) > 0 { 140 | // loop through each tag and page number 141 | baseMsg := "Searching for artworks based on tag names on Pixiv [%d/" + fmt.Sprintf("%d]...", len(pixivDl.TagNames)) 142 | progress := spinner.New( 143 | "pong", 144 | "fgHiYellow", 145 | fmt.Sprintf( 146 | baseMsg, 147 | 0, 148 | ), 149 | fmt.Sprintf( 150 | "Finished searching for artworks based on %d tag names on Pixiv!", 151 | len(pixivDl.TagNames), 152 | ), 153 | fmt.Sprintf( 154 | "Finished with some errors while searching for artworks based on %d tag names on Pixiv!\nPlease refer to the logs for more details...", 155 | len(pixivDl.TagNames), 156 | ), 157 | len(pixivDl.TagNames), 158 | ) 159 | progress.Start() 160 | hasErr := false 161 | for idx, tagName := range pixivDl.TagNames { 162 | var artworksSlice []*request.ToDownload 163 | var ugoiraSlice []*models.Ugoira 164 | artworksSlice, ugoiraSlice, hasErr = pixivDlOptions.MobileClient.TagSearch( 165 | tagName, 166 | utils.DOWNLOAD_PATH, 167 | pixivDl.TagNamesPageNums[idx], 168 | pixivDlOptions, 169 | ) 170 | artworksToDl = append(artworksToDl, artworksSlice...) 171 | ugoiraToDl = append(ugoiraToDl, ugoiraSlice...) 172 | progress.MsgIncrement(baseMsg) 173 | } 174 | progress.Stop(hasErr) 175 | } 176 | 177 | if len(artworksToDl) > 0 { 178 | request.DownloadUrls( 179 | artworksToDl, 180 | &request.DlOptions{ 181 | MaxConcurrency: utils.PIXIV_MAX_CONCURRENT_DOWNLOADS, 182 | Headers: pixivcommon.GetPixivRequestHeaders(), 183 | UseHttp3: false, 184 | }, 185 | pixivDlOptions.Configs, 186 | ) 187 | } 188 | if len(ugoiraToDl) > 0 { 189 | ugoira.DownloadMultipleUgoira( 190 | &ugoira.UgoiraArgs{ 191 | UseMobileApi: true, 192 | ToDownload: ugoiraToDl, 193 | Cookies: nil, 194 | }, 195 | pixivUgoiraOptions, 196 | pixivDlOptions.Configs, 197 | pixivDlOptions.MobileClient.SendRequest, 198 | ) 199 | } 200 | 201 | alertUser(artworksToDl, ugoiraToDl) 202 | } 203 | -------------------------------------------------------------------------------- /src/api/kemono/args.go: -------------------------------------------------------------------------------- 1 | package kemono 2 | 3 | import ( 4 | "fmt" 5 | "net/http" 6 | "os" 7 | "regexp" 8 | 9 | "github.com/KJHJason/Cultured-Downloader-CLI/api" 10 | "github.com/KJHJason/Cultured-Downloader-CLI/api/kemono/models" 11 | "github.com/KJHJason/Cultured-Downloader-CLI/configs" 12 | "github.com/KJHJason/Cultured-Downloader-CLI/gdrive" 13 | "github.com/KJHJason/Cultured-Downloader-CLI/utils" 14 | "github.com/fatih/color" 15 | ) 16 | 17 | const ( 18 | BASE_REGEX_STR = `https://kemono\.(?Pparty|su)/(?Ppatreon|fanbox|gumroad|subscribestar|dlsite|fantia|boosty)/user/(?P[\w-]+)` 19 | BASE_POST_SUFFIX_REGEX_STR = `/post/(?P\d+)` 20 | TLD_GROUP_NAME = "topLevelDomain" 21 | SERVICE_GROUP_NAME = "service" 22 | CREATOR_ID_GROUP_NAME = "creatorId" 23 | POST_ID_GROUP_NAME = "postId" 24 | API_MAX_CONCURRENT = 3 25 | ) 26 | 27 | var ( 28 | POST_URL_REGEX = regexp.MustCompile( 29 | fmt.Sprintf( 30 | `^%s%s$`, 31 | BASE_REGEX_STR, 32 | BASE_POST_SUFFIX_REGEX_STR, 33 | ), 34 | ) 35 | POST_URL_REGEX_TLD_INDEX = POST_URL_REGEX.SubexpIndex(TLD_GROUP_NAME) 36 | POST_URL_REGEX_SERVICE_INDEX = POST_URL_REGEX.SubexpIndex(SERVICE_GROUP_NAME) 37 | POST_URL_REGEX_CREATOR_ID_INDEX = POST_URL_REGEX.SubexpIndex(CREATOR_ID_GROUP_NAME) 38 | POST_URL_REGEX_POST_ID_INDEX = POST_URL_REGEX.SubexpIndex(POST_ID_GROUP_NAME) 39 | 40 | CREATOR_URL_REGEX = regexp.MustCompile( 41 | fmt.Sprintf( 42 | `^%s$`, 43 | BASE_REGEX_STR, 44 | ), 45 | ) 46 | CREATOR_URL_REGEX_TLD_INDEX = CREATOR_URL_REGEX.SubexpIndex(TLD_GROUP_NAME) 47 | CREATOR_URL_REGEX_SERVICE_INDEX = CREATOR_URL_REGEX.SubexpIndex(SERVICE_GROUP_NAME) 48 | CREATOR_URL_REGEX_CREATOR_ID_INDEX = CREATOR_URL_REGEX.SubexpIndex(CREATOR_ID_GROUP_NAME) 49 | ) 50 | 51 | type KemonoDl struct { 52 | CreatorUrls []string 53 | CreatorPageNums []string 54 | CreatorsToDl []*models.KemonoCreatorToDl 55 | 56 | PostUrls []string 57 | PostsToDl []*models.KemonoPostToDl 58 | } 59 | 60 | // HOTFIX: looks like kemono.party is no longer up and redirects to kemono.su 61 | func changeServiceToSu(service string) string { 62 | if service == utils.KEMONO_TLD { 63 | return utils.KEMONO_BACKUP_TLD 64 | } 65 | return service 66 | } 67 | 68 | func ProcessCreatorUrls(creatorUrls []string, pageNums []string) []*models.KemonoCreatorToDl { 69 | creatorsToDl := make([]*models.KemonoCreatorToDl, len(creatorUrls)) 70 | for i, creatorUrl := range creatorUrls { 71 | matched := CREATOR_URL_REGEX.FindStringSubmatch(creatorUrl) 72 | creatorsToDl[i] = &models.KemonoCreatorToDl{ 73 | Service: matched[CREATOR_URL_REGEX_SERVICE_INDEX], 74 | CreatorId: matched[CREATOR_URL_REGEX_CREATOR_ID_INDEX], 75 | PageNum: pageNums[i], 76 | Tld: changeServiceToSu(matched[CREATOR_URL_REGEX_TLD_INDEX]), 77 | } 78 | } 79 | 80 | return creatorsToDl 81 | } 82 | 83 | func ProcessPostUrls(postUrls []string) []*models.KemonoPostToDl { 84 | postsToDl := make([]*models.KemonoPostToDl, len(postUrls)) 85 | for i, postUrl := range postUrls { 86 | matched := POST_URL_REGEX.FindStringSubmatch(postUrl) 87 | postsToDl[i] = &models.KemonoPostToDl{ 88 | Service: matched[POST_URL_REGEX_SERVICE_INDEX], 89 | CreatorId: matched[POST_URL_REGEX_CREATOR_ID_INDEX], 90 | PostId: matched[POST_URL_REGEX_POST_ID_INDEX], 91 | Tld: matched[POST_URL_REGEX_TLD_INDEX], 92 | } 93 | } 94 | 95 | return postsToDl 96 | } 97 | 98 | // RemoveDuplicates removes duplicate creators and posts from the slice 99 | func (k *KemonoDl) RemoveDuplicates() { 100 | if len(k.CreatorsToDl) > 0 { 101 | newCreatorSlice := make([]*models.KemonoCreatorToDl, 0, len(k.CreatorsToDl)) 102 | seen := make(map[string]struct{}) 103 | for _, creator := range k.CreatorsToDl { 104 | key := fmt.Sprintf("%s/%s", creator.Service, creator.CreatorId) 105 | if _, ok := seen[key]; ok { 106 | continue 107 | } 108 | seen[key] = struct{}{} 109 | newCreatorSlice = append(newCreatorSlice, creator) 110 | } 111 | k.CreatorsToDl = newCreatorSlice 112 | } 113 | 114 | if len(k.PostsToDl) == 0 { 115 | return 116 | } 117 | newPostSlice := make([]*models.KemonoPostToDl, 0, len(k.PostsToDl)) 118 | seen := make(map[string]struct{}) 119 | for _, post := range k.PostsToDl { 120 | key := fmt.Sprintf("%s/%s/%s", post.Service, post.CreatorId, post.PostId) 121 | if _, ok := seen[key]; ok { 122 | continue 123 | } 124 | seen[key] = struct{}{} 125 | newPostSlice = append(newPostSlice, post) 126 | } 127 | k.PostsToDl = newPostSlice 128 | } 129 | 130 | func (k *KemonoDl) ValidateArgs() { 131 | valid, outlier := utils.SliceMatchesRegex(CREATOR_URL_REGEX, k.CreatorUrls) 132 | if !valid { 133 | color.Red( 134 | fmt.Sprintf( 135 | "kemono error %d: invalid creator URL found for kemono party: %s", 136 | utils.INPUT_ERROR, 137 | outlier, 138 | ), 139 | ) 140 | os.Exit(1) 141 | } 142 | 143 | valid, outlier = utils.SliceMatchesRegex(POST_URL_REGEX, k.PostUrls) 144 | if !valid { 145 | color.Red( 146 | fmt.Sprintf( 147 | "kemono error %d: invalid post URL found for kemono party: %s", 148 | utils.INPUT_ERROR, 149 | outlier, 150 | ), 151 | ) 152 | os.Exit(1) 153 | } 154 | 155 | if len(k.CreatorUrls) > 0 { 156 | if len(k.CreatorPageNums) == 0 { 157 | k.CreatorPageNums = make([]string, len(k.CreatorUrls)) 158 | } else { 159 | utils.ValidatePageNumInput( 160 | len(k.CreatorUrls), 161 | k.CreatorPageNums, 162 | []string{ 163 | "Number of creator URL(s) and page numbers must be equal.", 164 | }, 165 | ) 166 | } 167 | creatorsToDl := ProcessCreatorUrls(k.CreatorUrls, k.CreatorPageNums) 168 | k.CreatorsToDl = append(k.CreatorsToDl, creatorsToDl...) 169 | k.CreatorUrls = nil 170 | k.CreatorPageNums = nil 171 | } 172 | if len(k.PostUrls) > 0 { 173 | postsToDl := ProcessPostUrls(k.PostUrls) 174 | k.PostsToDl = append(k.PostsToDl, postsToDl...) 175 | k.PostUrls = nil 176 | } 177 | k.RemoveDuplicates() 178 | } 179 | 180 | // KemonoDlOptions is the struct that contains the arguments for Kemono download options. 181 | type KemonoDlOptions struct { 182 | DlAttachments bool 183 | DlGdrive bool 184 | 185 | Configs *configs.Config 186 | 187 | // GdriveClient is the Google Drive client to be 188 | // used in the download process if GDrive links are detected. 189 | GdriveClient *gdrive.GDrive 190 | 191 | SessionCookieId string 192 | SessionCookies []*http.Cookie 193 | } 194 | 195 | // ValidateArgs validates the session cookie ID of the Kemono account to download from. 196 | // It also validates the Google Drive client if the user wants to download to Google Drive. 197 | // 198 | // Should be called after initialising the struct. 199 | func (k *KemonoDlOptions) ValidateArgs(userAgent string) { 200 | if k.SessionCookieId != "" { 201 | k.SessionCookies = []*http.Cookie{ 202 | api.VerifyAndGetCookie(utils.KEMONO, k.SessionCookieId, userAgent), 203 | } 204 | } else { 205 | color.Red("kemono error %d: session cookie ID is required", utils.INPUT_ERROR) 206 | os.Exit(1) 207 | } 208 | 209 | if k.DlGdrive && k.GdriveClient == nil { 210 | k.DlGdrive = false 211 | } else if !k.DlGdrive && k.GdriveClient != nil { 212 | k.GdriveClient = nil 213 | } 214 | } 215 | -------------------------------------------------------------------------------- /src/api/pixiv/ugoira/process.go: -------------------------------------------------------------------------------- 1 | package ugoira 2 | 3 | import ( 4 | "context" 5 | "fmt" 6 | "net/http" 7 | "os" 8 | "os/exec" 9 | "os/signal" 10 | "path/filepath" 11 | "syscall" 12 | 13 | "github.com/KJHJason/Cultured-Downloader-CLI/api/pixiv/common" 14 | "github.com/KJHJason/Cultured-Downloader-CLI/api/pixiv/models" 15 | "github.com/KJHJason/Cultured-Downloader-CLI/configs" 16 | "github.com/KJHJason/Cultured-Downloader-CLI/request" 17 | "github.com/KJHJason/Cultured-Downloader-CLI/spinner" 18 | "github.com/KJHJason/Cultured-Downloader-CLI/utils" 19 | ) 20 | 21 | // Map the Ugoira frame delays to their respective filenames 22 | func MapDelaysToFilename(ugoiraFramesJson models.UgoiraFramesJson) map[string]int64 { 23 | frameInfoMap := map[string]int64{} 24 | for _, frame := range ugoiraFramesJson { 25 | frameInfoMap[frame.File] = int64(frame.Delay) 26 | } 27 | return frameInfoMap 28 | } 29 | 30 | type UgoiraFfmpegArgs struct { 31 | ffmpegPath string 32 | outputPath string 33 | ugoiraQuality int 34 | } 35 | 36 | // Converts the Ugoira to the desired output path using FFmpeg 37 | func ConvertUgoira(ugoiraInfo *models.Ugoira, imagesFolderPath string, ugoiraFfmpeg *UgoiraFfmpegArgs) error { 38 | outputExt := filepath.Ext(ugoiraFfmpeg.outputPath) 39 | if !utils.SliceContains(UGOIRA_ACCEPTED_EXT, outputExt) { 40 | return fmt.Errorf( 41 | "pixiv error %d: Output extension %v is not allowed for ugoira conversion", 42 | utils.INPUT_ERROR, 43 | outputExt, 44 | ) 45 | } 46 | 47 | concatDelayFilePath, sortedFilenames, err := writeDelays(ugoiraInfo, imagesFolderPath) 48 | if err != nil { 49 | return err 50 | } 51 | 52 | args, err := getFfmpegFlagsForUgoira( 53 | &ffmpegOptions{ 54 | ffmpegPath: ugoiraFfmpeg.ffmpegPath, 55 | outputExt: outputExt, 56 | concatDelayFilePath: concatDelayFilePath, 57 | sortedFilenames: sortedFilenames, 58 | outputPath: ugoiraFfmpeg.outputPath, 59 | ugoiraQuality: ugoiraFfmpeg.ugoiraQuality, 60 | }, 61 | imagesFolderPath, 62 | ) 63 | if err != nil { 64 | return err 65 | } 66 | 67 | // convert the frames to a gif or a video 68 | cmd := exec.Command(ugoiraFfmpeg.ffmpegPath, args...) 69 | // cmd.Stderr = os.Stderr 70 | // cmd.Stdout = os.Stdout 71 | err = cmd.Run() 72 | if err != nil { 73 | os.Remove(ugoiraFfmpeg.outputPath) 74 | return fmt.Errorf( 75 | "pixiv error %d: failed to convert ugoira to %s, more info => %v", 76 | utils.CMD_ERROR, 77 | ugoiraFfmpeg.outputPath, 78 | err, 79 | ) 80 | } 81 | 82 | // delete unzipped folder which contains 83 | // the frames images and the delays text file 84 | os.RemoveAll(imagesFolderPath) 85 | return nil 86 | } 87 | 88 | // Returns the ugoira's zip file path and the ugoira's converted file path 89 | func GetUgoiraFilePaths(ugoireFilePath, ugoiraUrl, outputFormat string) (string, string) { 90 | filePath := filepath.Join(ugoireFilePath, utils.GetLastPartOfUrl(ugoiraUrl)) 91 | outputFilePath := utils.RemoveExtFromFilename(filePath) + outputFormat 92 | return filePath, outputFilePath 93 | } 94 | 95 | func convertMultipleUgoira(ugoiraArgs *UgoiraArgs, ugoiraOptions *UgoiraOptions, config *configs.Config) { 96 | // Create a context that can be cancelled when SIGINT/SIGTERM signal is received 97 | ctx, cancel := context.WithCancel(context.Background()) 98 | defer cancel() 99 | 100 | // Catch SIGINT/SIGTERM signal and cancel the context when received 101 | sigs := make(chan os.Signal, 1) 102 | signal.Notify(sigs, os.Interrupt, syscall.SIGTERM) 103 | go func() { 104 | <-sigs 105 | cancel() 106 | }() 107 | defer signal.Stop(sigs) 108 | 109 | var errSlice []error 110 | downloadInfoLen := len(ugoiraArgs.ToDownload) 111 | baseMsg := "Converting Ugoira to %s [%d/" + fmt.Sprintf("%d]...", downloadInfoLen) 112 | progress := spinner.New( 113 | spinner.DL_SPINNER, 114 | "fgHiYellow", 115 | fmt.Sprintf( 116 | baseMsg, 117 | 0, 118 | ), 119 | fmt.Sprintf( 120 | "Finished converting %d Ugoira to %s!", 121 | downloadInfoLen, 122 | ugoiraOptions.OutputFormat, 123 | ), 124 | fmt.Sprintf( 125 | "Something went wrong while converting %d Ugoira to %s!\nPlease refer to the logs for more details.", 126 | downloadInfoLen, 127 | ugoiraOptions.OutputFormat, 128 | ), 129 | downloadInfoLen, 130 | ) 131 | progress.Start() 132 | for i, ugoira := range ugoiraArgs.ToDownload { 133 | zipFilePath, outputPath := GetUgoiraFilePaths(ugoira.FilePath, ugoira.Url, ugoiraOptions.OutputFormat) 134 | if utils.PathExists(outputPath) { 135 | progress.MsgIncrement(baseMsg) 136 | continue 137 | } 138 | if !utils.PathExists(zipFilePath) { 139 | progress.MsgIncrement(baseMsg) 140 | continue 141 | } 142 | 143 | unzipFolderPath := filepath.Join( 144 | filepath.Dir(zipFilePath), 145 | "unzipped", 146 | ) 147 | err := utils.ExtractFiles(ctx, zipFilePath, unzipFolderPath, true) 148 | if err != nil { 149 | if err == context.Canceled { 150 | progress.KillProgram( 151 | fmt.Sprintf( 152 | "Stopped converting ugoira to %s [%d/%d]!", 153 | ugoiraOptions.OutputFormat, 154 | i, 155 | len(ugoiraArgs.ToDownload), 156 | ), 157 | ) 158 | } 159 | err := fmt.Errorf( 160 | "pixiv error %d: failed to unzip file %s, more info => %v", 161 | utils.OS_ERROR, 162 | zipFilePath, 163 | err, 164 | ) 165 | errSlice = append(errSlice, err) 166 | progress.MsgIncrement(baseMsg) 167 | continue 168 | } 169 | 170 | err = ConvertUgoira( 171 | ugoira, 172 | unzipFolderPath, 173 | &UgoiraFfmpegArgs{ 174 | ffmpegPath: config.FfmpegPath, 175 | outputPath: outputPath, 176 | ugoiraQuality: ugoiraOptions.Quality, 177 | }, 178 | ) 179 | if err != nil { 180 | errSlice = append(errSlice, err) 181 | } else if ugoiraOptions.DeleteZip { 182 | os.Remove(zipFilePath) 183 | } 184 | progress.MsgIncrement(baseMsg) 185 | } 186 | 187 | hasErr := false 188 | if len(errSlice) > 0 { 189 | hasErr = true 190 | utils.LogErrors(false, nil, utils.ERROR, errSlice...) 191 | } 192 | progress.Stop(hasErr) 193 | } 194 | 195 | type UgoiraArgs struct { 196 | UseMobileApi bool 197 | ToDownload []*models.Ugoira 198 | Cookies []*http.Cookie 199 | } 200 | 201 | // Downloads multiple Ugoira artworks and converts them based on the output format 202 | func DownloadMultipleUgoira(ugoiraArgs *UgoiraArgs, ugoiraOptions *UgoiraOptions, config *configs.Config, reqHandler request.RequestHandler) { 203 | var urlsToDownload []*request.ToDownload 204 | for _, ugoira := range ugoiraArgs.ToDownload { 205 | filePath, outputFilePath := GetUgoiraFilePaths( 206 | ugoira.FilePath, 207 | ugoira.Url, 208 | ugoiraOptions.OutputFormat, 209 | ) 210 | if !utils.PathExists(outputFilePath) { 211 | urlsToDownload = append(urlsToDownload, &request.ToDownload{ 212 | Url: ugoira.Url, 213 | FilePath: filePath, 214 | }) 215 | } 216 | } 217 | 218 | var useHttp3 bool 219 | var headers map[string]string 220 | if ugoiraArgs.UseMobileApi { 221 | headers = map[string]string{ 222 | "Referer": "https://app-api.pixiv.net", 223 | } 224 | } else { 225 | headers = pixivcommon.GetPixivRequestHeaders() 226 | useHttp3 = utils.IsHttp3Supported(utils.PIXIV, true) 227 | } 228 | 229 | request.DownloadUrlsWithHandler( 230 | urlsToDownload, 231 | &request.DlOptions{ 232 | MaxConcurrency: utils.PIXIV_MAX_CONCURRENT_DOWNLOADS, 233 | Headers: headers, 234 | Cookies: ugoiraArgs.Cookies, 235 | UseHttp3: useHttp3, 236 | }, 237 | config, // Note: if isMobileApi is true, custom user-agent will be ignored 238 | reqHandler, 239 | ) 240 | 241 | convertMultipleUgoira(ugoiraArgs, ugoiraOptions, config) 242 | } 243 | -------------------------------------------------------------------------------- /src/gdrive/api.go: -------------------------------------------------------------------------------- 1 | package gdrive 2 | 3 | import ( 4 | "fmt" 5 | "strconv" 6 | "net/http" 7 | 8 | "github.com/KJHJason/Cultured-Downloader-CLI/configs" 9 | "github.com/KJHJason/Cultured-Downloader-CLI/request" 10 | "github.com/KJHJason/Cultured-Downloader-CLI/utils" 11 | "github.com/KJHJason/Cultured-Downloader-CLI/gdrive/models" 12 | ) 13 | 14 | // censor the key=... part of the URL to . 15 | // This is to prevent the API key from being leaked in the logs. 16 | func censorApiKeyFromStr(str string) string { 17 | return API_KEY_PARAM_REGEX.ReplaceAllString(str, "key=") 18 | } 19 | 20 | // Gets the error message for a failed GDrive API call 21 | func getFailedApiCallErr(res *http.Response) error { 22 | requestUrl := res.Request.URL.String() 23 | return fmt.Errorf( 24 | "error while fetching from GDrive...\n" + 25 | "GDrive URL (May not be accurate): https://drive.google.com/file/d/%s/view?usp=sharing\n" + 26 | "Status Code: %s\nURL: %s", 27 | utils.GetLastPartOfUrl(requestUrl), 28 | res.Status, 29 | censorApiKeyFromStr(requestUrl), 30 | ) 31 | } 32 | 33 | // Returns the contents of the given GDrive folder using Google's GDrive package 34 | func (gdrive *GDrive) getFolderContentsWithClient(folderId, logPath string, config *configs.Config) ([]*models.GdriveFileToDl, error) { 35 | var pageToken string 36 | var gdriveFiles []*models.GdriveFileToDl 37 | for { 38 | action := gdrive.client.Files.List().Q(fmt.Sprintf("'%s' in parents", folderId)).Fields(GDRIVE_FOLDER_FIELDS) 39 | if pageToken != "" { 40 | action = action.PageToken(pageToken) 41 | } 42 | files, err := action.Do() 43 | if err != nil { 44 | return nil, fmt.Errorf( 45 | "gdrive error %d: failed to get folder contents with ID of %s, more info => %v", 46 | utils.CONNECTION_ERROR, 47 | folderId, 48 | err, 49 | ) 50 | } 51 | 52 | for _, file := range files.Files { 53 | gdriveFiles = append(gdriveFiles, &models.GdriveFileToDl{ 54 | Id: file.Id, 55 | Name: file.Name, 56 | Size: strconv.FormatInt(file.Size, 10), 57 | MimeType: file.MimeType, 58 | Md5Checksum: file.Md5Checksum, 59 | FilePath: "", 60 | }) 61 | } 62 | 63 | if files.NextPageToken == "" { 64 | break 65 | } else { 66 | pageToken = files.NextPageToken 67 | } 68 | } 69 | return gdriveFiles, nil 70 | } 71 | 72 | // Returns the contents of the given GDrive folder using API calls to GDrive API v3 73 | func (gdrive *GDrive) getFolderContentsWithApi(folderId, logPath string, config *configs.Config) ([]*models.GdriveFileToDl, error) { 74 | params := map[string]string{ 75 | "key": gdrive.apiKey, 76 | "q": fmt.Sprintf("'%s' in parents", folderId), 77 | "fields": GDRIVE_FOLDER_FIELDS, 78 | } 79 | var files []*models.GdriveFileToDl 80 | pageToken := "" 81 | for { 82 | if pageToken != "" { 83 | params["pageToken"] = pageToken 84 | } else { 85 | delete(params, "pageToken") 86 | } 87 | res, err := request.CallRequest( 88 | &request.RequestArgs{ 89 | Url: gdrive.apiUrl, 90 | Method: "GET", 91 | Timeout: gdrive.timeout, 92 | Params: params, 93 | UserAgent: config.UserAgent, 94 | Http2: !HTTP3_SUPPORTED, 95 | Http3: HTTP3_SUPPORTED, 96 | }, 97 | ) 98 | if err != nil { 99 | return nil, fmt.Errorf( 100 | "gdrive error %d: failed to get folder contents with ID of %s, more info => %v", 101 | utils.CONNECTION_ERROR, 102 | folderId, 103 | err, 104 | ) 105 | } 106 | defer res.Body.Close() 107 | if res.StatusCode != 200 { 108 | return nil, fmt.Errorf( 109 | "gdrive error %d: failed to get folder contents with ID of %s, more info => %s", 110 | utils.RESPONSE_ERROR, 111 | folderId, 112 | res.Status, 113 | ) 114 | } 115 | 116 | var gdriveFolder models.GDriveFolder 117 | if err := utils.LoadJsonFromResponse(res, &gdriveFolder); err != nil { 118 | return nil, err 119 | } 120 | 121 | for _, file := range gdriveFolder.Files { 122 | files = append(files, &models.GdriveFileToDl{ 123 | Id: file.Id, 124 | Name: file.Name, 125 | Size: file.Size, 126 | MimeType: file.MimeType, 127 | Md5Checksum: file.Md5Checksum, 128 | FilePath: "", 129 | }) 130 | } 131 | 132 | if gdriveFolder.NextPageToken == "" { 133 | break 134 | } else { 135 | pageToken = gdriveFolder.NextPageToken 136 | } 137 | } 138 | return files, nil 139 | } 140 | 141 | // Returns the contents of the given GDrive folder 142 | func (gdrive *GDrive) GetFolderContents(folderId, logPath string, config *configs.Config) ([]*models.GdriveFileToDl, error) { 143 | if gdrive.client != nil { 144 | return gdrive.getFolderContentsWithClient(folderId, logPath, config) 145 | } 146 | return gdrive.getFolderContentsWithApi(folderId, logPath, config) 147 | } 148 | 149 | // Retrieves the content of a GDrive folder and its subfolders recursively using GDrive API v3 150 | func (gdrive *GDrive) GetNestedFolderContents(folderId, logPath string, config *configs.Config) ([]*models.GdriveFileToDl, error) { 151 | var files []*models.GdriveFileToDl 152 | folderContents, err := gdrive.GetFolderContents(folderId, logPath, config) 153 | if err != nil { 154 | return nil, err 155 | } 156 | 157 | for _, file := range folderContents { 158 | if file.MimeType == "application/vnd.google-apps.folder" { 159 | subFolderFiles, err := gdrive.GetNestedFolderContents(file.Id, logPath, config) 160 | if err != nil { 161 | return nil, err 162 | } 163 | files = append(files, subFolderFiles...) 164 | } else { 165 | files = append(files, file) 166 | } 167 | } 168 | return files, nil 169 | } 170 | 171 | // Retrieves the file details of the given GDrive file by making a HTTP request to GDrive API v3 172 | func (gdrive *GDrive) getFileDetailsWithAPI(gdriveInfo *models.GDriveToDl, config *configs.Config) (*models.GdriveFileToDl, error) { 173 | params := map[string]string{ 174 | "key": gdrive.apiKey, 175 | "fields": GDRIVE_FILE_FIELDS, 176 | } 177 | url := fmt.Sprintf("%s/%s", gdrive.apiUrl, gdriveInfo.Id) 178 | res, err := request.CallRequest( 179 | &request.RequestArgs{ 180 | Url: url, 181 | Method: "GET", 182 | Timeout: gdrive.timeout, 183 | Params: params, 184 | UserAgent: config.UserAgent, 185 | Http2: !HTTP3_SUPPORTED, 186 | Http3: HTTP3_SUPPORTED, 187 | }, 188 | ) 189 | if err != nil { 190 | return nil, fmt.Errorf( 191 | "gdrive error %d: failed to get file details with ID of %s, more info => %v", 192 | utils.CONNECTION_ERROR, 193 | gdriveInfo.Id, 194 | err, 195 | ) 196 | } 197 | defer res.Body.Close() 198 | if res.StatusCode != 200 { 199 | return nil, getFailedApiCallErr(res) 200 | } 201 | 202 | var gdriveFile models.GDriveFile 203 | if err := utils.LoadJsonFromResponse(res, &gdriveFile); err != nil { 204 | return nil, err 205 | } 206 | return &models.GdriveFileToDl{ 207 | Id: gdriveFile.Id, 208 | Name: gdriveFile.Name, 209 | Size: gdriveFile.Size, 210 | MimeType: gdriveFile.MimeType, 211 | Md5Checksum: gdriveFile.Md5Checksum, 212 | FilePath: gdriveInfo.FilePath, 213 | }, nil 214 | } 215 | 216 | // Retrieves the file details of the given GDrive file using Google's GDrive package 217 | func (gdrive *GDrive) getFileDetailsWithClient(gdriveInfo *models.GDriveToDl, config *configs.Config) (*models.GdriveFileToDl, error) { 218 | file, err := gdrive.client.Files.Get(gdriveInfo.Id).Fields(GDRIVE_FILE_FIELDS).Do() 219 | if err != nil { 220 | return nil, fmt.Errorf( 221 | "gdrive error %d: failed to get file details with ID of %s, more info => %v", 222 | utils.CONNECTION_ERROR, 223 | gdriveInfo.Id, 224 | err, 225 | ) 226 | } 227 | return &models.GdriveFileToDl{ 228 | Id: file.Id, 229 | Name: file.Name, 230 | Size: strconv.FormatInt(file.Size, 10), 231 | MimeType: file.MimeType, 232 | Md5Checksum: file.Md5Checksum, 233 | FilePath: gdriveInfo.FilePath, 234 | }, nil 235 | } 236 | 237 | // Retrieves the file details of the given GDrive file using GDrive API v3 238 | func (gdrive *GDrive) GetFileDetails(gdriveInfo *models.GDriveToDl, config *configs.Config) (*models.GdriveFileToDl, error) { 239 | if gdrive.client != nil { 240 | return gdrive.getFileDetailsWithClient(gdriveInfo, config) 241 | } 242 | return gdrive.getFileDetailsWithAPI(gdriveInfo, config) 243 | } 244 | -------------------------------------------------------------------------------- /src/request/download.go: -------------------------------------------------------------------------------- 1 | package request 2 | 3 | import ( 4 | "context" 5 | "fmt" 6 | "io" 7 | "net/http" 8 | "net/url" 9 | "os" 10 | "os/signal" 11 | "path/filepath" 12 | "strings" 13 | "sync" 14 | "syscall" 15 | 16 | "github.com/KJHJason/Cultured-Downloader-CLI/configs" 17 | "github.com/KJHJason/Cultured-Downloader-CLI/spinner" 18 | "github.com/KJHJason/Cultured-Downloader-CLI/utils" 19 | ) 20 | 21 | func getFullFilePath(res *http.Response, filePath string) (string, error) { 22 | // check if filepath already have a filename attached 23 | if filepath.Ext(filePath) != "" { 24 | filePathDir := filepath.Dir(filePath) 25 | os.MkdirAll(filePathDir, 0755) 26 | filePathWithoutExt := utils.RemoveExtFromFilename(filePath) 27 | return filePathWithoutExt + strings.ToLower(filepath.Ext(filePath)), nil 28 | } 29 | 30 | os.MkdirAll(filePath, 0755) 31 | filename, err := url.PathUnescape(res.Request.URL.String()) 32 | if err != nil { 33 | // should never happen but just in case 34 | return "", fmt.Errorf( 35 | "error %d: failed to unescape URL, more info => %v\nurl: %s", 36 | utils.UNEXPECTED_ERROR, 37 | err, 38 | res.Request.URL.String(), 39 | ) 40 | } 41 | filename = utils.GetLastPartOfUrl(filename) 42 | filenameWithoutExt := utils.RemoveExtFromFilename(filename) 43 | filePath = filepath.Join( 44 | filePath, 45 | filenameWithoutExt + strings.ToLower(filepath.Ext(filename)), 46 | ) 47 | return filePath, nil 48 | } 49 | 50 | // check if the file size matches the content length 51 | // if not, then the file does not exist or is corrupted and should be re-downloaded 52 | func checkIfCanSkipDl(contentLength int64, filePath string, forceOverwrite bool) bool { 53 | fileSize, err := utils.GetFileSize(filePath) 54 | if err != nil { 55 | if err != os.ErrNotExist { 56 | // if the error wasn't because the file does not exist, 57 | // then log the error and continue with the download process 58 | utils.LogError(err, "", false, utils.ERROR) 59 | } 60 | return false 61 | } 62 | 63 | if fileSize == contentLength { 64 | // If the file already exists and the file size 65 | // matches the expected file size in the Content-Length header, 66 | // then skip the download process. 67 | return true 68 | } else if !forceOverwrite && fileSize > 0 { 69 | // If the file already exists and have more than 0 bytes 70 | // but the Content-Length header does not exist in the response, 71 | // we will assume that the file is already downloaded 72 | // and skip the download process if the overwrite flag is false. 73 | return true 74 | } 75 | return false 76 | } 77 | 78 | func DlToFile(res *http.Response, url, filePath string) error { 79 | file, err := os.Create(filePath) // create the file 80 | if err != nil { 81 | return fmt.Errorf( 82 | "error %d: failed to create file, more info => %v\nfile path: %s", 83 | utils.OS_ERROR, 84 | err, 85 | filePath, 86 | ) 87 | } 88 | 89 | // write the body to file 90 | // https://stackoverflow.com/a/11693049/16377492 91 | _, err = io.Copy(file, res.Body) 92 | if err != nil { 93 | file.Close() 94 | if fileErr := os.Remove(filePath); fileErr != nil { 95 | utils.LogError( 96 | fmt.Errorf( 97 | "download error %d: failed to remove file at %s, more info => %v", 98 | utils.OS_ERROR, 99 | filePath, 100 | fileErr, 101 | ), 102 | "", 103 | false, 104 | utils.ERROR, 105 | ) 106 | } 107 | 108 | if err != context.Canceled { 109 | errorMsg := fmt.Sprintf("failed to download %s due to %v", url, err) 110 | utils.LogError(err, errorMsg, false, utils.ERROR) 111 | err = nil 112 | } 113 | return err 114 | } 115 | file.Close() 116 | return nil 117 | } 118 | 119 | // DownloadUrl is used to download a file from a URL 120 | // 121 | // Note: If the file already exists, the download process will be skipped 122 | func DownloadUrl(filePath string, queue chan struct{}, reqArgs *RequestArgs, overwriteExistingFile bool) error { 123 | // Create a context that can be cancelled when SIGINT/SIGTERM signal is received 124 | ctx, cancel := context.WithCancel(context.Background()) 125 | defer cancel() 126 | 127 | // Catch SIGINT/SIGTERM signal and cancel the context when received 128 | sigs := make(chan os.Signal, 1) 129 | signal.Notify(sigs, os.Interrupt, syscall.SIGTERM) 130 | go func() { 131 | <-sigs 132 | cancel() 133 | }() 134 | defer signal.Stop(sigs) 135 | 136 | queue <- struct{}{} 137 | // Send a HEAD request first to get the expected file size from the Content-Length header. 138 | // A GET request might work but most of the time 139 | // as the Content-Length header may not present due to chunked encoding. 140 | headRes, err := reqArgs.RequestHandler( 141 | &RequestArgs{ 142 | Url: reqArgs.Url, 143 | Method: "HEAD", 144 | Timeout: 10, 145 | Cookies: reqArgs.Cookies, 146 | Headers: reqArgs.Headers, 147 | UserAgent: reqArgs.UserAgent, 148 | CheckStatus: true, 149 | Http3: reqArgs.Http3, 150 | Http2: reqArgs.Http2, 151 | Context: ctx, 152 | }, 153 | ) 154 | if err != nil { 155 | return err 156 | } 157 | fileReqContentLength := headRes.ContentLength 158 | headRes.Body.Close() 159 | 160 | reqArgs.Context = ctx 161 | res, err := reqArgs.RequestHandler(reqArgs) 162 | if err != nil { 163 | if err != context.Canceled { 164 | err = fmt.Errorf( 165 | "error %d: failed to download file, more info => %v\nurl: %s", 166 | utils.DOWNLOAD_ERROR, 167 | err, 168 | reqArgs.Url, 169 | ) 170 | } 171 | return err 172 | } 173 | defer res.Body.Close() 174 | 175 | filePath, err = getFullFilePath(res, filePath) 176 | if err != nil { 177 | return err 178 | } 179 | 180 | if !checkIfCanSkipDl(fileReqContentLength, filePath, overwriteExistingFile) { 181 | err = DlToFile(res, reqArgs.Url, filePath) 182 | } 183 | return err 184 | } 185 | 186 | // DownloadUrls is used to download multiple files from URLs concurrently 187 | // 188 | // Note: If the file already exists, the download process will be skipped 189 | func DownloadUrlsWithHandler(urlInfoSlice []*ToDownload, dlOptions *DlOptions, config *configs.Config, reqHandler RequestHandler) { 190 | urlsLen := len(urlInfoSlice) 191 | if urlsLen == 0 { 192 | return 193 | } 194 | if urlsLen < dlOptions.MaxConcurrency { 195 | dlOptions.MaxConcurrency = urlsLen 196 | } 197 | 198 | var wg sync.WaitGroup 199 | queue := make(chan struct{}, dlOptions.MaxConcurrency) 200 | errChan := make(chan error, urlsLen) 201 | 202 | baseMsg := "Downloading files [%d/" + fmt.Sprintf("%d]...", urlsLen) 203 | progress := spinner.New( 204 | spinner.DL_SPINNER, 205 | "fgHiYellow", 206 | fmt.Sprintf( 207 | baseMsg, 208 | 0, 209 | ), 210 | fmt.Sprintf( 211 | "Finished downloading %d files", 212 | urlsLen, 213 | ), 214 | fmt.Sprintf( 215 | "Something went wrong while downloading %d files.\nPlease refer to the logs for more details.", 216 | urlsLen, 217 | ), 218 | urlsLen, 219 | ) 220 | progress.Start() 221 | for _, urlInfo := range urlInfoSlice { 222 | wg.Add(1) 223 | go func(fileUrl, filePath string) { 224 | defer func() { 225 | wg.Done() 226 | <-queue 227 | }() 228 | err := DownloadUrl( 229 | filePath, 230 | queue, 231 | &RequestArgs{ 232 | Url: fileUrl, 233 | Method: "GET", 234 | Timeout: utils.DOWNLOAD_TIMEOUT, 235 | Cookies: dlOptions.Cookies, 236 | Headers: dlOptions.Headers, 237 | Http2: !dlOptions.UseHttp3, 238 | Http3: dlOptions.UseHttp3, 239 | UserAgent: config.UserAgent, 240 | RequestHandler: reqHandler, 241 | }, 242 | config.OverwriteFiles, 243 | ) 244 | if err != nil { 245 | errChan <- err 246 | } 247 | 248 | if err != context.Canceled { 249 | progress.MsgIncrement(baseMsg) 250 | } 251 | }(urlInfo.Url, urlInfo.FilePath) 252 | } 253 | wg.Wait() 254 | close(queue) 255 | close(errChan) 256 | 257 | hasErr := false 258 | if len(errChan) > 0 { 259 | hasErr = true 260 | if kill := utils.LogErrors(false, errChan, utils.ERROR); kill { 261 | progress.KillProgram( 262 | "Stopped downloading files (incomplete downloads will be deleted)...", 263 | ) 264 | } 265 | } 266 | progress.Stop(hasErr) 267 | } 268 | 269 | // Same as DownloadUrlsWithHandler but uses the default request handler (CallRequest) 270 | func DownloadUrls(urlInfoSlice []*ToDownload, dlOptions *DlOptions, config *configs.Config) { 271 | DownloadUrlsWithHandler(urlInfoSlice, dlOptions, config, CallRequest) 272 | } 273 | --------------------------------------------------------------------------------